import {isReadableStream} from 'is-stream'; import {asyncIterator} from '@sec-ant/readable-stream/ponyfill'; export const getAsyncIterable = stream => { if (isReadableStream(stream, {checkOpen: false}) && nodeImports.on !== undefined) { return getStreamIterable(stream); } if (typeof stream?.[Symbol.asyncIterator] === 'function') { return stream; } // `ReadableStream[Symbol.asyncIterator]` support is missing in multiple browsers, so we ponyfill it if (toString.call(stream) === '[object ReadableStream]') { return asyncIterator.call(stream); } throw new TypeError('The first argument must be a Readable, a ReadableStream, or an async iterable.'); }; const {toString} = Object.prototype; // The default iterable for Node.js streams does not allow for multiple readers at once, so we re-implement it const getStreamIterable = async function * (stream) { const controller = new AbortController(); const state = {}; handleStreamEnd(stream, controller, state); try { for await (const [chunk] of nodeImports.on(stream, 'data', {signal: controller.signal})) { yield chunk; } } catch (error) { // Stream failure, for example due to `stream.destroy(error)` if (state.error !== undefined) { throw state.error; // `error` event directly emitted on stream } else if (!controller.signal.aborted) { throw error; // Otherwise, stream completed successfully } // The `finally` block also runs when the caller throws, for example due to the `maxBuffer` option } finally { stream.destroy(); } }; const handleStreamEnd = async (stream, controller, state) => { try { await nodeImports.finished(stream, { cleanup: true, readable: true, writable: false, error: false, }); } catch (error) { state.error = error; } finally { controller.abort(); } }; // Loaded by the Node entrypoint, but not by the browser one. // This prevents using dynamic imports. export const nodeImports = {};