This commit is contained in:
2025-05-12 05:38:44 +09:00
parent dced21c3f8
commit 6d78bfa46e
8120 changed files with 1161564 additions and 0 deletions

9
book/node_modules/get-stream/license generated vendored Normal file
View File

@ -0,0 +1,9 @@
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

60
book/node_modules/get-stream/package.json generated vendored Normal file
View File

@ -0,0 +1,60 @@
{
"name": "get-stream",
"version": "9.0.1",
"description": "Get a stream as a string, Buffer, ArrayBuffer or array",
"license": "MIT",
"repository": "sindresorhus/get-stream",
"funding": "https://github.com/sponsors/sindresorhus",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com"
},
"type": "module",
"exports": {
"types": "./source/index.d.ts",
"browser": "./source/exports.js",
"default": "./source/index.js"
},
"sideEffects": false,
"engines": {
"node": ">=18"
},
"scripts": {
"benchmark": "node benchmarks/index.js",
"test": "xo && ava && tsd --typings=source/index.d.ts --files=source/index.test-d.ts"
},
"files": [
"source",
"!*.test-d.ts"
],
"keywords": [
"get",
"stream",
"promise",
"concat",
"string",
"text",
"buffer",
"read",
"data",
"consume",
"readable",
"readablestream",
"object",
"concat"
],
"dependencies": {
"@sec-ant/readable-stream": "^0.4.1",
"is-stream": "^4.0.1"
},
"devDependencies": {
"@types/node": "^20.8.9",
"ava": "^6.1.2",
"onetime": "^7.0.0",
"precise-now": "^3.0.0",
"stream-json": "^1.8.0",
"tsd": "^0.29.0",
"xo": "^0.58.0"
}
}

303
book/node_modules/get-stream/readme.md generated vendored Normal file
View File

@ -0,0 +1,303 @@
# get-stream
> Get a stream as a string, Buffer, ArrayBuffer or array
## Features
- Works in any JavaScript environment ([Node.js](#nodejs-streams), [browsers](#browser-support), etc.).
- Supports [text streams](#getstreamstream-options), [binary streams](#getstreamasbufferstream-options) and [object streams](#getstreamasarraystream-options).
- Supports [async iterables](#async-iterables).
- Can set a [maximum stream size](#maxbuffer).
- Returns [partially read data](#errors) when the stream errors.
- [Fast](#benchmarks).
## Install
```sh
npm install get-stream
```
## Usage
### Node.js streams
```js
import fs from 'node:fs';
import getStream from 'get-stream';
const stream = fs.createReadStream('unicorn.txt');
console.log(await getStream(stream));
/*
,,))))))));,
__)))))))))))))),
\|/ -\(((((''''((((((((.
-*-==//////(('' . `)))))),
/|\ ))| o ;-. '((((( ,(,
( `| / ) ;))))' ,_))^;(~
| | | ,))((((_ _____------~~~-. %,;(;(>';'~
o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~
; ''''```` `: `:::|\,__,%% );`'; ~
| _ ) / `:|`----' `-'
______/\/~ | / /
/~;;.____/;;' / ___--,-( `;;;/
/ // _;______;'------~~~~~ /;;/\ /
// | | / ; \;;,\
(<_ | ; /',/-----' _>
\_| ||_ //~;~~~~~~~~~
`\_| (,~~
\~\
~~
*/
```
### Web streams
```js
import getStream from 'get-stream';
const {body: readableStream} = await fetch('https://example.com');
console.log(await getStream(readableStream));
```
This works in any browser, even [the ones](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream#browser_compatibility) not supporting `ReadableStream.values()` yet.
### Async iterables
```js
import {opendir} from 'node:fs/promises';
import {getStreamAsArray} from 'get-stream';
const asyncIterable = await opendir(directory);
console.log(await getStreamAsArray(asyncIterable));
```
## API
The following methods read the stream's contents and return it as a promise.
### getStream(stream, options?)
`stream`: [`stream.Readable`](https://nodejs.org/api/stream.html#class-streamreadable), [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream), or [`AsyncIterable<string | Buffer | ArrayBuffer | DataView | TypedArray>`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#the_async_iterator_and_async_iterable_protocols)\
`options`: [`Options`](#options)
Get the given `stream` as a string.
### getStreamAsBuffer(stream, options?)
Get the given `stream` as a Node.js [`Buffer`](https://nodejs.org/api/buffer.html#class-buffer).
```js
import {getStreamAsBuffer} from 'get-stream';
const stream = fs.createReadStream('unicorn.png');
console.log(await getStreamAsBuffer(stream));
```
### getStreamAsArrayBuffer(stream, options?)
Get the given `stream` as an [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer).
```js
import {getStreamAsArrayBuffer} from 'get-stream';
const {body: readableStream} = await fetch('https://example.com');
console.log(await getStreamAsArrayBuffer(readableStream));
```
### getStreamAsArray(stream, options?)
Get the given `stream` as an array. Unlike [other methods](#api), this supports [streams of objects](https://nodejs.org/api/stream.html#object-mode).
```js
import {getStreamAsArray} from 'get-stream';
const {body: readableStream} = await fetch('https://example.com');
console.log(await getStreamAsArray(readableStream));
```
#### options
Type: `object`
##### maxBuffer
Type: `number`\
Default: `Infinity`
Maximum length of the stream. If exceeded, the promise will be rejected with a `MaxBufferError`.
Depending on the [method](#api), the length is measured with [`string.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/length), [`buffer.length`](https://nodejs.org/api/buffer.html#buflength), [`arrayBuffer.byteLength`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/byteLength) or [`array.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/length).
## Errors
If the stream errors, the returned promise will be rejected with the `error`. Any contents already read from the stream will be set to `error.bufferedData`, which is a `string`, a `Buffer`, an `ArrayBuffer` or an array depending on the [method used](#api).
```js
import getStream from 'get-stream';
try {
await getStream(streamThatErrorsAtTheEnd('unicorn'));
} catch (error) {
console.log(error.bufferedData);
//=> 'unicorn'
}
```
## Browser support
For this module to work in browsers, a bundler must be used that either:
- Supports the [`exports.browser`](https://nodejs.org/api/packages.html#community-conditions-definitions) field in `package.json`
- Strips or ignores `node:*` imports
Most bundlers (such as [Webpack](https://webpack.js.org/guides/package-exports/#target-environment)) support either of these.
Additionally, browsers support [web streams](#web-streams) and [async iterables](#async-iterables), but not [Node.js streams](#nodejs-streams).
## Tips
### Alternatives
If you do not need the [`maxBuffer`](#maxbuffer) option, [`error.bufferedData`](#errors), nor browser support, you can use the following methods instead of this package.
#### [`streamConsumers.text()`](https://nodejs.org/api/webstreams.html#streamconsumerstextstream)
```js
import fs from 'node:fs';
import {text} from 'node:stream/consumers';
const stream = fs.createReadStream('unicorn.txt', {encoding: 'utf8'});
console.log(await text(stream))
```
#### [`streamConsumers.buffer()`](https://nodejs.org/api/webstreams.html#streamconsumersbufferstream)
```js
import {buffer} from 'node:stream/consumers';
console.log(await buffer(stream))
```
#### [`streamConsumers.arrayBuffer()`](https://nodejs.org/api/webstreams.html#streamconsumersarraybufferstream)
```js
import {arrayBuffer} from 'node:stream/consumers';
console.log(await arrayBuffer(stream))
```
#### [`readable.toArray()`](https://nodejs.org/api/stream.html#readabletoarrayoptions)
```js
console.log(await stream.toArray())
```
#### [`Array.fromAsync()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/fromAsync)
If your [environment supports it](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/fromAsync#browser_compatibility):
```js
console.log(await Array.fromAsync(stream))
```
### Non-UTF-8 encoding
When all of the following conditions apply:
- [`getStream()`](#getstreamstream-options) is used (as opposed to [`getStreamAsBuffer()`](#getstreamasbufferstream-options) or [`getStreamAsArrayBuffer()`](#getstreamasarraybufferstream-options))
- The stream is binary (not text)
- The stream's encoding is not UTF-8 (for example, it is UTF-16, hexadecimal, or Base64)
Then the stream must be decoded using a transform stream like [`TextDecoderStream`](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoderStream) or [`b64`](https://github.com/hapijs/b64).
```js
import getStream from 'get-stream';
const textDecoderStream = new TextDecoderStream('utf-16le');
const {body: readableStream} = await fetch('https://example.com');
console.log(await getStream(readableStream.pipeThrough(textDecoderStream)));
```
### Blobs
[`getStreamAsArrayBuffer()`](#getstreamasarraybufferstream-options) can be used to create [Blobs](https://developer.mozilla.org/en-US/docs/Web/API/Blob).
```js
import {getStreamAsArrayBuffer} from 'get-stream';
const stream = fs.createReadStream('unicorn.txt');
console.log(new Blob([await getStreamAsArrayBuffer(stream)]));
```
### JSON streaming
[`getStreamAsArray()`](#getstreamasarraystream-options) can be combined with JSON streaming utilities to parse JSON incrementally.
```js
import fs from 'node:fs';
import {compose as composeStreams} from 'node:stream';
import {getStreamAsArray} from 'get-stream';
import streamJson from 'stream-json';
import streamJsonArray from 'stream-json/streamers/StreamArray.js';
const stream = fs.createReadStream('big-array-of-objects.json');
console.log(await getStreamAsArray(
composeStreams(stream, streamJson.parser(), streamJsonArray.streamArray()),
));
```
## Benchmarks
### Node.js stream (100 MB, binary)
- `getStream()`: 142ms
- `text()`: 139ms
- `getStreamAsBuffer()`: 106ms
- `buffer()`: 83ms
- `getStreamAsArrayBuffer()`: 105ms
- `arrayBuffer()`: 81ms
- `getStreamAsArray()`: 24ms
- `stream.toArray()`: 21ms
### Node.js stream (100 MB, text)
- `getStream()`: 90ms
- `text()`: 89ms
- `getStreamAsBuffer()`: 127ms
- `buffer()`: 192ms
- `getStreamAsArrayBuffer()`: 129ms
- `arrayBuffer()`: 195ms
- `getStreamAsArray()`: 89ms
- `stream.toArray()`: 90ms
### Web ReadableStream (100 MB, binary)
- `getStream()`: 223ms
- `text()`: 221ms
- `getStreamAsBuffer()`: 182ms
- `buffer()`: 153ms
- `getStreamAsArrayBuffer()`: 171ms
- `arrayBuffer()`: 155ms
- `getStreamAsArray()`: 83ms
### Web ReadableStream (100 MB, text)
- `getStream()`: 141ms
- `text()`: 139ms
- `getStreamAsBuffer()`: 91ms
- `buffer()`: 80ms
- `getStreamAsArrayBuffer()`: 89ms
- `arrayBuffer()`: 81ms
- `getStreamAsArray()`: 21ms
[Benchmarks' source file](benchmarks/index.js).
## FAQ
### How is this different from [`concat-stream`](https://github.com/maxogden/concat-stream)?
This module accepts a stream instead of being one and returns a promise instead of using a callback. The API is simpler and it only supports returning a string, `Buffer`, an `ArrayBuffer` or an array. It doesn't have a fragile type inference. You explicitly choose what you want. And it doesn't depend on the huge `readable-stream` package.
## Related
- [get-stdin](https://github.com/sindresorhus/get-stdin) - Get stdin as a string or buffer
- [into-stream](https://github.com/sindresorhus/into-stream) - The opposite of this package

84
book/node_modules/get-stream/source/array-buffer.js generated vendored Normal file
View File

@ -0,0 +1,84 @@
import {getStreamContents} from './contents.js';
import {noop, throwObjectStream, getLengthProperty} from './utils.js';
export async function getStreamAsArrayBuffer(stream, options) {
return getStreamContents(stream, arrayBufferMethods, options);
}
const initArrayBuffer = () => ({contents: new ArrayBuffer(0)});
const useTextEncoder = chunk => textEncoder.encode(chunk);
const textEncoder = new TextEncoder();
const useUint8Array = chunk => new Uint8Array(chunk);
const useUint8ArrayWithOffset = chunk => new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength);
const truncateArrayBufferChunk = (convertedChunk, chunkSize) => convertedChunk.slice(0, chunkSize);
// `contents` is an increasingly growing `Uint8Array`.
const addArrayBufferChunk = (convertedChunk, {contents, length: previousLength}, length) => {
const newContents = hasArrayBufferResize() ? resizeArrayBuffer(contents, length) : resizeArrayBufferSlow(contents, length);
new Uint8Array(newContents).set(convertedChunk, previousLength);
return newContents;
};
// Without `ArrayBuffer.resize()`, `contents` size is always a power of 2.
// This means its last bytes are zeroes (not stream data), which need to be
// trimmed at the end with `ArrayBuffer.slice()`.
const resizeArrayBufferSlow = (contents, length) => {
if (length <= contents.byteLength) {
return contents;
}
const arrayBuffer = new ArrayBuffer(getNewContentsLength(length));
new Uint8Array(arrayBuffer).set(new Uint8Array(contents), 0);
return arrayBuffer;
};
// With `ArrayBuffer.resize()`, `contents` size matches exactly the size of
// the stream data. It does not include extraneous zeroes to trim at the end.
// The underlying `ArrayBuffer` does allocate a number of bytes that is a power
// of 2, but those bytes are only visible after calling `ArrayBuffer.resize()`.
const resizeArrayBuffer = (contents, length) => {
if (length <= contents.maxByteLength) {
contents.resize(length);
return contents;
}
const arrayBuffer = new ArrayBuffer(length, {maxByteLength: getNewContentsLength(length)});
new Uint8Array(arrayBuffer).set(new Uint8Array(contents), 0);
return arrayBuffer;
};
// Retrieve the closest `length` that is both >= and a power of 2
const getNewContentsLength = length => SCALE_FACTOR ** Math.ceil(Math.log(length) / Math.log(SCALE_FACTOR));
const SCALE_FACTOR = 2;
const finalizeArrayBuffer = ({contents, length}) => hasArrayBufferResize() ? contents : contents.slice(0, length);
// `ArrayBuffer.slice()` is slow. When `ArrayBuffer.resize()` is available
// (Node >=20.0.0, Safari >=16.4 and Chrome), we can use it instead.
// eslint-disable-next-line no-warning-comments
// TODO: remove after dropping support for Node 20.
// eslint-disable-next-line no-warning-comments
// TODO: use `ArrayBuffer.transferToFixedLength()` instead once it is available
const hasArrayBufferResize = () => 'resize' in ArrayBuffer.prototype;
const arrayBufferMethods = {
init: initArrayBuffer,
convertChunk: {
string: useTextEncoder,
buffer: useUint8Array,
arrayBuffer: useUint8Array,
dataView: useUint8ArrayWithOffset,
typedArray: useUint8ArrayWithOffset,
others: throwObjectStream,
},
getSize: getLengthProperty,
truncateChunk: truncateArrayBufferChunk,
addChunk: addArrayBufferChunk,
getFinalChunk: noop,
finalize: finalizeArrayBuffer,
};

32
book/node_modules/get-stream/source/array.js generated vendored Normal file
View File

@ -0,0 +1,32 @@
import {getStreamContents} from './contents.js';
import {identity, noop, getContentsProperty} from './utils.js';
export async function getStreamAsArray(stream, options) {
return getStreamContents(stream, arrayMethods, options);
}
const initArray = () => ({contents: []});
const increment = () => 1;
const addArrayChunk = (convertedChunk, {contents}) => {
contents.push(convertedChunk);
return contents;
};
const arrayMethods = {
init: initArray,
convertChunk: {
string: identity,
buffer: identity,
arrayBuffer: identity,
dataView: identity,
typedArray: identity,
others: identity,
},
getSize: increment,
truncateChunk: noop,
addChunk: addArrayChunk,
getFinalChunk: noop,
finalize: getContentsProperty,
};

19
book/node_modules/get-stream/source/buffer.js generated vendored Normal file
View File

@ -0,0 +1,19 @@
import {getStreamAsArrayBuffer} from './array-buffer.js';
export async function getStreamAsBuffer(stream, options) {
if (!('Buffer' in globalThis)) {
throw new Error('getStreamAsBuffer() is only supported in Node.js');
}
try {
return arrayBufferToNodeBuffer(await getStreamAsArrayBuffer(stream, options));
} catch (error) {
if (error.bufferedData !== undefined) {
error.bufferedData = arrayBufferToNodeBuffer(error.bufferedData);
}
throw error;
}
}
const arrayBufferToNodeBuffer = arrayBuffer => globalThis.Buffer.from(arrayBuffer);

121
book/node_modules/get-stream/source/contents.js generated vendored Normal file
View File

@ -0,0 +1,121 @@
import {getAsyncIterable} from './stream.js';
export const getStreamContents = async (stream, {init, convertChunk, getSize, truncateChunk, addChunk, getFinalChunk, finalize}, {maxBuffer = Number.POSITIVE_INFINITY} = {}) => {
const asyncIterable = getAsyncIterable(stream);
const state = init();
state.length = 0;
try {
for await (const chunk of asyncIterable) {
const chunkType = getChunkType(chunk);
const convertedChunk = convertChunk[chunkType](chunk, state);
appendChunk({
convertedChunk,
state,
getSize,
truncateChunk,
addChunk,
maxBuffer,
});
}
appendFinalChunk({
state,
convertChunk,
getSize,
truncateChunk,
addChunk,
getFinalChunk,
maxBuffer,
});
return finalize(state);
} catch (error) {
const normalizedError = typeof error === 'object' && error !== null ? error : new Error(error);
normalizedError.bufferedData = finalize(state);
throw normalizedError;
}
};
const appendFinalChunk = ({state, getSize, truncateChunk, addChunk, getFinalChunk, maxBuffer}) => {
const convertedChunk = getFinalChunk(state);
if (convertedChunk !== undefined) {
appendChunk({
convertedChunk,
state,
getSize,
truncateChunk,
addChunk,
maxBuffer,
});
}
};
const appendChunk = ({convertedChunk, state, getSize, truncateChunk, addChunk, maxBuffer}) => {
const chunkSize = getSize(convertedChunk);
const newLength = state.length + chunkSize;
if (newLength <= maxBuffer) {
addNewChunk(convertedChunk, state, addChunk, newLength);
return;
}
const truncatedChunk = truncateChunk(convertedChunk, maxBuffer - state.length);
if (truncatedChunk !== undefined) {
addNewChunk(truncatedChunk, state, addChunk, maxBuffer);
}
throw new MaxBufferError();
};
const addNewChunk = (convertedChunk, state, addChunk, newLength) => {
state.contents = addChunk(convertedChunk, state, newLength);
state.length = newLength;
};
const getChunkType = chunk => {
const typeOfChunk = typeof chunk;
if (typeOfChunk === 'string') {
return 'string';
}
if (typeOfChunk !== 'object' || chunk === null) {
return 'others';
}
if (globalThis.Buffer?.isBuffer(chunk)) {
return 'buffer';
}
const prototypeName = objectToString.call(chunk);
if (prototypeName === '[object ArrayBuffer]') {
return 'arrayBuffer';
}
if (prototypeName === '[object DataView]') {
return 'dataView';
}
if (
Number.isInteger(chunk.byteLength)
&& Number.isInteger(chunk.byteOffset)
&& objectToString.call(chunk.buffer) === '[object ArrayBuffer]'
) {
return 'typedArray';
}
return 'others';
};
const {toString: objectToString} = Object.prototype;
export class MaxBufferError extends Error {
name = 'MaxBufferError';
constructor() {
super('maxBuffer exceeded');
}
}

5
book/node_modules/get-stream/source/exports.js generated vendored Normal file
View File

@ -0,0 +1,5 @@
export {getStreamAsArray} from './array.js';
export {getStreamAsArrayBuffer} from './array-buffer.js';
export {getStreamAsBuffer} from './buffer.js';
export {getStreamAsString as default} from './string.js';
export {MaxBufferError} from './contents.js';

121
book/node_modules/get-stream/source/index.d.ts generated vendored Normal file
View File

@ -0,0 +1,121 @@
import {type Readable} from 'node:stream';
import {type Buffer} from 'node:buffer';
export class MaxBufferError extends Error {
readonly name: 'MaxBufferError';
constructor();
}
// eslint-disable-next-line @typescript-eslint/ban-types
type TextStreamItem = string | Buffer | ArrayBuffer | ArrayBufferView;
export type AnyStream<SteamItem = TextStreamItem> = Readable | ReadableStream<SteamItem> | AsyncIterable<SteamItem>;
export type Options = {
/**
Maximum length of the stream. If exceeded, the promise will be rejected with a `MaxBufferError`.
Depending on the [method](#api), the length is measured with [`string.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/length), [`buffer.length`](https://nodejs.org/api/buffer.html#buflength), [`arrayBuffer.byteLength`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/byteLength) or [`array.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/length).
@default Infinity
*/
readonly maxBuffer?: number;
};
/**
Get the given `stream` as a string.
@returns The stream's contents as a promise.
@example
```
import fs from 'node:fs';
import getStream from 'get-stream';
const stream = fs.createReadStream('unicorn.txt');
console.log(await getStream(stream));
// ,,))))))));,
// __)))))))))))))),
// \|/ -\(((((''''((((((((.
// -*-==//////(('' . `)))))),
// /|\ ))| o ;-. '((((( ,(,
// ( `| / ) ;))))' ,_))^;(~
// | | | ,))((((_ _____------~~~-. %,;(;(>';'~
// o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~
// ; ''''```` `: `:::|\,__,%% );`'; ~
// | _ ) / `:|`----' `-'
// ______/\/~ | / /
// /~;;.____/;;' / ___--,-( `;;;/
// / // _;______;'------~~~~~ /;;/\ /
// // | | / ; \;;,\
// (<_ | ; /',/-----' _>
// \_| ||_ //~;~~~~~~~~~
// `\_| (,~~
// \~\
// ~~
```
@example
```
import getStream from 'get-stream';
const {body: readableStream} = await fetch('https://example.com');
console.log(await getStream(readableStream));
```
@example
```
import {opendir} from 'node:fs/promises';
import {getStreamAsArray} from 'get-stream';
const asyncIterable = await opendir(directory);
console.log(await getStreamAsArray(asyncIterable));
```
*/
export default function getStream(stream: AnyStream, options?: Options): Promise<string>;
/**
Get the given `stream` as a Node.js [`Buffer`](https://nodejs.org/api/buffer.html#class-buffer).
@returns The stream's contents as a promise.
@example
```
import {getStreamAsBuffer} from 'get-stream';
const stream = fs.createReadStream('unicorn.png');
console.log(await getStreamAsBuffer(stream));
```
*/
// eslint-disable-next-line @typescript-eslint/ban-types
export function getStreamAsBuffer(stream: AnyStream, options?: Options): Promise<Buffer>;
/**
Get the given `stream` as an [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer).
@returns The stream's contents as a promise.
@example
```
import {getStreamAsArrayBuffer} from 'get-stream';
const {body: readableStream} = await fetch('https://example.com');
console.log(await getStreamAsArrayBuffer(readableStream));
```
*/
export function getStreamAsArrayBuffer(stream: AnyStream, options?: Options): Promise<ArrayBuffer>;
/**
Get the given `stream` as an array. Unlike [other methods](#api), this supports [streams of objects](https://nodejs.org/api/stream.html#object-mode).
@returns The stream's contents as a promise.
@example
```
import {getStreamAsArray} from 'get-stream';
const {body: readableStream} = await fetch('https://example.com');
console.log(await getStreamAsArray(readableStream));
```
*/
export function getStreamAsArray<Item>(stream: AnyStream<Item>, options?: Options): Promise<Item[]>;

13
book/node_modules/get-stream/source/index.js generated vendored Normal file
View File

@ -0,0 +1,13 @@
import {on} from 'node:events';
import {finished} from 'node:stream/promises';
import {nodeImports} from './stream.js';
Object.assign(nodeImports, {on, finished});
export {
default,
getStreamAsArray,
getStreamAsArrayBuffer,
getStreamAsBuffer,
MaxBufferError,
} from './exports.js';

65
book/node_modules/get-stream/source/stream.js generated vendored Normal file
View File

@ -0,0 +1,65 @@
import {isReadableStream} from 'is-stream';
import {asyncIterator} from '@sec-ant/readable-stream/ponyfill';
export const getAsyncIterable = stream => {
if (isReadableStream(stream, {checkOpen: false}) && nodeImports.on !== undefined) {
return getStreamIterable(stream);
}
if (typeof stream?.[Symbol.asyncIterator] === 'function') {
return stream;
}
// `ReadableStream[Symbol.asyncIterator]` support is missing in multiple browsers, so we ponyfill it
if (toString.call(stream) === '[object ReadableStream]') {
return asyncIterator.call(stream);
}
throw new TypeError('The first argument must be a Readable, a ReadableStream, or an async iterable.');
};
const {toString} = Object.prototype;
// The default iterable for Node.js streams does not allow for multiple readers at once, so we re-implement it
const getStreamIterable = async function * (stream) {
const controller = new AbortController();
const state = {};
handleStreamEnd(stream, controller, state);
try {
for await (const [chunk] of nodeImports.on(stream, 'data', {signal: controller.signal})) {
yield chunk;
}
} catch (error) {
// Stream failure, for example due to `stream.destroy(error)`
if (state.error !== undefined) {
throw state.error;
// `error` event directly emitted on stream
} else if (!controller.signal.aborted) {
throw error;
// Otherwise, stream completed successfully
}
// The `finally` block also runs when the caller throws, for example due to the `maxBuffer` option
} finally {
stream.destroy();
}
};
const handleStreamEnd = async (stream, controller, state) => {
try {
await nodeImports.finished(stream, {
cleanup: true,
readable: true,
writable: false,
error: false,
});
} catch (error) {
state.error = error;
} finally {
controller.abort();
}
};
// Loaded by the Node entrypoint, but not by the browser one.
// This prevents using dynamic imports.
export const nodeImports = {};

41
book/node_modules/get-stream/source/string.js generated vendored Normal file
View File

@ -0,0 +1,41 @@
import {getStreamContents} from './contents.js';
import {
identity,
getContentsProperty,
throwObjectStream,
getLengthProperty,
} from './utils.js';
export async function getStreamAsString(stream, options) {
return getStreamContents(stream, stringMethods, options);
}
const initString = () => ({contents: '', textDecoder: new TextDecoder()});
const useTextDecoder = (chunk, {textDecoder}) => textDecoder.decode(chunk, {stream: true});
const addStringChunk = (convertedChunk, {contents}) => contents + convertedChunk;
const truncateStringChunk = (convertedChunk, chunkSize) => convertedChunk.slice(0, chunkSize);
const getFinalStringChunk = ({textDecoder}) => {
const finalChunk = textDecoder.decode();
return finalChunk === '' ? undefined : finalChunk;
};
const stringMethods = {
init: initString,
convertChunk: {
string: identity,
buffer: useTextDecoder,
arrayBuffer: useTextDecoder,
dataView: useTextDecoder,
typedArray: useTextDecoder,
others: throwObjectStream,
},
getSize: getLengthProperty,
truncateChunk: truncateStringChunk,
addChunk: addStringChunk,
getFinalChunk: getFinalStringChunk,
finalize: getContentsProperty,
};

11
book/node_modules/get-stream/source/utils.js generated vendored Normal file
View File

@ -0,0 +1,11 @@
export const identity = value => value;
export const noop = () => undefined;
export const getContentsProperty = ({contents}) => contents;
export const throwObjectStream = chunk => {
throw new Error(`Streams in object mode are not supported: ${String(chunk)}`);
};
export const getLengthProperty = convertedChunk => convertedChunk.length;