Skip to content

Commit fabf6e6

Browse files
committed
feat: add full NodeJS.ReadableStream support
see also: microsoft/TypeScript#420 (comment) thanks: @nthypes @ericmasiello
1 parent 35e3106 commit fabf6e6

File tree

13 files changed

+233
-165
lines changed

13 files changed

+233
-165
lines changed

.github/workflows/ci.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,8 @@ jobs:
1616
strategy:
1717
matrix:
1818
nodejs: [
19-
# 14,
20-
# 16,
19+
14,
20+
16,
2121
18,
2222
# "lts/*"
2323
]

README.md

Lines changed: 11 additions & 99 deletions
Original file line numberDiff line numberDiff line change
@@ -1,106 +1,18 @@
11
# `yield-stream`
22

3-
- **Docs: https://yield-stream.vercel.app**
3+
[**Github**](https://github.com/gptlabs/yield-stream) |
4+
[**NPM**](https://npmjs.com/package/yield-stream) |
5+
[**Docs**](https://yield-stream.vercel.app)
46

5-
A small library for switching between streams, generators, and arrays.
7+
A small library for switching between streams, generators, and arrays. See docs
8+
for details.
69

10+
### Note: Using `NodeJS.ReadableStream`
711

8-
```ts
9-
/**
10-
* `compose(f, g, h, ...)` returns a generator function `G(data)` that yields
11-
* all `(f · g · h · ...)(data)`.
12-
*
13-
* @note Used to compose multiple transforms into a `pipeline`.
14-
*/
15-
export const compose = <T>(
16-
...generators: GeneratorFn<T>[]
17-
): GeneratorFn<T> => {
18-
return generators.reduce(
19-
(prev, next) => async function* (data) {
20-
for await (const chunk of prev(data)) {
21-
yield* next(chunk);
22-
}
23-
},
24-
);
25-
};
26-
27-
/**
28-
* Accepts a stream and transforms and returns a stream of the transformed
29-
* chunks. Transforms can yield multiple chunks per input chunk.
30-
*/
31-
export const pipeline = <T>(
32-
stream: ReadableStream<T>,
33-
...transforms: GeneratorFn<T>[]
34-
): ReadableStream<T> => {
35-
const composed = compose(...transforms);
36-
return generateStream(
37-
async function* () {
38-
for await (const chunk of yieldStream(stream)) {
39-
yield* composed(chunk);
40-
}
41-
}
42-
);
43-
};
44-
45-
/**
46-
* Accepts a stream and yields all of its chunks.
47-
*/
48-
export const yieldStream = async function* <T>(
49-
stream: ReadableStream<T>,
50-
controller?: AbortController
51-
) {
52-
const reader = stream.getReader();
53-
while (true) {
54-
if (controller?.signal.aborted) {
55-
break;
56-
}
57-
58-
const { done, value } = await reader.read();
59-
if (done) {
60-
break;
61-
}
62-
63-
yield value;
64-
}
65-
};
12+
By default, this library uses WHATWG `ReadableStream`, which is only available
13+
on Node 18+. If you are on an older version of Node or otherwise need to use
14+
`NodeJS.ReadableStream`, import from:
6615

67-
/**
68-
* Accepts a generator function and streams its outputs.
69-
*/
70-
export const generateStream = <T, TReturn, D>(
71-
G: StreamGenerator<D, T, TReturn>,
72-
data?: D
73-
): ReadableStream<T> => {
74-
return new ReadableStream<T>({
75-
async start(controller) {
76-
for await (const chunk of G(data)) {
77-
controller.enqueue(chunk);
78-
}
79-
controller.close();
80-
},
81-
});
82-
};
83-
84-
/**
85-
* Accepts an array and returns a stream of its items.
86-
*/
87-
export const streamArray = <T>(array: T[]): ReadableStream<T> => {
88-
return generateStream(function* () {
89-
for (const item of array) {
90-
yield item;
91-
}
92-
});
93-
};
94-
95-
/**
96-
* Accepts a stream and yields a growing buffer of all chunks received.
97-
*/
98-
export const buffer = async function* <T>(stream: ReadableStream<T>) {
99-
const buffer: T[] = [];
100-
101-
for await (const chunk of yieldStream(stream)) {
102-
buffer.push(chunk);
103-
yield buffer;
104-
}
105-
};
16+
```ts
17+
import { yieldStream } from "yield-stream/node";
10618
```

node.d.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
export * from "./dist/platforms/node";

node.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
export * from "./dist/platforms/node.js";

package.json

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,13 @@
99
},
1010
"types": "dist/index.d.ts",
1111
"files": [
12-
"dist"
12+
"dist",
13+
"node.d.ts",
14+
"node.js"
1315
],
1416
"exports": {
1517
"./package.json": "./package.json",
18+
"./node": "./node.js",
1619
".": "./dist/index.js",
1720
"./*": "./dist/*/index.js"
1821
},
@@ -41,4 +44,4 @@
4144
"shim-streams": "^0.0.2",
4245
"web-streams-polyfill": "^3.2.1"
4346
}
44-
}
47+
}

src/index.ts

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,5 @@
1-
export * from "./lib";
2-
export * from "./types";
1+
/**
2+
* `.` entrypoint is Edge by default. NodeJS.ReadableStream version available at
3+
* `./node`.
4+
*/
5+
export * from "./platforms/edge";

src/lib.ts renamed to src/lib/edge.ts

Lines changed: 22 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,21 @@
1-
import { GeneratorFn, StreamGenerator } from "./types";
1+
import { GeneratorFn, StreamGenerator } from "../types";
2+
import { compose, generateArray } from "./shared";
23

34
/**
4-
* `compose(f, g, h, ...)` returns a generator function `G(data)` that yields
5-
* all `(f · g · h · ...)(data)`.
6-
*
7-
* @note Used to compose multiple transforms into a `pipeline`.
5+
* Accepts a generator function and streams its outputs.
86
*/
9-
export const compose = <Chunk>(
10-
...generators: GeneratorFn<Chunk>[]
11-
): GeneratorFn<Chunk> => {
12-
return generators.reduce(
13-
(prev, next) => async function* (data) {
14-
for await (const chunk of prev(data)) {
15-
yield* next(chunk);
7+
export const generateStream = <Chunk, Return, Data>(
8+
G: StreamGenerator<Data, Chunk, Return>,
9+
data?: Data
10+
): ReadableStream<Chunk> => {
11+
return new ReadableStream<Chunk>({
12+
async start(controller) {
13+
for await (const chunk of G(data)) {
14+
controller.enqueue(chunk);
1615
}
16+
controller.close();
1717
},
18-
);
18+
});
1919
};
2020

2121
/**
@@ -36,6 +36,15 @@ export const pipeline = <Chunk>(
3636
);
3737
};
3838

39+
/**
40+
* Accepts an array and returns a stream of its items.
41+
*/
42+
export const streamArray = <Chunk>(
43+
array: Chunk[]
44+
): ReadableStream<Chunk> => {
45+
return generateStream(generateArray(array));
46+
};
47+
3948
/**
4049
* Accepts a stream and yields all of its chunks.
4150
*/
@@ -58,36 +67,6 @@ export const yieldStream = async function* <Chunk>(
5867
}
5968
};
6069

61-
/**
62-
* Accepts a generator function and streams its outputs.
63-
*/
64-
export const generateStream = <Chunk, Return, Data>(
65-
G: StreamGenerator<Data, Chunk, Return>,
66-
data?: Data
67-
): ReadableStream<Chunk> => {
68-
return new ReadableStream<Chunk>({
69-
async start(controller) {
70-
for await (const chunk of G(data)) {
71-
controller.enqueue(chunk);
72-
}
73-
controller.close();
74-
},
75-
});
76-
};
77-
78-
/**
79-
* Accepts an array and returns a stream of its items.
80-
*/
81-
export const streamArray = <Chunk>(
82-
array: Chunk[]
83-
): ReadableStream<Chunk> => {
84-
return generateStream(function* () {
85-
for (const item of array) {
86-
yield item;
87-
}
88-
});
89-
};
90-
9170
/**
9271
* Accepts a stream and yields a growing buffer of all chunks received.
9372
*/

src/lib/node.ts

Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,73 @@
1+
import { Readable } from "stream";
2+
import { GeneratorFn, StreamGenerator } from "../types";
3+
import { compose, generateArray } from "./shared";
4+
5+
/**
6+
* Accepts a generator function and returns a NodeJS.ReadableStream of its
7+
* outputs.
8+
*/
9+
export const generateStream = <Chunk, Return, Data>(
10+
G: StreamGenerator<Data, Chunk, Return>,
11+
data?: Data
12+
): NodeJS.ReadableStream => {
13+
const readable = Readable.from(G(data));
14+
return readable;
15+
};
16+
17+
18+
/**
19+
* Accepts a stream and yields all of its chunks.
20+
*/
21+
export const yieldStream = async function* <Chunk>(
22+
stream: NodeJS.ReadableStream,
23+
controller?: AbortController
24+
): AsyncGenerator<Chunk> {
25+
for await (const chunk of stream) {
26+
if (controller?.signal.aborted) {
27+
break;
28+
}
29+
30+
yield chunk as Chunk;
31+
}
32+
};
33+
34+
/**
35+
* Accepts a stream and transforms and returns a stream of the transformed
36+
* chunks. Transforms can yield multiple chunks per input chunk.
37+
*/
38+
export const pipeline = <Chunk>(
39+
stream: NodeJS.ReadableStream,
40+
...transforms: GeneratorFn<Chunk>[]
41+
): NodeJS.ReadableStream => {
42+
const composed = compose(...transforms);
43+
return generateStream(
44+
async function* () {
45+
for await (const chunk of yieldStream<Chunk>(stream)) {
46+
yield* composed(chunk);
47+
}
48+
}
49+
);
50+
};
51+
52+
/**
53+
* Accepts an array and returns a stream of its items.
54+
*/
55+
export const streamArray = <Chunk>(
56+
array: Chunk[]
57+
): NodeJS.ReadableStream => {
58+
return generateStream(generateArray(array));
59+
};
60+
61+
/**
62+
* Accepts a stream and yields a growing buffer of all chunks received.
63+
*/
64+
export const buffer = async function* <Chunk>(
65+
stream: NodeJS.ReadableStream
66+
) {
67+
const buffer: Chunk[] = [];
68+
69+
for await (const chunk of yieldStream<Chunk>(stream)) {
70+
buffer.push(chunk);
71+
yield buffer;
72+
}
73+
};

src/lib/shared.ts

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
import { GeneratorFn } from "../types";
2+
3+
/**
4+
* `compose(f, g, h, ...)` returns a generator function `G(data)` that yields
5+
* all `(f · g · h · ...)(data)`.
6+
*
7+
* @note Used to compose multiple transforms into a `pipeline`.
8+
*/
9+
export const compose = <Chunk>(
10+
...generators: GeneratorFn<Chunk>[]
11+
): GeneratorFn<Chunk> => {
12+
return generators.reduce(
13+
(prev, next) => async function* (data) {
14+
for await (const chunk of prev(data)) {
15+
yield* next(chunk);
16+
}
17+
},
18+
);
19+
};
20+
21+
/**
22+
* Accepts an array and returns a generator function that yields its items.
23+
*/
24+
export const generateArray = <Chunk>(
25+
array: Chunk[]
26+
) => {
27+
return function* () {
28+
for (const item of array) {
29+
yield item;
30+
}
31+
};
32+
};

0 commit comments

Comments
 (0)