Skip to content

Commit a112a6f

Browse files
committed
refactor!: set up initial public API
mod.ts only publicly exports JsonSequenceDecoderStream. The two individual Transformer-creating functions could be exported publicly later, but I'm keeping it minimal for now. The utility functions in mod_test.ts are now in a separate _test_utils.ts module.
1 parent 3db47b1 commit a112a6f

File tree

4 files changed

+205
-105
lines changed

4 files changed

+205
-105
lines changed

_test_utils.ts

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
import { assert, assertEquals } from "./dev_deps.ts";
2+
import { JSON_SEQ_END, JSON_SEQ_START } from "./mod.ts";
3+
4+
export function chunkify(str: string, size: number): string[] {
5+
assert(size > 0);
6+
const chunks: string[] = [];
7+
for (let i = 0; i < str.length; i += size) {
8+
chunks.push(str.substring(i, Math.min(str.length, i + size)));
9+
}
10+
return chunks;
11+
}
12+
13+
export function enlargen(chunk: string, length: number): string {
14+
return chunk.repeat(Math.ceil(length / chunk.length)).substring(0, length);
15+
}
16+
17+
export function jsonSeqChunk<Content extends string = string>(
18+
content: Content,
19+
): `${typeof JSON_SEQ_START}${Content}${typeof JSON_SEQ_END}` {
20+
return `${JSON_SEQ_START}${content}${JSON_SEQ_END}`;
21+
}
22+
23+
export async function assertStreamContainsChunks<T>(
24+
stream: ReadableStream<T>,
25+
chunks: ReadonlyArray<T>,
26+
): Promise<void> {
27+
const reader = stream.getReader();
28+
for (const chunk of chunks) {
29+
const result = await reader.read();
30+
assertEquals(result, { done: false, value: chunk });
31+
}
32+
const result = await reader.read();
33+
assertEquals(result, { done: true, value: undefined });
34+
}

_test_utils_test.ts

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
import { assertEquals } from "./dev_deps.ts";
2+
import { chunkify, enlargen } from "./_test_utils.ts";
3+
4+
Deno.test("chunkify", () => {
5+
assertEquals(chunkify("", 1), []);
6+
assertEquals(chunkify("", 10), []);
7+
assertEquals(chunkify("foo", 10), ["foo"]);
8+
assertEquals(chunkify("foo", 1), ["f", "o", "o"]);
9+
assertEquals(chunkify("foo", 2), ["fo", "o"]);
10+
});
11+
12+
Deno.test("enlargen", () => {
13+
assertEquals(enlargen("a", 3), "aaa");
14+
assertEquals(enlargen("abc", 5), "abcab");
15+
assertEquals(enlargen("abc", 1), "a");
16+
assertEquals(enlargen("abc", 2), "ab");
17+
assertEquals(enlargen("abc", 3), "abc");
18+
});

mod.ts

Lines changed: 41 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,14 @@ enum State {
1313
BEFORE_CHUNK_END,
1414
}
1515

16-
export function jsonSeqDelimiterTransformer(options?: {
16+
export interface _JsonSeqDelimiterTransformerOptions {
1717
strict?: boolean;
18-
}): Transformer<string, string> {
19-
const strict = options?.strict === undefined ? true : options?.strict;
18+
}
19+
20+
export function _jsonSeqDelimiterTransformer(
21+
options?: _JsonSeqDelimiterTransformerOptions,
22+
): Transformer<string, string> {
23+
const strict = options?.strict === undefined ? false : options?.strict;
2024
let state: State = State.BEFORE_CHUNK_START;
2125
let unDelimitedChunks: string[] = [];
2226

@@ -29,7 +33,7 @@ export function jsonSeqDelimiterTransformer(options?: {
2933
throw new Error(
3034
`leading content before chunk start: ${
3135
start < 0 ? chunk : chunk.substring(0, start)
32-
}`
36+
}`,
3337
);
3438
}
3539
if (start < 0) {
@@ -63,25 +67,52 @@ export function jsonSeqDelimiterTransformer(options?: {
6367
};
6468
}
6569

66-
export function stringToJSONTransformer(): Transformer<string, unknown> {
70+
export function _stringToJSONTransformer(): Transformer<string, unknown> {
6771
return {
6872
transform(chunk, controller) {
6973
controller.enqueue(JSON.parse(chunk));
7074
},
7175
};
7276
}
7377

78+
export interface JsonSequenceDecoderStreamOptions {
79+
/** If `true`, raise errors instead of recovering when parsing malformed
80+
* streams.
81+
*
82+
* The default is `false`, as the json-seq spec (RFC 7464) encourages decoders
83+
* to automatically handle stream errors, such as truncated JSON texts.
84+
*
85+
* * When `true`, the decoder behaves as if the stream format **MUST** exactly
86+
* match the [JSON Text Sequence Encoding] format.
87+
* * When `false` the decoder follows the more permissive
88+
* [JSON Text Sequence Parsing] format and other permissive behaviour
89+
* described in the spec.
90+
*
91+
* [JSON Text Sequence Encoding]: https://datatracker.ietf.org/doc/html/rfc7464#section-2.2
92+
* [JSON Text Sequence Parsing]: https://datatracker.ietf.org/doc/html/rfc7464#section-2.1
93+
*/
94+
strict?: boolean;
95+
}
96+
97+
/** A streaming decoder that decodes RFC 7464 JSON Sequences to JSON values.
98+
*
99+
* The stream consumes UTF-8-encoded bytes. The byte stream consists of zero or
100+
* more JSON-encoded values, each of which is preceded by a `'\x1E'` character,
101+
* and followed by a `'\n'` character.
102+
*
103+
* The stream produces the values resulting from parsing each individual JSON
104+
* text in the stream.
105+
*/
74106
export class JsonSequenceDecoderStream
75-
implements TransformStream<Uint8Array, unknown>
76-
{
107+
implements TransformStream<Uint8Array, unknown> {
77108
readonly readable: ReadableStream<unknown>;
78109
readonly writable: WritableStream<Uint8Array>;
79110

80-
constructor() {
111+
constructor(options?: JsonSequenceDecoderStreamOptions) {
81112
const decoder = new TextDecoderStream();
82113
this.readable = decoder.readable
83-
.pipeThrough(new TransformStream(jsonSeqDelimiterTransformer()))
84-
.pipeThrough(new TransformStream(stringToJSONTransformer()));
114+
.pipeThrough(new TransformStream(_jsonSeqDelimiterTransformer(options)))
115+
.pipeThrough(new TransformStream(_stringToJSONTransformer()));
85116
this.writable = decoder.writable;
86117
}
87118
}

0 commit comments

Comments
 (0)