Skip to content

Commit 68c3e44

Browse files
committed
Optimize large strings
1 parent 194d544 commit 68c3e44

File tree

11 files changed

+184
-45
lines changed

11 files changed

+184
-45
lines changed

packages/react-client/src/ReactFlightClient.js

Lines changed: 69 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -179,7 +179,7 @@ export type Response = {
179179
_rowID: number, // parts of a row ID parsed so far
180180
_rowTag: number, // 0 indicates that we're currently parsing the row ID
181181
_rowLength: number, // remaining bytes in the row. 0 indicates that we're looking for a newline.
182-
_buffer: Array<string | Uint8Array>, // chunks received so far as part of this row
182+
_buffer: Array<Uint8Array>, // chunks received so far as part of this row
183183
};
184184

185185
function readChunk<T>(chunk: SomeChunk<T>): T {
@@ -288,6 +288,14 @@ function createResolvedModuleChunk<T>(
288288
return new Chunk(RESOLVED_MODULE, value, null, response);
289289
}
290290

291+
function createInitializedTextChunk(
292+
response: Response,
293+
value: string,
294+
): InitializedChunk<string> {
295+
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors
296+
return new Chunk(INITIALIZED, value, null, response);
297+
}
298+
291299
function resolveModelChunk<T>(
292300
chunk: SomeChunk<T>,
293301
value: UninitializedModel,
@@ -704,6 +712,13 @@ function resolveModel(
704712
}
705713
}
706714

715+
function resolveText(response: Response, id: number, text: string): void {
716+
const chunks = response._chunks;
717+
// We assume that we always reference large strings after they've been
718+
// emitted.
719+
chunks.set(id, createInitializedTextChunk(response, text));
720+
}
721+
707722
function resolveModule(
708723
response: Response,
709724
id: number,
@@ -818,26 +833,22 @@ function resolveHint(
818833
code: string,
819834
model: UninitializedModel,
820835
): void {
821-
const hintModel = parseModel<HintModel>(response, model);
836+
const hintModel: HintModel = parseModel(response, model);
822837
dispatchHint(code, hintModel);
823838
}
824839

825840
function processFullRow(
826841
response: Response,
827842
id: number,
828843
tag: number,
829-
buffer: Array<string | Uint8Array>,
844+
buffer: Array<Uint8Array>,
830845
lastChunk: string | Uint8Array,
831846
): void {
832847
let row = '';
833848
const stringDecoder = response._stringDecoder;
834849
for (let i = 0; i < buffer.length; i++) {
835850
const chunk = buffer[i];
836-
if (typeof chunk === 'string') {
837-
row += chunk;
838-
} else {
839-
row += readPartialStringChunk(stringDecoder, chunk);
840-
}
851+
row += readPartialStringChunk(stringDecoder, chunk);
841852
}
842853
if (typeof lastChunk === 'string') {
843854
row += lastChunk;
@@ -869,6 +880,10 @@ function processFullRow(
869880
}
870881
return;
871882
}
883+
case 84 /* "T" */: {
884+
resolveText(response, id, row);
885+
return;
886+
}
872887
default: {
873888
// We assume anything else is JSON.
874889
resolveModel(response, id, row);
@@ -882,33 +897,50 @@ export function processBinaryChunk(
882897
chunk: Uint8Array,
883898
): void {
884899
let i = 0;
885-
while (i < chunk.length) {
900+
let rowState = response._rowState;
901+
let rowID = response._rowID;
902+
let rowTag = response._rowTag;
903+
let rowLength = response._rowLength;
904+
const buffer = response._buffer;
905+
const chunkLength = chunk.length;
906+
while (i < chunkLength) {
886907
let lastIdx = -1;
887-
switch (response._rowState) {
908+
switch (rowState) {
888909
case ROW_ID: {
889910
const byte = chunk[i++];
890911
if (byte === 58 /* ":" */) {
891912
// Finished the rowID, next we'll parse the tag.
892-
response._rowState = ROW_TAG;
913+
rowState = ROW_TAG;
893914
} else {
894-
response._rowID =
895-
(response._rowID << 4) | (byte > 96 ? byte - 87 : byte - 48);
915+
rowID = (rowID << 4) | (byte > 96 ? byte - 87 : byte - 48);
896916
}
897917
continue;
898918
}
899919
case ROW_TAG: {
900920
const resolvedRowTag = chunk[i];
901-
if (resolvedRowTag > 64 && resolvedRowTag < 91) {
902-
response._rowTag = resolvedRowTag;
921+
if (resolvedRowTag === 84 /* "T" */) {
922+
rowTag = resolvedRowTag;
923+
rowState = ROW_LENGTH;
924+
i++;
925+
} else if (resolvedRowTag > 64 && resolvedRowTag < 91 /* "A"-"Z" */) {
926+
rowTag = resolvedRowTag;
927+
rowState = ROW_CHUNK_BY_NEWLINE;
903928
i++;
904929
} else {
930+
rowTag = 0;
931+
rowState = ROW_CHUNK_BY_NEWLINE;
905932
// This was an unknown tag so it was probably part of the data.
906933
}
907-
response._rowState = ROW_CHUNK_BY_NEWLINE;
908934
continue;
909935
}
910936
case ROW_LENGTH: {
911-
// TODO
937+
const byte = chunk[i++];
938+
if (byte === 44 /* "," */) {
939+
// Finished the rowLength, next we'll buffer up to that length.
940+
rowState = ROW_CHUNK_BY_LENGTH;
941+
} else {
942+
rowLength = (rowLength << 4) | (byte > 96 ? byte - 87 : byte - 48);
943+
}
912944
continue;
913945
}
914946
case ROW_CHUNK_BY_NEWLINE: {
@@ -918,7 +950,6 @@ export function processBinaryChunk(
918950
}
919951
case ROW_CHUNK_BY_LENGTH: {
920952
// We're looking for the remaining byte length
921-
const rowLength = response._rowLength;
922953
if (i + rowLength <= chunk.length) {
923954
lastIdx = i + rowLength;
924955
}
@@ -927,31 +958,34 @@ export function processBinaryChunk(
927958
}
928959
if (lastIdx > -1) {
929960
// We found the last chunk of the row
930-
const lastChunk = chunk.slice(i, lastIdx);
931-
processFullRow(
932-
response,
933-
response._rowID,
934-
response._rowTag,
935-
response._buffer,
936-
lastChunk,
937-
);
961+
const offset = chunk.byteOffset + i;
962+
const length = lastIdx - i;
963+
const lastChunk = new Uint8Array(chunk.buffer, offset, length);
964+
processFullRow(response, rowID, rowTag, buffer, lastChunk);
938965
// Reset state machine for a new row
939-
response._rowState = ROW_ID;
940-
response._rowTag = 0;
941-
response._rowID = 0;
942-
response._rowLength = 0;
943-
response._buffer.length = 0;
966+
rowState = ROW_ID;
967+
rowTag = 0;
968+
rowID = 0;
969+
rowLength = 0;
970+
buffer.length = 0;
944971
i = lastIdx + 1;
945972
} else {
946973
// The rest of this row is in a future chunk. We stash the rest of the
947974
// current chunk until we can process the full row.
948-
const remainingSlice = chunk.slice(i);
949-
response._buffer.push(remainingSlice);
950-
// Update how many bytes we're still waiting for.
951-
response._rowLength -= remainingSlice.length;
975+
const offset = chunk.byteOffset + i;
976+
const length = chunk.byteLength - i;
977+
const remainingSlice = new Uint8Array(chunk.buffer, offset, length);
978+
buffer.push(remainingSlice);
979+
// Update how many bytes we're still waiting for. If we're looking for
980+
// a newline, this doesn't hurt since we'll just ignore it.
981+
rowLength -= remainingSlice.byteLength;
952982
break;
953983
}
954984
}
985+
response._rowState = rowState;
986+
response._rowID = rowID;
987+
response._rowTag = rowTag;
988+
response._rowLength = rowLength;
955989
}
956990

957991
function parseModel<T>(response: Response, json: UninitializedModel): T {

packages/react-dom-bindings/src/server/ReactDOMLegacyServerStreamConfig.js

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,10 @@ export function clonePrecomputedChunk(
5757
return chunk;
5858
}
5959

60+
export function byteLengthOfChunk(chunk: Chunk | PrecomputedChunk): number {
61+
throw new Error('Not implemented.');
62+
}
63+
6064
export function closeWithError(destination: Destination, error: mixed): void {
6165
// $FlowFixMe[incompatible-call]: This is an Error object or the destination accepts other types.
6266
destination.destroy(error);

packages/react-server-dom-fb/src/ReactServerStreamConfigFB.js

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,10 @@ export function clonePrecomputedChunk(
6363
return chunk;
6464
}
6565

66+
export function byteLengthOfChunk(chunk: Chunk | PrecomputedChunk): number {
67+
throw new Error('Not implemented.');
68+
}
69+
6670
export function closeWithError(destination: Destination, error: mixed): void {
6771
destination.done = true;
6872
destination.fatal = true;

packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMEdge-test.js

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,4 +98,25 @@ describe('ReactFlightDOMEdge', () => {
9898
const result = await readResult(ssrStream);
9999
expect(result).toEqual('<span>Client Component</span>');
100100
});
101+
102+
it('should encode long string in a compact format', async () => {
103+
const testString = '"\n\t'.repeat(500) + '🙃';
104+
105+
const stream = ReactServerDOMServer.renderToReadableStream({
106+
text: testString,
107+
});
108+
const [stream1, stream2] = stream.tee();
109+
110+
const serializedContent = await readResult(stream1);
111+
// The content should be compact an unescaped
112+
expect(serializedContent.length).toBeLessThan(2000);
113+
expect(serializedContent).not.toContain('\\n');
114+
expect(serializedContent).not.toContain('\\t');
115+
expect(serializedContent).not.toContain('\\"');
116+
expect(serializedContent).toContain('\t');
117+
118+
const result = await ReactServerDOMClient.createFromReadableStream(stream2);
119+
// Should still match the result when parsed
120+
expect(result.text).toBe(testString);
121+
});
101122
});

packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMNode-test.js

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -104,4 +104,31 @@ describe('ReactFlightDOMNode', () => {
104104
const result = await readResult(ssrStream);
105105
expect(result).toEqual('<span>Client Component</span>');
106106
});
107+
108+
it('should encode long string in a compact format', async () => {
109+
const testString = '"\n\t'.repeat(500) + '🙃';
110+
111+
const stream = ReactServerDOMServer.renderToPipeableStream({
112+
text: testString,
113+
});
114+
115+
const readable = new Stream.PassThrough();
116+
117+
const stringResult = readResult(readable);
118+
const parsedResult = ReactServerDOMClient.createFromNodeStream(readable);
119+
120+
stream.pipe(readable);
121+
122+
const serializedContent = await stringResult;
123+
// The content should be compact an unescaped
124+
expect(serializedContent.length).toBeLessThan(2000);
125+
expect(serializedContent).not.toContain('\\n');
126+
expect(serializedContent).not.toContain('\\t');
127+
expect(serializedContent).not.toContain('\\"');
128+
expect(serializedContent).toContain('\t');
129+
130+
const result = await parsedResult;
131+
// Should still match the result when parsed
132+
expect(result.text).toBe(testString);
133+
});
107134
});

0 commit comments

Comments
 (0)