From 5c61bad142cef060d68b2617ca581eacc12e4978 Mon Sep 17 00:00:00 2001 From: Sebastian Markbage Date: Tue, 15 Aug 2023 21:03:33 -0400 Subject: [PATCH 1/4] Inline all the error chunk emitting into a single helper --- .../react-server/src/ReactFlightServer.js | 151 ++++-------------- 1 file changed, 28 insertions(+), 123 deletions(-) diff --git a/packages/react-server/src/ReactFlightServer.js b/packages/react-server/src/ReactFlightServer.js index e8e18d758dd3d..20a171ff283fe 100644 --- a/packages/react-server/src/ReactFlightServer.js +++ b/packages/react-server/src/ReactFlightServer.js @@ -298,12 +298,7 @@ function serializeThenable(request: Request, thenable: Thenable): number { case 'rejected': { const x = thenable.reason; const digest = logRecoverableError(request, x); - if (__DEV__) { - const {message, stack} = getErrorMessageAndStackDev(x); - emitErrorChunkDev(request, newTask.id, digest, message, stack); - } else { - emitErrorChunkProd(request, newTask.id, digest); - } + emitErrorChunk(request, newTask.id, digest, x); return newTask.id; } default: { @@ -344,12 +339,7 @@ function serializeThenable(request: Request, thenable: Thenable): number { newTask.status = ERRORED; // TODO: We should ideally do this inside performWork so it's scheduled const digest = logRecoverableError(request, reason); - if (__DEV__) { - const {message, stack} = getErrorMessageAndStackDev(reason); - emitErrorChunkDev(request, newTask.id, digest, message, stack); - } else { - emitErrorChunkProd(request, newTask.id, digest); - } + emitErrorChunk(request, newTask.id, digest, reason); if (request.destination !== null) { flushCompletedChunks(request, request.destination); } @@ -681,12 +671,7 @@ function serializeClientReference( request.pendingChunks++; const errorId = request.nextChunkId++; const digest = logRecoverableError(request, x); - if (__DEV__) { - const {message, stack} = getErrorMessageAndStackDev(x); - emitErrorChunkDev(request, errorId, digest, message, stack); - } else { - emitErrorChunkProd(request, errorId, digest); - } + emitErrorChunk(request, errorId, digest, x); return serializeByValueID(errorId); } } @@ -920,12 +905,7 @@ function resolveModelToJSON( request.pendingChunks++; const errorId = request.nextChunkId++; const digest = logRecoverableError(request, x); - if (__DEV__) { - const {message, stack} = getErrorMessageAndStackDev(x); - emitErrorChunkDev(request, errorId, digest, message, stack); - } else { - emitErrorChunkProd(request, errorId, digest); - } + emitErrorChunk(request, errorId, digest, x); return serializeLazyID(errorId); } } @@ -1169,10 +1149,24 @@ function logRecoverableError(request: Request, error: mixed): string { return errorDigest || ''; } -function getErrorMessageAndStackDev(error: mixed): { - message: string, - stack: string, -} { +function fatalError(request: Request, error: mixed): void { + // This is called outside error handling code such as if an error happens in React internals. + if (request.destination !== null) { + request.status = CLOSED; + closeWithError(request.destination, error); + } else { + request.status = CLOSING; + request.fatalError = error; + } +} + +function emitErrorChunk( + request: Request, + id: number, + digest: string, + error: mixed, +): void { + let errorInfo: any; if (__DEV__) { let message; let stack = ''; @@ -1188,53 +1182,12 @@ function getErrorMessageAndStackDev(error: mixed): { } catch (x) { message = 'An error occurred but serializing the error message failed.'; } - return { - message, - stack, - }; + errorInfo = {digest, message, stack}; } else { - // These errors should never make it into a build so we don't need to encode them in codes.json - // eslint-disable-next-line react-internal/prod-error-codes - throw new Error( - 'getErrorMessageAndStackDev should never be called from production mode. This is a bug in React.', - ); + errorInfo = {digest}; } -} - -function fatalError(request: Request, error: mixed): void { - // This is called outside error handling code such as if an error happens in React internals. - if (request.destination !== null) { - request.status = CLOSED; - closeWithError(request.destination, error); - } else { - request.status = CLOSING; - request.fatalError = error; - } -} - -function emitErrorChunkProd( - request: Request, - id: number, - digest: string, -): void { - const processedChunk = processErrorChunkProd(request, id, digest); - request.completedErrorChunks.push(processedChunk); -} - -function emitErrorChunkDev( - request: Request, - id: number, - digest: string, - message: string, - stack: string, -): void { - const processedChunk = processErrorChunkDev( - request, - id, - digest, - message, - stack, - ); + const row = serializeRowHeader('E', id) + stringify(errorInfo) + '\n'; + const processedChunk = stringToChunk(row); request.completedErrorChunks.push(processedChunk); } @@ -1362,12 +1315,7 @@ function retryTask(request: Request, task: Task): void { request.abortableTasks.delete(task); task.status = ERRORED; const digest = logRecoverableError(request, x); - if (__DEV__) { - const {message, stack} = getErrorMessageAndStackDev(x); - emitErrorChunkDev(request, task.id, digest, message, stack); - } else { - emitErrorChunkProd(request, task.id, digest); - } + emitErrorChunk(request, task.id, digest, x); } } } @@ -1547,12 +1495,7 @@ export function abort(request: Request, reason: mixed): void { const digest = logRecoverableError(request, error); request.pendingChunks++; const errorId = request.nextChunkId++; - if (__DEV__) { - const {message, stack} = getErrorMessageAndStackDev(error); - emitErrorChunkDev(request, errorId, digest, message, stack); - } else { - emitErrorChunkProd(request, errorId, digest); - } + emitErrorChunk(request, errorId, digest, error); abortableTasks.forEach(task => abortTask(task, request, errorId)); abortableTasks.clear(); } @@ -1587,44 +1530,6 @@ function serializeRowHeader(tag: string, id: number) { return id.toString(16) + ':' + tag; } -function processErrorChunkProd( - request: Request, - id: number, - digest: string, -): Chunk { - if (__DEV__) { - // These errors should never make it into a build so we don't need to encode them in codes.json - // eslint-disable-next-line react-internal/prod-error-codes - throw new Error( - 'processErrorChunkProd should never be called while in development mode. Use processErrorChunkDev instead. This is a bug in React.', - ); - } - - const errorInfo: any = {digest}; - const row = serializeRowHeader('E', id) + stringify(errorInfo) + '\n'; - return stringToChunk(row); -} - -function processErrorChunkDev( - request: Request, - id: number, - digest: string, - message: string, - stack: string, -): Chunk { - if (!__DEV__) { - // These errors should never make it into a build so we don't need to encode them in codes.json - // eslint-disable-next-line react-internal/prod-error-codes - throw new Error( - 'processErrorChunkDev should never be called while in production mode. Use processErrorChunkProd instead. This is a bug in React.', - ); - } - - const errorInfo: any = {digest, message, stack}; - const row = serializeRowHeader('E', id) + stringify(errorInfo) + '\n'; - return stringToChunk(row); -} - function processModelChunk( request: Request, id: number, From 7025fe876b0b4f208ddafaeb046978dfca3bdb4e Mon Sep 17 00:00:00 2001 From: Sebastian Markbage Date: Tue, 15 Aug 2023 21:12:21 -0400 Subject: [PATCH 2/4] Inline single calls to processX functions --- .../react-server/src/ReactFlightServer.js | 78 ++++--------------- 1 file changed, 15 insertions(+), 63 deletions(-) diff --git a/packages/react-server/src/ReactFlightServer.js b/packages/react-server/src/ReactFlightServer.js index 20a171ff283fe..b53400597bd4b 100644 --- a/packages/react-server/src/ReactFlightServer.js +++ b/packages/react-server/src/ReactFlightServer.js @@ -719,11 +719,9 @@ function serializeLargeTextString(request: Request, text: string): string { request.pendingChunks += 2; const textId = request.nextChunkId++; const textChunk = stringToChunk(text); - const headerChunk = processTextHeader( - request, - textId, - byteLengthOfChunk(textChunk), - ); + const binaryLength = byteLengthOfChunk(textChunk); + const row = textId.toString(16) + ':T' + binaryLength.toString(16) + ','; + const headerChunk = stringToChunk(row); request.completedRegularChunks.push(headerChunk, textChunk); return serializeByValueID(textId); } @@ -750,12 +748,10 @@ function serializeTypedArray( const bufferId = request.nextChunkId++; // TODO: Convert to little endian if that's not the server default. const binaryChunk = typedArrayToBinaryChunk(typedArray); - const headerChunk = processBufferHeader( - request, - tag, - bufferId, - byteLengthOfBinaryChunk(binaryChunk), - ); + const binaryLength = byteLengthOfBinaryChunk(binaryChunk); + const row = + bufferId.toString(16) + ':' + tag + binaryLength.toString(16) + ','; + const headerChunk = stringToChunk(row); request.completedRegularChunks.push(headerChunk, binaryChunk); return serializeByValueID(bufferId); } @@ -1196,21 +1192,18 @@ function emitImportChunk( id: number, clientReferenceMetadata: ClientReferenceMetadata, ): void { - const processedChunk = processImportChunk( - request, - id, - clientReferenceMetadata, - ); + // $FlowFixMe[incompatible-type] stringify can return null + const json: string = stringify(clientReferenceMetadata); + const row = serializeRowHeader('I', id) + json + '\n'; + const processedChunk = stringToChunk(row); request.completedImportChunks.push(processedChunk); } function emitHintChunk(request: Request, code: string, model: HintModel): void { - const processedChunk = processHintChunk( - request, - request.nextChunkId++, - code, - model, - ); + const json: string = stringify(model); + const id = request.nextChunkId++; + const row = serializeRowHeader('H' + code, id) + json + '\n'; + const processedChunk = stringToChunk(row); request.completedHintChunks.push(processedChunk); } @@ -1550,44 +1543,3 @@ function processReferenceChunk( const row = id.toString(16) + ':' + json + '\n'; return stringToChunk(row); } - -function processImportChunk( - request: Request, - id: number, - clientReferenceMetadata: ReactClientValue, -): Chunk { - // $FlowFixMe[incompatible-type] stringify can return null - const json: string = stringify(clientReferenceMetadata); - const row = serializeRowHeader('I', id) + json + '\n'; - return stringToChunk(row); -} - -function processHintChunk( - request: Request, - id: number, - code: string, - model: JSONValue, -): Chunk { - const json: string = stringify(model); - const row = serializeRowHeader('H' + code, id) + json + '\n'; - return stringToChunk(row); -} - -function processTextHeader( - request: Request, - id: number, - binaryLength: number, -): Chunk { - const row = id.toString(16) + ':T' + binaryLength.toString(16) + ','; - return stringToChunk(row); -} - -function processBufferHeader( - request: Request, - tag: string, - id: number, - binaryLength: number, -): Chunk { - const row = id.toString(16) + ':' + tag + binaryLength.toString(16) + ','; - return stringToChunk(row); -} From c17329440c783c2f94d8c0a353f1349429e9c563 Mon Sep 17 00:00:00 2001 From: Sebastian Markbage Date: Tue, 15 Aug 2023 21:15:44 -0400 Subject: [PATCH 3/4] processModelChunk to emitModelChunk This pattern is more inline with the other reuses. --- .../react-server/src/ReactFlightServer.js | 29 +++++++++---------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/packages/react-server/src/ReactFlightServer.js b/packages/react-server/src/ReactFlightServer.js index b53400597bd4b..fd7cf074c5895 100644 --- a/packages/react-server/src/ReactFlightServer.js +++ b/packages/react-server/src/ReactFlightServer.js @@ -680,8 +680,7 @@ function outlineModel(request: Request, value: any): number { request.pendingChunks++; const outlinedId = request.nextChunkId++; // We assume that this object doesn't suspend, but a child might. - const processedChunk = processModelChunk(request, outlinedId, value); - request.completedRegularChunks.push(processedChunk); + emitModelChunk(request, outlinedId, value); return outlinedId; } @@ -1223,6 +1222,18 @@ function emitProviderChunk( request.completedRegularChunks.push(processedChunk); } +function emitModelChunk( + request: Request, + id: number, + model: ReactClientValue, +): void { + // $FlowFixMe[incompatible-type] stringify can return null + const json: string = stringify(model, request.toJSON); + const row = id.toString(16) + ':' + json + '\n'; + const processedChunk = stringToChunk(row); + request.completedRegularChunks.push(processedChunk); +} + function retryTask(request: Request, task: Task): void { if (task.status !== PENDING) { // We completed this by other means before we had a chance to retry it. @@ -1283,8 +1294,7 @@ function retryTask(request: Request, task: Task): void { } } - const processedChunk = processModelChunk(request, task.id, value); - request.completedRegularChunks.push(processedChunk); + emitModelChunk(request, task.id, value); request.abortableTasks.delete(task); task.status = COMPLETED; } catch (thrownValue) { @@ -1523,17 +1533,6 @@ function serializeRowHeader(tag: string, id: number) { return id.toString(16) + ':' + tag; } -function processModelChunk( - request: Request, - id: number, - model: ReactClientValue, -): Chunk { - // $FlowFixMe[incompatible-type] stringify can return null - const json: string = stringify(model, request.toJSON); - const row = id.toString(16) + ':' + json + '\n'; - return stringToChunk(row); -} - function processReferenceChunk( request: Request, id: number, From d520b82006051205deec17323113ed6d39569484 Mon Sep 17 00:00:00 2001 From: Sebastian Markbage Date: Tue, 15 Aug 2023 21:19:22 -0400 Subject: [PATCH 4/4] Rename processReferenceChunk to encodeReferenceChunk This one still is around and is kind of a special case. --- .../react-server/src/ReactFlightServer.js | 34 +++++++++---------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/packages/react-server/src/ReactFlightServer.js b/packages/react-server/src/ReactFlightServer.js index fd7cf074c5895..200316550734d 100644 --- a/packages/react-server/src/ReactFlightServer.js +++ b/packages/react-server/src/ReactFlightServer.js @@ -628,6 +628,20 @@ function serializeBigInt(n: bigint): string { return '$n' + n.toString(10); } +function serializeRowHeader(tag: string, id: number) { + return id.toString(16) + ':' + tag; +} + +function encodeReferenceChunk( + request: Request, + id: number, + reference: string, +): Chunk { + const json = stringify(reference); + const row = id.toString(16) + ':' + json + '\n'; + return stringToChunk(row); +} + function serializeClientReference( request: Request, parent: @@ -1208,7 +1222,7 @@ function emitHintChunk(request: Request, code: string, model: HintModel): void { function emitSymbolChunk(request: Request, id: number, name: string): void { const symbolReference = serializeSymbolReference(name); - const processedChunk = processReferenceChunk(request, id, symbolReference); + const processedChunk = encodeReferenceChunk(request, id, symbolReference); request.completedImportChunks.push(processedChunk); } @@ -1218,7 +1232,7 @@ function emitProviderChunk( contextName: string, ): void { const contextReference = serializeProviderReference(contextName); - const processedChunk = processReferenceChunk(request, id, contextReference); + const processedChunk = encodeReferenceChunk(request, id, contextReference); request.completedRegularChunks.push(processedChunk); } @@ -1355,7 +1369,7 @@ function abortTask(task: Task, request: Request, errorId: number): void { // Instead of emitting an error per task.id, we emit a model that only // has a single value referencing the error. const ref = serializeByValueID(errorId); - const processedChunk = processReferenceChunk(request, task.id, ref); + const processedChunk = encodeReferenceChunk(request, task.id, ref); request.completedErrorChunks.push(processedChunk); } @@ -1528,17 +1542,3 @@ function importServerContexts( } return rootContextSnapshot; } - -function serializeRowHeader(tag: string, id: number) { - return id.toString(16) + ':' + tag; -} - -function processReferenceChunk( - request: Request, - id: number, - reference: string, -): Chunk { - const json = stringify(reference); - const row = id.toString(16) + ':' + json + '\n'; - return stringToChunk(row); -}