-
Notifications
You must be signed in to change notification settings - Fork 50.3k
[Flight] Keep a separate ref count for debug chunks #33717
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -274,7 +274,7 @@ function patchConsole(consoleInst: typeof console, methodName: string) { | |
| request, | ||
| parseStackTrace(new Error('react-stack-top-frame'), 1), | ||
| ); | ||
| request.pendingChunks++; | ||
| request.pendingDebugChunks++; | ||
| const owner: null | ReactComponentInfo = resolveOwner(); | ||
| const args = Array.from(arguments); | ||
| // Extract the env if this is a console log that was replayed from another env. | ||
|
|
@@ -464,6 +464,7 @@ export type Request = { | |
| timeOrigin: number, | ||
| abortTime: number, | ||
| // DEV-only | ||
| pendingDebugChunks: number, | ||
| completedDebugChunks: Array<Chunk | BinaryChunk>, | ||
| environmentName: () => string, | ||
| filterStackFrame: (url: string, functionName: string) => boolean, | ||
|
|
@@ -578,6 +579,7 @@ function RequestInstance( | |
| this.onFatalError = onFatalError; | ||
|
|
||
| if (__DEV__) { | ||
| this.pendingDebugChunks = 0; | ||
| this.completedDebugChunks = ([]: Array<Chunk>); | ||
| this.environmentName = | ||
| environmentName === undefined | ||
|
|
@@ -716,7 +718,7 @@ function serializeDebugThenable( | |
| thenable: Thenable<any>, | ||
| ): string { | ||
| // Like serializeThenable but for renderDebugModel | ||
| request.pendingChunks++; | ||
| request.pendingDebugChunks++; | ||
| const id = request.nextChunkId++; | ||
| const ref = serializePromiseID(id); | ||
| request.writtenDebugObjects.set(thenable, ref); | ||
|
|
@@ -728,20 +730,9 @@ function serializeDebugThenable( | |
| } | ||
| case 'rejected': { | ||
| const x = thenable.reason; | ||
| if ( | ||
| enablePostpone && | ||
| typeof x === 'object' && | ||
| x !== null && | ||
| (x: any).$$typeof === REACT_POSTPONE_TYPE | ||
| ) { | ||
| const postponeInstance: Postpone = (x: any); | ||
| // We don't log this postpone. | ||
| emitPostponeChunk(request, id, postponeInstance); | ||
| } else { | ||
| // We don't log these errors since they didn't actually throw into Flight. | ||
| const digest = ''; | ||
| emitErrorChunk(request, id, digest, x, true); | ||
| } | ||
| // We don't log these errors since they didn't actually throw into Flight. | ||
| const digest = ''; | ||
| emitErrorChunk(request, id, digest, x, true); | ||
| return ref; | ||
| } | ||
| } | ||
|
|
@@ -778,20 +769,9 @@ function serializeDebugThenable( | |
| enqueueFlush(request); | ||
| return; | ||
| } | ||
| if ( | ||
| enablePostpone && | ||
| typeof reason === 'object' && | ||
| reason !== null && | ||
| (reason: any).$$typeof === REACT_POSTPONE_TYPE | ||
| ) { | ||
| const postponeInstance: Postpone = (reason: any); | ||
| // We don't log this postpone. | ||
| emitPostponeChunk(request, id, postponeInstance); | ||
| } else { | ||
| // We don't log these errors since they didn't actually throw into Flight. | ||
| const digest = ''; | ||
| emitErrorChunk(request, id, digest, reason, true); | ||
| } | ||
| // We don't log these errors since they didn't actually throw into Flight. | ||
| const digest = ''; | ||
| emitErrorChunk(request, id, digest, reason, true); | ||
| enqueueFlush(request); | ||
| }, | ||
| ); | ||
|
|
@@ -1436,7 +1416,7 @@ function renderFunctionComponent<Props>( | |
| const componentName = | ||
| (Component: any).displayName || Component.name || ''; | ||
| const componentEnv = (0, request.environmentName)(); | ||
| request.pendingChunks++; | ||
| request.pendingDebugChunks++; | ||
| componentDebugInfo = ({ | ||
| name: componentName, | ||
| env: componentEnv, | ||
|
|
@@ -2206,7 +2186,7 @@ function visitAsyncNode( | |
| const env = (0, request.environmentName)(); | ||
| advanceTaskTime(request, task, startTime); | ||
| // Then emit a reference to us awaiting it in the current task. | ||
| request.pendingChunks++; | ||
| request.pendingDebugChunks++; | ||
| emitDebugChunk(request, task.id, { | ||
| awaited: ((ioNode: any): ReactIOInfo), // This is deduped by this reference. | ||
| env: env, | ||
|
|
@@ -2263,7 +2243,7 @@ function emitAsyncSequence( | |
| } else if (awaitedNode !== null) { | ||
| // Nothing in user space (unfiltered stack) awaited this. | ||
| serializeIONode(request, awaitedNode, awaitedNode.promise); | ||
| request.pendingChunks++; | ||
| request.pendingDebugChunks++; | ||
| // We log the environment at the time when we ping which may be later than what the | ||
| // environment was when we actually started awaiting. | ||
| const env = (0, request.environmentName)(); | ||
|
|
@@ -2444,7 +2424,7 @@ function serializeDeferredObject( | |
| // This client supports a long lived connection. We can assign this object | ||
| // an ID to be lazy loaded later. | ||
| // This keeps the connection alive until we ask for it or release it. | ||
| request.pendingChunks++; | ||
| request.pendingDebugChunks++; | ||
| const id = request.nextChunkId++; | ||
| deferredDebugObjects.existing.set(value, id); | ||
| deferredDebugObjects.retained.set(id, value); | ||
|
|
@@ -2581,7 +2561,7 @@ function serializeDebugClientReference( | |
| try { | ||
| const clientReferenceMetadata: ClientReferenceMetadata = | ||
| resolveClientReferenceMetadata(request.bundlerConfig, clientReference); | ||
| request.pendingChunks++; | ||
| request.pendingDebugChunks++; | ||
| const importId = request.nextChunkId++; | ||
| emitImportChunk(request, importId, clientReferenceMetadata, true); | ||
| if (parent[0] === REACT_ELEMENT_TYPE && parentPropertyName === '1') { | ||
|
|
@@ -2594,7 +2574,7 @@ function serializeDebugClientReference( | |
| } | ||
| return serializeByValueID(importId); | ||
| } catch (x) { | ||
| request.pendingChunks++; | ||
| request.pendingDebugChunks++; | ||
| const errorId = request.nextChunkId++; | ||
| const digest = logRecoverableError(request, x, null); | ||
| emitErrorChunk(request, errorId, digest, x, true); | ||
|
|
@@ -2697,7 +2677,7 @@ function serializeLargeTextString(request: Request, text: string): string { | |
| } | ||
|
|
||
| function serializeDebugLargeTextString(request: Request, text: string): string { | ||
| request.pendingChunks++; | ||
| request.pendingDebugChunks++; | ||
| const textId = request.nextChunkId++; | ||
| emitTextChunk(request, textId, text, true); | ||
| return serializeByValueID(textId); | ||
|
|
@@ -2806,7 +2786,7 @@ function serializeDebugTypedArray( | |
| tag: string, | ||
| typedArray: $ArrayBufferView, | ||
| ): string { | ||
| request.pendingChunks++; | ||
| request.pendingDebugChunks++; | ||
| const bufferId = request.nextChunkId++; | ||
| emitTypedArrayChunk(request, bufferId, tag, typedArray, true); | ||
| return serializeByValueID(bufferId); | ||
|
|
@@ -2815,6 +2795,7 @@ function serializeDebugTypedArray( | |
| function serializeDebugBlob(request: Request, blob: Blob): string { | ||
| const model: Array<string | Uint8Array> = [blob.type]; | ||
| const reader = blob.stream().getReader(); | ||
| request.pendingDebugChunks++; | ||
| const id = request.nextChunkId++; | ||
| function progress( | ||
| entry: {done: false, value: Uint8Array} | {done: true, value: void}, | ||
|
|
@@ -4082,7 +4063,7 @@ function outlineIOInfo(request: Request, ioInfo: ReactIOInfo): void { | |
| return; | ||
| } | ||
| // We can't serialize the ConsoleTask/Error objects so we need to omit them before serializing. | ||
| request.pendingChunks++; | ||
| request.pendingDebugChunks++; | ||
| const id = request.nextChunkId++; | ||
| const owner = ioInfo.owner; | ||
| // Ensure the owner is already outlined. | ||
|
|
@@ -4160,7 +4141,7 @@ function serializeIONode( | |
| request.abortTime | ||
| : ioNode.end; | ||
|
|
||
| request.pendingChunks++; | ||
| request.pendingDebugChunks++; | ||
| const id = request.nextChunkId++; | ||
| emitIOInfoChunk( | ||
| request, | ||
|
|
@@ -4197,7 +4178,11 @@ function emitTypedArrayChunk( | |
| } | ||
| } | ||
| } | ||
| request.pendingChunks++; // Extra chunk for the header. | ||
| if (debug) { | ||
| request.pendingDebugChunks++; | ||
| } else { | ||
| request.pendingChunks++; // Extra chunk for the header. | ||
| } | ||
| // TODO: Convert to little endian if that's not the server default. | ||
| const binaryChunk = typedArrayToBinaryChunk(typedArray); | ||
| const binaryLength = byteLengthOfBinaryChunk(binaryChunk); | ||
|
|
@@ -4222,7 +4207,11 @@ function emitTextChunk( | |
| 'Existence of byteLengthOfChunk should have already been checked. This is a bug in React.', | ||
| ); | ||
| } | ||
| request.pendingChunks++; // Extra chunk for the header. | ||
| if (debug) { | ||
| request.pendingDebugChunks++; | ||
| } else { | ||
| request.pendingChunks++; // Extra chunk for the header. | ||
| } | ||
|
Comment on lines
+4210
to
+4214
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Should we co-locate this with the if/else below?
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It might be misleading. It’s unfortunate the way it’s set up now that we add counts separately from pushing the chunks. We have to sometimes since they’re not always resolved immediately. In this case this increment is not an exact pair with the push like it is in some other places. This is actually just one of two increments. So it might be misleading. It’s more colocated with the header than the push. |
||
| const textChunk = stringToChunk(text); | ||
| const binaryLength = byteLengthOfChunk(textChunk); | ||
| const row = id.toString(16) + ':T' + binaryLength.toString(16) + ','; | ||
|
|
@@ -4607,7 +4596,7 @@ function renderDebugModel( | |
| // $FlowFixMe[method-unbinding] | ||
| '(' + Function.prototype.toString.call(value) + ')', | ||
| ); | ||
| request.pendingChunks++; | ||
| request.pendingDebugChunks++; | ||
| const id = request.nextChunkId++; | ||
| const processedChunk = encodeReferenceChunk(request, id, serializedValue); | ||
| request.completedDebugChunks.push(processedChunk); | ||
|
|
@@ -4765,7 +4754,7 @@ function outlineDebugModel( | |
| } | ||
|
|
||
| const id = request.nextChunkId++; | ||
| request.pendingChunks++; | ||
| request.pendingDebugChunks++; | ||
| emitOutlinedDebugModelChunk(request, id, counter, model); | ||
| return id; | ||
| } | ||
|
|
@@ -4814,7 +4803,7 @@ function emitTimeOriginChunk(request: Request, timeOrigin: number): void { | |
| // We emit the time origin once. All ReactTimeInfo timestamps later in the stream | ||
| // are relative to this time origin. This allows for more compact number encoding | ||
| // and lower precision loss. | ||
| request.pendingChunks++; | ||
| request.pendingDebugChunks++; | ||
| const row = ':N' + timeOrigin + '\n'; | ||
| const processedChunk = stringToChunk(row); | ||
| // TODO: Move to its own priority queue. | ||
|
|
@@ -4841,7 +4830,7 @@ function forwardDebugInfo( | |
| // being no references to this as an owner. | ||
| outlineComponentInfo(request, (info: any)); | ||
| // Emit a reference to the outlined one. | ||
| request.pendingChunks++; | ||
| request.pendingDebugChunks++; | ||
| emitDebugChunk(request, id, info); | ||
| } else if (info.awaited) { | ||
| const ioInfo = info.awaited; | ||
|
|
@@ -4882,11 +4871,11 @@ function forwardDebugInfo( | |
| // $FlowFixMe[cannot-write] | ||
| debugAsyncInfo.stack = debugStack; | ||
| } | ||
| request.pendingChunks++; | ||
| request.pendingDebugChunks++; | ||
| emitDebugChunk(request, id, debugAsyncInfo); | ||
| } | ||
| } else { | ||
| request.pendingChunks++; | ||
| request.pendingDebugChunks++; | ||
| emitDebugChunk(request, id, info); | ||
| } | ||
| } | ||
|
|
@@ -4988,7 +4977,7 @@ function forwardDebugInfoFromAbortedTask(request: Request, task: Task): void { | |
| // complete in time before aborting. | ||
| // The best we can do is try to emit the stack of where this Promise was created. | ||
| serializeIONode(request, node, null); | ||
| request.pendingChunks++; | ||
| request.pendingDebugChunks++; | ||
| const env = (0, request.environmentName)(); | ||
| const asyncInfo: ReactAsyncInfo = { | ||
| awaited: ((node: any): ReactIOInfo), // This is deduped by this reference. | ||
|
|
@@ -5017,7 +5006,7 @@ function emitTimingChunk( | |
| if (!enableProfilerTimer || !enableComponentPerformanceTrack) { | ||
| return; | ||
| } | ||
| request.pendingChunks++; | ||
| request.pendingDebugChunks++; | ||
| const relativeTimestamp = timestamp - request.timeOrigin; | ||
| const row = | ||
| serializeRowHeader('D', id) + '{"time":' + relativeTimestamp + '}\n'; | ||
|
|
@@ -5229,7 +5218,7 @@ function retryTask(request: Request, task: Task): void { | |
| if (__DEV__) { | ||
| const currentEnv = (0, request.environmentName)(); | ||
| if (currentEnv !== task.environmentName) { | ||
| request.pendingChunks++; | ||
| request.pendingDebugChunks++; | ||
| // The environment changed since we last emitted any debug information for this | ||
| // task. We emit an entry that just includes the environment name change. | ||
| emitDebugChunk(request, task.id, {env: currentEnv}); | ||
|
|
@@ -5449,7 +5438,7 @@ function flushCompletedChunks( | |
| const debugChunks = request.completedDebugChunks; | ||
| i = 0; | ||
| for (; i < debugChunks.length; i++) { | ||
| request.pendingChunks--; | ||
| request.pendingDebugChunks--; | ||
| const chunk = debugChunks[i]; | ||
| const keepWriting: boolean = writeChunkAndReturn(destination, chunk); | ||
| if (!keepWriting) { | ||
|
|
@@ -5497,7 +5486,10 @@ function flushCompletedChunks( | |
| completeWriting(destination); | ||
| } | ||
| flushBuffered(destination); | ||
| if (request.pendingChunks === 0) { | ||
| if ( | ||
| request.pendingChunks === 0 && | ||
| (!__DEV__ || request.pendingDebugChunks === 0) | ||
| ) { | ||
| // We're done. | ||
| if (enableTaint) { | ||
| cleanupTaintQueue(request); | ||
|
|
@@ -5716,7 +5708,7 @@ export function resolveDebugMessage(request: Request, message: string): void { | |
| const retainedValue = deferredDebugObjects.retained.get(id); | ||
| if (retainedValue !== undefined) { | ||
| // We're no longer blocked on this. We won't emit it. | ||
| request.pendingChunks--; | ||
| request.pendingDebugChunks--; | ||
| deferredDebugObjects.retained.delete(id); | ||
| deferredDebugObjects.existing.delete(retainedValue); | ||
| enqueueFlush(request); | ||
|
|
@@ -5759,7 +5751,7 @@ export function closeDebugChannel(request: Request): void { | |
| ); | ||
| } | ||
| deferredDebugObjects.retained.forEach((value, id) => { | ||
| request.pendingChunks--; | ||
| request.pendingDebugChunks--; | ||
| deferredDebugObjects.retained.delete(id); | ||
| deferredDebugObjects.existing.delete(value); | ||
| }); | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.