Skip to content

Commit

Permalink
[Flight] model halted references explicitly
Browse files Browse the repository at this point in the history
using infinitely suspending promises isn't right because this will parse as a promise which is only appropriate if the value we're halting at is a promise. Instead we need to have a special marker type that says this reference will never resolve. Additionally flight client needs to not error any halted references when the stream closes because they will otherwise appear as an error
  • Loading branch information
gnoff committed Aug 18, 2024
1 parent 7954db9 commit 5b582d3
Show file tree
Hide file tree
Showing 3 changed files with 168 additions and 14 deletions.
31 changes: 31 additions & 0 deletions packages/react-client/src/ReactFlightClient.js
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ import {
enableRefAsProp,
enableFlightReadableStream,
enableOwnerStacks,
enableHalt,
} from 'shared/ReactFeatureFlags';

import {
Expand Down Expand Up @@ -860,6 +861,25 @@ function getChunk(response: Response, id: number): SomeChunk<any> {
return chunk;
}

/**
* Fork of waitForReference that doesn't ever resolve
*/
function waitForever() {
if (initializingHandler) {
initializingHandler.deps++;
} else {
initializingHandler = {
parent: null,
chunk: null,
value: null,
deps: 1,
errored: false,
};
}

return null;
}

function waitForReference<T>(
referencedChunk: SomeChunk<T>,
parentObject: Object,
Expand Down Expand Up @@ -1184,6 +1204,10 @@ function parseModelString(
}
case 'L': {
// Lazy node
if (enableHalt && value.length === 2) {
// Lazy Infinitely Blocked Reference.
return createLazyChunkWrapper(createBlockedChunk<mixed>(response));
}
const id = parseInt(value.slice(2), 16);
const chunk = getChunk(response, id);
// We create a React.lazy wrapper around any lazy values.
Expand Down Expand Up @@ -1227,6 +1251,13 @@ function parseModelString(
}
return readTemporaryReference(temporaryReferences, reference);
}
case '#': {
// Infinitely Blocked Reference
if (enableHalt) {
return waitForever();
}
// fallthrough
}
case 'Q': {
// Map
const ref = value.slice(2);
Expand Down
101 changes: 101 additions & 0 deletions packages/react-server-dom-webpack/src/__tests__/ReactFlightDOM-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -2856,4 +2856,105 @@ describe('ReactFlightDOM', () => {
jest.advanceTimersByTime('100');
expect(await race).toBe('timeout');
});

// @gate enableHalt
it('will halt unfinished chunks inside Suspense when aborting a prerender', async () => {
const controller = new AbortController();
function ComponentThatAborts() {
controller.abort();
return null;
}

async function Greeting() {
await 1;
return 'hello world';
}

async function Farewell() {
return 'goodbye world';
}

async function Wrapper() {
return (
<Suspense fallback="loading too...">
<ComponentThatAborts />
</Suspense>
);
}

function App() {
return (
<div>
<Suspense fallback="loading...">
<Greeting />
</Suspense>
<Wrapper />
<Suspense fallback="loading three...">
<Farewell />
</Suspense>
</div>
);
}

const errors = [];
const {pendingResult} = await serverAct(() => {
return {
pendingResult: ReactServerDOMStaticServer.prerenderToNodeStream(
<App />,
{},
{
onError(x) {
errors.push(x);
},
signal: controller.signal,
},
),
};
});

controller.abort();

const {prelude} = await pendingResult;
expect(errors).toEqual([]);

const response = ReactServerDOMClient.createFromReadableStream(
Readable.toWeb(prelude),
);

const {writable: fizzWritable, readable: fizzReadable} = getTestStream();

function ClientApp() {
return use(response);
}
let abortFizz;
await serverAct(async () => {
const {pipe, abort} = ReactDOMFizzServer.renderToPipeableStream(
React.createElement(ClientApp),
{
onError(error, errorInfo) {
errors.push(error);
},
},
);
pipe(fizzWritable);
abortFizz = abort;
});

await serverAct(() => {
abortFizz('boom');
});

// one error per boundary
expect(errors).toEqual(['boom', 'boom', 'boom']);

const container = document.createElement('div');
await readInto(container, fizzReadable);
expect(getMeaningfulChildren(container)).toEqual(
<div>
{'loading...'}
{'loading too...'}
{'loading three...'}
</div>,
);
});
});
50 changes: 36 additions & 14 deletions packages/react-server/src/ReactFlightServer.js
Original file line number Diff line number Diff line change
Expand Up @@ -615,7 +615,7 @@ function serializeThenable(
request.abortableTasks.delete(newTask);
newTask.status = ABORTED;
if (enableHalt && request.fatalError === haltSymbol) {
emitModelChunk(request, newTask.id, reusableInfinitePromiseModel);
emitModelChunk(request, newTask.id, reusableBlockedReferenceModel);
} else {
const errorId: number = (request.fatalError: any);
const model = stringify(serializeByValueID(errorId));
Expand Down Expand Up @@ -1815,10 +1815,13 @@ function serializeLazyID(id: number): string {
return '$L' + id.toString(16);
}

function serializeLazyBlockedReference(): string {
return '$L';
}

function serializeInfinitePromise(): string {
return '$@';
}
const reusableInfinitePromiseModel = stringify(serializeInfinitePromise());

function serializePromiseID(id: number): string {
return '$@' + id.toString(16);
Expand All @@ -1836,6 +1839,11 @@ function serializeLimitedObject(): string {
return '$Y';
}

function serializeBlockedReference(): string {
return '$#';
}
const reusableBlockedReferenceModel = '"$#"';

function serializeNumber(number: number): string | number {
if (Number.isFinite(number)) {
if (number === 0 && 1 / number === -Infinity) {
Expand Down Expand Up @@ -2177,7 +2185,10 @@ function renderModel(
if (request.status === ABORTING) {
task.status = ABORTED;
if (enableHalt && request.fatalError === haltSymbol) {
return serializeInfinitePromise();
if (wasReactNode) {
return serializeLazyBlockedReference();
}
return serializeBlockedReference();
}
const errorId: number = (request.fatalError: any);
if (wasReactNode) {
Expand Down Expand Up @@ -2233,7 +2244,10 @@ function renderModel(
if (request.status === ABORTING) {
task.status = ABORTED;
if (enableHalt && request.fatalError === haltSymbol) {
return serializeInfinitePromise();
if (wasReactNode) {
return serializeLazyBlockedReference();
}
return serializeBlockedReference();
}
const errorId: number = (request.fatalError: any);
if (wasReactNode) {
Expand Down Expand Up @@ -3725,7 +3739,7 @@ function retryTask(request: Request, task: Task): void {
request.abortableTasks.delete(task);
task.status = ABORTED;
if (enableHalt && request.fatalError === haltSymbol) {
emitModelChunk(request, task.id, reusableInfinitePromiseModel);
emitModelChunk(request, task.id, reusableBlockedReferenceModel);
} else {
const errorId: number = (request.fatalError: any);
const model = stringify(serializeByValueID(errorId));
Expand Down Expand Up @@ -3753,7 +3767,7 @@ function retryTask(request: Request, task: Task): void {
request.abortableTasks.delete(task);
task.status = ABORTED;
if (enableHalt && request.fatalError === haltSymbol) {
emitModelChunk(request, task.id, reusableInfinitePromiseModel);
emitModelChunk(request, task.id, reusableBlockedReferenceModel);
} else {
const errorId: number = (request.fatalError: any);
const model = stringify(serializeByValueID(errorId));
Expand Down Expand Up @@ -3798,6 +3812,7 @@ function performWork(request: Request): void {
currentRequest = request;
prepareToUseHooksForRequest(request);

const hadAbortableTasks = request.abortableTasks.size > 0;
try {
const pingedTasks = request.pingedTasks;
request.pingedTasks = [];
Expand All @@ -3808,10 +3823,11 @@ function performWork(request: Request): void {
if (request.destination !== null) {
flushCompletedChunks(request, request.destination);
}
if (request.abortableTasks.size === 0) {
// we're done rendering
const onAllReady = request.onAllReady;
onAllReady();
if (hadAbortableTasks && request.abortableTasks.size === 0) {
// We can ping after completing but if this happens there already
// wouldn't be any abortable tasks. So we only call allReady after
// the work which actually completed the last pending task
allReady(request);
}
} catch (error) {
logRecoverableError(request, error, null);
Expand Down Expand Up @@ -3842,7 +3858,7 @@ function haltTask(task: Task, request: Request): void {
return;
}
task.status = ABORTED;
emitModelChunk(request, task.id, reusableInfinitePromiseModel);
emitModelChunk(request, task.id, reusableBlockedReferenceModel);
}

function flushCompletedChunks(
Expand Down Expand Up @@ -3986,7 +4002,7 @@ export function stopFlowing(request: Request): void {
// This is called to early terminate a request. It creates an error at all pending tasks.
export function abort(request: Request, reason: mixed): void {
try {
if (request.status === OPEN) {
if (request.status === PENDING) {
request.status = ABORTING;
}
const abortableTasks = request.abortableTasks;
Expand Down Expand Up @@ -4023,6 +4039,7 @@ export function abort(request: Request, reason: mixed): void {
}
abortableTasks.forEach(task => abortTask(task, request, errorId));
abortableTasks.clear();
allReady(request);
}
const abortListeners = request.abortListeners;
if (abortListeners.size > 0) {
Expand Down Expand Up @@ -4069,17 +4086,17 @@ const haltSymbol = Symbol('halt');
// that never resolve.
export function halt(request: Request, reason: mixed): void {
try {
if (request.status === OPEN) {
if (request.status === PENDING) {
request.status = ABORTING;
}
request.fatalError = haltSymbol;
const abortableTasks = request.abortableTasks;
// We have tasks to abort. We'll emit one error row and then emit a reference
// to that row from every row that's still remaining.
if (abortableTasks.size > 0) {
request.pendingChunks++;
abortableTasks.forEach(task => haltTask(task, request));
abortableTasks.clear();
allReady(request);
}
const abortListeners = request.abortListeners;
if (abortListeners.size > 0) {
Expand All @@ -4094,3 +4111,8 @@ export function halt(request: Request, reason: mixed): void {
fatalError(request, error);
}
}

function allReady(request: Request) {
const onAllReady = request.onAllReady;
onAllReady();
}

0 comments on commit 5b582d3

Please sign in to comment.