[Flight] Patch Promise cycles and toString on Server Functions (#35345)

Server Functions can be stringified (sometimes implicitly) when passed
as data. This adds an override to hide the source code in that case -
just in case someone puts sensitive information in there.

Note that this still preserves the `name` field but this is also
available on the export but in practice is likely minified anyway.
There's nothing else on these referenes we'd consider unsafe unless you
explicitly expose expandos which are part of the `"use server"` export.

This adds a safety check to ensure you don't encode cyclic Promises.
This isn't a parser bug per se. Promises do have a safety mechanism that
avoids them infinite looping. However, since we use custom Thenables,
what can happen is that every time a native Promise awaits it, another
Promise wrapper is created around the Thenable which foils the
ECMAScript Promise cycle detection which can lead to an infinite loop.

This also ensures that embedded `ReadableStream` and `AsyncIterable`
streams are properly closed if the source stream closes early both on
the Server and Client. This doesn't cause an infinite loop but just to
make sure resource clean up can proceed properly.

We're also adding some more explicit clear errors for invalid payloads
since we no longer need to obfuscate the original issue.
This commit is contained in:
Sebastian Markbåge
2025-12-11 15:24:24 -05:00
committed by GitHub
parent d3eb566291
commit 894bc73cb4
14 changed files with 442 additions and 241 deletions

View File

@@ -894,6 +894,7 @@ function resolveModuleChunk<T>(
const resolvedChunk: ResolvedModuleChunk<T> = (chunk: any);
resolvedChunk.status = RESOLVED_MODULE;
resolvedChunk.value = value;
resolvedChunk.reason = null;
if (__DEV__) {
const debugInfo = getModuleDebugInfo(value);
if (debugInfo !== null) {
@@ -1114,6 +1115,8 @@ export function reportGlobalError(
// because we won't be getting any new data to resolve it.
if (chunk.status === PENDING) {
triggerErrorOnChunk(response, chunk, error);
} else if (chunk.status === INITIALIZED && chunk.reason !== null) {
chunk.reason.error(error);
}
});
if (__DEV__) {
@@ -1462,15 +1465,95 @@ function fulfillReference(
): void {
const {handler, parentObject, key, map, path} = reference;
for (let i = 1; i < path.length; i++) {
try {
for (let i = 1; i < path.length; i++) {
while (
typeof value === 'object' &&
value !== null &&
value.$$typeof === REACT_LAZY_TYPE
) {
// We never expect to see a Lazy node on this path because we encode those as
// separate models. This must mean that we have inserted an extra lazy node
// e.g. to replace a blocked element. We must instead look for it inside.
const referencedChunk: SomeChunk<any> = value._payload;
if (referencedChunk === handler.chunk) {
// This is a reference to the thing we're currently blocking. We can peak
// inside of it to get the value.
value = handler.value;
continue;
} else {
switch (referencedChunk.status) {
case RESOLVED_MODEL:
initializeModelChunk(referencedChunk);
break;
case RESOLVED_MODULE:
initializeModuleChunk(referencedChunk);
break;
}
switch (referencedChunk.status) {
case INITIALIZED: {
value = referencedChunk.value;
continue;
}
case BLOCKED: {
// It is possible that we're blocked on our own chunk if it's a cycle.
// Before adding the listener to the inner chunk, let's check if it would
// result in a cycle.
const cyclicHandler = resolveBlockedCycle(
referencedChunk,
reference,
);
if (cyclicHandler !== null) {
// This reference points back to this chunk. We can resolve the cycle by
// using the value from that handler.
value = cyclicHandler.value;
continue;
}
// Fallthrough
}
case PENDING: {
// If we're not yet initialized we need to skip what we've already drilled
// through and then wait for the next value to become available.
path.splice(0, i - 1);
// Add "listener" to our new chunk dependency.
if (referencedChunk.value === null) {
referencedChunk.value = [reference];
} else {
referencedChunk.value.push(reference);
}
if (referencedChunk.reason === null) {
referencedChunk.reason = [reference];
} else {
referencedChunk.reason.push(reference);
}
return;
}
case HALTED: {
// Do nothing. We couldn't fulfill.
// TODO: Mark downstreams as halted too.
return;
}
default: {
rejectReference(
response,
reference.handler,
referencedChunk.reason,
);
return;
}
}
}
}
value = value[path[i]];
}
while (
typeof value === 'object' &&
value !== null &&
value.$$typeof === REACT_LAZY_TYPE
) {
// We never expect to see a Lazy node on this path because we encode those as
// separate models. This must mean that we have inserted an extra lazy node
// e.g. to replace a blocked element. We must instead look for it inside.
// If what we're referencing is a Lazy it must be because we inserted one as a virtual node
// while it was blocked by other data. If it's no longer blocked, we can unwrap it.
const referencedChunk: SomeChunk<any> = value._payload;
if (referencedChunk === handler.chunk) {
// This is a reference to the thing we're currently blocking. We can peak
@@ -1491,132 +1574,57 @@ function fulfillReference(
value = referencedChunk.value;
continue;
}
case BLOCKED: {
// It is possible that we're blocked on our own chunk if it's a cycle.
// Before adding the listener to the inner chunk, let's check if it would
// result in a cycle.
const cyclicHandler = resolveBlockedCycle(
referencedChunk,
reference,
);
if (cyclicHandler !== null) {
// This reference points back to this chunk. We can resolve the cycle by
// using the value from that handler.
value = cyclicHandler.value;
continue;
}
// Fallthrough
}
case PENDING: {
// If we're not yet initialized we need to skip what we've already drilled
// through and then wait for the next value to become available.
path.splice(0, i - 1);
// Add "listener" to our new chunk dependency.
if (referencedChunk.value === null) {
referencedChunk.value = [reference];
} else {
referencedChunk.value.push(reference);
}
if (referencedChunk.reason === null) {
referencedChunk.reason = [reference];
} else {
referencedChunk.reason.push(reference);
}
return;
}
case HALTED: {
// Do nothing. We couldn't fulfill.
// TODO: Mark downstreams as halted too.
return;
}
default: {
rejectReference(
response,
reference.handler,
referencedChunk.reason,
);
return;
}
}
}
break;
}
value = value[path[i]];
}
while (
typeof value === 'object' &&
value !== null &&
value.$$typeof === REACT_LAZY_TYPE
) {
// If what we're referencing is a Lazy it must be because we inserted one as a virtual node
// while it was blocked by other data. If it's no longer blocked, we can unwrap it.
const referencedChunk: SomeChunk<any> = value._payload;
if (referencedChunk === handler.chunk) {
// This is a reference to the thing we're currently blocking. We can peak
// inside of it to get the value.
value = handler.value;
continue;
} else {
switch (referencedChunk.status) {
case RESOLVED_MODEL:
initializeModelChunk(referencedChunk);
const mappedValue = map(response, value, parentObject, key);
parentObject[key] = mappedValue;
// If this is the root object for a model reference, where `handler.value`
// is a stale `null`, the resolved value can be used directly.
if (key === '' && handler.value === null) {
handler.value = mappedValue;
}
// If the parent object is an unparsed React element tuple, we also need to
// update the props and owner of the parsed element object (i.e.
// handler.value).
if (
parentObject[0] === REACT_ELEMENT_TYPE &&
typeof handler.value === 'object' &&
handler.value !== null &&
handler.value.$$typeof === REACT_ELEMENT_TYPE
) {
const element: any = handler.value;
switch (key) {
case '3':
transferReferencedDebugInfo(handler.chunk, fulfilledChunk);
element.props = mappedValue;
break;
case RESOLVED_MODULE:
initializeModuleChunk(referencedChunk);
case '4':
// This path doesn't call transferReferencedDebugInfo because this reference is to a debug chunk.
if (__DEV__) {
element._owner = mappedValue;
}
break;
case '5':
// This path doesn't call transferReferencedDebugInfo because this reference is to a debug chunk.
if (__DEV__) {
element._debugStack = mappedValue;
}
break;
default:
transferReferencedDebugInfo(handler.chunk, fulfilledChunk);
break;
}
switch (referencedChunk.status) {
case INITIALIZED: {
value = referencedChunk.value;
continue;
}
}
} else if (__DEV__ && !reference.isDebug) {
transferReferencedDebugInfo(handler.chunk, fulfilledChunk);
}
break;
}
const mappedValue = map(response, value, parentObject, key);
parentObject[key] = mappedValue;
// If this is the root object for a model reference, where `handler.value`
// is a stale `null`, the resolved value can be used directly.
if (key === '' && handler.value === null) {
handler.value = mappedValue;
}
// If the parent object is an unparsed React element tuple, we also need to
// update the props and owner of the parsed element object (i.e.
// handler.value).
if (
parentObject[0] === REACT_ELEMENT_TYPE &&
typeof handler.value === 'object' &&
handler.value !== null &&
handler.value.$$typeof === REACT_ELEMENT_TYPE
) {
const element: any = handler.value;
switch (key) {
case '3':
transferReferencedDebugInfo(handler.chunk, fulfilledChunk);
element.props = mappedValue;
break;
case '4':
// This path doesn't call transferReferencedDebugInfo because this reference is to a debug chunk.
if (__DEV__) {
element._owner = mappedValue;
}
break;
case '5':
// This path doesn't call transferReferencedDebugInfo because this reference is to a debug chunk.
if (__DEV__) {
element._debugStack = mappedValue;
}
break;
default:
transferReferencedDebugInfo(handler.chunk, fulfilledChunk);
break;
}
} else if (__DEV__ && !reference.isDebug) {
transferReferencedDebugInfo(handler.chunk, fulfilledChunk);
} catch (error) {
rejectReference(response, reference.handler, error);
return;
}
handler.deps--;
@@ -1882,6 +1890,7 @@ function loadServerReference<A: Iterable<any>, T>(
const initializedChunk: InitializedChunk<T> = (chunk: any);
initializedChunk.status = INITIALIZED;
initializedChunk.value = handler.value;
initializedChunk.reason = null;
if (resolveListeners !== null) {
wakeChunk(response, resolveListeners, handler.value, initializedChunk);
} else {
@@ -2359,7 +2368,7 @@ function parseModelString(
// Symbol
return Symbol.for(value.slice(2));
}
case 'F': {
case 'h': {
// Server Reference
const ref = value.slice(2);
return getOutlinedModel(
@@ -3138,6 +3147,7 @@ function startReadableStream<T>(
streamState: StreamState,
): void {
let controller: ReadableStreamController = (null: any);
let closed = false;
const stream = new ReadableStream({
type: type,
start(c) {
@@ -3195,6 +3205,10 @@ function startReadableStream<T>(
}
},
close(json: UninitializedModel): void {
if (closed) {
return;
}
closed = true;
if (previousBlockedChunk === null) {
controller.close();
} else {
@@ -3205,6 +3219,10 @@ function startReadableStream<T>(
}
},
error(error: mixed): void {
if (closed) {
return;
}
closed = true;
if (previousBlockedChunk === null) {
// $FlowFixMe[incompatible-call]
controller.error(error);
@@ -3265,6 +3283,7 @@ function startAsyncIterable<T>(
(chunk: any);
initializedChunk.status = INITIALIZED;
initializedChunk.value = {done: false, value: value};
initializedChunk.reason = null;
if (resolveListeners !== null) {
wakeChunkIfInitialized(
response,
@@ -3294,6 +3313,9 @@ function startAsyncIterable<T>(
nextWriteIndex++;
},
close(value: UninitializedModel): void {
if (closed) {
return;
}
closed = true;
if (nextWriteIndex === buffer.length) {
buffer[nextWriteIndex] = createResolvedIteratorResultChunk(
@@ -3321,6 +3343,9 @@ function startAsyncIterable<T>(
}
},
error(error: Error): void {
if (closed) {
return;
}
closed = true;
if (nextWriteIndex === buffer.length) {
buffer[nextWriteIndex] =

View File

@@ -104,7 +104,7 @@ function serializePromiseID(id: number): string {
}
function serializeServerReferenceID(id: number): string {
return '$F' + id.toString(16);
return '$h' + id.toString(16);
}
function serializeTemporaryReferenceMarker(): string {
@@ -112,7 +112,6 @@ function serializeTemporaryReferenceMarker(): string {
}
function serializeFormDataReference(id: number): string {
// Why K? F is "Function". D is "Date". What else?
return '$K' + id.toString(16);
}
@@ -474,8 +473,22 @@ export function processReply(
}
}
const existingReference = writtenObjects.get(value);
// $FlowFixMe[method-unbinding]
if (typeof value.then === 'function') {
if (existingReference !== undefined) {
if (modelRoot === value) {
// This is the ID we're currently emitting so we need to write it
// once but if we discover it again, we refer to it by id.
modelRoot = null;
} else {
// We've already emitted this as an outlined object, so we can
// just refer to that by its existing ID.
return existingReference;
}
}
// We assume that any object with a .then property is a "Thenable" type,
// or a Promise type. Either of which can be represented by a Promise.
if (formData === null) {
@@ -484,11 +497,19 @@ export function processReply(
}
pendingParts++;
const promiseId = nextPartId++;
const promiseReference = serializePromiseID(promiseId);
writtenObjects.set(value, promiseReference);
const thenable: Thenable<any> = (value: any);
thenable.then(
partValue => {
try {
const partJSON = serializeModel(partValue, promiseId);
const previousReference = writtenObjects.get(partValue);
let partJSON;
if (previousReference !== undefined) {
partJSON = JSON.stringify(previousReference);
} else {
partJSON = serializeModel(partValue, promiseId);
}
// $FlowFixMe[incompatible-type] We know it's not null because we assigned it above.
const data: FormData = formData;
data.append(formFieldPrefix + promiseId, partJSON);
@@ -504,10 +525,9 @@ export function processReply(
// that throws on the server instead.
reject,
);
return serializePromiseID(promiseId);
return promiseReference;
}
const existingReference = writtenObjects.get(value);
if (existingReference !== undefined) {
if (modelRoot === value) {
// This is the ID we're currently emitting so we need to write it

View File

@@ -88,6 +88,12 @@ function bind(this: ServerReference<any>): any {
return newFn;
}
const serverReferenceToString = {
value: () => 'function () { [omitted code] }',
configurable: true,
writable: true,
};
export function registerServerReference<T: Function>(
reference: T,
id: string,
@@ -111,12 +117,14 @@ export function registerServerReference<T: Function>(
configurable: true,
},
bind: {value: bind, configurable: true},
toString: serverReferenceToString,
}
: {
$$typeof,
$$id,
$$bound,
bind: {value: bind, configurable: true},
toString: serverReferenceToString,
}) as PropertyDescriptorMap,
);
}

View File

@@ -95,6 +95,12 @@ function bind(this: ServerReference<any>): any {
return newFn;
}
const serverReferenceToString = {
value: () => 'function () { [omitted code] }',
configurable: true,
writable: true,
};
export function registerServerReference<T>(
reference: ServerReference<T>,
id: string,
@@ -118,12 +124,14 @@ export function registerServerReference<T>(
configurable: true,
},
bind: {value: bind, configurable: true},
toString: serverReferenceToString,
}
: {
$$typeof,
$$id,
$$bound,
bind: {value: bind, configurable: true},
toString: serverReferenceToString,
}) as PropertyDescriptorMap,
);
}

View File

@@ -102,6 +102,12 @@ function bind(this: ServerReference<any>): any {
return newFn;
}
const serverReferenceToString = {
value: () => 'function () { [omitted code] }',
configurable: true,
writable: true,
};
export function registerServerReference<T: Function>(
reference: T,
id: string,
@@ -125,12 +131,14 @@ export function registerServerReference<T: Function>(
configurable: true,
},
bind: {value: bind, configurable: true},
toString: serverReferenceToString,
}
: {
$$typeof,
$$id,
$$bound,
bind: {value: bind, configurable: true},
toString: serverReferenceToString,
}) as PropertyDescriptorMap,
);
}

View File

@@ -102,6 +102,12 @@ function bind(this: ServerReference<any>): any {
return newFn;
}
const serverReferenceToString = {
value: () => 'function () { [omitted code] }',
configurable: true,
writable: true,
};
export function registerServerReference<T: Function>(
reference: T,
id: string,
@@ -125,12 +131,14 @@ export function registerServerReference<T: Function>(
configurable: true,
},
bind: {value: bind, configurable: true},
toString: serverReferenceToString,
} as PropertyDescriptorMap)
: ({
$$typeof,
$$id,
$$bound,
bind: {value: bind, configurable: true},
toString: serverReferenceToString,
} as PropertyDescriptorMap),
);
}

View File

@@ -102,6 +102,12 @@ function bind(this: ServerReference<any>): any {
return newFn;
}
const serverReferenceToString = {
value: () => 'function () { [omitted code] }',
configurable: true,
writable: true,
};
export function registerServerReference<T: Function>(
reference: T,
id: string,
@@ -125,12 +131,14 @@ export function registerServerReference<T: Function>(
configurable: true,
},
bind: {value: bind, configurable: true},
toString: serverReferenceToString,
} as PropertyDescriptorMap)
: ({
$$typeof,
$$id,
$$bound,
bind: {value: bind, configurable: true},
toString: serverReferenceToString,
} as PropertyDescriptorMap),
);
}

View File

@@ -156,6 +156,23 @@ describe('ReactFlightDOM', () => {
};
}
function createUnclosingStream(
stream: ReadableStream<Uint8Array>,
): ReadableStream<Uint8Array> {
const reader = stream.getReader();
const s = new ReadableStream({
async pull(controller) {
const {done, value} = await reader.read();
if (!done) {
controller.enqueue(value);
}
},
});
return s;
}
const theInfinitePromise = new Promise(() => {});
function InfiniteSuspend() {
throw theInfinitePromise;
@@ -2970,7 +2987,7 @@ describe('ReactFlightDOM', () => {
const {prelude} = await pendingResult;
const result = await ReactServerDOMClient.createFromReadableStream(
Readable.toWeb(prelude),
createUnclosingStream(Readable.toWeb(prelude)),
);
const iterator = result.multiShotIterable[Symbol.asyncIterator]();

View File

@@ -228,7 +228,7 @@ describe('ReactFlightDOMEdge', () => {
async function createBufferedUnclosingStream(
stream: ReadableStream<Uint8Array>,
): ReadableStream<Uint8Array> {
): Promise<ReadableStream<Uint8Array>> {
const chunks: Array<Uint8Array> = [];
const reader = stream.getReader();
while (true) {
@@ -2309,4 +2309,34 @@ describe('ReactFlightDOMEdge', () => {
const result = await response;
expect(result).toEqual({obj: obj, node: 'hi'});
});
it('does not leak the server reference code', async () => {
function foo() {
return 'foo';
}
const bar = () => {
return 'bar';
};
const anonymous = (
() => () =>
'anonymous'
)();
expect(
ReactServerDOMServer.registerServerReference(foo, 'foo-id').toString(),
).toBe('function () { [omitted code] }');
expect(
ReactServerDOMServer.registerServerReference(bar, 'bar-id').toString(),
).toBe('function () { [omitted code] }');
expect(
ReactServerDOMServer.registerServerReference(
anonymous,
'anonymous-id',
).toString(),
).toBe('function () { [omitted code] }');
});
});

View File

@@ -140,7 +140,7 @@ describe('ReactFlightDOMNode', () => {
async function createBufferedUnclosingStream(
stream: ReadableStream<Uint8Array>,
): ReadableStream<Uint8Array> {
): Promise<ReadableStream<Uint8Array>> {
const chunks: Array<Uint8Array> = [];
const reader = stream.getReader();
while (true) {
@@ -407,7 +407,7 @@ describe('ReactFlightDOMNode', () => {
);
});
it('should cancels the underlying ReadableStream when we are cancelled', async () => {
it('should cancel the underlying and transported ReadableStreams when we are cancelled', async () => {
let controller;
let cancelReason;
const s = new ReadableStream({
@@ -431,16 +431,30 @@ describe('ReactFlightDOMNode', () => {
),
);
const writable = new Stream.PassThrough(streamOptions);
rscStream.pipe(writable);
const readable = new Stream.PassThrough(streamOptions);
rscStream.pipe(readable);
const result = await ReactServerDOMClient.createFromNodeStream(readable, {
moduleMap: {},
moduleLoading: webpackModuleLoading,
});
const reader = result.getReader();
controller.enqueue('hi');
await serverAct(async () => {
// We should be able to read the part we already emitted before the abort
expect(await reader.read()).toEqual({
value: 'hi',
done: false,
});
});
const reason = new Error('aborted');
writable.destroy(reason);
readable.destroy(reason);
await new Promise(resolve => {
writable.on('error', () => {
readable.on('error', () => {
resolve();
});
});
@@ -448,9 +462,17 @@ describe('ReactFlightDOMNode', () => {
expect(cancelReason.message).toBe(
'The destination stream errored while writing data.',
);
let error = null;
try {
await reader.read();
} catch (x) {
error = x;
}
expect(error).toBe(reason);
});
it('should cancels the underlying ReadableStream when we abort', async () => {
it('should cancel the underlying and transported ReadableStreams when we abort', async () => {
const errors = [];
let controller;
let cancelReason;
@@ -1342,12 +1364,12 @@ describe('ReactFlightDOMNode', () => {
'\n' +
' in Dynamic' +
(gate(flags => flags.enableAsyncDebugInfo)
? ' (file://ReactFlightDOMNode-test.js:1216:27)\n'
? ' (file://ReactFlightDOMNode-test.js:1238:27)\n'
: '\n') +
' in body\n' +
' in html\n' +
' in App (file://ReactFlightDOMNode-test.js:1229:25)\n' +
' in ClientRoot (ReactFlightDOMNode-test.js:1304:16)',
' in App (file://ReactFlightDOMNode-test.js:1251:25)\n' +
' in ClientRoot (ReactFlightDOMNode-test.js:1326:16)',
);
} else {
expect(
@@ -1356,7 +1378,7 @@ describe('ReactFlightDOMNode', () => {
'\n' +
' in body\n' +
' in html\n' +
' in ClientRoot (ReactFlightDOMNode-test.js:1304:16)',
' in ClientRoot (ReactFlightDOMNode-test.js:1326:16)',
);
}
@@ -1366,8 +1388,8 @@ describe('ReactFlightDOMNode', () => {
normalizeCodeLocInfo(ownerStack, {preserveLocation: true}),
).toBe(
'\n' +
' in Dynamic (file://ReactFlightDOMNode-test.js:1216:27)\n' +
' in App (file://ReactFlightDOMNode-test.js:1229:25)',
' in Dynamic (file://ReactFlightDOMNode-test.js:1238:27)\n' +
' in App (file://ReactFlightDOMNode-test.js:1251:25)',
);
} else {
expect(
@@ -1375,7 +1397,7 @@ describe('ReactFlightDOMNode', () => {
).toBe(
'' +
'\n' +
' in App (file://ReactFlightDOMNode-test.js:1229:25)',
' in App (file://ReactFlightDOMNode-test.js:1251:25)',
);
}
} else {

View File

@@ -135,7 +135,7 @@ describe('ReactFlightDOMReplyEdge', () => {
expect(await resultBlob.arrayBuffer()).toEqual(await blob.arrayBuffer());
});
it('should supports ReadableStreams with typed arrays', async () => {
it('should support ReadableStreams with typed arrays', async () => {
const buffer = new Uint8Array([
123, 4, 10, 5, 100, 255, 244, 45, 56, 67, 43, 124, 67, 89, 100, 20,
]).buffer;
@@ -239,6 +239,53 @@ describe('ReactFlightDOMReplyEdge', () => {
expect(streamedBuffers.flatMap(t => Array.from(t))).toEqual(expectedBytes);
});
it('should cancel the transported ReadableStream when we are cancelled', async () => {
const s = new ReadableStream({
start(controller) {
controller.enqueue('hi');
controller.close();
},
});
const body = await ReactServerDOMClient.encodeReply(s);
const iterable = {
async *[Symbol.asyncIterator]() {
// eslint-disable-next-line no-for-of-loops/no-for-of-loops
for (const entry of body) {
if (entry[1] === 'C') {
// Return before finishing the stream.
return;
}
yield entry;
}
},
};
const result = await ReactServerDOMServer.decodeReplyFromAsyncIterable(
iterable,
webpackServerMap,
);
const reader = result.getReader();
// We should be able to read the part we already emitted before the abort
expect(await reader.read()).toEqual({
value: 'hi',
done: false,
});
let error = null;
try {
await reader.read();
} catch (x) {
error = x;
}
expect(error).not.toBe(null);
expect(error.message).toBe('Connection closed.');
});
it('should abort when parsing an incomplete payload', async () => {
const infinitePromise = new Promise(() => {});
const controller = new AbortController();

View File

@@ -33,6 +33,7 @@ import {
import {ASYNC_ITERATOR} from 'shared/ReactSymbols';
import hasOwnProperty from 'shared/hasOwnProperty';
import getPrototypeOf from 'shared/getPrototypeOf';
interface FlightStreamController {
enqueueModel(json: string): void;
@@ -128,6 +129,24 @@ ReactPromise.prototype.then = function <T>(
switch (chunk.status) {
case INITIALIZED:
if (typeof resolve === 'function') {
let inspectedValue = chunk.value;
// Recursively check if the value is itself a ReactPromise and if so if it points
// back to itself. This helps catch recursive thenables early error.
while (inspectedValue instanceof ReactPromise) {
if (inspectedValue === chunk) {
if (typeof reject === 'function') {
reject(new Error('Cannot have cyclic thenables.'));
}
return;
}
if (inspectedValue.status === INITIALIZED) {
inspectedValue = inspectedValue.value;
} else {
// If this is lazily resolved, pending or blocked, it'll eventually become
// initialized and break the loop. Rejected also breaks it.
break;
}
}
resolve(chunk.value);
}
break;
@@ -156,6 +175,9 @@ ReactPromise.prototype.then = function <T>(
}
};
const ObjectPrototype = Object.prototype;
const ArrayPrototype = Array.prototype;
export type Response = {
_bundlerConfig: ServerManifest,
_prefix: string,
@@ -506,6 +528,7 @@ function loadServerReference<A: Iterable<any>, T>(
const initializedChunk: InitializedChunk<T> = (chunk: any);
initializedChunk.status = INITIALIZED;
initializedChunk.value = handler.value;
initializedChunk.reason = null;
if (resolveListeners !== null) {
wakeChunk(response, resolveListeners, handler.value);
}
@@ -674,6 +697,7 @@ function initializeModelChunk<T>(chunk: ResolvedModelChunk<T>): void {
const initializedChunk: InitializedChunk<T> = (chunk: any);
initializedChunk.status = INITIALIZED;
initializedChunk.value = value;
initializedChunk.reason = null;
} catch (error) {
const erroredChunk: ErroredChunk<T> = (chunk: any);
erroredChunk.status = ERRORED;
@@ -694,6 +718,8 @@ export function reportGlobalError(response: Response, error: Error): void {
// because we won't be getting any new data to resolve it.
if (chunk.status === PENDING) {
triggerErrorOnChunk(response, chunk, error);
} else if (chunk.status === INITIALIZED && chunk.reason !== null) {
chunk.reason.error(error);
}
});
}
@@ -728,57 +754,34 @@ function fulfillReference(
): void {
const {handler, parentObject, key, map, path} = reference;
for (let i = 1; i < path.length; i++) {
// The server doesn't have any lazy references but we unwrap Chunks here in the same way as the client.
while (value instanceof ReactPromise) {
const referencedChunk: SomeChunk<any> = value;
switch (referencedChunk.status) {
case RESOLVED_MODEL:
initializeModelChunk(referencedChunk);
break;
}
switch (referencedChunk.status) {
case INITIALIZED: {
value = referencedChunk.value;
continue;
}
case BLOCKED:
case PENDING: {
// If we're not yet initialized we need to skip what we've already drilled
// through and then wait for the next value to become available.
path.splice(0, i - 1);
// Add "listener" to our new chunk dependency.
if (referencedChunk.value === null) {
referencedChunk.value = [reference];
} else {
referencedChunk.value.push(reference);
}
if (referencedChunk.reason === null) {
referencedChunk.reason = [reference];
} else {
referencedChunk.reason.push(reference);
}
return;
}
default: {
rejectReference(response, reference.handler, referencedChunk.reason);
return;
}
try {
for (let i = 1; i < path.length; i++) {
// The server doesn't have any lazy references so we don't expect to go through a Promise.
const name = path[i];
if (
typeof value === 'object' &&
value !== null &&
(getPrototypeOf(value) === ObjectPrototype ||
getPrototypeOf(value) === ArrayPrototype) &&
hasOwnProperty.call(value, name)
) {
value = value[name];
} else {
throw new Error('Invalid reference.');
}
}
const name = path[i];
if (typeof value === 'object' && hasOwnProperty.call(value, name)) {
value = value[name];
const mappedValue = map(response, value, parentObject, key);
parentObject[key] = mappedValue;
// If this is the root object for a model reference, where `handler.value`
// is a stale `null`, the resolved value can be used directly.
if (key === '' && handler.value === null) {
handler.value = mappedValue;
}
}
const mappedValue = map(response, value, parentObject, key);
parentObject[key] = mappedValue;
// If this is the root object for a model reference, where `handler.value`
// is a stale `null`, the resolved value can be used directly.
if (key === '' && handler.value === null) {
handler.value = mappedValue;
} catch (error) {
rejectReference(response, reference.handler, error);
return;
}
// There are no Elements or Debug Info to transfer here.
@@ -889,53 +892,17 @@ function getOutlinedModel<T>(
case INITIALIZED:
let value = chunk.value;
for (let i = 1; i < path.length; i++) {
// The server doesn't have any lazy references but we unwrap Chunks here in the same way as the client.
while (value instanceof ReactPromise) {
const referencedChunk: SomeChunk<any> = value;
switch (referencedChunk.status) {
case RESOLVED_MODEL:
initializeModelChunk(referencedChunk);
break;
}
switch (referencedChunk.status) {
case INITIALIZED: {
value = referencedChunk.value;
break;
}
case BLOCKED:
case PENDING: {
return waitForReference(
referencedChunk,
parentObject,
key,
response,
map,
path.slice(i - 1),
);
}
default: {
// This is an error. Instead of erroring directly, we're going to encode this on
// an initialization handler so that we can catch it at the nearest Element.
if (initializingHandler) {
initializingHandler.errored = true;
initializingHandler.value = null;
initializingHandler.reason = referencedChunk.reason;
} else {
initializingHandler = {
chunk: null,
value: null,
reason: referencedChunk.reason,
deps: 0,
errored: true,
};
}
return (null: any);
}
}
}
const name = path[i];
if (typeof value === 'object' && hasOwnProperty.call(value, name)) {
if (
typeof value === 'object' &&
value !== null &&
(getPrototypeOf(value) === ObjectPrototype ||
getPrototypeOf(value) === ArrayPrototype) &&
hasOwnProperty.call(value, name)
) {
value = value[name];
} else {
throw new Error('Invalid reference.');
}
}
const chunkValue = map(response, value, parentObject, key);
@@ -1006,6 +973,11 @@ function parseTypedArray<T: $ArrayBufferView | ArrayBuffer>(
const id = parseInt(reference.slice(2), 16);
const prefix = response._prefix;
const key = prefix + id;
const chunks = response._chunks;
if (chunks.has(id)) {
throw new Error('Already initialized typed array.');
}
// We should have this backingEntry in the store already because we emitted
// it before referencing it. It should be a Blob.
// TODO: Use getOutlinedModel to allow us to emit the Blob later. We should be able to do that now.
@@ -1055,6 +1027,7 @@ function parseTypedArray<T: $ArrayBufferView | ArrayBuffer>(
const initializedChunk: InitializedChunk<T> = (chunk: any);
initializedChunk.status = INITIALIZED;
initializedChunk.value = handler.value;
initializedChunk.reason = null;
if (resolveListeners !== null) {
wakeChunk(response, resolveListeners, handler.value);
}
@@ -1116,8 +1089,13 @@ function parseReadableStream<T>(
parentKey: string,
): ReadableStream {
const id = parseInt(reference.slice(2), 16);
const chunks = response._chunks;
if (chunks.has(id)) {
throw new Error('Already initialized stream.');
}
let controller: ReadableStreamController = (null: any);
let closed = false;
const stream = new ReadableStream({
type: type,
start(c) {
@@ -1166,6 +1144,10 @@ function parseReadableStream<T>(
}
},
close(json: string): void {
if (closed) {
return;
}
closed = true;
if (previousBlockedChunk === null) {
controller.close();
} else {
@@ -1176,6 +1158,10 @@ function parseReadableStream<T>(
}
},
error(error: mixed): void {
if (closed) {
return;
}
closed = true;
if (previousBlockedChunk === null) {
// $FlowFixMe[incompatible-call]
controller.error(error);
@@ -1218,6 +1204,10 @@ function parseAsyncIterable<T>(
parentKey: string,
): $AsyncIterable<T, T, void> | $AsyncIterator<T, T, void> {
const id = parseInt(reference.slice(2), 16);
const chunks = response._chunks;
if (chunks.has(id)) {
throw new Error('Already initialized stream.');
}
const buffer: Array<SomeChunk<IteratorResult<T, T>>> = [];
let closed = false;
@@ -1241,6 +1231,9 @@ function parseAsyncIterable<T>(
nextWriteIndex++;
},
close(value: string): void {
if (closed) {
return;
}
closed = true;
if (nextWriteIndex === buffer.length) {
buffer[nextWriteIndex] = createResolvedIteratorResultChunk(
@@ -1268,6 +1261,9 @@ function parseAsyncIterable<T>(
}
},
error(error: Error): void {
if (closed) {
return;
}
closed = true;
if (nextWriteIndex === buffer.length) {
buffer[nextWriteIndex] =
@@ -1329,7 +1325,7 @@ function parseModelString(
const chunk = getChunk(response, id);
return chunk;
}
case 'F': {
case 'h': {
// Server Reference
const ref = value.slice(2);
return getOutlinedModel(response, ref, obj, key, loadServerReference);

View File

@@ -2797,7 +2797,7 @@ function serializePromiseID(id: number): string {
}
function serializeServerReferenceID(id: number): string {
return '$F' + id.toString(16);
return '$h' + id.toString(16);
}
function serializeSymbolReference(name: string): string {

View File

@@ -551,5 +551,9 @@
"563": "This render completed successfully. All cacheSignals are now aborted to allow clean up of any unused resources.",
"564": "Unknown command. The debugChannel was not wired up properly.",
"565": "resolveDebugMessage/closeDebugChannel should not be called for a Request that wasn't kept alive. This is a bug in React.",
"566": "FragmentInstance.scrollIntoView() does not support scrollIntoViewOptions. Use the alignToTop boolean instead."
"566": "FragmentInstance.scrollIntoView() does not support scrollIntoViewOptions. Use the alignToTop boolean instead.",
"567": "Already initialized stream.",
"568": "Already initialized typed array.",
"569": "Cannot have cyclic thenables.",
"570": "Invalid reference."
}