diff --git a/packages/react-dom/src/server/ReactPartialRendererHooks.js b/packages/react-dom/src/server/ReactPartialRendererHooks.js index 3a543aa337b6c..50edb72c2844a 100644 --- a/packages/react-dom/src/server/ReactPartialRendererHooks.js +++ b/packages/react-dom/src/server/ReactPartialRendererHooks.js @@ -489,6 +489,10 @@ function useOpaqueIdentifier(): OpaqueIDType { ); } +function useCacheRefresh(): (?() => T, ?T) => void { + invariant(false, 'Not implemented.'); +} + function noop(): void {} export let currentPartialRenderer: PartialRenderer = (null: any); @@ -520,4 +524,5 @@ export const Dispatcher: DispatcherType = { if (enableCache) { Dispatcher.getCacheForType = getCacheForType; + Dispatcher.useCacheRefresh = useCacheRefresh; } diff --git a/packages/react-reconciler/src/ReactFiber.new.js b/packages/react-reconciler/src/ReactFiber.new.js index 9d55634deab95..3b3d9d4bc2e7b 100644 --- a/packages/react-reconciler/src/ReactFiber.new.js +++ b/packages/react-reconciler/src/ReactFiber.new.js @@ -27,6 +27,7 @@ import { enableProfilerTimer, enableFundamentalAPI, enableScopeAPI, + enableCache, } from 'shared/ReactFeatureFlags'; import {NoFlags, Placement, StaticMask} from './ReactFiberFlags'; import {ConcurrentRoot, BlockingRoot} from './ReactRootTags'; @@ -54,6 +55,7 @@ import { ScopeComponent, OffscreenComponent, LegacyHiddenComponent, + CacheComponent, } from './ReactWorkTags'; import getComponentName from 'shared/getComponentName'; @@ -88,6 +90,7 @@ import { REACT_SCOPE_TYPE, REACT_OFFSCREEN_TYPE, REACT_LEGACY_HIDDEN_TYPE, + REACT_CACHE_TYPE, } from 'shared/ReactSymbols'; export type {Fiber}; @@ -501,6 +504,11 @@ export function createFiberFromTypeAndProps( return createFiberFromScope(type, pendingProps, mode, lanes, key); } // eslint-disable-next-line no-fallthrough + case REACT_CACHE_TYPE: + if (enableCache) { + return createFiberFromCache(pendingProps, mode, lanes, key); + } + // eslint-disable-next-line no-fallthrough default: { if (typeof type === 'object' && type !== null) { switch (type.$$typeof) { @@ -745,6 +753,24 @@ export function createFiberFromLegacyHidden( return fiber; } +export function createFiberFromCache( + pendingProps: any, + mode: TypeOfMode, + lanes: Lanes, + key: null | string, +) { + const fiber = createFiber(CacheComponent, pendingProps, key, mode); + // TODO: The Cache fiber shouldn't have a type. It has a tag. + // This needs to be fixed in getComponentName so that it relies on the tag + // instead. + if (__DEV__) { + fiber.type = REACT_CACHE_TYPE; + } + fiber.elementType = REACT_CACHE_TYPE; + fiber.lanes = lanes; + return fiber; +} + export function createFiberFromText( content: string, mode: TypeOfMode, diff --git a/packages/react-reconciler/src/ReactFiber.old.js b/packages/react-reconciler/src/ReactFiber.old.js index 76001dd757e74..1e20db2851007 100644 --- a/packages/react-reconciler/src/ReactFiber.old.js +++ b/packages/react-reconciler/src/ReactFiber.old.js @@ -27,6 +27,7 @@ import { enableProfilerTimer, enableFundamentalAPI, enableScopeAPI, + enableCache, } from 'shared/ReactFeatureFlags'; import {NoFlags, Placement, StaticMask} from './ReactFiberFlags'; import {ConcurrentRoot, BlockingRoot} from './ReactRootTags'; @@ -54,6 +55,7 @@ import { ScopeComponent, OffscreenComponent, LegacyHiddenComponent, + CacheComponent, } from './ReactWorkTags'; import getComponentName from 'shared/getComponentName'; @@ -88,6 +90,7 @@ import { REACT_SCOPE_TYPE, REACT_OFFSCREEN_TYPE, REACT_LEGACY_HIDDEN_TYPE, + REACT_CACHE_TYPE, } from 'shared/ReactSymbols'; export type {Fiber}; @@ -501,6 +504,11 @@ export function createFiberFromTypeAndProps( return createFiberFromScope(type, pendingProps, mode, lanes, key); } // eslint-disable-next-line no-fallthrough + case REACT_CACHE_TYPE: + if (enableCache) { + return createFiberFromCache(pendingProps, mode, lanes, key); + } + // eslint-disable-next-line no-fallthrough default: { if (typeof type === 'object' && type !== null) { switch (type.$$typeof) { @@ -745,6 +753,24 @@ export function createFiberFromLegacyHidden( return fiber; } +export function createFiberFromCache( + pendingProps: any, + mode: TypeOfMode, + lanes: Lanes, + key: null | string, +) { + const fiber = createFiber(CacheComponent, pendingProps, key, mode); + // TODO: The Cache fiber shouldn't have a type. It has a tag. + // This needs to be fixed in getComponentName so that it relies on the tag + // instead. + if (__DEV__) { + fiber.type = REACT_CACHE_TYPE; + } + fiber.elementType = REACT_CACHE_TYPE; + fiber.lanes = lanes; + return fiber; +} + export function createFiberFromText( content: string, mode: TypeOfMode, diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index 056d9c8b1ca7f..52e6216d442eb 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -23,6 +23,12 @@ import type { OffscreenProps, OffscreenState, } from './ReactFiberOffscreenComponent'; +import type { + Cache, + CacheComponentState, + SpawnedCachePool, +} from './ReactFiberCacheComponent.new'; +import type {UpdateQueue} from './ReactUpdateQueue.new'; import checkPropTypes from 'shared/checkPropTypes'; @@ -50,6 +56,7 @@ import { ScopeComponent, OffscreenComponent, LegacyHiddenComponent, + CacheComponent, } from './ReactWorkTags'; import { NoFlags, @@ -76,6 +83,7 @@ import { enableFundamentalAPI, warnAboutDefaultPropsOnFunctionComponents, enableScopeAPI, + enableCache, } from 'shared/ReactFeatureFlags'; import invariant from 'shared/invariant'; import shallowEqual from 'shared/shallowEqual'; @@ -129,6 +137,7 @@ import { isSuspenseInstanceFallback, registerSuspenseInstanceRetry, supportsHydration, + isPrimaryRenderer, } from './ReactFiberHostConfig'; import type {SuspenseInstance} from './ReactFiberHostConfig'; import {shouldSuspend} from './ReactFiberReconciler'; @@ -201,6 +210,16 @@ import { } from './ReactFiberWorkLoop.new'; import {unstable_wrap as Schedule_tracing_wrap} from 'scheduler/tracing'; import {setWorkInProgressVersion} from './ReactMutableSource.new'; +import { + requestCacheFromPool, + pushCacheProvider, + pushRootCachePool, + CacheContext, + getSuspendedCachePool, + restoreSpawnedCachePool, + getOffscreenDeferredCachePool, +} from './ReactFiberCacheComponent.new'; +import {MAX_SIGNED_31_BIT_INT} from './MaxInts'; import {disableLogs, reenableLogs} from 'shared/ConsolePatchingDev'; @@ -576,23 +595,39 @@ function updateOffscreenComponent( const prevState: OffscreenState | null = current !== null ? current.memoizedState : null; + // If this is not null, this is a cache pool that was carried over from the + // previous render. We will push this to the cache pool context so that we can + // resume in-flight requests. + let spawnedCachePool: SpawnedCachePool | null = null; + if ( nextProps.mode === 'hidden' || nextProps.mode === 'unstable-defer-without-hiding' ) { + // Rendering a hidden tree. if ((workInProgress.mode & ConcurrentMode) === NoMode) { // In legacy sync mode, don't defer the subtree. Render it now. // TODO: Figure out what we should do in Blocking mode. const nextState: OffscreenState = { baseLanes: NoLanes, + cachePool: null, }; workInProgress.memoizedState = nextState; pushRenderLanes(workInProgress, renderLanes); } else if (!includesSomeLane(renderLanes, (OffscreenLane: Lane))) { + // We're hidden, and we're not rendering at Offscreen. We will bail out + // and resume this tree later. let nextBaseLanes; if (prevState !== null) { const prevBaseLanes = prevState.baseLanes; nextBaseLanes = mergeLanes(prevBaseLanes, renderLanes); + if (enableCache) { + // Save the cache pool so we can resume later. + spawnedCachePool = getOffscreenDeferredCachePool(); + // We don't need to push to the cache pool because we're about to + // bail out. There won't be a context mismatch because we only pop + // the cache pool if `updateQueue` is non-null. + } } else { nextBaseLanes = renderLanes; } @@ -606,16 +641,35 @@ function updateOffscreenComponent( ); const nextState: OffscreenState = { baseLanes: nextBaseLanes, + cachePool: spawnedCachePool, }; workInProgress.memoizedState = nextState; + workInProgress.updateQueue = null; // We're about to bail out, but we need to push this to the stack anyway // to avoid a push/pop misalignment. pushRenderLanes(workInProgress, nextBaseLanes); return null; } else { + // This is the second render. The surrounding visible content has already + // committed. Now we resume rendering the hidden tree. + + if (enableCache && prevState !== null) { + // If the render that spawned this one accessed the cache pool, resume + // using the same cache. Unless the parent changed, since that means + // there was a refresh. + const prevCachePool = prevState.cachePool; + if (prevCachePool !== null) { + spawnedCachePool = restoreSpawnedCachePool( + workInProgress, + prevCachePool, + ); + } + } + // Rendering at offscreen, so we can clear the base lanes. const nextState: OffscreenState = { baseLanes: NoLanes, + cachePool: null, }; workInProgress.memoizedState = nextState; // Push the lanes that were skipped when we bailed out. @@ -624,9 +678,26 @@ function updateOffscreenComponent( pushRenderLanes(workInProgress, subtreeRenderLanes); } } else { + // Rendering a visible tree. let subtreeRenderLanes; if (prevState !== null) { + // We're going from hidden -> visible. + subtreeRenderLanes = mergeLanes(prevState.baseLanes, renderLanes); + + if (enableCache) { + // If the render that spawned this one accessed the cache pool, resume + // using the same cache. Unless the parent changed, since that means + // there was a refresh. + const prevCachePool = prevState.cachePool; + if (prevCachePool !== null) { + spawnedCachePool = restoreSpawnedCachePool( + workInProgress, + prevCachePool, + ); + } + } + // Since we're not hidden anymore, reset the state workInProgress.memoizedState = null; } else { @@ -638,6 +709,12 @@ function updateOffscreenComponent( pushRenderLanes(workInProgress, subtreeRenderLanes); } + if (enableCache) { + // If we have a cache pool from a previous render attempt, then this will be + // non-null. We use this to infer whether to push/pop the cache context. + workInProgress.updateQueue = spawnedCachePool; + } + reconcileChildren(current, workInProgress, nextChildren, renderLanes); return workInProgress.child; } @@ -647,6 +724,79 @@ function updateOffscreenComponent( // fork the function. const updateLegacyHiddenComponent = updateOffscreenComponent; +function updateCacheComponent( + current: Fiber | null, + workInProgress: Fiber, + updateLanes: Lanes, + renderLanes: Lanes, +) { + if (!enableCache) { + return null; + } + + prepareToReadContext(workInProgress, renderLanes); + const parentCache = readContext(CacheContext); + + if (current === null) { + // Initial mount. Request a fresh cache from the pool. + const freshCache = requestCacheFromPool(renderLanes); + const initialState: CacheComponentState = { + parent: parentCache, + cache: freshCache, + }; + workInProgress.memoizedState = initialState; + initializeUpdateQueue(workInProgress); + pushCacheProvider(workInProgress, freshCache); + } else { + // Check for updates + if (includesSomeLane(renderLanes, updateLanes)) { + cloneUpdateQueue(current, workInProgress); + processUpdateQueue(workInProgress, null, null, renderLanes); + } + const prevState: CacheComponentState = current.memoizedState; + const nextState: CacheComponentState = workInProgress.memoizedState; + + // Compare the new parent cache to the previous to see detect there was + // a refresh. + if (prevState.parent !== parentCache) { + // Refresh in parent. Update the parent. + const derivedState: CacheComponentState = { + parent: parentCache, + cache: parentCache, + }; + + // Copied from getDerivedStateFromProps implementation. Once the update + // queue is empty, persist the derived state onto the base state. + workInProgress.memoizedState = derivedState; + if (workInProgress.lanes === NoLanes) { + const updateQueue: UpdateQueue = (workInProgress.updateQueue: any); + workInProgress.memoizedState = updateQueue.baseState = derivedState; + } + + pushCacheProvider(workInProgress, parentCache); + // No need to propagate a context change because the refreshed parent + // already did. + } else { + // The parent didn't refresh. Now check if this cache did. + const nextCache = nextState.cache; + pushCacheProvider(workInProgress, nextCache); + if (nextCache !== prevState.cache) { + // This cache refreshed. Propagate a context change. + propagateContextChange( + workInProgress, + CacheContext, + MAX_SIGNED_31_BIT_INT, + renderLanes, + ); + } + } + } + + const nextChildren = workInProgress.pendingProps.children; + reconcileChildren(current, workInProgress, nextChildren, renderLanes); + return workInProgress.child; +} + function updateFragment( current: Fiber | null, workInProgress: Fiber, @@ -991,10 +1141,28 @@ function updateHostRoot(current, workInProgress, renderLanes) { ); const nextProps = workInProgress.pendingProps; const prevState = workInProgress.memoizedState; - const prevChildren = prevState !== null ? prevState.element : null; + const prevChildren = prevState.element; cloneUpdateQueue(current, workInProgress); processUpdateQueue(workInProgress, nextProps, null, renderLanes); const nextState = workInProgress.memoizedState; + + const root: FiberRoot = workInProgress.stateNode; + + if (enableCache) { + const nextCache: Cache = nextState.cache; + pushRootCachePool(root); + pushCacheProvider(workInProgress, nextCache); + if (nextCache !== prevState.cache) { + // The root cache refreshed. + propagateContextChange( + workInProgress, + CacheContext, + MAX_SIGNED_31_BIT_INT, + renderLanes, + ); + } + } + // Caution: React DevTools currently depends on this property // being called "element". const nextChildren = nextState.element; @@ -1002,7 +1170,6 @@ function updateHostRoot(current, workInProgress, renderLanes) { resetHydrationState(); return bailoutOnAlreadyFinishedWork(current, workInProgress, renderLanes); } - const root: FiberRoot = workInProgress.stateNode; if (root.hydrate && enterHydrationState(workInProgress)) { // If we don't have any current children this might be the first pass. // We always try to hydrate. If this isn't a hydration pass there won't @@ -1562,6 +1729,7 @@ const SUSPENDED_MARKER: SuspenseState = { function mountSuspenseOffscreenState(renderLanes: Lanes): OffscreenState { return { baseLanes: renderLanes, + cachePool: getSuspendedCachePool(), }; } @@ -1569,8 +1737,33 @@ function updateSuspenseOffscreenState( prevOffscreenState: OffscreenState, renderLanes: Lanes, ): OffscreenState { + let cachePool: SpawnedCachePool | null = null; + if (enableCache) { + const prevCachePool: SpawnedCachePool | null = prevOffscreenState.cachePool; + if (prevCachePool !== null) { + const parentCache = isPrimaryRenderer + ? CacheContext._currentValue + : CacheContext._currentValue2; + if (prevCachePool.parent !== parentCache) { + // Detected a refresh in the parent. This overrides any previously + // suspended cache. + cachePool = { + parent: parentCache, + pool: parentCache, + }; + } else { + // We can reuse the cache from last time. The only thing that would have + // overridden it is a parent refresh, which we checked for above. + cachePool = prevCachePool; + } + } else { + // If there's no previous cache pool, grab the current one. + cachePool = getSuspendedCachePool(); + } + } return { baseLanes: mergeLanes(prevOffscreenState.baseLanes, renderLanes), + cachePool, }; } @@ -2801,7 +2994,7 @@ function updateContextProvider( } } - pushProvider(workInProgress, newValue); + pushProvider(workInProgress, context, newValue); if (oldProps !== null) { const oldValue = oldProps.value; @@ -3068,6 +3261,12 @@ function beginWork( switch (workInProgress.tag) { case HostRoot: pushHostRootContext(workInProgress); + if (enableCache) { + const root: FiberRoot = workInProgress.stateNode; + const cache: Cache = current.memoizedState.cache; + pushCacheProvider(workInProgress, cache); + pushRootCachePool(root); + } resetHydrationState(); break; case HostComponent: @@ -3088,7 +3287,8 @@ function beginWork( break; case ContextProvider: { const newValue = workInProgress.memoizedProps.value; - pushProvider(workInProgress, newValue); + const context: ReactContext = workInProgress.type._context; + pushProvider(workInProgress, context, newValue); break; } case Profiler: @@ -3234,6 +3434,13 @@ function beginWork( workInProgress.lanes = NoLanes; return updateOffscreenComponent(current, workInProgress, renderLanes); } + case CacheComponent: { + if (enableCache) { + const cache: Cache = current.memoizedState.cache; + pushCacheProvider(workInProgress, cache); + } + break; + } } return bailoutOnAlreadyFinishedWork(current, workInProgress, renderLanes); } else { @@ -3418,6 +3625,17 @@ function beginWork( case LegacyHiddenComponent: { return updateLegacyHiddenComponent(current, workInProgress, renderLanes); } + case CacheComponent: { + if (enableCache) { + return updateCacheComponent( + current, + workInProgress, + updateLanes, + renderLanes, + ); + } + break; + } } invariant( false, diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index 3f462e198aaf5..5a25d0a9cd65f 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -23,6 +23,12 @@ import type { OffscreenProps, OffscreenState, } from './ReactFiberOffscreenComponent'; +import type { + Cache, + CacheComponentState, + SpawnedCachePool, +} from './ReactFiberCacheComponent.old'; +import type {UpdateQueue} from './ReactUpdateQueue.old'; import checkPropTypes from 'shared/checkPropTypes'; @@ -50,6 +56,7 @@ import { ScopeComponent, OffscreenComponent, LegacyHiddenComponent, + CacheComponent, } from './ReactWorkTags'; import { NoFlags, @@ -76,6 +83,7 @@ import { enableFundamentalAPI, warnAboutDefaultPropsOnFunctionComponents, enableScopeAPI, + enableCache, } from 'shared/ReactFeatureFlags'; import invariant from 'shared/invariant'; import shallowEqual from 'shared/shallowEqual'; @@ -129,6 +137,7 @@ import { isSuspenseInstanceFallback, registerSuspenseInstanceRetry, supportsHydration, + isPrimaryRenderer, } from './ReactFiberHostConfig'; import type {SuspenseInstance} from './ReactFiberHostConfig'; import {shouldSuspend} from './ReactFiberReconciler'; @@ -201,6 +210,16 @@ import { } from './ReactFiberWorkLoop.old'; import {unstable_wrap as Schedule_tracing_wrap} from 'scheduler/tracing'; import {setWorkInProgressVersion} from './ReactMutableSource.old'; +import { + requestCacheFromPool, + pushCacheProvider, + pushRootCachePool, + CacheContext, + getSuspendedCachePool, + restoreSpawnedCachePool, + getOffscreenDeferredCachePool, +} from './ReactFiberCacheComponent.old'; +import {MAX_SIGNED_31_BIT_INT} from './MaxInts'; import {disableLogs, reenableLogs} from 'shared/ConsolePatchingDev'; @@ -576,23 +595,39 @@ function updateOffscreenComponent( const prevState: OffscreenState | null = current !== null ? current.memoizedState : null; + // If this is not null, this is a cache pool that was carried over from the + // previous render. We will push this to the cache pool context so that we can + // resume in-flight requests. + let spawnedCachePool: SpawnedCachePool | null = null; + if ( nextProps.mode === 'hidden' || nextProps.mode === 'unstable-defer-without-hiding' ) { + // Rendering a hidden tree. if ((workInProgress.mode & ConcurrentMode) === NoMode) { // In legacy sync mode, don't defer the subtree. Render it now. // TODO: Figure out what we should do in Blocking mode. const nextState: OffscreenState = { baseLanes: NoLanes, + cachePool: null, }; workInProgress.memoizedState = nextState; pushRenderLanes(workInProgress, renderLanes); } else if (!includesSomeLane(renderLanes, (OffscreenLane: Lane))) { + // We're hidden, and we're not rendering at Offscreen. We will bail out + // and resume this tree later. let nextBaseLanes; if (prevState !== null) { const prevBaseLanes = prevState.baseLanes; nextBaseLanes = mergeLanes(prevBaseLanes, renderLanes); + if (enableCache) { + // Save the cache pool so we can resume later. + spawnedCachePool = getOffscreenDeferredCachePool(); + // We don't need to push to the cache pool because we're about to + // bail out. There won't be a context mismatch because we only pop + // the cache pool if `updateQueue` is non-null. + } } else { nextBaseLanes = renderLanes; } @@ -606,16 +641,35 @@ function updateOffscreenComponent( ); const nextState: OffscreenState = { baseLanes: nextBaseLanes, + cachePool: spawnedCachePool, }; workInProgress.memoizedState = nextState; + workInProgress.updateQueue = null; // We're about to bail out, but we need to push this to the stack anyway // to avoid a push/pop misalignment. pushRenderLanes(workInProgress, nextBaseLanes); return null; } else { + // This is the second render. The surrounding visible content has already + // committed. Now we resume rendering the hidden tree. + + if (enableCache && prevState !== null) { + // If the render that spawned this one accessed the cache pool, resume + // using the same cache. Unless the parent changed, since that means + // there was a refresh. + const prevCachePool = prevState.cachePool; + if (prevCachePool !== null) { + spawnedCachePool = restoreSpawnedCachePool( + workInProgress, + prevCachePool, + ); + } + } + // Rendering at offscreen, so we can clear the base lanes. const nextState: OffscreenState = { baseLanes: NoLanes, + cachePool: null, }; workInProgress.memoizedState = nextState; // Push the lanes that were skipped when we bailed out. @@ -624,9 +678,26 @@ function updateOffscreenComponent( pushRenderLanes(workInProgress, subtreeRenderLanes); } } else { + // Rendering a visible tree. let subtreeRenderLanes; if (prevState !== null) { + // We're going from hidden -> visible. + subtreeRenderLanes = mergeLanes(prevState.baseLanes, renderLanes); + + if (enableCache) { + // If the render that spawned this one accessed the cache pool, resume + // using the same cache. Unless the parent changed, since that means + // there was a refresh. + const prevCachePool = prevState.cachePool; + if (prevCachePool !== null) { + spawnedCachePool = restoreSpawnedCachePool( + workInProgress, + prevCachePool, + ); + } + } + // Since we're not hidden anymore, reset the state workInProgress.memoizedState = null; } else { @@ -638,6 +709,12 @@ function updateOffscreenComponent( pushRenderLanes(workInProgress, subtreeRenderLanes); } + if (enableCache) { + // If we have a cache pool from a previous render attempt, then this will be + // non-null. We use this to infer whether to push/pop the cache context. + workInProgress.updateQueue = spawnedCachePool; + } + reconcileChildren(current, workInProgress, nextChildren, renderLanes); return workInProgress.child; } @@ -647,6 +724,79 @@ function updateOffscreenComponent( // fork the function. const updateLegacyHiddenComponent = updateOffscreenComponent; +function updateCacheComponent( + current: Fiber | null, + workInProgress: Fiber, + updateLanes: Lanes, + renderLanes: Lanes, +) { + if (!enableCache) { + return null; + } + + prepareToReadContext(workInProgress, renderLanes); + const parentCache = readContext(CacheContext); + + if (current === null) { + // Initial mount. Request a fresh cache from the pool. + const freshCache = requestCacheFromPool(renderLanes); + const initialState: CacheComponentState = { + parent: parentCache, + cache: freshCache, + }; + workInProgress.memoizedState = initialState; + initializeUpdateQueue(workInProgress); + pushCacheProvider(workInProgress, freshCache); + } else { + // Check for updates + if (includesSomeLane(renderLanes, updateLanes)) { + cloneUpdateQueue(current, workInProgress); + processUpdateQueue(workInProgress, null, null, renderLanes); + } + const prevState: CacheComponentState = current.memoizedState; + const nextState: CacheComponentState = workInProgress.memoizedState; + + // Compare the new parent cache to the previous to see detect there was + // a refresh. + if (prevState.parent !== parentCache) { + // Refresh in parent. Update the parent. + const derivedState: CacheComponentState = { + parent: parentCache, + cache: parentCache, + }; + + // Copied from getDerivedStateFromProps implementation. Once the update + // queue is empty, persist the derived state onto the base state. + workInProgress.memoizedState = derivedState; + if (workInProgress.lanes === NoLanes) { + const updateQueue: UpdateQueue = (workInProgress.updateQueue: any); + workInProgress.memoizedState = updateQueue.baseState = derivedState; + } + + pushCacheProvider(workInProgress, parentCache); + // No need to propagate a context change because the refreshed parent + // already did. + } else { + // The parent didn't refresh. Now check if this cache did. + const nextCache = nextState.cache; + pushCacheProvider(workInProgress, nextCache); + if (nextCache !== prevState.cache) { + // This cache refreshed. Propagate a context change. + propagateContextChange( + workInProgress, + CacheContext, + MAX_SIGNED_31_BIT_INT, + renderLanes, + ); + } + } + } + + const nextChildren = workInProgress.pendingProps.children; + reconcileChildren(current, workInProgress, nextChildren, renderLanes); + return workInProgress.child; +} + function updateFragment( current: Fiber | null, workInProgress: Fiber, @@ -991,10 +1141,28 @@ function updateHostRoot(current, workInProgress, renderLanes) { ); const nextProps = workInProgress.pendingProps; const prevState = workInProgress.memoizedState; - const prevChildren = prevState !== null ? prevState.element : null; + const prevChildren = prevState.element; cloneUpdateQueue(current, workInProgress); processUpdateQueue(workInProgress, nextProps, null, renderLanes); const nextState = workInProgress.memoizedState; + + const root: FiberRoot = workInProgress.stateNode; + + if (enableCache) { + const nextCache: Cache = nextState.cache; + pushRootCachePool(root); + pushCacheProvider(workInProgress, nextCache); + if (nextCache !== prevState.cache) { + // The root cache refreshed. + propagateContextChange( + workInProgress, + CacheContext, + MAX_SIGNED_31_BIT_INT, + renderLanes, + ); + } + } + // Caution: React DevTools currently depends on this property // being called "element". const nextChildren = nextState.element; @@ -1002,7 +1170,6 @@ function updateHostRoot(current, workInProgress, renderLanes) { resetHydrationState(); return bailoutOnAlreadyFinishedWork(current, workInProgress, renderLanes); } - const root: FiberRoot = workInProgress.stateNode; if (root.hydrate && enterHydrationState(workInProgress)) { // If we don't have any current children this might be the first pass. // We always try to hydrate. If this isn't a hydration pass there won't @@ -1562,6 +1729,7 @@ const SUSPENDED_MARKER: SuspenseState = { function mountSuspenseOffscreenState(renderLanes: Lanes): OffscreenState { return { baseLanes: renderLanes, + cachePool: getSuspendedCachePool(), }; } @@ -1569,8 +1737,33 @@ function updateSuspenseOffscreenState( prevOffscreenState: OffscreenState, renderLanes: Lanes, ): OffscreenState { + let cachePool: SpawnedCachePool | null = null; + if (enableCache) { + const prevCachePool: SpawnedCachePool | null = prevOffscreenState.cachePool; + if (prevCachePool !== null) { + const parentCache = isPrimaryRenderer + ? CacheContext._currentValue + : CacheContext._currentValue2; + if (prevCachePool.parent !== parentCache) { + // Detected a refresh in the parent. This overrides any previously + // suspended cache. + cachePool = { + parent: parentCache, + pool: parentCache, + }; + } else { + // We can reuse the cache from last time. The only thing that would have + // overridden it is a parent refresh, which we checked for above. + cachePool = prevCachePool; + } + } else { + // If there's no previous cache pool, grab the current one. + cachePool = getSuspendedCachePool(); + } + } return { baseLanes: mergeLanes(prevOffscreenState.baseLanes, renderLanes), + cachePool, }; } @@ -2801,7 +2994,7 @@ function updateContextProvider( } } - pushProvider(workInProgress, newValue); + pushProvider(workInProgress, context, newValue); if (oldProps !== null) { const oldValue = oldProps.value; @@ -3068,6 +3261,12 @@ function beginWork( switch (workInProgress.tag) { case HostRoot: pushHostRootContext(workInProgress); + if (enableCache) { + const root: FiberRoot = workInProgress.stateNode; + const cache: Cache = current.memoizedState.cache; + pushCacheProvider(workInProgress, cache); + pushRootCachePool(root); + } resetHydrationState(); break; case HostComponent: @@ -3088,7 +3287,8 @@ function beginWork( break; case ContextProvider: { const newValue = workInProgress.memoizedProps.value; - pushProvider(workInProgress, newValue); + const context: ReactContext = workInProgress.type._context; + pushProvider(workInProgress, context, newValue); break; } case Profiler: @@ -3234,6 +3434,13 @@ function beginWork( workInProgress.lanes = NoLanes; return updateOffscreenComponent(current, workInProgress, renderLanes); } + case CacheComponent: { + if (enableCache) { + const cache: Cache = current.memoizedState.cache; + pushCacheProvider(workInProgress, cache); + } + break; + } } return bailoutOnAlreadyFinishedWork(current, workInProgress, renderLanes); } else { @@ -3418,6 +3625,17 @@ function beginWork( case LegacyHiddenComponent: { return updateLegacyHiddenComponent(current, workInProgress, renderLanes); } + case CacheComponent: { + if (enableCache) { + return updateCacheComponent( + current, + workInProgress, + updateLanes, + renderLanes, + ); + } + break; + } } invariant( false, diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.new.js b/packages/react-reconciler/src/ReactFiberCacheComponent.new.js new file mode 100644 index 0000000000000..b5ae3ccc611a8 --- /dev/null +++ b/packages/react-reconciler/src/ReactFiberCacheComponent.new.js @@ -0,0 +1,204 @@ +/** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow + */ + +import type {ReactContext} from 'shared/ReactTypes'; +import type {FiberRoot} from './ReactInternalTypes'; +import type {Lanes} from './ReactFiberLane.new'; +import type {StackCursor} from './ReactFiberStack.new'; + +import {enableCache} from 'shared/ReactFeatureFlags'; +import {REACT_CONTEXT_TYPE} from 'shared/ReactSymbols'; + +import {isPrimaryRenderer} from './ReactFiberHostConfig'; +import {createCursor, push, pop} from './ReactFiberStack.new'; +import {pushProvider, popProvider} from './ReactFiberNewContext.new'; + +export type Cache = Map<() => mixed, mixed>; + +export type CacheComponentState = {| + +parent: Cache, + +cache: Cache, +|}; + +export type SpawnedCachePool = {| + +parent: Cache, + +pool: Cache, +|}; + +export const CacheContext: ReactContext = enableCache + ? { + $$typeof: REACT_CONTEXT_TYPE, + // We don't use Consumer/Provider for Cache components. So we'll cheat. + Consumer: (null: any), + Provider: (null: any), + _calculateChangedBits: null, + // We'll initialize these at the root. + _currentValue: (null: any), + _currentValue2: (null: any), + _threadCount: 0, + } + : (null: any); + +if (__DEV__ && enableCache) { + CacheContext._currentRenderer = null; + CacheContext._currentRenderer2 = null; +} + +// The cache that newly mounted Cache boundaries should use. It's either +// retrieved from the cache pool, or the result of a refresh. +let pooledCache: Cache | null = null; + +// When retrying a Suspense/Offscreen boundary, we override pooledCache with the +// cache from the render that suspended. +const prevFreshCacheOnStack: StackCursor = createCursor(null); + +export function pushCacheProvider(workInProgress: Fiber, cache: Cache) { + if (!enableCache) { + return; + } + pushProvider(workInProgress, CacheContext, cache); +} + +export function popCacheProvider(workInProgress: Fiber, cache: Cache) { + if (!enableCache) { + return; + } + popProvider(CacheContext, workInProgress); +} + +export function requestCacheFromPool(renderLanes: Lanes): Cache { + if (!enableCache) { + return (null: any); + } + if (pooledCache !== null) { + return pooledCache; + } + // Create a fresh cache. + pooledCache = new Map(); + return pooledCache; +} + +export function pushRootCachePool(root: FiberRoot) { + if (!enableCache) { + return; + } + // When we start rendering a tree, read the pooled cache for this render + // from `root.pooledCache`. If it's currently `null`, we will lazily + // initialize it the first type it's requested. However, we only mutate + // the root itself during the complete/unwind phase of the HostRoot. + pooledCache = root.pooledCache; +} + +export function popRootCachePool(root: FiberRoot, renderLanes: Lanes) { + if (!enableCache) { + return; + } + // The `pooledCache` variable points to the cache that was used for new + // cache boundaries during this render, if any. Stash it on the root so that + // parallel transitions may share the same cache. We will clear this field + // once all the transitions that depend on it (which we track with + // `pooledCacheLanes`) have committed. + root.pooledCache = pooledCache; + if (pooledCache !== null) { + root.pooledCacheLanes |= renderLanes; + } +} + +export function restoreSpawnedCachePool( + offscreenWorkInProgress: Fiber, + prevCachePool: SpawnedCachePool, +): SpawnedCachePool | null { + if (!enableCache) { + return (null: any); + } + const nextParentCache = isPrimaryRenderer + ? CacheContext._currentValue + : CacheContext._currentValue2; + if (nextParentCache !== prevCachePool.parent) { + // There was a refresh. Don't bother restoring anything since the refresh + // will override it. + return null; + } else { + // No refresh. Resume with the previous cache. This will override the cache + // pool so that any new Cache boundaries in the subtree use this one instead + // of requesting a fresh one. + push(prevFreshCacheOnStack, pooledCache, offscreenWorkInProgress); + pooledCache = prevCachePool.pool; + + // Return the cache pool to signal that we did in fact push it. We will + // assign this to the field on the fiber so we know to pop the context. + return prevCachePool; + } +} + +// Note: Ideally, `popCachePool` would return this value, and then we would pass +// it to `getSuspendedCachePool`. But factoring reasons, those two functions are +// in different phases/files. They are always called in sequence, though, so we +// can stash the value here temporarily. +let _suspendedPooledCache: Cache | null = null; + +export function popCachePool(workInProgress: Fiber) { + if (!enableCache) { + return; + } + _suspendedPooledCache = pooledCache; + pooledCache = prevFreshCacheOnStack.current; + pop(prevFreshCacheOnStack, workInProgress); +} + +export function getSuspendedCachePool(): SpawnedCachePool | null { + if (!enableCache) { + return null; + } + + // We check the cache on the stack first, since that's the one any new Caches + // would have accessed. + let pool = pooledCache; + if (pool === null) { + // There's no pooled cache above us in the stack. However, a child in the + // suspended tree may have requested a fresh cache pool. If so, we would + // have unwound it with `popCachePool`. + if (_suspendedPooledCache !== null) { + pool = _suspendedPooledCache; + _suspendedPooledCache = null; + } else { + // There's no suspended cache pool. + return null; + } + } + + return { + // We must also save the parent, so that when we resume we can detect + // a refresh. + parent: isPrimaryRenderer + ? CacheContext._currentValue + : CacheContext._currentValue2, + pool, + }; +} + +export function getOffscreenDeferredCachePool(): SpawnedCachePool | null { + if (!enableCache) { + return null; + } + + if (pooledCache === null) { + // There's no deferred cache pool. + return null; + } + + return { + // We must also store the parent, so that when we resume we can detect + // a refresh. + parent: isPrimaryRenderer + ? CacheContext._currentValue + : CacheContext._currentValue2, + pool: pooledCache, + }; +} diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.old.js b/packages/react-reconciler/src/ReactFiberCacheComponent.old.js new file mode 100644 index 0000000000000..8882f7dbd2d48 --- /dev/null +++ b/packages/react-reconciler/src/ReactFiberCacheComponent.old.js @@ -0,0 +1,204 @@ +/** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow + */ + +import type {ReactContext} from 'shared/ReactTypes'; +import type {FiberRoot} from './ReactInternalTypes'; +import type {Lanes} from './ReactFiberLane.old'; +import type {StackCursor} from './ReactFiberStack.old'; + +import {enableCache} from 'shared/ReactFeatureFlags'; +import {REACT_CONTEXT_TYPE} from 'shared/ReactSymbols'; + +import {isPrimaryRenderer} from './ReactFiberHostConfig'; +import {createCursor, push, pop} from './ReactFiberStack.old'; +import {pushProvider, popProvider} from './ReactFiberNewContext.old'; + +export type Cache = Map<() => mixed, mixed>; + +export type CacheComponentState = {| + +parent: Cache, + +cache: Cache, +|}; + +export type SpawnedCachePool = {| + +parent: Cache, + +pool: Cache, +|}; + +export const CacheContext: ReactContext = enableCache + ? { + $$typeof: REACT_CONTEXT_TYPE, + // We don't use Consumer/Provider for Cache components. So we'll cheat. + Consumer: (null: any), + Provider: (null: any), + _calculateChangedBits: null, + // We'll initialize these at the root. + _currentValue: (null: any), + _currentValue2: (null: any), + _threadCount: 0, + } + : (null: any); + +if (__DEV__ && enableCache) { + CacheContext._currentRenderer = null; + CacheContext._currentRenderer2 = null; +} + +// The cache that newly mounted Cache boundaries should use. It's either +// retrieved from the cache pool, or the result of a refresh. +let pooledCache: Cache | null = null; + +// When retrying a Suspense/Offscreen boundary, we override pooledCache with the +// cache from the render that suspended. +const prevFreshCacheOnStack: StackCursor = createCursor(null); + +export function pushCacheProvider(workInProgress: Fiber, cache: Cache) { + if (!enableCache) { + return; + } + pushProvider(workInProgress, CacheContext, cache); +} + +export function popCacheProvider(workInProgress: Fiber, cache: Cache) { + if (!enableCache) { + return; + } + popProvider(CacheContext, workInProgress); +} + +export function requestCacheFromPool(renderLanes: Lanes): Cache { + if (!enableCache) { + return (null: any); + } + if (pooledCache !== null) { + return pooledCache; + } + // Create a fresh cache. + pooledCache = new Map(); + return pooledCache; +} + +export function pushRootCachePool(root: FiberRoot) { + if (!enableCache) { + return; + } + // When we start rendering a tree, read the pooled cache for this render + // from `root.pooledCache`. If it's currently `null`, we will lazily + // initialize it the first type it's requested. However, we only mutate + // the root itself during the complete/unwind phase of the HostRoot. + pooledCache = root.pooledCache; +} + +export function popRootCachePool(root: FiberRoot, renderLanes: Lanes) { + if (!enableCache) { + return; + } + // The `pooledCache` variable points to the cache that was used for new + // cache boundaries during this render, if any. Stash it on the root so that + // parallel transitions may share the same cache. We will clear this field + // once all the transitions that depend on it (which we track with + // `pooledCacheLanes`) have committed. + root.pooledCache = pooledCache; + if (pooledCache !== null) { + root.pooledCacheLanes |= renderLanes; + } +} + +export function restoreSpawnedCachePool( + offscreenWorkInProgress: Fiber, + prevCachePool: SpawnedCachePool, +): SpawnedCachePool | null { + if (!enableCache) { + return (null: any); + } + const nextParentCache = isPrimaryRenderer + ? CacheContext._currentValue + : CacheContext._currentValue2; + if (nextParentCache !== prevCachePool.parent) { + // There was a refresh. Don't bother restoring anything since the refresh + // will override it. + return null; + } else { + // No refresh. Resume with the previous cache. This will override the cache + // pool so that any new Cache boundaries in the subtree use this one instead + // of requesting a fresh one. + push(prevFreshCacheOnStack, pooledCache, offscreenWorkInProgress); + pooledCache = prevCachePool.pool; + + // Return the cache pool to signal that we did in fact push it. We will + // assign this to the field on the fiber so we know to pop the context. + return prevCachePool; + } +} + +// Note: Ideally, `popCachePool` would return this value, and then we would pass +// it to `getSuspendedCachePool`. But factoring reasons, those two functions are +// in different phases/files. They are always called in sequence, though, so we +// can stash the value here temporarily. +let _suspendedPooledCache: Cache | null = null; + +export function popCachePool(workInProgress: Fiber) { + if (!enableCache) { + return; + } + _suspendedPooledCache = pooledCache; + pooledCache = prevFreshCacheOnStack.current; + pop(prevFreshCacheOnStack, workInProgress); +} + +export function getSuspendedCachePool(): SpawnedCachePool | null { + if (!enableCache) { + return null; + } + + // We check the cache on the stack first, since that's the one any new Caches + // would have accessed. + let pool = pooledCache; + if (pool === null) { + // There's no pooled cache above us in the stack. However, a child in the + // suspended tree may have requested a fresh cache pool. If so, we would + // have unwound it with `popCachePool`. + if (_suspendedPooledCache !== null) { + pool = _suspendedPooledCache; + _suspendedPooledCache = null; + } else { + // There's no suspended cache pool. + return null; + } + } + + return { + // We must also save the parent, so that when we resume we can detect + // a refresh. + parent: isPrimaryRenderer + ? CacheContext._currentValue + : CacheContext._currentValue2, + pool, + }; +} + +export function getOffscreenDeferredCachePool(): SpawnedCachePool | null { + if (!enableCache) { + return null; + } + + if (pooledCache === null) { + // There's no deferred cache pool. + return null; + } + + return { + // We must also store the parent, so that when we resume we can detect + // a refresh. + parent: isPrimaryRenderer + ? CacheContext._currentValue + : CacheContext._currentValue2, + pool: pooledCache, + }; +} diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js index 53a328121051f..92fa3eeacb603 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js @@ -12,6 +12,7 @@ import type {Lanes, Lane} from './ReactFiberLane.new'; import type { ReactFundamentalComponentInstance, ReactScopeInstance, + ReactContext, } from 'shared/ReactTypes'; import type {FiberRoot} from './ReactInternalTypes'; import type { @@ -27,6 +28,7 @@ import type { } from './ReactFiberSuspenseComponent.new'; import type {SuspenseContext} from './ReactFiberSuspenseContext.new'; import type {OffscreenState} from './ReactFiberOffscreenComponent'; +import type {Cache, SpawnedCachePool} from './ReactFiberCacheComponent.new'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.new'; @@ -56,6 +58,7 @@ import { ScopeComponent, OffscreenComponent, LegacyHiddenComponent, + CacheComponent, } from './ReactWorkTags'; import { NoMode, @@ -132,6 +135,7 @@ import { enableFundamentalAPI, enableScopeAPI, enableProfilerTimer, + enableCache, } from 'shared/ReactFeatureFlags'; import { markSpawnedWork, @@ -153,6 +157,11 @@ import { import {resetChildFibers} from './ReactChildFiber.new'; import {createScopeInstance} from './ReactFiberScope.new'; import {transferActualDuration} from './ReactProfilerTimer.new'; +import { + popCacheProvider, + popRootCachePool, + popCachePool, +} from './ReactFiberCacheComponent.new'; function markUpdate(workInProgress: Fiber) { // Tag the fiber with an update effect. This turns a Placement into @@ -808,10 +817,16 @@ function completeWork( return null; } case HostRoot: { + const fiberRoot = (workInProgress.stateNode: FiberRoot); + if (enableCache) { + popRootCachePool(fiberRoot, renderLanes); + + const cache: Cache = workInProgress.memoizedState.cache; + popCacheProvider(workInProgress, cache); + } popHostContainer(workInProgress); popTopLevelLegacyContextObject(workInProgress); resetMutableSourceWorkInProgressVersions(); - const fiberRoot = (workInProgress.stateNode: FiberRoot); if (fiberRoot.pendingContext) { fiberRoot.context = fiberRoot.pendingContext; fiberRoot.pendingContext = null; @@ -1135,7 +1150,8 @@ function completeWork( return null; case ContextProvider: // Pop provider fiber - popProvider(workInProgress); + const context: ReactContext = workInProgress.type._context; + popProvider(context, workInProgress); bubbleProperties(workInProgress); return null; case IncompleteClassComponent: { @@ -1479,8 +1495,23 @@ function completeWork( bubbleProperties(workInProgress); } + if (enableCache) { + const spawnedCachePool: SpawnedCachePool | null = (workInProgress.updateQueue: any); + if (spawnedCachePool !== null) { + popCachePool(workInProgress); + } + } + return null; } + case CacheComponent: { + if (enableCache) { + const cache: Cache = workInProgress.memoizedState.cache; + popCacheProvider(workInProgress, cache); + bubbleProperties(workInProgress); + return null; + } + } } invariant( false, diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js index 8f0e68353d90c..a44029c8347c4 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js @@ -12,6 +12,7 @@ import type {Lanes, Lane} from './ReactFiberLane.old'; import type { ReactFundamentalComponentInstance, ReactScopeInstance, + ReactContext, } from 'shared/ReactTypes'; import type {FiberRoot} from './ReactInternalTypes'; import type { @@ -27,6 +28,7 @@ import type { } from './ReactFiberSuspenseComponent.old'; import type {SuspenseContext} from './ReactFiberSuspenseContext.old'; import type {OffscreenState} from './ReactFiberOffscreenComponent'; +import type {Cache, SpawnedCachePool} from './ReactFiberCacheComponent.old'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.old'; @@ -56,6 +58,7 @@ import { ScopeComponent, OffscreenComponent, LegacyHiddenComponent, + CacheComponent, } from './ReactWorkTags'; import { NoMode, @@ -132,6 +135,7 @@ import { enableFundamentalAPI, enableScopeAPI, enableProfilerTimer, + enableCache, } from 'shared/ReactFeatureFlags'; import { markSpawnedWork, @@ -153,6 +157,11 @@ import { import {resetChildFibers} from './ReactChildFiber.old'; import {createScopeInstance} from './ReactFiberScope.old'; import {transferActualDuration} from './ReactProfilerTimer.old'; +import { + popCacheProvider, + popRootCachePool, + popCachePool, +} from './ReactFiberCacheComponent.old'; function markUpdate(workInProgress: Fiber) { // Tag the fiber with an update effect. This turns a Placement into @@ -808,10 +817,16 @@ function completeWork( return null; } case HostRoot: { + const fiberRoot = (workInProgress.stateNode: FiberRoot); + if (enableCache) { + popRootCachePool(fiberRoot, renderLanes); + + const cache: Cache = workInProgress.memoizedState.cache; + popCacheProvider(workInProgress, cache); + } popHostContainer(workInProgress); popTopLevelLegacyContextObject(workInProgress); resetMutableSourceWorkInProgressVersions(); - const fiberRoot = (workInProgress.stateNode: FiberRoot); if (fiberRoot.pendingContext) { fiberRoot.context = fiberRoot.pendingContext; fiberRoot.pendingContext = null; @@ -1135,7 +1150,8 @@ function completeWork( return null; case ContextProvider: // Pop provider fiber - popProvider(workInProgress); + const context: ReactContext = workInProgress.type._context; + popProvider(context, workInProgress); bubbleProperties(workInProgress); return null; case IncompleteClassComponent: { @@ -1479,8 +1495,23 @@ function completeWork( bubbleProperties(workInProgress); } + if (enableCache) { + const spawnedCachePool: SpawnedCachePool | null = (workInProgress.updateQueue: any); + if (spawnedCachePool !== null) { + popCachePool(workInProgress); + } + } + return null; } + case CacheComponent: { + if (enableCache) { + const cache: Cache = workInProgress.memoizedState.cache; + popCacheProvider(workInProgress, cache); + bubbleProperties(workInProgress); + return null; + } + } } invariant( false, diff --git a/packages/react-reconciler/src/ReactFiberHooks.new.js b/packages/react-reconciler/src/ReactFiberHooks.new.js index 66c85b274df0e..d5ad5048926ea 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.new.js +++ b/packages/react-reconciler/src/ReactFiberHooks.new.js @@ -19,6 +19,7 @@ import type {HookFlags} from './ReactHookEffectTags'; import type {ReactPriorityLevel} from './ReactInternalTypes'; import type {FiberRoot} from './ReactInternalTypes'; import type {OpaqueIDType} from './ReactFiberHostConfig'; +import type {Cache} from './ReactFiberCacheComponent.new'; import ReactSharedInternals from 'shared/ReactSharedInternals'; import { @@ -46,6 +47,7 @@ import { DefaultLanePriority, } from './ReactFiberLane.new'; import {readContext} from './ReactFiberNewContext.new'; +import {HostRoot, CacheComponent} from './ReactWorkTags'; import { Update as UpdateEffect, Passive as PassiveEffect, @@ -92,6 +94,8 @@ import { import {getIsRendering} from './ReactCurrentFiber'; import {logStateUpdateScheduled} from './DebugTracing'; import {markStateUpdateScheduled} from './SchedulingProfiler'; +import {CacheContext} from './ReactFiberCacheComponent.new'; +import {createUpdate, enqueueUpdate} from './ReactUpdateQueue.new'; const {ReactCurrentDispatcher, ReactCurrentBatchConfig} = ReactSharedInternals; @@ -1707,6 +1711,55 @@ function rerenderOpaqueIdentifier(): OpaqueIDType | void { return id; } +function mountRefresh() { + const hook = mountWorkInProgressHook(); + const refresh = (hook.memoizedState = refreshCache.bind( + null, + currentlyRenderingFiber, + )); + return refresh; +} + +function updateRefresh() { + const hook = updateWorkInProgressHook(); + return hook.memoizedState; +} + +function refreshCache(fiber: Fiber, seedKey: ?() => T, seedValue: T) { + // TODO: Does Cache work in legacy mode? Should decide and write a test. + // TODO: Consider warning if the refresh is at discrete priority, or if we + // otherwise suspect that it wasn't batched properly. + let provider = fiber.return; + while (provider !== null) { + switch (provider.tag) { + case CacheComponent: + case HostRoot: { + const lane = requestUpdateLane(provider); + const eventTime = requestEventTime(); + const root = scheduleUpdateOnFiber(provider, lane, eventTime); + + const seededCache = new Map(); + if (seedKey !== null && seedKey !== undefined && root !== null) { + // Seed the cache with the value passed by the caller. This could be + // from a server mutation, or it could be a streaming response. + seededCache.set(seedKey, seedValue); + } + + // Schedule an update on the cache boundary to trigger a refresh. + const refreshUpdate = createUpdate(eventTime, lane); + const payload = { + cache: seededCache, + }; + refreshUpdate.payload = payload; + enqueueUpdate(provider, refreshUpdate); + return; + } + } + provider = provider.return; + } + // TODO: Warn if unmounted? +} + function dispatchAction( fiber: Fiber, queue: UpdateQueue, @@ -1818,7 +1871,16 @@ function dispatchAction( } function getCacheForType(resourceType: () => T): T { - invariant(false, 'Not implemented.'); + if (!enableCache) { + invariant(false, 'Not implemented.'); + } + const cache: Cache = readContext(CacheContext); + let cacheForType: T | void = (cache.get(resourceType): any); + if (cacheForType === undefined) { + cacheForType = resourceType(); + cache.set(resourceType, cacheForType); + } + return cacheForType; } export const ContextOnlyDispatcher: Dispatcher = { @@ -1843,6 +1905,7 @@ export const ContextOnlyDispatcher: Dispatcher = { }; if (enableCache) { (ContextOnlyDispatcher: Dispatcher).getCacheForType = getCacheForType; + (ContextOnlyDispatcher: Dispatcher).useCacheRefresh = throwInvalidHookError; } const HooksDispatcherOnMount: Dispatcher = { @@ -1865,6 +1928,10 @@ const HooksDispatcherOnMount: Dispatcher = { unstable_isNewReconciler: enableNewReconciler, }; +if (enableCache) { + (HooksDispatcherOnMount: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnMount: Dispatcher).useCacheRefresh = mountRefresh; +} const HooksDispatcherOnUpdate: Dispatcher = { readContext, @@ -1888,6 +1955,7 @@ const HooksDispatcherOnUpdate: Dispatcher = { }; if (enableCache) { (HooksDispatcherOnUpdate: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnUpdate: Dispatcher).useCacheRefresh = updateRefresh; } const HooksDispatcherOnRerender: Dispatcher = { @@ -1912,6 +1980,7 @@ const HooksDispatcherOnRerender: Dispatcher = { }; if (enableCache) { (HooksDispatcherOnRerender: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnRerender: Dispatcher).useCacheRefresh = updateRefresh; } let HooksDispatcherOnMountInDEV: Dispatcher | null = null; @@ -2069,6 +2138,11 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnMountInDEV: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnMountInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; + mountHookTypesDev(); + return mountRefresh(); + }; } HooksDispatcherOnMountWithHookTypesInDEV = { @@ -2194,6 +2268,11 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnMountWithHookTypesInDEV: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnMountWithHookTypesInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; + updateHookTypesDev(); + return mountRefresh(); + }; } HooksDispatcherOnUpdateInDEV = { @@ -2319,6 +2398,11 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnUpdateInDEV: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnUpdateInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; + updateHookTypesDev(); + return updateRefresh(); + }; } HooksDispatcherOnRerenderInDEV = { @@ -2445,6 +2529,11 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnRerenderInDEV: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnRerenderInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; + updateHookTypesDev(); + return updateRefresh(); + }; } InvalidNestedHooksDispatcherOnMountInDEV = { @@ -2585,6 +2674,11 @@ if (__DEV__) { }; if (enableCache) { (InvalidNestedHooksDispatcherOnMountInDEV: Dispatcher).getCacheForType = getCacheForType; + (InvalidNestedHooksDispatcherOnMountInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; + updateHookTypesDev(); + return mountRefresh(); + }; } InvalidNestedHooksDispatcherOnUpdateInDEV = { @@ -2725,6 +2819,11 @@ if (__DEV__) { }; if (enableCache) { (InvalidNestedHooksDispatcherOnUpdateInDEV: Dispatcher).getCacheForType = getCacheForType; + (InvalidNestedHooksDispatcherOnUpdateInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; + updateHookTypesDev(); + return updateRefresh(); + }; } InvalidNestedHooksDispatcherOnRerenderInDEV = { @@ -2866,5 +2965,10 @@ if (__DEV__) { }; if (enableCache) { (InvalidNestedHooksDispatcherOnRerenderInDEV: Dispatcher).getCacheForType = getCacheForType; + (InvalidNestedHooksDispatcherOnRerenderInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; + updateHookTypesDev(); + return updateRefresh(); + }; } } diff --git a/packages/react-reconciler/src/ReactFiberHooks.old.js b/packages/react-reconciler/src/ReactFiberHooks.old.js index 49e222fa6735f..45ff2b50665cc 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.old.js +++ b/packages/react-reconciler/src/ReactFiberHooks.old.js @@ -19,6 +19,7 @@ import type {HookFlags} from './ReactHookEffectTags'; import type {ReactPriorityLevel} from './ReactInternalTypes'; import type {FiberRoot} from './ReactInternalTypes'; import type {OpaqueIDType} from './ReactFiberHostConfig'; +import type {Cache} from './ReactFiberCacheComponent.old'; import ReactSharedInternals from 'shared/ReactSharedInternals'; import { @@ -46,6 +47,7 @@ import { DefaultLanePriority, } from './ReactFiberLane.old'; import {readContext} from './ReactFiberNewContext.old'; +import {HostRoot, CacheComponent} from './ReactWorkTags'; import { Update as UpdateEffect, Passive as PassiveEffect, @@ -92,6 +94,8 @@ import { import {getIsRendering} from './ReactCurrentFiber'; import {logStateUpdateScheduled} from './DebugTracing'; import {markStateUpdateScheduled} from './SchedulingProfiler'; +import {CacheContext} from './ReactFiberCacheComponent.old'; +import {createUpdate, enqueueUpdate} from './ReactUpdateQueue.old'; const {ReactCurrentDispatcher, ReactCurrentBatchConfig} = ReactSharedInternals; @@ -1707,6 +1711,55 @@ function rerenderOpaqueIdentifier(): OpaqueIDType | void { return id; } +function mountRefresh() { + const hook = mountWorkInProgressHook(); + const refresh = (hook.memoizedState = refreshCache.bind( + null, + currentlyRenderingFiber, + )); + return refresh; +} + +function updateRefresh() { + const hook = updateWorkInProgressHook(); + return hook.memoizedState; +} + +function refreshCache(fiber: Fiber, seedKey: ?() => T, seedValue: T) { + // TODO: Does Cache work in legacy mode? Should decide and write a test. + // TODO: Consider warning if the refresh is at discrete priority, or if we + // otherwise suspect that it wasn't batched properly. + let provider = fiber.return; + while (provider !== null) { + switch (provider.tag) { + case CacheComponent: + case HostRoot: { + const lane = requestUpdateLane(provider); + const eventTime = requestEventTime(); + const root = scheduleUpdateOnFiber(provider, lane, eventTime); + + const seededCache = new Map(); + if (seedKey !== null && seedKey !== undefined && root !== null) { + // Seed the cache with the value passed by the caller. This could be + // from a server mutation, or it could be a streaming response. + seededCache.set(seedKey, seedValue); + } + + // Schedule an update on the cache boundary to trigger a refresh. + const refreshUpdate = createUpdate(eventTime, lane); + const payload = { + cache: seededCache, + }; + refreshUpdate.payload = payload; + enqueueUpdate(provider, refreshUpdate); + return; + } + } + provider = provider.return; + } + // TODO: Warn if unmounted? +} + function dispatchAction( fiber: Fiber, queue: UpdateQueue, @@ -1818,7 +1871,16 @@ function dispatchAction( } function getCacheForType(resourceType: () => T): T { - invariant(false, 'Not implemented.'); + if (!enableCache) { + invariant(false, 'Not implemented.'); + } + const cache: Cache = readContext(CacheContext); + let cacheForType: T | void = (cache.get(resourceType): any); + if (cacheForType === undefined) { + cacheForType = resourceType(); + cache.set(resourceType, cacheForType); + } + return cacheForType; } export const ContextOnlyDispatcher: Dispatcher = { @@ -1843,6 +1905,7 @@ export const ContextOnlyDispatcher: Dispatcher = { }; if (enableCache) { (ContextOnlyDispatcher: Dispatcher).getCacheForType = getCacheForType; + (ContextOnlyDispatcher: Dispatcher).useCacheRefresh = throwInvalidHookError; } const HooksDispatcherOnMount: Dispatcher = { @@ -1865,6 +1928,10 @@ const HooksDispatcherOnMount: Dispatcher = { unstable_isNewReconciler: enableNewReconciler, }; +if (enableCache) { + (HooksDispatcherOnMount: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnMount: Dispatcher).useCacheRefresh = mountRefresh; +} const HooksDispatcherOnUpdate: Dispatcher = { readContext, @@ -1888,6 +1955,7 @@ const HooksDispatcherOnUpdate: Dispatcher = { }; if (enableCache) { (HooksDispatcherOnUpdate: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnUpdate: Dispatcher).useCacheRefresh = updateRefresh; } const HooksDispatcherOnRerender: Dispatcher = { @@ -1912,6 +1980,7 @@ const HooksDispatcherOnRerender: Dispatcher = { }; if (enableCache) { (HooksDispatcherOnRerender: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnRerender: Dispatcher).useCacheRefresh = updateRefresh; } let HooksDispatcherOnMountInDEV: Dispatcher | null = null; @@ -2069,6 +2138,11 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnMountInDEV: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnMountInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; + mountHookTypesDev(); + return mountRefresh(); + }; } HooksDispatcherOnMountWithHookTypesInDEV = { @@ -2194,6 +2268,11 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnMountWithHookTypesInDEV: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnMountWithHookTypesInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; + updateHookTypesDev(); + return mountRefresh(); + }; } HooksDispatcherOnUpdateInDEV = { @@ -2319,6 +2398,11 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnUpdateInDEV: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnUpdateInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; + updateHookTypesDev(); + return updateRefresh(); + }; } HooksDispatcherOnRerenderInDEV = { @@ -2445,6 +2529,11 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnRerenderInDEV: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnRerenderInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; + updateHookTypesDev(); + return updateRefresh(); + }; } InvalidNestedHooksDispatcherOnMountInDEV = { @@ -2585,6 +2674,11 @@ if (__DEV__) { }; if (enableCache) { (InvalidNestedHooksDispatcherOnMountInDEV: Dispatcher).getCacheForType = getCacheForType; + (InvalidNestedHooksDispatcherOnMountInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; + updateHookTypesDev(); + return mountRefresh(); + }; } InvalidNestedHooksDispatcherOnUpdateInDEV = { @@ -2725,6 +2819,11 @@ if (__DEV__) { }; if (enableCache) { (InvalidNestedHooksDispatcherOnUpdateInDEV: Dispatcher).getCacheForType = getCacheForType; + (InvalidNestedHooksDispatcherOnUpdateInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; + updateHookTypesDev(); + return updateRefresh(); + }; } InvalidNestedHooksDispatcherOnRerenderInDEV = { @@ -2866,5 +2965,10 @@ if (__DEV__) { }; if (enableCache) { (InvalidNestedHooksDispatcherOnRerenderInDEV: Dispatcher).getCacheForType = getCacheForType; + (InvalidNestedHooksDispatcherOnRerenderInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; + updateHookTypesDev(); + return updateRefresh(); + }; } } diff --git a/packages/react-reconciler/src/ReactFiberLane.new.js b/packages/react-reconciler/src/ReactFiberLane.new.js index b5d8846ec2002..351035d0c341a 100644 --- a/packages/react-reconciler/src/ReactFiberLane.new.js +++ b/packages/react-reconciler/src/ReactFiberLane.new.js @@ -36,6 +36,7 @@ export type Lane = number; export type LaneMap = Array; import invariant from 'shared/invariant'; +import {enableCache} from 'shared/ReactFeatureFlags'; import { ImmediatePriority as ImmediateSchedulerPriority, @@ -737,6 +738,15 @@ export function markRootFinished(root: FiberRoot, remainingLanes: Lanes) { root.entangledLanes &= remainingLanes; + if (enableCache) { + const pooledCacheLanes = (root.pooledCacheLanes &= remainingLanes); + if (pooledCacheLanes === NoLanes) { + // None of the remaining work relies on the cache pool. Clear it so + // subsequent requests get a new cache. + root.pooledCache = null; + } + } + const entanglements = root.entanglements; const eventTimes = root.eventTimes; const expirationTimes = root.expirationTimes; diff --git a/packages/react-reconciler/src/ReactFiberLane.old.js b/packages/react-reconciler/src/ReactFiberLane.old.js index f58ab20531740..7a00fe095e6c6 100644 --- a/packages/react-reconciler/src/ReactFiberLane.old.js +++ b/packages/react-reconciler/src/ReactFiberLane.old.js @@ -36,6 +36,7 @@ export type Lane = number; export type LaneMap = Array; import invariant from 'shared/invariant'; +import {enableCache} from 'shared/ReactFeatureFlags'; import { ImmediatePriority as ImmediateSchedulerPriority, @@ -737,6 +738,15 @@ export function markRootFinished(root: FiberRoot, remainingLanes: Lanes) { root.entangledLanes &= remainingLanes; + if (enableCache) { + const pooledCacheLanes = (root.pooledCacheLanes &= remainingLanes); + if (pooledCacheLanes === NoLanes) { + // None of the remaining work relies on the cache pool. Clear it so + // subsequent requests get a new cache. + root.pooledCache = null; + } + } + const entanglements = root.entanglements; const eventTimes = root.eventTimes; const expirationTimes = root.expirationTimes; diff --git a/packages/react-reconciler/src/ReactFiberNewContext.new.js b/packages/react-reconciler/src/ReactFiberNewContext.new.js index 8e387ee012ec3..2a084390dcc75 100644 --- a/packages/react-reconciler/src/ReactFiberNewContext.new.js +++ b/packages/react-reconciler/src/ReactFiberNewContext.new.js @@ -72,9 +72,11 @@ export function exitDisallowedContextReadInDEV(): void { } } -export function pushProvider(providerFiber: Fiber, nextValue: T): void { - const context: ReactContext = providerFiber.type._context; - +export function pushProvider( + providerFiber: Fiber, + context: ReactContext, + nextValue: T, +): void { if (isPrimaryRenderer) { push(valueCursor, context._currentValue, providerFiber); @@ -112,12 +114,12 @@ export function pushProvider(providerFiber: Fiber, nextValue: T): void { } } -export function popProvider(providerFiber: Fiber): void { +export function popProvider( + context: ReactContext, + providerFiber: Fiber, +): void { const currentValue = valueCursor.current; - pop(valueCursor, providerFiber); - - const context: ReactContext = providerFiber.type._context; if (isPrimaryRenderer) { context._currentValue = currentValue; } else { @@ -179,9 +181,9 @@ export function scheduleWorkOnParentPath( } } -export function propagateContextChange( +export function propagateContextChange( workInProgress: Fiber, - context: ReactContext, + context: ReactContext, changedBits: number, renderLanes: Lanes, ): void { diff --git a/packages/react-reconciler/src/ReactFiberNewContext.old.js b/packages/react-reconciler/src/ReactFiberNewContext.old.js index c7f03442d5797..934bc6b3fcd5c 100644 --- a/packages/react-reconciler/src/ReactFiberNewContext.old.js +++ b/packages/react-reconciler/src/ReactFiberNewContext.old.js @@ -72,9 +72,11 @@ export function exitDisallowedContextReadInDEV(): void { } } -export function pushProvider(providerFiber: Fiber, nextValue: T): void { - const context: ReactContext = providerFiber.type._context; - +export function pushProvider( + providerFiber: Fiber, + context: ReactContext, + nextValue: T, +): void { if (isPrimaryRenderer) { push(valueCursor, context._currentValue, providerFiber); @@ -112,12 +114,12 @@ export function pushProvider(providerFiber: Fiber, nextValue: T): void { } } -export function popProvider(providerFiber: Fiber): void { +export function popProvider( + context: ReactContext, + providerFiber: Fiber, +): void { const currentValue = valueCursor.current; - pop(valueCursor, providerFiber); - - const context: ReactContext = providerFiber.type._context; if (isPrimaryRenderer) { context._currentValue = currentValue; } else { @@ -179,9 +181,9 @@ export function scheduleWorkOnParentPath( } } -export function propagateContextChange( +export function propagateContextChange( workInProgress: Fiber, - context: ReactContext, + context: ReactContext, changedBits: number, renderLanes: Lanes, ): void { diff --git a/packages/react-reconciler/src/ReactFiberOffscreenComponent.js b/packages/react-reconciler/src/ReactFiberOffscreenComponent.js index 4a013a9d7eaf9..acae8d206194b 100644 --- a/packages/react-reconciler/src/ReactFiberOffscreenComponent.js +++ b/packages/react-reconciler/src/ReactFiberOffscreenComponent.js @@ -9,6 +9,7 @@ import type {ReactNodeList} from 'shared/ReactTypes'; import type {Lanes} from './ReactFiberLane.old'; +import type {SpawnedCachePool} from './ReactFiberCacheComponent.new'; export type OffscreenProps = {| // TODO: Pick an API before exposing the Offscreen type. I've chosen an enum @@ -28,4 +29,5 @@ export type OffscreenState = {| // will represent the pending work that must be included in the render in // order to unhide the component. baseLanes: Lanes, + cachePool: SpawnedCachePool | null, |}; diff --git a/packages/react-reconciler/src/ReactFiberRoot.new.js b/packages/react-reconciler/src/ReactFiberRoot.new.js index 1b504a18fc326..9057137ec61a8 100644 --- a/packages/react-reconciler/src/ReactFiberRoot.new.js +++ b/packages/react-reconciler/src/ReactFiberRoot.new.js @@ -21,6 +21,7 @@ import { import { enableSchedulerTracing, enableSuspenseCallback, + enableCache, } from 'shared/ReactFeatureFlags'; import {unstable_getThreadID} from 'scheduler/tracing'; import {initializeUpdateQueue} from './ReactUpdateQueue.new'; @@ -52,6 +53,11 @@ function FiberRootNode(containerInfo, tag, hydrate) { this.entangledLanes = NoLanes; this.entanglements = createLaneMap(NoLanes); + if (enableCache) { + this.pooledCache = null; + this.pooledCacheLanes = NoLanes; + } + if (supportsHydration) { this.mutableSourceEagerHydrationData = null; } @@ -97,6 +103,21 @@ export function createFiberRoot( root.current = uninitializedFiber; uninitializedFiber.stateNode = root; + if (enableCache) { + const initialCache = new Map(); + root.pooledCache = initialCache; + const initialState = { + element: null, + cache: initialCache, + }; + uninitializedFiber.memoizedState = initialState; + } else { + const initialState = { + element: null, + }; + uninitializedFiber.memoizedState = initialState; + } + initializeUpdateQueue(uninitializedFiber); return root; diff --git a/packages/react-reconciler/src/ReactFiberRoot.old.js b/packages/react-reconciler/src/ReactFiberRoot.old.js index 84509babdb51a..92ec811dd5589 100644 --- a/packages/react-reconciler/src/ReactFiberRoot.old.js +++ b/packages/react-reconciler/src/ReactFiberRoot.old.js @@ -21,6 +21,7 @@ import { import { enableSchedulerTracing, enableSuspenseCallback, + enableCache, } from 'shared/ReactFeatureFlags'; import {unstable_getThreadID} from 'scheduler/tracing'; import {initializeUpdateQueue} from './ReactUpdateQueue.old'; @@ -52,6 +53,11 @@ function FiberRootNode(containerInfo, tag, hydrate) { this.entangledLanes = NoLanes; this.entanglements = createLaneMap(NoLanes); + if (enableCache) { + this.pooledCache = null; + this.pooledCacheLanes = NoLanes; + } + if (supportsHydration) { this.mutableSourceEagerHydrationData = null; } @@ -97,6 +103,21 @@ export function createFiberRoot( root.current = uninitializedFiber; uninitializedFiber.stateNode = root; + if (enableCache) { + const initialCache = new Map(); + root.pooledCache = initialCache; + const initialState = { + element: null, + cache: initialCache, + }; + uninitializedFiber.memoizedState = initialState; + } else { + const initialState = { + element: null, + }; + uninitializedFiber.memoizedState = initialState; + } + initializeUpdateQueue(uninitializedFiber); return root; diff --git a/packages/react-reconciler/src/ReactFiberThrow.new.js b/packages/react-reconciler/src/ReactFiberThrow.new.js index 058b46be0040e..ea670b69f070a 100644 --- a/packages/react-reconciler/src/ReactFiberThrow.new.js +++ b/packages/react-reconciler/src/ReactFiberThrow.new.js @@ -21,6 +21,9 @@ import { HostRoot, SuspenseComponent, IncompleteClassComponent, + FunctionComponent, + ForwardRef, + SimpleMemoComponent, } from './ReactWorkTags'; import { DidCapture, @@ -209,9 +212,15 @@ function throwException( markComponentSuspended(sourceFiber, wakeable); } - if ((sourceFiber.mode & BlockingMode) === NoMode) { - // Reset the memoizedState to what it was before we attempted - // to render it. + // Reset the memoizedState to what it was before we attempted to render it. + // A legacy mode Suspense quirk, only relevant to hook components. + const tag = sourceFiber.tag; + if ( + (sourceFiber.mode & BlockingMode) === NoMode && + (tag === FunctionComponent || + tag === ForwardRef || + tag === SimpleMemoComponent) + ) { const currentSource = sourceFiber.alternate; if (currentSource) { sourceFiber.updateQueue = currentSource.updateQueue; diff --git a/packages/react-reconciler/src/ReactFiberThrow.old.js b/packages/react-reconciler/src/ReactFiberThrow.old.js index fbb9daa452625..781d523b48815 100644 --- a/packages/react-reconciler/src/ReactFiberThrow.old.js +++ b/packages/react-reconciler/src/ReactFiberThrow.old.js @@ -21,6 +21,9 @@ import { HostRoot, SuspenseComponent, IncompleteClassComponent, + FunctionComponent, + ForwardRef, + SimpleMemoComponent, } from './ReactWorkTags'; import { DidCapture, @@ -209,9 +212,15 @@ function throwException( markComponentSuspended(sourceFiber, wakeable); } - if ((sourceFiber.mode & BlockingMode) === NoMode) { - // Reset the memoizedState to what it was before we attempted - // to render it. + // Reset the memoizedState to what it was before we attempted to render it. + // A legacy mode Suspense quirk, only relevant to hook components. + const tag = sourceFiber.tag; + if ( + (sourceFiber.mode & BlockingMode) === NoMode && + (tag === FunctionComponent || + tag === ForwardRef || + tag === SimpleMemoComponent) + ) { const currentSource = sourceFiber.alternate; if (currentSource) { sourceFiber.updateQueue = currentSource.updateQueue; diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js index 8d445cedd44f9..152837286f5d2 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js @@ -7,9 +7,11 @@ * @flow */ -import type {Fiber} from './ReactInternalTypes'; +import type {ReactContext} from 'shared/ReactTypes'; +import type {Fiber, FiberRoot} from './ReactInternalTypes'; import type {Lanes} from './ReactFiberLane.new'; import type {SuspenseState} from './ReactFiberSuspenseComponent.new'; +import type {Cache, SpawnedCachePool} from './ReactFiberCacheComponent.new'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.new'; import { @@ -22,12 +24,14 @@ import { SuspenseListComponent, OffscreenComponent, LegacyHiddenComponent, + CacheComponent, } from './ReactWorkTags'; import {DidCapture, NoFlags, ShouldCapture} from './ReactFiberFlags'; import {NoMode, ProfileMode} from './ReactTypeOfMode'; import { enableSuspenseServerRenderer, enableProfilerTimer, + enableCache, } from 'shared/ReactFeatureFlags'; import {popHostContainer, popHostContext} from './ReactFiberHostContext.new'; @@ -40,6 +44,11 @@ import { } from './ReactFiberContext.new'; import {popProvider} from './ReactFiberNewContext.new'; import {popRenderLanes} from './ReactFiberWorkLoop.new'; +import { + popCacheProvider, + popRootCachePool, + popCachePool, +} from './ReactFiberCacheComponent.new'; import {transferActualDuration} from './ReactProfilerTimer.new'; import invariant from 'shared/invariant'; @@ -65,6 +74,13 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { return null; } case HostRoot: { + if (enableCache) { + const root: FiberRoot = workInProgress.stateNode; + popRootCachePool(root, renderLanes); + + const cache: Cache = workInProgress.memoizedState.cache; + popCacheProvider(workInProgress, cache); + } popHostContainer(workInProgress); popTopLevelLegacyContextObject(workInProgress); resetMutableSourceWorkInProgressVersions(); @@ -120,18 +136,31 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { popHostContainer(workInProgress); return null; case ContextProvider: - popProvider(workInProgress); + const context: ReactContext = workInProgress.type._context; + popProvider(context, workInProgress); return null; case OffscreenComponent: case LegacyHiddenComponent: popRenderLanes(workInProgress); + if (enableCache) { + const spawnedCachePool: SpawnedCachePool | null = (workInProgress.updateQueue: any); + if (spawnedCachePool !== null) { + popCachePool(workInProgress); + } + } + return null; + case CacheComponent: + if (enableCache) { + const cache: Cache = workInProgress.memoizedState.cache; + popCacheProvider(workInProgress, cache); + } return null; default: return null; } } -function unwindInterruptedWork(interruptedWork: Fiber) { +function unwindInterruptedWork(interruptedWork: Fiber, renderLanes: Lanes) { switch (interruptedWork.tag) { case ClassComponent: { const childContextTypes = interruptedWork.type.childContextTypes; @@ -141,6 +170,13 @@ function unwindInterruptedWork(interruptedWork: Fiber) { break; } case HostRoot: { + if (enableCache) { + const root: FiberRoot = interruptedWork.stateNode; + popRootCachePool(root, renderLanes); + + const cache: Cache = interruptedWork.memoizedState.cache; + popCacheProvider(interruptedWork, cache); + } popHostContainer(interruptedWork); popTopLevelLegacyContextObject(interruptedWork); resetMutableSourceWorkInProgressVersions(); @@ -160,11 +196,25 @@ function unwindInterruptedWork(interruptedWork: Fiber) { popSuspenseContext(interruptedWork); break; case ContextProvider: - popProvider(interruptedWork); + const context: ReactContext = interruptedWork.type._context; + popProvider(context, interruptedWork); break; case OffscreenComponent: case LegacyHiddenComponent: popRenderLanes(interruptedWork); + if (enableCache) { + const spawnedCachePool: SpawnedCachePool | null = (interruptedWork.updateQueue: any); + if (spawnedCachePool !== null) { + popCachePool(interruptedWork); + } + } + + break; + case CacheComponent: + if (enableCache) { + const cache: Cache = interruptedWork.memoizedState.cache; + popCacheProvider(interruptedWork, cache); + } break; default: break; diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js index 3590f3ea4f1b1..88861db778be3 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js @@ -7,9 +7,11 @@ * @flow */ -import type {Fiber} from './ReactInternalTypes'; +import type {ReactContext} from 'shared/ReactTypes'; +import type {Fiber, FiberRoot} from './ReactInternalTypes'; import type {Lanes} from './ReactFiberLane.old'; import type {SuspenseState} from './ReactFiberSuspenseComponent.old'; +import type {Cache, SpawnedCachePool} from './ReactFiberCacheComponent.old'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.old'; import { @@ -22,12 +24,14 @@ import { SuspenseListComponent, OffscreenComponent, LegacyHiddenComponent, + CacheComponent, } from './ReactWorkTags'; import {DidCapture, NoFlags, ShouldCapture} from './ReactFiberFlags'; import {NoMode, ProfileMode} from './ReactTypeOfMode'; import { enableSuspenseServerRenderer, enableProfilerTimer, + enableCache, } from 'shared/ReactFeatureFlags'; import {popHostContainer, popHostContext} from './ReactFiberHostContext.old'; @@ -40,6 +44,11 @@ import { } from './ReactFiberContext.old'; import {popProvider} from './ReactFiberNewContext.old'; import {popRenderLanes} from './ReactFiberWorkLoop.old'; +import { + popCacheProvider, + popRootCachePool, + popCachePool, +} from './ReactFiberCacheComponent.old'; import {transferActualDuration} from './ReactProfilerTimer.old'; import invariant from 'shared/invariant'; @@ -65,6 +74,13 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { return null; } case HostRoot: { + if (enableCache) { + const root: FiberRoot = workInProgress.stateNode; + popRootCachePool(root, renderLanes); + + const cache: Cache = workInProgress.memoizedState.cache; + popCacheProvider(workInProgress, cache); + } popHostContainer(workInProgress); popTopLevelLegacyContextObject(workInProgress); resetMutableSourceWorkInProgressVersions(); @@ -120,18 +136,31 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { popHostContainer(workInProgress); return null; case ContextProvider: - popProvider(workInProgress); + const context: ReactContext = workInProgress.type._context; + popProvider(context, workInProgress); return null; case OffscreenComponent: case LegacyHiddenComponent: popRenderLanes(workInProgress); + if (enableCache) { + const spawnedCachePool: SpawnedCachePool | null = (workInProgress.updateQueue: any); + if (spawnedCachePool !== null) { + popCachePool(workInProgress); + } + } + return null; + case CacheComponent: + if (enableCache) { + const cache: Cache = workInProgress.memoizedState.cache; + popCacheProvider(workInProgress, cache); + } return null; default: return null; } } -function unwindInterruptedWork(interruptedWork: Fiber) { +function unwindInterruptedWork(interruptedWork: Fiber, renderLanes: Lanes) { switch (interruptedWork.tag) { case ClassComponent: { const childContextTypes = interruptedWork.type.childContextTypes; @@ -141,6 +170,13 @@ function unwindInterruptedWork(interruptedWork: Fiber) { break; } case HostRoot: { + if (enableCache) { + const root: FiberRoot = interruptedWork.stateNode; + popRootCachePool(root, renderLanes); + + const cache: Cache = interruptedWork.memoizedState.cache; + popCacheProvider(interruptedWork, cache); + } popHostContainer(interruptedWork); popTopLevelLegacyContextObject(interruptedWork); resetMutableSourceWorkInProgressVersions(); @@ -160,11 +196,25 @@ function unwindInterruptedWork(interruptedWork: Fiber) { popSuspenseContext(interruptedWork); break; case ContextProvider: - popProvider(interruptedWork); + const context: ReactContext = interruptedWork.type._context; + popProvider(context, interruptedWork); break; case OffscreenComponent: case LegacyHiddenComponent: popRenderLanes(interruptedWork); + if (enableCache) { + const spawnedCachePool: SpawnedCachePool | null = (interruptedWork.updateQueue: any); + if (spawnedCachePool !== null) { + popCachePool(interruptedWork); + } + } + + break; + case CacheComponent: + if (enableCache) { + const cache: Cache = interruptedWork.memoizedState.cache; + popCacheProvider(interruptedWork, cache); + } break; default: break; diff --git a/packages/react-reconciler/src/ReactFiberWorkLoop.new.js b/packages/react-reconciler/src/ReactFiberWorkLoop.new.js index 390feac653140..03831988e2b2b 100644 --- a/packages/react-reconciler/src/ReactFiberWorkLoop.new.js +++ b/packages/react-reconciler/src/ReactFiberWorkLoop.new.js @@ -531,7 +531,7 @@ export function scheduleUpdateOnFiber( fiber: Fiber, lane: Lane, eventTime: number, -) { +): FiberRoot | null { checkForNestedUpdates(); warnAboutRenderPhaseUpdatesInDEV(fiber); @@ -653,6 +653,8 @@ export function scheduleUpdateOnFiber( // the same root, then it's not a huge deal, we just might batch more stuff // together more than necessary. mostRecentlyUpdatedRoot = root; + + return root; } // This is split into a separate function so we can mark a fiber with pending @@ -1371,7 +1373,7 @@ function prepareFreshStack(root: FiberRoot, lanes: Lanes) { if (workInProgress !== null) { let interruptedWork = workInProgress.return; while (interruptedWork !== null) { - unwindInterruptedWork(interruptedWork); + unwindInterruptedWork(interruptedWork, workInProgressRootRenderLanes); interruptedWork = interruptedWork.return; } } @@ -2303,7 +2305,10 @@ function commitBeforeMutationEffects() { } } -function commitMutationEffects(root: FiberRoot, renderPriorityLevel) { +function commitMutationEffects( + root: FiberRoot, + renderPriorityLevel: ReactPriorityLevel, +) { // TODO: Should probably move the bulk of this function to commitWork. while (nextEffect !== null) { setCurrentDebugFiberInDEV(nextEffect); @@ -3047,7 +3052,7 @@ if (__DEV__ && replayFailedUnitOfWorkWithInvokeGuardedCallback) { // same fiber again. // Unwind the failed stack frame - unwindInterruptedWork(unitOfWork); + unwindInterruptedWork(unitOfWork, workInProgressRootRenderLanes); // Restore the original properties of the fiber. assignFiberPropertiesInDEV(unitOfWork, originalWorkInProgressCopy); diff --git a/packages/react-reconciler/src/ReactFiberWorkLoop.old.js b/packages/react-reconciler/src/ReactFiberWorkLoop.old.js index 0aa21dbfa85b4..6966472e0d16f 100644 --- a/packages/react-reconciler/src/ReactFiberWorkLoop.old.js +++ b/packages/react-reconciler/src/ReactFiberWorkLoop.old.js @@ -531,7 +531,7 @@ export function scheduleUpdateOnFiber( fiber: Fiber, lane: Lane, eventTime: number, -) { +): FiberRoot | null { checkForNestedUpdates(); warnAboutRenderPhaseUpdatesInDEV(fiber); @@ -653,6 +653,8 @@ export function scheduleUpdateOnFiber( // the same root, then it's not a huge deal, we just might batch more stuff // together more than necessary. mostRecentlyUpdatedRoot = root; + + return root; } // This is split into a separate function so we can mark a fiber with pending @@ -1371,7 +1373,7 @@ function prepareFreshStack(root: FiberRoot, lanes: Lanes) { if (workInProgress !== null) { let interruptedWork = workInProgress.return; while (interruptedWork !== null) { - unwindInterruptedWork(interruptedWork); + unwindInterruptedWork(interruptedWork, workInProgressRootRenderLanes); interruptedWork = interruptedWork.return; } } @@ -2303,7 +2305,10 @@ function commitBeforeMutationEffects() { } } -function commitMutationEffects(root: FiberRoot, renderPriorityLevel) { +function commitMutationEffects( + root: FiberRoot, + renderPriorityLevel: ReactPriorityLevel, +) { // TODO: Should probably move the bulk of this function to commitWork. while (nextEffect !== null) { setCurrentDebugFiberInDEV(nextEffect); @@ -3047,7 +3052,7 @@ if (__DEV__ && replayFailedUnitOfWorkWithInvokeGuardedCallback) { // same fiber again. // Unwind the failed stack frame - unwindInterruptedWork(unitOfWork); + unwindInterruptedWork(unitOfWork, workInProgressRootRenderLanes); // Restore the original properties of the fiber. assignFiberPropertiesInDEV(unitOfWork, originalWorkInProgressCopy); diff --git a/packages/react-reconciler/src/ReactInternalTypes.js b/packages/react-reconciler/src/ReactInternalTypes.js index c48d53fbb919b..cb34ac74948e0 100644 --- a/packages/react-reconciler/src/ReactInternalTypes.js +++ b/packages/react-reconciler/src/ReactInternalTypes.js @@ -25,6 +25,7 @@ import type {RootTag} from './ReactRootTags'; import type {TimeoutHandle, NoTimeout} from './ReactFiberHostConfig'; import type {Wakeable} from 'shared/ReactTypes'; import type {Interaction} from 'scheduler/src/Tracing'; +import type {Cache} from './ReactFiberCacheComponent.old'; // Unwind Circular: moved from ReactFiberHooks.old export type HookType = @@ -41,7 +42,8 @@ export type HookType = | 'useDeferredValue' | 'useTransition' | 'useMutableSource' - | 'useOpaqueIdentifier'; + | 'useOpaqueIdentifier' + | 'useCacheRefresh'; export type ReactPriorityLevel = 99 | 98 | 97 | 96 | 95 | 90; @@ -235,6 +237,9 @@ type BaseFiberRootProperties = {| entangledLanes: Lanes, entanglements: LaneMap, + + pooledCache: Cache | null, + pooledCacheLanes: Lanes, |}; // The following attributes are only used by interaction tracing builds. @@ -314,6 +319,7 @@ export type Dispatcher = {| subscribe: MutableSourceSubscribeFn, ): Snapshot, useOpaqueIdentifier(): any, + useCacheRefresh?: () => (?() => T, ?T) => void, unstable_isNewReconciler?: boolean, |}; diff --git a/packages/react-reconciler/src/ReactWorkTags.js b/packages/react-reconciler/src/ReactWorkTags.js index 84cd9ebd4ba55..65dba28b367a3 100644 --- a/packages/react-reconciler/src/ReactWorkTags.js +++ b/packages/react-reconciler/src/ReactWorkTags.js @@ -31,7 +31,8 @@ export type WorkTag = | 20 | 21 | 22 - | 23; + | 23 + | 24; export const FunctionComponent = 0; export const ClassComponent = 1; @@ -57,3 +58,4 @@ export const FundamentalComponent = 20; export const ScopeComponent = 21; export const OffscreenComponent = 22; export const LegacyHiddenComponent = 23; +export const CacheComponent = 24; diff --git a/packages/react-reconciler/src/__tests__/ReactCache-test.js b/packages/react-reconciler/src/__tests__/ReactCache-test.js new file mode 100644 index 0000000000000..1574fc222898f --- /dev/null +++ b/packages/react-reconciler/src/__tests__/ReactCache-test.js @@ -0,0 +1,916 @@ +let React; +let ReactNoop; +let Cache; +let getCacheForType; +let Scheduler; +let Suspense; +let useCacheRefresh; +let startTransition; +let useState; + +let textService; +let textServiceVersion; + +describe('ReactCache', () => { + beforeEach(() => { + jest.resetModules(); + + React = require('react'); + ReactNoop = require('react-noop-renderer'); + Cache = React.unstable_Cache; + Scheduler = require('scheduler'); + Suspense = React.Suspense; + getCacheForType = React.unstable_getCacheForType; + useCacheRefresh = React.unstable_useCacheRefresh; + startTransition = React.unstable_startTransition; + useState = React.useState; + + // Represents some data service that returns text. It likely has additional + // caching layers, like a CDN or the local browser cache. It can be mutated + // or emptied independently of the React cache. + textService = new Map(); + textServiceVersion = 1; + }); + + function createTextCache() { + return new Map(); + } + + function readText(text) { + const textCache = getCacheForType(createTextCache); + const record = textCache.get(text); + if (record !== undefined) { + switch (record.status) { + case 'pending': + throw record.value; + case 'rejected': + throw record.value; + case 'resolved': + return record.value; + } + } else { + Scheduler.unstable_yieldValue(`Cache miss! [${text}]`); + + let request = textService.get(text); + if (request === undefined) { + let resolve; + let reject; + request = new Promise((res, rej) => { + resolve = res; + reject = rej; + }); + request.resolve = resolve; + request.reject = reject; + + // Add the request to a backing cache. This may outlive the lifetime + // of the component that is currently reading the data. + textService.set(text, request); + } + + const thenable = request.then( + value => { + if (newRecord.status === 'pending') { + newRecord.status = 'resolved'; + newRecord.value = value; + } + }, + error => { + if (newRecord.status === 'pending') { + newRecord.status = 'rejected'; + newRecord.value = error; + } + }, + ); + + const newRecord = { + ping: null, + status: 'pending', + value: thenable, + }; + textCache.set(text, newRecord); + + throw thenable; + } + } + + function mutateRemoteTextService() { + textService = new Map(); + textServiceVersion++; + } + + function resolveText(text) { + const request = textService.get(text); + if (request !== undefined) { + request.resolve(textServiceVersion); + return request; + } else { + const newRequest = Promise.resolve(textServiceVersion); + newRequest.resolve = newRequest.reject = () => {}; + textService.set(text, newRequest); + return newRequest; + } + } + + function Text({text}) { + Scheduler.unstable_yieldValue(text); + return text; + } + + function AsyncText({text, showVersion}) { + const version = readText(text); + const fullText = showVersion ? `${text} [v${version}]` : text; + Scheduler.unstable_yieldValue(fullText); + return fullText; + } + + // @gate experimental + test('render Cache component', async () => { + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render(Hi); + }); + expect(root).toMatchRenderedOutput('Hi'); + }); + + // @gate experimental + test('mount new data', async () => { + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render( + + }> + + + , + ); + }); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); + expect(root).toMatchRenderedOutput('Loading...'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A']); + expect(root).toMatchRenderedOutput('A'); + }); + + // @gate experimental + test('root acts as implicit cache boundary', async () => { + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render( + }> + + , + ); + }); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); + expect(root).toMatchRenderedOutput('Loading...'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A']); + expect(root).toMatchRenderedOutput('A'); + }); + + // @gate experimental + test('multiple new Cache boundaries in the same update share the same, fresh cache', async () => { + function App({text}) { + return ( + <> + + }> + + + + + }> + + + + + ); + } + + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render(); + }); + // Even though there are two new trees, they should share the same + // data cache. So there should be only a single cache miss for A. + expect(Scheduler).toHaveYielded([ + 'Cache miss! [A]', + 'Loading...', + 'Loading...', + ]); + expect(root).toMatchRenderedOutput('Loading...Loading...'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A', 'A']); + expect(root).toMatchRenderedOutput('AA'); + }); + + // @gate experimental + test( + 'nested cache boundaries share the same cache as the root during ' + + 'the initial render', + async () => { + function App() { + return ( + }> + + + + + + ); + } + + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render(); + }); + // Even though there are two new trees, they should share the same + // data cache. So there should be only a single cache miss for A. + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); + expect(root).toMatchRenderedOutput('Loading...'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A', 'A']); + expect(root).toMatchRenderedOutput('AA'); + }, + ); + + // @gate experimental + test('new content inside an existing Cache boundary should re-use already cached data', async () => { + function App({showMore}) { + return ( + + }> + + + {showMore ? ( + }> + + + ) : null} + + ); + } + + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + await resolveText('A'); + root.render(); + }); + expect(Scheduler).toHaveYielded([ + 'Cache miss! [A]', + 'Loading...', + 'A [v1]', + ]); + expect(root).toMatchRenderedOutput('A [v1]'); + + // Simulate a server mutation. + mutateRemoteTextService(); + + // Add a new cache boundary + await ReactNoop.act(async () => { + await resolveText('A'); + root.render(); + }); + expect(Scheduler).toHaveYielded([ + 'A [v1]', + // New tree should use already cached data + 'A [v1]', + ]); + expect(root).toMatchRenderedOutput('A [v1]A [v1]'); + }); + + // @gate experimental + test('a new Cache boundary uses fresh cache', async () => { + // The only difference from the previous test is that the "Show More" + // content is wrapped in a nested boundary + function App({showMore}) { + return ( + + }> + + + {showMore ? ( + + }> + + + + ) : null} + + ); + } + + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + await resolveText('A'); + root.render(); + }); + expect(Scheduler).toHaveYielded([ + 'Cache miss! [A]', + 'Loading...', + 'A [v1]', + ]); + expect(root).toMatchRenderedOutput('A [v1]'); + + // Simulate a server mutation. + mutateRemoteTextService(); + + // Add a new cache boundary + await ReactNoop.act(async () => { + await resolveText('A'); + root.render(); + }); + expect(Scheduler).toHaveYielded([ + 'A [v1]', + // New tree should load fresh data. + 'Cache miss! [A]', + 'Loading...', + 'A [v2]', + ]); + expect(root).toMatchRenderedOutput('A [v1]A [v2]'); + }); + + // @gate experimental + test('inner content uses same cache as shell if spawned by the same transition', async () => { + const root = ReactNoop.createRoot(); + + function App() { + return ( + + }> + {/* The shell reads A */} + + {/* The inner content reads both A and B */} + }> + + + + + + + + ); + } + + function Shell({children}) { + readText('A'); + return ( + <> +
+ +
+
{children}
+ + ); + } + + function Content() { + readText('A'); + readText('B'); + return ; + } + + await ReactNoop.act(async () => { + root.render(); + }); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading shell...']); + expect(root).toMatchRenderedOutput('Loading shell...'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded([ + 'Shell', + // There's a cache miss for B, because it hasn't been read yet. But not + // A, because it was cached when we rendered the shell. + 'Cache miss! [B]', + 'Loading content...', + ]); + expect(root).toMatchRenderedOutput( + <> +
Shell
+
Loading content...
+ , + ); + + await ReactNoop.act(async () => { + await resolveText('B'); + }); + expect(Scheduler).toHaveYielded(['Content']); + expect(root).toMatchRenderedOutput( + <> +
Shell
+
Content
+ , + ); + }); + + // @gate experimental + test('refresh a cache', async () => { + let refresh; + function App() { + refresh = useCacheRefresh(); + return ; + } + + // Mount initial data + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render( + + }> + + + , + ); + }); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); + expect(root).toMatchRenderedOutput('Loading...'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A [v1]']); + expect(root).toMatchRenderedOutput('A [v1]'); + + // Mutate the text service, then refresh for new data. + mutateRemoteTextService(); + await ReactNoop.act(async () => { + startTransition(() => refresh()); + }); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); + expect(root).toMatchRenderedOutput('A [v1]'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + // Note that the version has updated + expect(Scheduler).toHaveYielded(['A [v2]']); + expect(root).toMatchRenderedOutput('A [v2]'); + }); + + // @gate experimental + test('refresh the root cache', async () => { + let refresh; + function App() { + refresh = useCacheRefresh(); + return ; + } + + // Mount initial data + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render( + }> + + , + ); + }); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); + expect(root).toMatchRenderedOutput('Loading...'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A [v1]']); + expect(root).toMatchRenderedOutput('A [v1]'); + + // Mutate the text service, then refresh for new data. + mutateRemoteTextService(); + await ReactNoop.act(async () => { + startTransition(() => refresh()); + }); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); + expect(root).toMatchRenderedOutput('A [v1]'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + // Note that the version has updated + expect(Scheduler).toHaveYielded(['A [v2]']); + expect(root).toMatchRenderedOutput('A [v2]'); + }); + + // @gate experimental + test('refresh a cache with seed data', async () => { + let refresh; + function App() { + refresh = useCacheRefresh(); + return ; + } + + // Mount initial data + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render( + + }> + + + , + ); + }); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); + expect(root).toMatchRenderedOutput('Loading...'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A [v1]']); + expect(root).toMatchRenderedOutput('A [v1]'); + + // Mutate the text service, then refresh for new data. + mutateRemoteTextService(); + await ReactNoop.act(async () => { + // Refresh the cache with seeded data, like you would receive from a + // server mutation. + // TODO: Seeding multiple typed caches. Should work by calling `refresh` + // multiple times with different key/value pairs + const seededCache = new Map(); + seededCache.set('A', { + ping: null, + status: 'resolved', + value: textServiceVersion, + }); + startTransition(() => refresh(createTextCache, seededCache)); + }); + // The root should re-render without a cache miss. + expect(Scheduler).toHaveYielded(['A [v2]']); + expect(root).toMatchRenderedOutput('A [v2]'); + }); + + // @gate experimental + test('refreshing a parent cache also refreshes its children', async () => { + let refreshShell; + function RefreshShell() { + refreshShell = useCacheRefresh(); + return null; + } + + function App({showMore}) { + return ( + + + }> + + + {showMore ? ( + + }> + + + + ) : null} + + ); + } + + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + await resolveText('A'); + root.render(); + }); + expect(Scheduler).toHaveYielded([ + 'Cache miss! [A]', + 'Loading...', + 'A [v1]', + ]); + expect(root).toMatchRenderedOutput('A [v1]'); + + // Simulate a server mutation. + mutateRemoteTextService(); + + // Add a new cache boundary + await ReactNoop.act(async () => { + await resolveText('A'); + root.render(); + }); + expect(Scheduler).toHaveYielded([ + 'A [v1]', + // New tree should load fresh data. + 'Cache miss! [A]', + 'Loading...', + 'A [v2]', + ]); + expect(root).toMatchRenderedOutput('A [v1]A [v2]'); + + // Now refresh the shell. This should also cause the "Show More" contents to + // refresh, since its cache is nested inside the outer one. + mutateRemoteTextService(); + await ReactNoop.act(async () => { + startTransition(() => refreshShell()); + }); + expect(Scheduler).toHaveYielded([ + 'Cache miss! [A]', + 'Loading...', + 'Loading...', + ]); + expect(root).toMatchRenderedOutput('A [v1]A [v2]'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A [v3]', 'A [v3]']); + expect(root).toMatchRenderedOutput('A [v3]A [v3]'); + }); + + // @gate experimental + test( + 'refreshing a cache boundary does not refresh the other boundaries ' + + 'that mounted at the same time (i.e. the ones that share the same cache)', + async () => { + let refreshFirstBoundary; + function RefreshFirstBoundary() { + refreshFirstBoundary = useCacheRefresh(); + return null; + } + + function App({showMore}) { + return showMore ? ( + <> + + }> + + + + + + }> + + + + + ) : null; + } + + // First mount the initial shell without the nested boundaries. This is + // necessary for this test because we want the two inner boundaries to be + // treated like sibling providers that happen to share an underlying + // cache, as opposed to consumers of the root-level cache. + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render(); + }); + + // Now reveal the boundaries. In a real app this would be a navigation. + await ReactNoop.act(async () => { + root.render(); + }); + + // Even though there are two new trees, they should share the same + // data cache. So there should be only a single cache miss for A. + expect(Scheduler).toHaveYielded([ + 'Cache miss! [A]', + 'Loading...', + 'Loading...', + ]); + expect(root).toMatchRenderedOutput('Loading...Loading...'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A [v1]', 'A [v1]']); + expect(root).toMatchRenderedOutput('A [v1]A [v1]'); + + // Refresh the first boundary. It should not refresh the second boundary, + // even though they previously shared the same underlying cache. + mutateRemoteTextService(); + await ReactNoop.act(async () => { + await refreshFirstBoundary(); + }); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A [v2]']); + expect(root).toMatchRenderedOutput('A [v2]A [v1]'); + }, + ); + + // @gate experimental + test( + 'mount a new Cache boundary in a sibling while simultaneously ' + + 'resolving a Suspense boundary', + async () => { + function App({showMore}) { + return ( + <> + {showMore ? ( + }> + + + + + ) : null} + }> + + {' '} + {' '} + + + + + ); + } + + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render(); + }); + expect(Scheduler).toHaveYielded([ + 'Cache miss! [A]', + 'Cache miss! [B]', + 'Loading...', + ]); + + await ReactNoop.act(async () => { + // This will resolve the content in the first cache + resolveText('A'); + resolveText('B'); + // Now let's simulate a mutation + mutateRemoteTextService(); + // And mount the second tree, which includes new content + root.render(); + }); + expect(Scheduler).toHaveYielded([ + // The new tree should use a fresh cache + 'Cache miss! [A]', + 'Loading...', + // The other tree uses the cached responses. This demonstrates that the + // requests are not dropped. + 'A [v1]', + 'B [v1]', + ]); + + // Now resolve the second tree + await ReactNoop.act(async () => { + resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A [v2]']); + expect(root).toMatchRenderedOutput('A [v2] A [v1] B [v1]'); + }, + ); + + // @gate experimental + test('cache pool is cleared once transitions that depend on it commit their shell', async () => { + function Child({text}) { + return ( + + + + ); + } + + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render( + }>(empty), + ); + }); + expect(Scheduler).toHaveYielded([]); + expect(root).toMatchRenderedOutput('(empty)'); + + await ReactNoop.act(async () => { + startTransition(() => { + root.render( + }> + + , + ); + }); + }); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); + expect(root).toMatchRenderedOutput('(empty)'); + + await ReactNoop.act(async () => { + startTransition(() => { + root.render( + }> + + + , + ); + }); + }); + expect(Scheduler).toHaveYielded([ + // No cache miss, because it uses the pooled cache + 'Loading...', + ]); + expect(root).toMatchRenderedOutput('(empty)'); + + // Resolve the request + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A', 'A']); + expect(root).toMatchRenderedOutput('AA'); + + // Now do another transition + await ReactNoop.act(async () => { + startTransition(() => { + root.render( + }> + + + + , + ); + }); + }); + expect(Scheduler).toHaveYielded([ + // First two children use the old cache because they already finished + 'A', + 'A', + // The new child uses a fresh cache + 'Cache miss! [A]', + 'Loading...', + 'A', + 'A', + 'A', + ]); + expect(root).toMatchRenderedOutput('AAA'); + }); + + // @gate experimental + test('cache pool is not cleared by arbitrary commits', async () => { + function App() { + return ( + <> + + + + ); + } + + let showMore; + function ShowMore() { + const [shouldShow, _showMore] = useState(false); + showMore = () => _showMore(true); + return ( + <> + }> + {shouldShow ? ( + + + + ) : null} + + + ); + } + + let updateUnrelated; + function Unrelated() { + const [count, _updateUnrelated] = useState(0); + updateUnrelated = _updateUnrelated; + return ; + } + + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render(); + }); + expect(Scheduler).toHaveYielded(['0']); + expect(root).toMatchRenderedOutput('0'); + + await ReactNoop.act(async () => { + startTransition(() => { + showMore(); + }); + }); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); + expect(root).toMatchRenderedOutput('0'); + + await ReactNoop.act(async () => { + updateUnrelated(1); + }); + expect(Scheduler).toHaveYielded([ + '1', + + // Happens to re-render the fallback. Doesn't need to, but not relevant + // to this test. + 'Loading...', + ]); + expect(root).toMatchRenderedOutput('1'); + + await ReactNoop.act(async () => { + resolveText('A'); + mutateRemoteTextService(); + }); + expect(Scheduler).toHaveYielded(['A']); + expect(root).toMatchRenderedOutput('A1'); + }); +}); diff --git a/packages/react-server/src/ReactFlightServer.js b/packages/react-server/src/ReactFlightServer.js index d6717a0830760..ffbc13edaf60f 100644 --- a/packages/react-server/src/ReactFlightServer.js +++ b/packages/react-server/src/ReactFlightServer.js @@ -758,6 +758,13 @@ function unsupportedHook(): void { invariant(false, 'This Hook is not supported in Server Components.'); } +function unsupportedRefresh(): void { + invariant( + currentCache, + 'Refreshing the cache is not supported in Server Components.', + ); +} + let currentCache: Map | null = null; const Dispatcher: DispatcherType = { @@ -797,4 +804,7 @@ const Dispatcher: DispatcherType = { useEffect: (unsupportedHook: any), useOpaqueIdentifier: (unsupportedHook: any), useMutableSource: (unsupportedHook: any), + useCacheRefresh(): (?() => T, ?T) => void { + return unsupportedRefresh; + }, }; diff --git a/packages/react-suspense-test-utils/src/ReactSuspenseTestUtils.js b/packages/react-suspense-test-utils/src/ReactSuspenseTestUtils.js index a58c6cae824c0..43c6c5184d2b9 100644 --- a/packages/react-suspense-test-utils/src/ReactSuspenseTestUtils.js +++ b/packages/react-suspense-test-utils/src/ReactSuspenseTestUtils.js @@ -44,6 +44,7 @@ export function waitForSuspense(fn: () => T): Promise { useTransition: unsupported, useOpaqueIdentifier: unsupported, useMutableSource: unsupported, + useCacheRefresh: unsupported, }; // Not using async/await because we don't compile it. return new Promise((resolve, reject) => { diff --git a/packages/react/index.classic.fb.js b/packages/react/index.classic.fb.js index 04723075defa4..366e86626fd15 100644 --- a/packages/react/index.classic.fb.js +++ b/packages/react/index.classic.fb.js @@ -51,6 +51,8 @@ export { SuspenseList, SuspenseList as unstable_SuspenseList, unstable_getCacheForType, + unstable_Cache, + unstable_useCacheRefresh, // enableScopeAPI unstable_Scope, unstable_useOpaqueIdentifier, diff --git a/packages/react/index.experimental.js b/packages/react/index.experimental.js index 5de53908ae883..ba0d205f81297 100644 --- a/packages/react/index.experimental.js +++ b/packages/react/index.experimental.js @@ -46,6 +46,8 @@ export { SuspenseList as unstable_SuspenseList, unstable_useOpaqueIdentifier, unstable_getCacheForType, + unstable_Cache, + unstable_useCacheRefresh, // enableDebugTracing unstable_DebugTracingMode, } from './src/React'; diff --git a/packages/react/index.js b/packages/react/index.js index 1553bdd9e9a89..80e6591171b5c 100644 --- a/packages/react/index.js +++ b/packages/react/index.js @@ -83,4 +83,6 @@ export { unstable_Scope, unstable_useOpaqueIdentifier, unstable_getCacheForType, + unstable_Cache, + unstable_useCacheRefresh, } from './src/React'; diff --git a/packages/react/index.modern.fb.js b/packages/react/index.modern.fb.js index 9a3bb4384ca2e..cf459c0bfb442 100644 --- a/packages/react/index.modern.fb.js +++ b/packages/react/index.modern.fb.js @@ -50,6 +50,8 @@ export { SuspenseList, SuspenseList as unstable_SuspenseList, unstable_getCacheForType, + unstable_Cache, + unstable_useCacheRefresh, // enableScopeAPI unstable_Scope, unstable_useOpaqueIdentifier, diff --git a/packages/react/src/React.js b/packages/react/src/React.js index aae52b2750db6..84490ef902c97 100644 --- a/packages/react/src/React.js +++ b/packages/react/src/React.js @@ -17,6 +17,7 @@ import { REACT_SUSPENSE_LIST_TYPE, REACT_LEGACY_HIDDEN_TYPE, REACT_SCOPE_TYPE, + REACT_CACHE_TYPE, } from 'shared/ReactSymbols'; import {Component, PureComponent} from './ReactBaseClasses'; @@ -48,6 +49,7 @@ import { useTransition, useDeferredValue, useOpaqueIdentifier, + useCacheRefresh, } from './ReactHooks'; import { createElementWithValidation, @@ -112,6 +114,8 @@ export { REACT_SUSPENSE_LIST_TYPE as SuspenseList, REACT_LEGACY_HIDDEN_TYPE as unstable_LegacyHidden, getCacheForType as unstable_getCacheForType, + useCacheRefresh as unstable_useCacheRefresh, + REACT_CACHE_TYPE as unstable_Cache, // enableFundamentalAPI createFundamental as unstable_createFundamental, // enableScopeAPI diff --git a/packages/react/src/ReactHooks.js b/packages/react/src/ReactHooks.js index 1020efa74cb96..c1602a9bb53ed 100644 --- a/packages/react/src/ReactHooks.js +++ b/packages/react/src/ReactHooks.js @@ -180,3 +180,9 @@ export function useMutableSource( const dispatcher = resolveDispatcher(); return dispatcher.useMutableSource(source, getSnapshot, subscribe); } + +export function useCacheRefresh(): (?() => T, ?T) => void { + const dispatcher = resolveDispatcher(); + // $FlowFixMe This is unstable, thus optional + return dispatcher.useCacheRefresh(); +} diff --git a/packages/shared/ReactSymbols.js b/packages/shared/ReactSymbols.js index 1870c8c009482..d490ab417c211 100644 --- a/packages/shared/ReactSymbols.js +++ b/packages/shared/ReactSymbols.js @@ -31,6 +31,7 @@ export let REACT_OPAQUE_ID_TYPE = 0xeae0; export let REACT_DEBUG_TRACING_MODE_TYPE = 0xeae1; export let REACT_OFFSCREEN_TYPE = 0xeae2; export let REACT_LEGACY_HIDDEN_TYPE = 0xeae3; +export let REACT_CACHE_TYPE = 0xeae4; if (typeof Symbol === 'function' && Symbol.for) { const symbolFor = Symbol.for; @@ -52,6 +53,7 @@ if (typeof Symbol === 'function' && Symbol.for) { REACT_DEBUG_TRACING_MODE_TYPE = symbolFor('react.debug_trace_mode'); REACT_OFFSCREEN_TYPE = symbolFor('react.offscreen'); REACT_LEGACY_HIDDEN_TYPE = symbolFor('react.legacy_hidden'); + REACT_CACHE_TYPE = symbolFor('react.cache'); } const MAYBE_ITERATOR_SYMBOL = typeof Symbol === 'function' && Symbol.iterator; diff --git a/packages/shared/getComponentName.js b/packages/shared/getComponentName.js index ae74676ed3616..a5594f6f64d70 100644 --- a/packages/shared/getComponentName.js +++ b/packages/shared/getComponentName.js @@ -21,6 +21,7 @@ import { REACT_SUSPENSE_TYPE, REACT_SUSPENSE_LIST_TYPE, REACT_LAZY_TYPE, + REACT_CACHE_TYPE, } from 'shared/ReactSymbols'; import type {ReactContext, ReactProviderType} from 'shared/ReactTypes'; @@ -72,6 +73,8 @@ function getComponentName(type: mixed): string | null { return 'Suspense'; case REACT_SUSPENSE_LIST_TYPE: return 'SuspenseList'; + case REACT_CACHE_TYPE: + return 'Cache'; } if (typeof type === 'object') { switch (type.$$typeof) { diff --git a/packages/shared/isValidElementType.js b/packages/shared/isValidElementType.js index 35eeae2a308ce..0a361c4a22dec 100644 --- a/packages/shared/isValidElementType.js +++ b/packages/shared/isValidElementType.js @@ -22,8 +22,9 @@ import { REACT_FUNDAMENTAL_TYPE, REACT_SCOPE_TYPE, REACT_LEGACY_HIDDEN_TYPE, + REACT_CACHE_TYPE, } from 'shared/ReactSymbols'; -import {enableScopeAPI} from './ReactFeatureFlags'; +import {enableScopeAPI, enableCache} from './ReactFeatureFlags'; let REACT_MODULE_REFERENCE: number | Symbol = 0; if (typeof Symbol === 'function') { @@ -44,7 +45,8 @@ export default function isValidElementType(type: mixed) { type === REACT_SUSPENSE_TYPE || type === REACT_SUSPENSE_LIST_TYPE || type === REACT_LEGACY_HIDDEN_TYPE || - (enableScopeAPI && type === REACT_SCOPE_TYPE) + (enableScopeAPI && type === REACT_SCOPE_TYPE) || + (enableCache && type === REACT_CACHE_TYPE) ) { return true; } diff --git a/scripts/error-codes/codes.json b/scripts/error-codes/codes.json index 641901fcaba05..12a8db733b0ce 100644 --- a/scripts/error-codes/codes.json +++ b/scripts/error-codes/codes.json @@ -371,5 +371,6 @@ "380": "Reading the cache is only supported while rendering.", "381": "This feature is not supported by ReactSuspenseTestUtils.", "382": "This query has received more parameters than the last time the same query was used. Always pass the exact number of parameters that the query needs.", - "383": "This query has received fewer parameters than the last time the same query was used. Always pass the exact number of parameters that the query needs." + "383": "This query has received fewer parameters than the last time the same query was used. Always pass the exact number of parameters that the query needs.", + "384": "Refreshing the cache is not supported in Server Components." }