@@ -19,6 +19,7 @@ import {isPrimaryRenderer} from './ReactFiberHostConfig';
19
19
import { createCursor , push , pop } from './ReactFiberStack.new' ;
20
20
import { pushProvider , popProvider } from './ReactFiberNewContext.new' ;
21
21
import * as Scheduler from 'scheduler' ;
22
+ import { getWorkInProgressRoot } from './ReactFiberWorkLoop.new' ;
22
23
23
24
export type Cache = { |
24
25
controller : AbortController ,
@@ -61,13 +62,9 @@ if (__DEV__ && enableCache) {
61
62
CacheContext . _currentRenderer2 = null ;
62
63
}
63
64
64
- // The cache that newly mounted Cache boundaries should use. It's either
65
- // retrieved from the cache pool, or the result of a refresh.
66
- let pooledCache : Cache | null = null ;
67
-
68
- // When retrying a Suspense/Offscreen boundary, we override pooledCache with the
69
- // cache from the render that suspended.
70
- const prevFreshCacheOnStack : StackCursor < Cache | null > = createCursor ( null ) ;
65
+ // When retrying a Suspense/Offscreen boundary, we restore the cache that was
66
+ // used during the previous render by placing it here, on the stack.
67
+ const resumedCache : StackCursor < Cache | null > = createCursor ( null ) ;
71
68
72
69
// Creates a new empty Cache instance with a ref-count of 0. The caller is responsible
73
70
// for retaining the cache once it is in use (retainCache), and releasing the cache
@@ -135,56 +132,70 @@ export function popCacheProvider(workInProgress: Fiber, cache: Cache) {
135
132
popProvider ( CacheContext , workInProgress ) ;
136
133
}
137
134
138
- export function requestCacheFromPool ( renderLanes : Lanes ) : Cache {
135
+ function peekCacheFromPool ( ) : Cache | null {
139
136
if ( ! enableCache ) {
140
137
return ( null : any ) ;
141
138
}
142
- if ( pooledCache !== null ) {
143
- return pooledCache ;
139
+
140
+ // Check if the cache pool already has a cache we can use.
141
+
142
+ // If we're rendering inside a Suspense boundary that is currently hidden,
143
+ // we should use the same cache that we used during the previous render, if
144
+ // one exists.
145
+ const cacheResumedFromPreviousRender = resumedCache . current ;
146
+ if ( cacheResumedFromPreviousRender !== null ) {
147
+ return cacheResumedFromPreviousRender ;
144
148
}
145
- // Create a fresh cache. The pooled cache must be owned - it is freed
146
- // in releaseRootPooledCache() - but the cache instance handed out
147
- // is retained/released in the commit phase of the component that
148
- // references is (ie the host root, cache boundary, suspense component)
149
- // Ie, pooledCache is conceptually an Option<Arc<Cache>> (owned),
150
- // whereas the return value of this function is a &Arc<Cache> (borrowed).
151
- pooledCache = createCache ( ) ;
152
- retainCache ( pooledCache ) ;
153
- return pooledCache ;
149
+
150
+ // Otherwise, check the root's cache pool.
151
+ const root = ( getWorkInProgressRoot ( ) : any ) ;
152
+ const cacheFromRootCachePool = root . pooledCache ;
153
+
154
+ return cacheFromRootCachePool ;
155
+ }
156
+
157
+ export function requestCacheFromPool ( renderLanes : Lanes ) : Cache {
158
+ // Similar to previous function, except if there's not already a cache in the
159
+ // pool, we allocate a new one.
160
+ const cacheFromPool = peekCacheFromPool ( ) ;
161
+ if ( cacheFromPool !== null ) {
162
+ return cacheFromPool ;
163
+ }
164
+
165
+ // Create a fresh cache and add it to the root cache pool. A cache can have
166
+ // multiple owners:
167
+ // - A cache pool that lives on the FiberRoot. This is where all fresh caches
168
+ // are originally created (TODO: except during refreshes, until we implement
169
+ // this correctly). The root takes ownership immediately when the cache is
170
+ // created. Conceptually, root.pooledCache is an Option<Arc<Cache>> (owned),
171
+ // and the return value of this function is a &Arc<Cache> (borrowed).
172
+ // - One of several fiber types: host root, cache boundary, suspense
173
+ // component. These retain and release in the commit phase.
174
+
175
+ const root = ( getWorkInProgressRoot ( ) : any ) ;
176
+ const freshCache = createCache ( ) ;
177
+ root . pooledCache = freshCache ;
178
+ retainCache ( freshCache ) ;
179
+ if ( freshCache !== null ) {
180
+ root . pooledCacheLanes |= renderLanes ;
181
+ }
182
+ return freshCache ;
154
183
}
155
184
156
185
export function pushRootCachePool ( root : FiberRoot ) {
157
186
if ( ! enableCache ) {
158
187
return ;
159
188
}
160
- // When we start rendering a tree, read the pooled cache for this render
161
- // from `root.pooledCache`. If it's currently `null`, we will lazily
162
- // initialize it the first type it's requested. However, we only mutate
163
- // the root itself during the complete/unwind phase of the HostRoot.
164
- const rootCache = root . pooledCache ;
165
- if ( rootCache != null ) {
166
- pooledCache = rootCache ;
167
- root . pooledCache = null ;
168
- } else {
169
- pooledCache = null ;
170
- }
189
+ // Note: This function currently does nothing but I'll leave it here for
190
+ // code organization purposes in case that changes.
171
191
}
172
192
173
193
export function popRootCachePool ( root : FiberRoot , renderLanes : Lanes ) {
174
194
if ( ! enableCache ) {
175
195
return ;
176
196
}
177
- // The `pooledCache` variable points to the cache that was used for new
178
- // cache boundaries during this render, if any. Move ownership of the
179
- // cache to the root so that parallel transitions may share the same
180
- // cache. We will clear this field once all the transitions that depend
181
- // on it (which we track with `pooledCacheLanes`) have committed.
182
- root . pooledCache = pooledCache ;
183
- if ( pooledCache !== null ) {
184
- root . pooledCacheLanes |= renderLanes ;
185
- }
186
- // set to null, conceptually we are moving ownership to the root
187
- pooledCache = null ;
197
+ // Note: This function currently does nothing but I'll leave it here for
198
+ // code organization purposes in case that changes.
188
199
}
189
200
190
201
export function restoreSpawnedCachePool (
@@ -202,51 +213,35 @@ export function restoreSpawnedCachePool(
202
213
// will override it.
203
214
return null ;
204
215
} else {
205
- // No refresh. Resume with the previous cache. This will override the cache
206
- // pool so that any new Cache boundaries in the subtree use this one instead
207
- // of requesting a fresh one.
208
- push ( prevFreshCacheOnStack , pooledCache , offscreenWorkInProgress ) ;
209
- pooledCache = prevCachePool . pool ;
216
+ // No refresh. Resume with the previous cache. New Cache boundaries in the
217
+ // subtree use this one instead of requesting a fresh one (see
218
+ // peekCacheFromPool).
219
+ push ( resumedCache , prevCachePool . pool , offscreenWorkInProgress ) ;
210
220
211
221
// Return the cache pool to signal that we did in fact push it. We will
212
222
// assign this to the field on the fiber so we know to pop the context.
213
223
return prevCachePool ;
214
224
}
215
225
}
216
226
217
- // Note: Ideally, `popCachePool` would return this value, and then we would pass
218
- // it to `getSuspendedCachePool`. But factoring reasons, those two functions are
219
- // in different phases/files. They are always called in sequence, though, so we
220
- // can stash the value here temporarily.
221
- let _suspendedPooledCache : Cache | null = null ;
222
-
223
227
export function popCachePool ( workInProgress : Fiber ) {
224
228
if ( ! enableCache ) {
225
229
return ;
226
230
}
227
- _suspendedPooledCache = pooledCache ;
228
- pooledCache = prevFreshCacheOnStack . current ;
229
- pop ( prevFreshCacheOnStack , workInProgress ) ;
231
+ pop ( resumedCache , workInProgress ) ;
230
232
}
231
233
232
234
export function getSuspendedCachePool ( ) : SpawnedCachePool | null {
233
235
if ( ! enableCache ) {
234
236
return null ;
235
237
}
236
- // We check the cache on the stack first, since that's the one any new Caches
237
- // would have accessed.
238
- let pool = pooledCache ;
239
- if ( pool === null ) {
240
- // There's no pooled cache above us in the stack. However, a child in the
241
- // suspended tree may have requested a fresh cache pool. If so, we would
242
- // have unwound it with `popCachePool`.
243
- if ( _suspendedPooledCache !== null ) {
244
- pool = _suspendedPooledCache ;
245
- _suspendedPooledCache = null ;
246
- } else {
247
- // There's no suspended cache pool.
248
- return null ;
249
- }
238
+ // This function is called when a Suspense boundary suspends. It returns the
239
+ // cache that would have been used to render fresh data during this render,
240
+ // if there was any, so that we can resume rendering with the same cache when
241
+ // we receive more data.
242
+ const cacheFromPool = peekCacheFromPool ( ) ;
243
+ if ( cacheFromPool === null ) {
244
+ return null ;
250
245
}
251
246
252
247
return {
@@ -255,7 +250,7 @@ export function getSuspendedCachePool(): SpawnedCachePool | null {
255
250
parent : isPrimaryRenderer
256
251
? CacheContext . _currentValue
257
252
: CacheContext . _currentValue2 ,
258
- pool,
253
+ pool : cacheFromPool ,
259
254
} ;
260
255
}
261
256
@@ -264,8 +259,8 @@ export function getOffscreenDeferredCachePool(): SpawnedCachePool | null {
264
259
return null ;
265
260
}
266
261
267
- if ( pooledCache === null ) {
268
- // There's no deferred cache pool.
262
+ const cacheFromPool = peekCacheFromPool ( ) ;
263
+ if ( cacheFromPool === null ) {
269
264
return null ;
270
265
}
271
266
@@ -275,6 +270,6 @@ export function getOffscreenDeferredCachePool(): SpawnedCachePool | null {
275
270
parent : isPrimaryRenderer
276
271
? CacheContext . _currentValue
277
272
: CacheContext . _currentValue2 ,
278
- pool : pooledCache ,
273
+ pool : cacheFromPool ,
279
274
} ;
280
275
}
0 commit comments