From 44257b636fe0cc4d4294673f9cb76f26b72f91b5 Mon Sep 17 00:00:00 2001 From: Lennart Date: Thu, 24 Jun 2021 16:10:58 +0200 Subject: [PATCH] feat(gatsby): PQR worker can run page queries (#32017) * run queries in child/schema * split schema into schema & queries, add new test * wip * test static queries * revert changes related to page queries * initial * smaller ts improvements removing unused types, consolidate them, export IQueryJob * convert query/index.js to TS + change pageQueryIds type to Array * wip * use .cache/worker folder for slices location * optional chaining, test context variables * make cloud tests pass? * add runQueriesInWorkers function * remove runQueriesInWorkersQueue from build command for now * add test for runQueriesInWorkersQueue * adapt activity * first part of review comments * wip test * finalize test * proper test with jest-extended Co-authored-by: pieh@users.noreply.github.com * typescript magic * revert createProgress changes --- jest.config.js | 1 + package.json | 1 + .../gatsby/src/query/{index.js => index.ts} | 140 ++++++++----- packages/gatsby/src/query/query-runner.ts | 4 +- packages/gatsby/src/query/types.ts | 2 +- packages/gatsby/src/redux/persist.ts | 51 +++-- .../src/services/calculate-dirty-queries.ts | 4 +- .../gatsby/src/services/run-page-queries.ts | 9 +- packages/gatsby/src/services/types.ts | 6 +- .../src/state-machines/data-layer/types.ts | 8 - .../src/state-machines/query-running/types.ts | 2 +- .../plugins/gatsby-plugin-test/gatsby-node.js | 13 ++ .../src/utils/worker/__tests__/queries.ts | 194 ++++++++++++++++-- .../src/utils/worker/__tests__/schema.ts | 2 +- .../gatsby/src/utils/worker/child/queries.ts | 12 +- packages/gatsby/src/utils/worker/pool.ts | 44 ++++ yarn.lock | 13 +- 17 files changed, 401 insertions(+), 105 deletions(-) rename packages/gatsby/src/query/{index.js => index.ts} (58%) diff --git a/jest.config.js b/jest.config.js index 636ea5e603c4a..a93fe120fc298 100644 --- a/jest.config.js +++ b/jest.config.js @@ -54,4 +54,5 @@ module.exports = { testEnvironment: `jest-environment-jsdom-fourteen`, moduleFileExtensions: [`js`, `jsx`, `ts`, `tsx`, `json`], setupFiles: [`/.jestSetup.js`], + setupFilesAfterEnv: [`jest-extended`], } diff --git a/package.json b/package.json index 2c6e4f08f8b64..b590cfd61c178 100644 --- a/package.json +++ b/package.json @@ -51,6 +51,7 @@ "ignore": "^5.1.8", "jest": "^24.9.0", "jest-cli": "^24.9.0", + "jest-extended": "^0.11.5", "jest-environment-jsdom-fourteen": "^0.1.0", "jest-junit": "^10.0.0", "jest-serializer-path": "^0.1.15", diff --git a/packages/gatsby/src/query/index.js b/packages/gatsby/src/query/index.ts similarity index 58% rename from packages/gatsby/src/query/index.js rename to packages/gatsby/src/query/index.ts index ba66598bfef2b..e7792fad6c3d8 100644 --- a/packages/gatsby/src/query/index.js +++ b/packages/gatsby/src/query/index.ts @@ -1,10 +1,16 @@ -const _ = require(`lodash`) -const fastq = require(`fastq`) -const { store } = require(`../redux`) -const { hasFlag, FLAG_ERROR_EXTRACTION } = require(`../redux/reducers/queries`) -const { queryRunner } = require(`./query-runner`) -const { websocketManager } = require(`../utils/websocket-manager`) -const { GraphQLRunner } = require(`./graphql-runner`) +import _ from "lodash" +import fastq from "fastq" +import { IProgressReporter } from "gatsby-cli/lib/reporter/reporter-progress" +import { store } from "../redux" +import { IGatsbyPage, IGatsbyState } from "../redux/types" +import { hasFlag, FLAG_ERROR_EXTRACTION } from "../redux/reducers/queries" +import { IQueryJob, queryRunner } from "./query-runner" +import { + IStaticQueryResult, + websocketManager, +} from "../utils/websocket-manager" +import { GraphQLRunner } from "./graphql-runner" +import { IGroupedQueryIds } from "../services" if (process.env.GATSBY_EXPERIMENTAL_QUERY_CONCURRENCY) { console.info( @@ -21,7 +27,7 @@ const concurrency = * Dirty state is tracked in `queries` reducer, here we simply filter * them from all tracked queries. */ -function calcDirtyQueryIds(state) { +export function calcDirtyQueryIds(state: IGatsbyState): Array { const { trackedQueries, trackedComponents, deletedQueries } = state.queries const queriesWithBabelErrors = new Set() @@ -33,7 +39,7 @@ function calcDirtyQueryIds(state) { } } // Note: trackedQueries contains both - page and static query ids - const dirtyQueryIds = [] + const dirtyQueryIds: Array = [] for (const [queryId, query] of trackedQueries) { if (deletedQueries.has(queryId)) { continue @@ -45,32 +51,49 @@ function calcDirtyQueryIds(state) { return dirtyQueryIds } +export { calcDirtyQueryIds as calcInitialDirtyQueryIds } + /** - * groups queryIds by whether they are static or page queries. + * Groups queryIds by whether they are static or page queries. */ -function groupQueryIds(queryIds) { +export function groupQueryIds(queryIds: Array): IGroupedQueryIds { const grouped = _.groupBy(queryIds, p => p.slice(0, 4) === `sq--` ? `static` : `page` ) + + const { pages } = store.getState() + return { - staticQueryIds: grouped.static || [], - pageQueryIds: grouped.page || [], + staticQueryIds: grouped?.static || [], + pageQueryIds: + grouped?.page + ?.map(path => pages.get(path) as IGatsbyPage) + ?.filter(Boolean) || [], } } -function createQueue({ +function createQueue({ createJobFn, state, activity, graphqlRunner, graphqlTracing, -}) { +}: { + createJobFn: ( + state: IGatsbyState, + queryId: QueryIDType + ) => IQueryJob | undefined + state: IGatsbyState + activity: IProgressReporter + graphqlRunner: GraphQLRunner + graphqlTracing: boolean +}): fastq.queue { if (!graphqlRunner) { graphqlRunner = new GraphQLRunner(store, { graphqlTracing }) } state = state || store.getState() - function worker(queryId, cb) { + function worker(queryId: QueryIDType, cb): void { const job = createJobFn(state, queryId) if (!job) { cb(null, undefined) @@ -91,7 +114,7 @@ function createQueue({ return fastq(worker, concurrency) } -async function processQueries({ +async function processQueries({ queryIds, createJobFn, onQueryDone, @@ -99,7 +122,20 @@ async function processQueries({ activity, graphqlRunner, graphqlTracing, -}) { +}: { + queryIds: Array + createJobFn: ( + state: IGatsbyState, + queryId: QueryIDType + ) => IQueryJob | undefined + onQueryDone: + | (({ job, result }: { job: IQueryJob; result: unknown }) => void) + | undefined + state: IGatsbyState + activity: IProgressReporter + graphqlRunner: GraphQLRunner + graphqlTracing: boolean +}): Promise { return new Promise((resolve, reject) => { const fastQueue = createQueue({ createJobFn, @@ -109,7 +145,7 @@ async function processQueries({ graphqlTracing, }) - queryIds.forEach(queryId => { + queryIds.forEach((queryId: QueryIDType) => { fastQueue.push(queryId, (err, res) => { if (err) { fastQueue.kill() @@ -123,40 +159,57 @@ async function processQueries({ }) if (!fastQueue.idle()) { - fastQueue.drain = () => resolve() + fastQueue.drain = (): any => resolve() } else { resolve() } }) } -function createStaticQueryJob(state, queryId) { +function createStaticQueryJob( + state: IGatsbyState, + queryId: string +): IQueryJob | undefined { const component = state.staticQueryComponents.get(queryId) + if (!component) { return undefined } + const { hash, id, query, componentPath } = component + return { id: queryId, - hash, query, + isPage: false, + hash, componentPath, context: { path: id }, } } -function onDevelopStaticQueryDone({ job, result }) { +function onDevelopStaticQueryDone({ + job, + result, +}: { + job: IQueryJob + result: IStaticQueryResult["result"] +}): void { + if (!job.hash) { + return + } + websocketManager.emitStaticQueryData({ result, id: job.hash, }) } -async function processStaticQueries( - queryIds, +export async function processStaticQueries( + queryIds: IGroupedQueryIds["staticQueryIds"], { state, activity, graphqlRunner, graphqlTracing } -) { - return processQueries({ +): Promise { + return processQueries({ queryIds, createJobFn: createStaticQueryJob, onQueryDone: @@ -170,13 +223,14 @@ async function processStaticQueries( }) } -async function processPageQueries( - queryIds, +export async function processPageQueries( + queryIds: IGroupedQueryIds["pageQueryIds"], { state, activity, graphqlRunner, graphqlTracing } -) { - return processQueries({ +): Promise { + return processQueries({ queryIds, createJobFn: createPageQueryJob, + onQueryDone: undefined, state, activity, graphqlRunner, @@ -184,19 +238,19 @@ async function processPageQueries( }) } -function createPageQueryJob(state, queryId) { - const page = state.pages.get(queryId) +function createPageQueryJob( + state: IGatsbyState, + page: IGatsbyPage +): IQueryJob | undefined { + const component = state.components.get(page.componentPath) - // Make sure we filter out pages that don't exist. An example is - // /dev-404-page/, whose SitePage node is created via - // `internal-data-bridge`, but the actual page object is only - // created during `gatsby develop`. - if (!page) { + if (!component) { return undefined } - const component = state.components.get(page.componentPath) + const { path, componentPath, context } = page const { query } = component + return { id: path, query, @@ -208,11 +262,3 @@ function createPageQueryJob(state, queryId) { }, } } - -module.exports = { - calcInitialDirtyQueryIds: calcDirtyQueryIds, - calcDirtyQueryIds, - processPageQueries, - processStaticQueries, - groupQueryIds, -} diff --git a/packages/gatsby/src/query/query-runner.ts b/packages/gatsby/src/query/query-runner.ts index 71e2f9df42a92..b613524214da4 100644 --- a/packages/gatsby/src/query/query-runner.ts +++ b/packages/gatsby/src/query/query-runner.ts @@ -17,14 +17,14 @@ import { pageDataExists } from "../utils/page-data" const resultHashes = new Map() -interface IQueryJob { +export interface IQueryJob { id: string hash?: string query: string componentPath: string context: PageContext isPage: boolean - pluginCreatorId: string + pluginCreatorId?: string } function reportLongRunningQueryJob(queryJob): void { diff --git a/packages/gatsby/src/query/types.ts b/packages/gatsby/src/query/types.ts index 3aefc1a04a4f2..85cd877f8a203 100644 --- a/packages/gatsby/src/query/types.ts +++ b/packages/gatsby/src/query/types.ts @@ -28,7 +28,7 @@ export interface IGraphQLRunnerStatResults { uniqueSorts: number } -export type PageContext = any +export type PageContext = Record export interface IExecutionResult extends ExecutionResult { pageContext?: PageContext diff --git a/packages/gatsby/src/redux/persist.ts b/packages/gatsby/src/redux/persist.ts index e6a0a96f58118..b483a2c94caba 100644 --- a/packages/gatsby/src/redux/persist.ts +++ b/packages/gatsby/src/redux/persist.ts @@ -8,6 +8,7 @@ import { readFileSync, removeSync, writeFileSync, + outputFileSync, } from "fs-extra" import { ICachedReduxState, @@ -24,6 +25,10 @@ const getReduxCacheFolder = (): string => // This is a function for the case that somebody does a process.chdir (#19800) path.join(process.cwd(), `.cache/redux`) +const getWorkerSlicesFolder = (): string => + // This is a function for the case that somebody does a process.chdir (#19800) + path.join(process.cwd(), `.cache/worker`) + function reduxSharedFile(dir: string): string { return path.join(dir, `redux.rest.state`) } @@ -46,23 +51,25 @@ export function readFromCache( // for sites with a _lot_ of content. Since all nodes / pages go into a Map, the order // of reading them is not relevant. - const reduxCacheFolder = getReduxCacheFolder() + let cacheFolder = getReduxCacheFolder() if (slices) { + cacheFolder = getWorkerSlicesFolder() + return v8.deserialize( readFileSync( - reduxWorkerSlicesPrefix(reduxCacheFolder) + createContentDigest(slices) + reduxWorkerSlicesPrefix(cacheFolder) + createContentDigest(slices) ) ) } const obj: ICachedReduxState = v8.deserialize( - readFileSync(reduxSharedFile(reduxCacheFolder)) + readFileSync(reduxSharedFile(cacheFolder)) ) // Note: at 1M pages, this will be 1M/chunkSize chunks (ie. 1m/10k=100) const nodesChunks = globSync( - reduxChunkedNodesFilePrefix(reduxCacheFolder) + `*` + reduxChunkedNodesFilePrefix(cacheFolder) + `*` ).map(file => v8.deserialize(readFileSync(file))) const nodes: Array<[string, IGatsbyNode]> = [].concat(...nodesChunks) @@ -78,7 +85,7 @@ export function readFromCache( // Note: at 1M pages, this will be 1M/chunkSize chunks (ie. 1m/10k=100) const pagesChunks = globSync( - reduxChunkedPagesFilePrefix(reduxCacheFolder) + `*` + reduxChunkedPagesFilePrefix(cacheFolder) + `*` ).map(file => v8.deserialize(readFileSync(file))) const pages: Array<[string, IGatsbyPage]> = [].concat(...pagesChunks) @@ -122,17 +129,8 @@ export function guessSafeChunkSize( function prepareCacheFolder( targetDir: string, - contents: DeepPartial, - slices?: Array + contents: DeepPartial ): void { - if (slices) { - writeFileSync( - reduxWorkerSlicesPrefix(targetDir) + createContentDigest(slices), - v8.serialize(contents) - ) - return - } - // Temporarily save the nodes and pages and remove them from the main redux store // This prevents an OOM when the page nodes collectively contain to much data const nodesMap = contents.nodes @@ -197,18 +195,18 @@ function prepareCacheFolder( } } -function safelyRenameToBak(reduxCacheFolder: string): string { +function safelyRenameToBak(cacheFolder: string): string { // Basically try to work around the potential of previous renamed caches // not being removed for whatever reason. _That_ should not be a blocker. const tmpSuffix = `.bak` let suffixCounter = 0 - let bakName = reduxCacheFolder + tmpSuffix // Start without number + let bakName = cacheFolder + tmpSuffix // Start without number while (existsSync(bakName)) { ++suffixCounter - bakName = reduxCacheFolder + tmpSuffix + suffixCounter + bakName = cacheFolder + tmpSuffix + suffixCounter } - moveSync(reduxCacheFolder, bakName) + moveSync(cacheFolder, bakName) return bakName } @@ -217,12 +215,25 @@ export function writeToCache( contents: DeepPartial, slices?: Array ): void { + // Writing the "slices" also to the "redux" folder introduces subtle bugs when + // e.g. the whole folder gets replaced some "slices" are lost + // Thus they get written to dedicated "worker" folder + if (slices) { + const cacheFolder = getWorkerSlicesFolder() + + outputFileSync( + reduxWorkerSlicesPrefix(cacheFolder) + createContentDigest(slices), + v8.serialize(contents) + ) + return + } + // Note: this should be a transactional operation. So work in a tmp dir and // make sure the cache cannot be left in a corruptable state due to errors. const tmpDir = mkdtempSync(path.join(os.tmpdir(), `reduxcache`)) // linux / windows - prepareCacheFolder(tmpDir, contents, slices) + prepareCacheFolder(tmpDir, contents) // Replace old cache folder with new. If the first rename fails, the cache // is just stale. If the second rename fails, the cache is empty. In either diff --git a/packages/gatsby/src/services/calculate-dirty-queries.ts b/packages/gatsby/src/services/calculate-dirty-queries.ts index 3244a75656021..bf3456eb92ba2 100644 --- a/packages/gatsby/src/services/calculate-dirty-queries.ts +++ b/packages/gatsby/src/services/calculate-dirty-queries.ts @@ -12,9 +12,9 @@ export async function calculateDirtyQueries({ }> { assertStore(store) const state = store.getState() - const queryIds = calcDirtyQueryIds(state) + const queryIds: Array = calcDirtyQueryIds(state) - let queriesToRun: Array = queryIds + let queriesToRun = queryIds if ( process.env.gatsby_executing_command === `develop` && diff --git a/packages/gatsby/src/services/run-page-queries.ts b/packages/gatsby/src/services/run-page-queries.ts index 49b5d4bf92f3a..fbc06049e5593 100644 --- a/packages/gatsby/src/services/run-page-queries.ts +++ b/packages/gatsby/src/services/run-page-queries.ts @@ -19,22 +19,21 @@ export async function runPageQueries({ graphqlRunner, }: Partial): Promise { assertStore(store) + const state = store.getState() if (!queryIds) { return } + const { pageQueryIds } = queryIds - const state = store.getState() - const pageQueryIdsCount = pageQueryIds.filter(id => state.pages.has(id)) - .length - if (!pageQueryIdsCount) { + if (pageQueryIds.length === 0) { return } const activity = reporter.createProgress( `run page queries`, - pageQueryIdsCount, + pageQueryIds.length, 0, { id: `page-query-running`, diff --git a/packages/gatsby/src/services/types.ts b/packages/gatsby/src/services/types.ts index 0964a551f7cd8..beb3f766f5548 100644 --- a/packages/gatsby/src/services/types.ts +++ b/packages/gatsby/src/services/types.ts @@ -3,15 +3,16 @@ import { IProgram } from "../commands/types" import { Runner } from "../bootstrap/create-graphql-runner" import { GraphQLRunner } from "../query/graphql-runner" import { Store, AnyAction } from "redux" -import { IGatsbyState } from "../redux/types" +import { IGatsbyPage, IGatsbyState } from "../redux/types" import { Express } from "express" import type { GatsbyWorkerPool } from "../utils/worker/pool" import { Actor, AnyEventObject } from "xstate" import { Compiler } from "webpack" import { WebsocketManager } from "../utils/websocket-manager" import { IWebpackWatchingPauseResume } from "../utils/start-server" + export interface IGroupedQueryIds { - pageQueryIds: Array + pageQueryIds: Array staticQueryIds: Array } @@ -20,6 +21,7 @@ export interface IMutationAction { payload: Array resolve?: (result: unknown) => void } + export interface IBuildContext { program?: IProgram store?: Store diff --git a/packages/gatsby/src/state-machines/data-layer/types.ts b/packages/gatsby/src/state-machines/data-layer/types.ts index f665c000babeb..415938604c0d5 100644 --- a/packages/gatsby/src/state-machines/data-layer/types.ts +++ b/packages/gatsby/src/state-machines/data-layer/types.ts @@ -5,15 +5,7 @@ import { GraphQLRunner } from "../../query/graphql-runner" import { Store, AnyAction } from "redux" import { IGatsbyState } from "../../redux/types" import type { GatsbyWorkerPool } from "../../utils/worker/pool" -export interface IGroupedQueryIds { - pageQueryIds: Array - staticQueryIds: Array -} -export interface IMutationAction { - type: string - payload: Array -} export interface IDataLayerContext { deferNodeMutation?: boolean nodesMutatedDuringQueryRun?: boolean diff --git a/packages/gatsby/src/state-machines/query-running/types.ts b/packages/gatsby/src/state-machines/query-running/types.ts index 136425b9d8c33..afe80530634f4 100644 --- a/packages/gatsby/src/state-machines/query-running/types.ts +++ b/packages/gatsby/src/state-machines/query-running/types.ts @@ -4,7 +4,7 @@ import { Runner } from "../../bootstrap/create-graphql-runner" import { GraphQLRunner } from "../../query/graphql-runner" import { Store, AnyAction } from "redux" import { IGatsbyState } from "../../redux/types" -import { IGroupedQueryIds } from "../data-layer/types" +import { IGroupedQueryIds } from "../../services/types" import { WebsocketManager } from "../../utils/websocket-manager" export interface IQueryRunningContext { diff --git a/packages/gatsby/src/utils/worker/__tests__/fixtures/sample-site/plugins/gatsby-plugin-test/gatsby-node.js b/packages/gatsby/src/utils/worker/__tests__/fixtures/sample-site/plugins/gatsby-plugin-test/gatsby-node.js index 772179fc0b980..4ff7f4342380d 100644 --- a/packages/gatsby/src/utils/worker/__tests__/fixtures/sample-site/plugins/gatsby-plugin-test/gatsby-node.js +++ b/packages/gatsby/src/utils/worker/__tests__/fixtures/sample-site/plugins/gatsby-plugin-test/gatsby-node.js @@ -24,6 +24,19 @@ exports.createResolvers = ({ createResolvers }) => { resolve() { return `Custom String` } + }, + fieldWithArg: { + type: "String", + args: { + isCool: "Boolean" + }, + resolve(source, args) { + if (args.isCool) { + return `You are cool` + } else { + return `You are not cool` + } + } } } } diff --git a/packages/gatsby/src/utils/worker/__tests__/queries.ts b/packages/gatsby/src/utils/worker/__tests__/queries.ts index 10eaf6a0efc75..2ee929e797b71 100644 --- a/packages/gatsby/src/utils/worker/__tests__/queries.ts +++ b/packages/gatsby/src/utils/worker/__tests__/queries.ts @@ -1,3 +1,4 @@ +import "jest-extended" import * as path from "path" import fs from "fs-extra" import type { watch as ChokidarWatchType } from "chokidar" @@ -11,6 +12,9 @@ import { GatsbyTestWorkerPool, } from "./test-helpers" import { getDataStore } from "../../../datastore" +import { IGroupedQueryIds } from "../../../services" +import { IGatsbyPage } from "../../../redux/types" +import { runQueriesInWorkersQueue } from "../pool" let worker: GatsbyTestWorkerPool | undefined @@ -34,6 +38,49 @@ jest.mock(`chokidar`, () => { return chokidar }) +const dummyKeys = `a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z`.split( + `,` +) + +function pagePlaceholders(key): any { + return { + path: `/${key}`, + componentPath: `/${key}.js`, + component: `/${key}.js`, + internalComponentName: `Component/${key}/`, + matchPath: undefined, + componentChunkName: `component--${key}`, + isCreatedByStatefulCreatePages: true, + updatedAt: 1, + // eslint-disable-next-line @typescript-eslint/naming-convention + pluginCreator___NODE: key, + pluginCreatorId: key, + ownerNodeId: key, + } +} + +const dummyPages: Array = dummyKeys.map(name => { + return { + ...pagePlaceholders(name), + query: `{ nodeTypeOne { number } }`, + context: {}, + } +}) + +const dummyPageFoo = { + ...pagePlaceholders(`foo`), + query: `{ nodeTypeOne { number } }`, + context: {}, +} + +const dummyPageBar = { + ...pagePlaceholders(`bar`), + query: `query($var: Boolean) { nodeTypeOne { default: fieldWithArg, fieldWithArg(isCool: true), withVar: fieldWithArg(isCool: $var) } }`, + context: { + var: true, + }, +} + const dummyStaticQuery = { id: `sq--q1`, name: `q1-name`, @@ -42,15 +89,22 @@ const dummyStaticQuery = { hash: `q1-hash`, } -const queryIds = { - pageQueryIds: [], +const pageQueryIds = [dummyPageFoo, dummyPageBar, ...dummyPages] + +const queryIdsSmall: IGroupedQueryIds = { + pageQueryIds: [dummyPageFoo, dummyPageBar], + staticQueryIds: [dummyStaticQuery.id], +} + +const queryIdsBig: IGroupedQueryIds = { + pageQueryIds, staticQueryIds: [dummyStaticQuery.id], } describeWhenLMDB(`worker (queries)`, () => { beforeAll(async () => { store.dispatch({ type: `DELETE_CACHE` }) - const fileDir = path.join(process.cwd(), `.cache/redux`) + const fileDir = path.join(process.cwd(), `.cache/worker`) await fs.emptyDir(fileDir) worker = createTestWorker() @@ -63,8 +117,39 @@ describeWhenLMDB(`worker (queries)`, () => { await build({ parentSpan: {} }) + pageQueryIds.forEach(page => { + store.dispatch({ + type: `CREATE_PAGE`, + plugin: { + id: `gatsby-plugin-test`, + name: `gatsby-plugin-test`, + version: `1.0.0`, + }, + payload: { + path: page.path, + componentPath: page.componentPath, + component: page.component, + }, + }) + }) + saveStateForWorkers([`inferenceMetadata`]) + pageQueryIds.forEach(page => { + store.dispatch({ + type: `QUERY_EXTRACTED`, + plugin: { + id: `gatsby-plugin-test`, + name: `gatsby-plugin-test`, + version: `1.0.0`, + }, + payload: { + componentPath: page.componentPath, + query: page.query, + }, + }) + }) + store.dispatch({ type: `REPLACE_STATIC_QUERY`, plugin: { @@ -78,6 +163,7 @@ describeWhenLMDB(`worker (queries)`, () => { saveStateForWorkers([`components`, `staticQueryComponents`]) await worker.buildSchema() + await worker.runQueries(queryIdsSmall) }) afterAll(() => { @@ -91,21 +177,103 @@ describeWhenLMDB(`worker (queries)`, () => { }) it(`should execute static queries`, async () => { - await worker?.runQueries(queryIds) - const stateFromWorker = await worker!.getState() + if (!worker) fail(`worker not defined`) + const stateFromWorker = await worker.getState() const staticQueryResult = await fs.readJson( `${stateFromWorker.program.directory}/public/page-data/sq/d/${dummyStaticQuery.hash}.json` ) - expect(staticQueryResult).toMatchInlineSnapshot(` - Object { - "data": Object { - "nodeTypeOne": Object { - "resolverField": "Custom String", - }, + expect(staticQueryResult).toStrictEqual({ + data: { + nodeTypeOne: { + resolverField: `Custom String`, }, - } - `) + }, + }) + }) + + it(`should execute page queries`, async () => { + if (!worker) fail(`worker not defined`) + const stateFromWorker = await worker.getState() + + const pageQueryResult = await fs.readJson( + `${stateFromWorker.program.directory}/.cache/json/_foo.json` + ) + + expect(pageQueryResult.data).toStrictEqual({ + nodeTypeOne: { + number: 123, + }, + }) + }) + + it(`should execute page queries with context variables`, async () => { + if (!worker) fail(`worker not defined`) + const stateFromWorker = await worker.getState() + + const pageQueryResult = await fs.readJson( + `${stateFromWorker.program.directory}/.cache/json/_bar.json` + ) + + expect(pageQueryResult.data).toStrictEqual({ + nodeTypeOne: { + default: `You are not cool`, + fieldWithArg: `You are cool`, + withVar: `You are cool`, + }, + }) + }) + + it(`should chunk work in runQueriesInWorkersQueue`, async () => { + if (!worker) fail(`worker not defined`) + const spy = jest.spyOn(worker, `runQueries`) + + // @ts-ignore - worker is defined + await runQueriesInWorkersQueue(worker, queryIdsBig, 10) + const stateFromWorker = await worker.getState() + + // Called the complete ABC so we can test _a + const pageQueryResultA = await fs.readJson( + `${stateFromWorker.program.directory}/.cache/json/_a.json` + ) + + expect(pageQueryResultA.data).toStrictEqual({ + nodeTypeOne: { + number: 123, + }, + }) + + const pageQueryResultZ = await fs.readJson( + `${stateFromWorker.program.directory}/.cache/json/_z.json` + ) + + expect(pageQueryResultZ.data).toStrictEqual({ + nodeTypeOne: { + number: 123, + }, + }) + + expect(spy).toHaveBeenNthCalledWith(1, { + pageQueryIds: [], + staticQueryIds: expect.toBeArrayOfSize(1), + }) + + expect(spy).toHaveBeenNthCalledWith(2, { + pageQueryIds: expect.toBeArrayOfSize(10), + staticQueryIds: [], + }) + + expect(spy).toHaveBeenNthCalledWith(3, { + pageQueryIds: expect.toBeArrayOfSize(10), + staticQueryIds: [], + }) + + expect(spy).toHaveBeenNthCalledWith(4, { + pageQueryIds: expect.toBeArrayOfSize(8), + staticQueryIds: [], + }) + + spy.mockRestore() }) }) diff --git a/packages/gatsby/src/utils/worker/__tests__/schema.ts b/packages/gatsby/src/utils/worker/__tests__/schema.ts index ffa377d4c4140..8d3c6c796c7dd 100644 --- a/packages/gatsby/src/utils/worker/__tests__/schema.ts +++ b/packages/gatsby/src/utils/worker/__tests__/schema.ts @@ -42,7 +42,7 @@ describeWhenLMDB(`worker (schema)`, () => { beforeAll(async () => { store.dispatch({ type: `DELETE_CACHE` }) - const fileDir = path.join(process.cwd(), `.cache/redux`) + const fileDir = path.join(process.cwd(), `.cache/worker`) await fs.emptyDir(fileDir) worker = createTestWorker() diff --git a/packages/gatsby/src/utils/worker/child/queries.ts b/packages/gatsby/src/utils/worker/child/queries.ts index 6ad3429ff77c0..93a5499e7867f 100644 --- a/packages/gatsby/src/utils/worker/child/queries.ts +++ b/packages/gatsby/src/utils/worker/child/queries.ts @@ -1,4 +1,8 @@ -import { IGroupedQueryIds, runStaticQueries } from "../../../services" +import { + IGroupedQueryIds, + runPageQueries, + runStaticQueries, +} from "../../../services" import { store } from "../../../redux" import { GraphQLRunner } from "../../../query/graphql-runner" import { getDataStore } from "../../../datastore" @@ -30,5 +34,11 @@ export async function runQueries(queryIds: IGroupedQueryIds): Promise { graphqlRunner, }) + await runPageQueries({ + queryIds, + store, + graphqlRunner, + }) + await getDataStore().ready() } diff --git a/packages/gatsby/src/utils/worker/pool.ts b/packages/gatsby/src/utils/worker/pool.ts index 28fccc5f8a831..82823b279d092 100644 --- a/packages/gatsby/src/utils/worker/pool.ts +++ b/packages/gatsby/src/utils/worker/pool.ts @@ -1,7 +1,10 @@ import Worker from "jest-worker" +import { chunk } from "lodash" +import reporter from "gatsby-cli/lib/reporter" import { cpuCoreCount } from "gatsby-core-utils" import type { CreateWorkerPoolType } from "./types" +import { IGroupedQueryIds } from "../../services" export type GatsbyWorkerPool = CreateWorkerPoolType @@ -16,3 +19,44 @@ export const create = (): GatsbyWorkerPool => { delete process.env.GATSBY_WORKER_POOL_WORKER return worker } + +export async function runQueriesInWorkersQueue( + pool: GatsbyWorkerPool, + queryIds: IGroupedQueryIds, + chunkSize = 50 +): Promise { + const staticQuerySegments = chunk(queryIds.staticQueryIds, chunkSize) + const pageQuerySegments = chunk(queryIds.pageQueryIds, chunkSize) + + const activity = reporter.createProgress( + `run queries in workers`, + queryIds.staticQueryIds.length + queryIds.pageQueryIds.length + ) + activity.start() + + const promises: Array> = [] + + for (const segment of staticQuerySegments) { + promises.push( + pool + .runQueries({ pageQueryIds: [], staticQueryIds: segment }) + .then(() => { + activity.tick(segment.length) + }) + ) + } + + for (const segment of pageQuerySegments) { + promises.push( + pool + .runQueries({ pageQueryIds: segment, staticQueryIds: [] }) + .then(() => { + activity.tick(segment.length) + }) + ) + } + + await Promise.all(promises) + + activity.end() +} diff --git a/yarn.lock b/yarn.lock index b2944de42c525..a0f82e5a1ebb6 100644 --- a/yarn.lock +++ b/yarn.lock @@ -11843,7 +11843,7 @@ expect@^22.4.0: jest-message-util "^22.4.3" jest-regex-util "^22.4.3" -expect@^24.9.0: +expect@^24.1.0, expect@^24.9.0: version "24.9.0" resolved "https://registry.yarnpkg.com/expect/-/expect-24.9.0.tgz#b75165b4817074fa4a157794f46fe9f1ba15b6ca" integrity sha512-wvVAx8XIol3Z5m9zvZXiyZOQ+sRJqNTIm6sGjdWlaZIeupQGO3WbYI+15D/AmEwZywL6wtJkbAbJtzkOfBuR0Q== @@ -16061,6 +16061,15 @@ jest-environment-node@^26.6.2: jest-mock "^26.6.2" jest-util "^26.6.2" +jest-extended@^0.11.5: + version "0.11.5" + resolved "https://registry.yarnpkg.com/jest-extended/-/jest-extended-0.11.5.tgz#f063b3f1eaadad8d7c13a01f0dfe0f538d498ccf" + integrity sha512-3RsdFpLWKScpsLD6hJuyr/tV5iFOrw7v6YjA3tPdda9sJwoHwcMROws5gwiIZfcwhHlJRwFJB2OUvGmF3evV/Q== + dependencies: + expect "^24.1.0" + jest-get-type "^22.4.3" + jest-matcher-utils "^22.0.0" + jest-get-type@^22.1.0, jest-get-type@^22.4.3: version "22.4.3" resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-22.4.3.tgz#e3a8504d8479342dd4420236b322869f18900ce4" @@ -16220,7 +16229,7 @@ jest-leak-detector@^26.6.2: jest-get-type "^26.3.0" pretty-format "^26.6.2" -jest-matcher-utils@^22.4.0, jest-matcher-utils@^22.4.3: +jest-matcher-utils@^22.0.0, jest-matcher-utils@^22.4.0, jest-matcher-utils@^22.4.3: version "22.4.3" resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-22.4.3.tgz#4632fe428ebc73ebc194d3c7b65d37b161f710ff" integrity sha512-lsEHVaTnKzdAPR5t4B6OcxXo9Vy4K+kRRbG5gtddY8lBEC+Mlpvm1CJcsMESRjzUhzkz568exMV1hTB76nAKbA==