Skip to content

Commit

Permalink
KV storage to deduplicate SDLs from schema_checks
Browse files Browse the repository at this point in the history
Closes #2603
  • Loading branch information
kamilkisiela committed Oct 26, 2023
1 parent 9fe206d commit ae25aa8
Show file tree
Hide file tree
Showing 14 changed files with 583 additions and 141 deletions.
2 changes: 1 addition & 1 deletion packages/libraries/client/src/version.ts
Original file line number Diff line number Diff line change
@@ -1 +1 @@
export const version = '0.24.1';
export const version = '0.24.2';
2 changes: 1 addition & 1 deletion packages/migrations/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
"db:init": "pnpm db:create && pnpm migration:run",
"db:migrator": "node --experimental-specifier-resolution=node --loader ts-node/esm src/index.ts",
"migration:run": "pnpm db:migrator up",
"test": "node --experimental-specifier-resolution=node --loader ts-node/esm ./test/*.test.ts"
"test": "TS_NODE_TRANSPILE_ONLY=true node --experimental-specifier-resolution=node --loader ts-node/esm ./test/index.ts"
},
"devDependencies": {
"@graphql-hive/core": "workspace:*",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import { type MigrationExecutor } from '../pg-migrator';

export default {
name: '2023.10.25T14.41.41.schema-checks-dedup.ts',
run: ({ sql }) => sql`
CREATE TABLE "public"."sdl_store" (
"id" text PRIMARY KEY NOT NULL,
"sdl" text NOT NULL
);
CREATE UNIQUE INDEX sdl_store_unique_id ON "public"."sdl_store" ("id");
ALTER TABLE "public"."schema_checks"
ADD COLUMN "schema_sdl_store_id" text REFERENCES "public"."sdl_store" ("id"),
ADD COLUMN "supergraph_sdl_store_id" text REFERENCES "public"."sdl_store" ("id"),
ADD COLUMN "composite_schema_sdl_store_id" text REFERENCES "public"."sdl_store" ("id");
ALTER TABLE "public"."schema_checks"
ALTER COLUMN "schema_sdl" DROP NOT NULL,
ALTER COLUMN "supergraph_sdl" DROP NOT NULL,
ALTER COLUMN "composite_schema_sdl" DROP NOT NULL;
CREATE INDEX "schema_check_by_schema_sdl_store_id" ON "public"."schema_checks" ("schema_sdl_store_id" ASC);
CREATE INDEX "schema_check_by_supergraph_sdl_store_id" ON "public"."schema_checks" ("supergraph_sdl_store_id" ASC);
CREATE INDEX "schema_check_by_composite_schema_sdl_store_id" ON "public"."schema_checks" ("composite_schema_sdl_store_id" ASC);
`,
} satisfies MigrationExecutor;
2 changes: 2 additions & 0 deletions packages/migrations/src/run-pg-migrations.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ import migration_2023_09_01T09_54_00_zendesk_support from './actions/2023.09.01T
import migration_2023_09_25T15_23_00_github_check_with_project_name from './actions/2023.09.25T15.23.00.github-check-with-project-name';
import migration_2023_09_28T14_14_14_native_fed_v2 from './actions/2023.09.28T14.14.14.native-fed-v2';
import migration_2023_10_05T11_44_36_schema_checks_github_repository from './actions/2023.10.05T11.44.36.schema-checks-github-repository';
import migration_2023_10_25T14_41_41_schema_checks_dedup from './actions/2023.10.25T14.41.41.schema-checks-dedup';
import { runMigrations } from './pg-migrator';

export const runPGMigrations = (args: { slonik: DatabasePool; runTo?: string }) =>
Expand Down Expand Up @@ -116,5 +117,6 @@ export const runPGMigrations = (args: { slonik: DatabasePool; runTo?: string })
migration_2023_09_25T15_23_00_github_check_with_project_name,
migration_2023_09_28T14_14_14_native_fed_v2,
migration_2023_10_05T11_44_36_schema_checks_github_repository,
migration_2023_10_25T14_41_41_schema_checks_dedup,
],
});
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { describe, test } from 'node:test';
import { sql } from 'slonik';
import { initMigrationTestingEnvironment } from './utils/testkit';

await describe('migration: drop-personal-org', async () => {
await describe('drop-personal-org', async () => {
await test('should remove all existing personal orgs that does not have projects', async () => {
const { db, runTo, complete, done, seed } = await initMigrationTestingEnvironment();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { describe, test } from 'node:test';
import { sql } from 'slonik';
import { initMigrationTestingEnvironment } from './utils/testkit';

await describe('migration: github-check-with-project-name', async () => {
await describe('github-check-with-project-name', async () => {
await test('should use FALSE for existing projects and TRUE for new', async () => {
const { db, runTo, complete, done, seed } = await initMigrationTestingEnvironment();

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,237 @@
import assert from 'node:assert';
import { describe, test } from 'node:test';
import { DatabasePool, sql } from 'slonik';
import { createStorage } from '../../services/storage/src/index';
import { initMigrationTestingEnvironment } from './utils/testkit';

async function insertSchemaCheck(
pool: DatabasePool,
args: {
targetId: string;
schemaVersionId: string;
schemaSDL: string;
compositeSchemaSDL: string;
supergraphSDL: string;
},
) {
return pool.one<{ id: string }>(sql`
INSERT INTO "public"."schema_checks" (
"schema_sdl"
, "target_id"
, "schema_version_id"
, "is_success"
, "composite_schema_sdl"
, "supergraph_sdl"
, "is_manually_approved"
)
VALUES (
${args.schemaSDL}
, ${args.targetId}
, ${args.schemaVersionId}
, ${true}
, ${args.compositeSchemaSDL}
, ${args.supergraphSDL}
, ${false}
)
RETURNING id
`);
}

await describe('migration: schema-checks-dedup', async () => {
await test('should incrementally adopt sdl_store without corrupting existing and new data', async () => {
const { db, runTo, complete, done, seed, connectionString } =
await initMigrationTestingEnvironment();
const storage = await createStorage(connectionString, 1);
try {
// Run migrations all the way to the point before the one we are testing
await runTo('2023.08.03T11.44.36.schema-checks-github-repository.ts');

// Seed the database with some data (schema_sdl, supergraph_sdl, composite_schema_sdl)
const user = await seed.user();
const organization = await db.one<{
id: string;
}>(
sql`INSERT INTO public.organizations (clean_id, name, user_id) VALUES ('org-1', 'org-1', ${user.id}) RETURNING id;`,
);
const project = await db.one<{
id: string;
}>(
sql`INSERT INTO public.projects (clean_id, name, type, org_id) VALUES ('proj-1', 'proj-1', 'SINGLE', ${organization.id}) RETURNING id;`,
);
const target = await db.one<{
id: string;
}>(
sql`INSERT INTO public.targets (clean_id, name, project_id) VALUES ('proj-1', 'proj-1', ${project.id}) RETURNING id;`,
);

const compositeSchemaSDL = `composite schema 1`;
const schemaSDL = `schema 1`;
const supergraphSDL = `supergraph 1`;

const secondCompositeSchemaSDL = `composite schema 2`;
const secondSchemaSDL = `schema 2`;
const secondSupergraphSDL = `supergraph 2`;

const action = await db.one<{
id: string;
}>(
sql`
INSERT INTO public.schema_log
(
author,
commit,
sdl,
project_id,
target_id,
action
)
VALUES
(
${'Author 1'},
${'commit 1'}::text,
${compositeSchemaSDL}::text,
${project.id},
${target.id},
'PUSH'
)
RETURNING id
`,
);

const version = await db.one<{
id: string;
}>(
sql`
INSERT INTO public.schema_versions
(
is_composable,
target_id,
action_id,
has_persisted_schema_changes,
composite_schema_sdl,
supergraph_sdl
)
VALUES
(
${true},
${target.id},
${action.id},
${false},
${compositeSchemaSDL},
${supergraphSDL}
)
RETURNING id
`,
);

const firstSchemaCheck = await insertSchemaCheck(db, {
targetId: target.id,
schemaVersionId: version.id,
schemaSDL,
compositeSchemaSDL,
supergraphSDL,
});

const secondSchemaCheck = await insertSchemaCheck(db, {
targetId: target.id,
schemaVersionId: version.id,
schemaSDL: secondSchemaSDL,
compositeSchemaSDL: secondCompositeSchemaSDL,
supergraphSDL: secondSupergraphSDL,
});

const thirdSchemaCheck = await insertSchemaCheck(db, {
targetId: target.id,
schemaVersionId: version.id,
schemaSDL,
compositeSchemaSDL,
supergraphSDL,
});

// Run the additional remaining migrations
await complete();

const newSchemaCheck: {
id: string;
} = await storage.createSchemaCheck({
targetId: target.id,
schemaVersionId: version.id,
isSuccess: true,
isManuallyApproved: false,
schemaSDL,
schemaSDLHash: 'schemaSDLHash', // serve exact same schema from different hash to make sure it's allowed
compositeSchemaSDL,
compositeSchemaSDLHash: 'compositeSchemaSDLHash',
supergraphSDL,
supergraphSDLHash: 'supergraphSDLHash',
serviceName: null,
manualApprovalUserId: null,
githubCheckRunId: null,
githubRepository: null,
githubSha: null,
meta: null,
schemaCompositionErrors: null,
breakingSchemaChanges: null,
safeSchemaChanges: null,
schemaPolicyWarnings: null,
schemaPolicyErrors: null,
expiresAt: null,
});

// make sure SQL statements from Storage are capable of serving SDLs directly from schema_checks
const firstCheckFromStorage = await storage.findSchemaCheck({
schemaCheckId: firstSchemaCheck.id,
targetId: target.id,
});
assert.strictEqual(firstCheckFromStorage?.schemaSDL, schemaSDL);
assert.strictEqual(firstCheckFromStorage?.compositeSchemaSDL, compositeSchemaSDL);
assert.strictEqual(firstCheckFromStorage?.supergraphSDL, supergraphSDL);
const secondCheckFromStorage = await storage.findSchemaCheck({
schemaCheckId: secondSchemaCheck.id,
targetId: target.id,
});
assert.strictEqual(secondCheckFromStorage?.schemaSDL, secondSchemaSDL);
assert.strictEqual(secondCheckFromStorage?.compositeSchemaSDL, secondCompositeSchemaSDL);
assert.strictEqual(secondCheckFromStorage?.supergraphSDL, secondSupergraphSDL);
const thirdCheckFromStorage = await storage.findSchemaCheck({
schemaCheckId: thirdSchemaCheck.id,
targetId: target.id,
});
assert.strictEqual(thirdCheckFromStorage?.schemaSDL, schemaSDL);
assert.strictEqual(thirdCheckFromStorage?.compositeSchemaSDL, compositeSchemaSDL);
assert.strictEqual(thirdCheckFromStorage?.supergraphSDL, supergraphSDL);

// make sure SQL statements from Storage are capable of serving SDLs from sdl_store
const newCheckFromStorage = await storage.findSchemaCheck({
schemaCheckId: newSchemaCheck.id,
targetId: target.id,
});
assert.strictEqual(newCheckFromStorage?.schemaSDL, schemaSDL);
assert.strictEqual(newCheckFromStorage?.compositeSchemaSDL, compositeSchemaSDL);
assert.strictEqual(newCheckFromStorage?.supergraphSDL, supergraphSDL);

// make sure SDLs in schema_checks are null for new schema checks
const schemaChecksWithAllNulls = await db.oneFirst<number>(sql`
SELECT count(*) as total FROM schema_checks
WHERE
schema_sdl IS NULL
AND composite_schema_sdl IS NULL
AND supergraph_sdl IS NULL
`);
assert.strictEqual(
schemaChecksWithAllNulls,
1,
'only the new schema check should have nulls instead of SDLs',
);

const countSdlStore = await db.oneFirst<number>(sql`SELECT count(*) as total FROM sdl_store`);
assert.strictEqual(
countSdlStore,
3 /* 3 unique SDLs, only those from the newly created schema check */,
);
} finally {
await done();
await storage.destroy();
}
});
});
3 changes: 3 additions & 0 deletions packages/migrations/test/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
import './2023.02.22T09.27.02.delete-personal-org.test';
import './2023.09.25T15.23.00.github-check-with-project-name.test';
import './2023.10.25T14.41.41.schema-checks-dedup.test';
29 changes: 14 additions & 15 deletions packages/migrations/test/utils/testkit.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,6 @@ import type * as DbTypes from '../../../services/storage/src/db/types';
import { createConnectionString } from '../../src/connection-string';
import { runPGMigrations } from '../../src/run-pg-migrations'

export { DbTypes };

const __dirname = dirname(fileURLToPath(import.meta.url));

config({
Expand All @@ -21,21 +19,21 @@ import { env } from '../../src/environment';

export async function initMigrationTestingEnvironment() {
const pgp = pgpFactory();
const db = pgp(
createConnectionString({
...env.postgres,
db: 'postgres',
}),
);

const dbName = 'migration_test_' + Date.now();
const db = pgp(createConnectionString({
...env.postgres,
db: 'postgres',
}));

const dbName = 'migration_test_' + Date.now() + Math.random().toString(16).substring(2);
console.log('db name:', dbName)
await db.query(`CREATE DATABASE ${dbName};`);


const connectionString = createConnectionString({
...env.postgres,
db: dbName,
});
const slonik = await createPool(
createConnectionString({
...env.postgres,
db: dbName,
}),
connectionString
);

const actionsDirectory = resolve(__dirname + '/../../src/actions/');
Expand All @@ -44,6 +42,7 @@ export async function initMigrationTestingEnvironment() {


return {
connectionString,
db: slonik,
async runTo(name: string) {
await runPGMigrations({ slonik, runTo: name });
Expand Down
Loading

0 comments on commit ae25aa8

Please sign in to comment.