diff --git a/packages/libraries/client/src/version.ts b/packages/libraries/client/src/version.ts index b4dbd172c94..aba5391dfa8 100644 --- a/packages/libraries/client/src/version.ts +++ b/packages/libraries/client/src/version.ts @@ -1 +1 @@ -export const version = '0.24.1'; +export const version = '0.24.2'; diff --git a/packages/migrations/package.json b/packages/migrations/package.json index 7e9a0119713..37483f037d4 100644 --- a/packages/migrations/package.json +++ b/packages/migrations/package.json @@ -14,7 +14,7 @@ "db:init": "pnpm db:create && pnpm migration:run", "db:migrator": "node --experimental-specifier-resolution=node --loader ts-node/esm src/index.ts", "migration:run": "pnpm db:migrator up", - "test": "node --experimental-specifier-resolution=node --loader ts-node/esm ./test/*.test.ts" + "test": "TS_NODE_TRANSPILE_ONLY=true node --experimental-specifier-resolution=node --loader ts-node/esm ./test/index.ts" }, "devDependencies": { "@graphql-hive/core": "workspace:*", diff --git a/packages/migrations/src/actions/2023.10.25T14.41.41.schema-checks-dedup.ts b/packages/migrations/src/actions/2023.10.25T14.41.41.schema-checks-dedup.ts new file mode 100644 index 00000000000..7832e8c94cd --- /dev/null +++ b/packages/migrations/src/actions/2023.10.25T14.41.41.schema-checks-dedup.ts @@ -0,0 +1,27 @@ +import { type MigrationExecutor } from '../pg-migrator'; + +export default { + name: '2023.10.25T14.41.41.schema-checks-dedup.ts', + run: ({ sql }) => sql` + CREATE TABLE "public"."sdl_store" ( + "id" text PRIMARY KEY NOT NULL, + "sdl" text NOT NULL + ); + + CREATE UNIQUE INDEX sdl_store_unique_id ON "public"."sdl_store" ("id"); + + ALTER TABLE "public"."schema_checks" + ADD COLUMN "schema_sdl_store_id" text REFERENCES "public"."sdl_store" ("id"), + ADD COLUMN "supergraph_sdl_store_id" text REFERENCES "public"."sdl_store" ("id"), + ADD COLUMN "composite_schema_sdl_store_id" text REFERENCES "public"."sdl_store" ("id"); + + ALTER TABLE "public"."schema_checks" + ALTER COLUMN "schema_sdl" DROP NOT NULL, + ALTER COLUMN "supergraph_sdl" DROP NOT NULL, + ALTER COLUMN "composite_schema_sdl" DROP NOT NULL; + + CREATE INDEX "schema_check_by_schema_sdl_store_id" ON "public"."schema_checks" ("schema_sdl_store_id" ASC); + CREATE INDEX "schema_check_by_supergraph_sdl_store_id" ON "public"."schema_checks" ("supergraph_sdl_store_id" ASC); + CREATE INDEX "schema_check_by_composite_schema_sdl_store_id" ON "public"."schema_checks" ("composite_schema_sdl_store_id" ASC); + `, +} satisfies MigrationExecutor; diff --git a/packages/migrations/src/run-pg-migrations.ts b/packages/migrations/src/run-pg-migrations.ts index 0388b908e8c..16448847626 100644 --- a/packages/migrations/src/run-pg-migrations.ts +++ b/packages/migrations/src/run-pg-migrations.ts @@ -54,6 +54,7 @@ import migration_2023_09_01T09_54_00_zendesk_support from './actions/2023.09.01T import migration_2023_09_25T15_23_00_github_check_with_project_name from './actions/2023.09.25T15.23.00.github-check-with-project-name'; import migration_2023_09_28T14_14_14_native_fed_v2 from './actions/2023.09.28T14.14.14.native-fed-v2'; import migration_2023_10_05T11_44_36_schema_checks_github_repository from './actions/2023.10.05T11.44.36.schema-checks-github-repository'; +import migration_2023_10_25T14_41_41_schema_checks_dedup from './actions/2023.10.25T14.41.41.schema-checks-dedup'; import { runMigrations } from './pg-migrator'; export const runPGMigrations = (args: { slonik: DatabasePool; runTo?: string }) => @@ -116,5 +117,6 @@ export const runPGMigrations = (args: { slonik: DatabasePool; runTo?: string }) migration_2023_09_25T15_23_00_github_check_with_project_name, migration_2023_09_28T14_14_14_native_fed_v2, migration_2023_10_05T11_44_36_schema_checks_github_repository, + migration_2023_10_25T14_41_41_schema_checks_dedup, ], }); diff --git a/packages/migrations/test/2023.02.22T09.27.02.delete-personal-org.test.ts.ts b/packages/migrations/test/2023.02.22T09.27.02.delete-personal-org.test.ts similarity index 97% rename from packages/migrations/test/2023.02.22T09.27.02.delete-personal-org.test.ts.ts rename to packages/migrations/test/2023.02.22T09.27.02.delete-personal-org.test.ts index 50fce45660e..8cf69a03e09 100644 --- a/packages/migrations/test/2023.02.22T09.27.02.delete-personal-org.test.ts.ts +++ b/packages/migrations/test/2023.02.22T09.27.02.delete-personal-org.test.ts @@ -3,7 +3,7 @@ import { describe, test } from 'node:test'; import { sql } from 'slonik'; import { initMigrationTestingEnvironment } from './utils/testkit'; -await describe('migration: drop-personal-org', async () => { +await describe('drop-personal-org', async () => { await test('should remove all existing personal orgs that does not have projects', async () => { const { db, runTo, complete, done, seed } = await initMigrationTestingEnvironment(); diff --git a/packages/migrations/test/2023.09.25T15.23.00.github-check-with-project-name.test.ts b/packages/migrations/test/2023.09.25T15.23.00.github-check-with-project-name.test.ts index a83bbe8cfd6..7418008a5b7 100644 --- a/packages/migrations/test/2023.09.25T15.23.00.github-check-with-project-name.test.ts +++ b/packages/migrations/test/2023.09.25T15.23.00.github-check-with-project-name.test.ts @@ -3,7 +3,7 @@ import { describe, test } from 'node:test'; import { sql } from 'slonik'; import { initMigrationTestingEnvironment } from './utils/testkit'; -await describe('migration: github-check-with-project-name', async () => { +await describe('github-check-with-project-name', async () => { await test('should use FALSE for existing projects and TRUE for new', async () => { const { db, runTo, complete, done, seed } = await initMigrationTestingEnvironment(); diff --git a/packages/migrations/test/2023.10.25T14.41.41.schema-checks-dedup.test.ts b/packages/migrations/test/2023.10.25T14.41.41.schema-checks-dedup.test.ts new file mode 100644 index 00000000000..8cc0bae1878 --- /dev/null +++ b/packages/migrations/test/2023.10.25T14.41.41.schema-checks-dedup.test.ts @@ -0,0 +1,237 @@ +import assert from 'node:assert'; +import { describe, test } from 'node:test'; +import { DatabasePool, sql } from 'slonik'; +import { createStorage } from '../../services/storage/src/index'; +import { initMigrationTestingEnvironment } from './utils/testkit'; + +async function insertSchemaCheck( + pool: DatabasePool, + args: { + targetId: string; + schemaVersionId: string; + schemaSDL: string; + compositeSchemaSDL: string; + supergraphSDL: string; + }, +) { + return pool.one<{ id: string }>(sql` + INSERT INTO "public"."schema_checks" ( + "schema_sdl" + , "target_id" + , "schema_version_id" + , "is_success" + , "composite_schema_sdl" + , "supergraph_sdl" + , "is_manually_approved" + ) + VALUES ( + ${args.schemaSDL} + , ${args.targetId} + , ${args.schemaVersionId} + , ${true} + , ${args.compositeSchemaSDL} + , ${args.supergraphSDL} + , ${false} + ) + RETURNING id + `); +} + +await describe('migration: schema-checks-dedup', async () => { + await test('should incrementally adopt sdl_store without corrupting existing and new data', async () => { + const { db, runTo, complete, done, seed, connectionString } = + await initMigrationTestingEnvironment(); + const storage = await createStorage(connectionString, 1); + try { + // Run migrations all the way to the point before the one we are testing + await runTo('2023.08.03T11.44.36.schema-checks-github-repository.ts'); + + // Seed the database with some data (schema_sdl, supergraph_sdl, composite_schema_sdl) + const user = await seed.user(); + const organization = await db.one<{ + id: string; + }>( + sql`INSERT INTO public.organizations (clean_id, name, user_id) VALUES ('org-1', 'org-1', ${user.id}) RETURNING id;`, + ); + const project = await db.one<{ + id: string; + }>( + sql`INSERT INTO public.projects (clean_id, name, type, org_id) VALUES ('proj-1', 'proj-1', 'SINGLE', ${organization.id}) RETURNING id;`, + ); + const target = await db.one<{ + id: string; + }>( + sql`INSERT INTO public.targets (clean_id, name, project_id) VALUES ('proj-1', 'proj-1', ${project.id}) RETURNING id;`, + ); + + const compositeSchemaSDL = `composite schema 1`; + const schemaSDL = `schema 1`; + const supergraphSDL = `supergraph 1`; + + const secondCompositeSchemaSDL = `composite schema 2`; + const secondSchemaSDL = `schema 2`; + const secondSupergraphSDL = `supergraph 2`; + + const action = await db.one<{ + id: string; + }>( + sql` + INSERT INTO public.schema_log + ( + author, + commit, + sdl, + project_id, + target_id, + action + ) + VALUES + ( + ${'Author 1'}, + ${'commit 1'}::text, + ${compositeSchemaSDL}::text, + ${project.id}, + ${target.id}, + 'PUSH' + ) + RETURNING id + `, + ); + + const version = await db.one<{ + id: string; + }>( + sql` + INSERT INTO public.schema_versions + ( + is_composable, + target_id, + action_id, + has_persisted_schema_changes, + composite_schema_sdl, + supergraph_sdl + ) + VALUES + ( + ${true}, + ${target.id}, + ${action.id}, + ${false}, + ${compositeSchemaSDL}, + ${supergraphSDL} + ) + RETURNING id + `, + ); + + const firstSchemaCheck = await insertSchemaCheck(db, { + targetId: target.id, + schemaVersionId: version.id, + schemaSDL, + compositeSchemaSDL, + supergraphSDL, + }); + + const secondSchemaCheck = await insertSchemaCheck(db, { + targetId: target.id, + schemaVersionId: version.id, + schemaSDL: secondSchemaSDL, + compositeSchemaSDL: secondCompositeSchemaSDL, + supergraphSDL: secondSupergraphSDL, + }); + + const thirdSchemaCheck = await insertSchemaCheck(db, { + targetId: target.id, + schemaVersionId: version.id, + schemaSDL, + compositeSchemaSDL, + supergraphSDL, + }); + + // Run the additional remaining migrations + await complete(); + + const newSchemaCheck: { + id: string; + } = await storage.createSchemaCheck({ + targetId: target.id, + schemaVersionId: version.id, + isSuccess: true, + isManuallyApproved: false, + schemaSDL, + schemaSDLHash: 'schemaSDLHash', // serve exact same schema from different hash to make sure it's allowed + compositeSchemaSDL, + compositeSchemaSDLHash: 'compositeSchemaSDLHash', + supergraphSDL, + supergraphSDLHash: 'supergraphSDLHash', + serviceName: null, + manualApprovalUserId: null, + githubCheckRunId: null, + githubRepository: null, + githubSha: null, + meta: null, + schemaCompositionErrors: null, + breakingSchemaChanges: null, + safeSchemaChanges: null, + schemaPolicyWarnings: null, + schemaPolicyErrors: null, + expiresAt: null, + }); + + // make sure SQL statements from Storage are capable of serving SDLs directly from schema_checks + const firstCheckFromStorage = await storage.findSchemaCheck({ + schemaCheckId: firstSchemaCheck.id, + targetId: target.id, + }); + assert.strictEqual(firstCheckFromStorage?.schemaSDL, schemaSDL); + assert.strictEqual(firstCheckFromStorage?.compositeSchemaSDL, compositeSchemaSDL); + assert.strictEqual(firstCheckFromStorage?.supergraphSDL, supergraphSDL); + const secondCheckFromStorage = await storage.findSchemaCheck({ + schemaCheckId: secondSchemaCheck.id, + targetId: target.id, + }); + assert.strictEqual(secondCheckFromStorage?.schemaSDL, secondSchemaSDL); + assert.strictEqual(secondCheckFromStorage?.compositeSchemaSDL, secondCompositeSchemaSDL); + assert.strictEqual(secondCheckFromStorage?.supergraphSDL, secondSupergraphSDL); + const thirdCheckFromStorage = await storage.findSchemaCheck({ + schemaCheckId: thirdSchemaCheck.id, + targetId: target.id, + }); + assert.strictEqual(thirdCheckFromStorage?.schemaSDL, schemaSDL); + assert.strictEqual(thirdCheckFromStorage?.compositeSchemaSDL, compositeSchemaSDL); + assert.strictEqual(thirdCheckFromStorage?.supergraphSDL, supergraphSDL); + + // make sure SQL statements from Storage are capable of serving SDLs from sdl_store + const newCheckFromStorage = await storage.findSchemaCheck({ + schemaCheckId: newSchemaCheck.id, + targetId: target.id, + }); + assert.strictEqual(newCheckFromStorage?.schemaSDL, schemaSDL); + assert.strictEqual(newCheckFromStorage?.compositeSchemaSDL, compositeSchemaSDL); + assert.strictEqual(newCheckFromStorage?.supergraphSDL, supergraphSDL); + + // make sure SDLs in schema_checks are null for new schema checks + const schemaChecksWithAllNulls = await db.oneFirst(sql` + SELECT count(*) as total FROM schema_checks + WHERE + schema_sdl IS NULL + AND composite_schema_sdl IS NULL + AND supergraph_sdl IS NULL + `); + assert.strictEqual( + schemaChecksWithAllNulls, + 1, + 'only the new schema check should have nulls instead of SDLs', + ); + + const countSdlStore = await db.oneFirst(sql`SELECT count(*) as total FROM sdl_store`); + assert.strictEqual( + countSdlStore, + 3 /* 3 unique SDLs, only those from the newly created schema check */, + ); + } finally { + await done(); + await storage.destroy(); + } + }); +}); diff --git a/packages/migrations/test/index.ts b/packages/migrations/test/index.ts new file mode 100644 index 00000000000..de2ab241e8e --- /dev/null +++ b/packages/migrations/test/index.ts @@ -0,0 +1,3 @@ +import './2023.02.22T09.27.02.delete-personal-org.test'; +import './2023.09.25T15.23.00.github-check-with-project-name.test'; +import './2023.10.25T14.41.41.schema-checks-dedup.test'; diff --git a/packages/migrations/test/utils/testkit.ts b/packages/migrations/test/utils/testkit.ts index a5785e9f798..38ac30a0c30 100644 --- a/packages/migrations/test/utils/testkit.ts +++ b/packages/migrations/test/utils/testkit.ts @@ -9,8 +9,6 @@ import type * as DbTypes from '../../../services/storage/src/db/types'; import { createConnectionString } from '../../src/connection-string'; import { runPGMigrations } from '../../src/run-pg-migrations' -export { DbTypes }; - const __dirname = dirname(fileURLToPath(import.meta.url)); config({ @@ -21,21 +19,21 @@ import { env } from '../../src/environment'; export async function initMigrationTestingEnvironment() { const pgp = pgpFactory(); - const db = pgp( - createConnectionString({ - ...env.postgres, - db: 'postgres', - }), - ); - - const dbName = 'migration_test_' + Date.now(); + const db = pgp(createConnectionString({ + ...env.postgres, + db: 'postgres', + })); + + const dbName = 'migration_test_' + Date.now() + Math.random().toString(16).substring(2); + console.log('db name:', dbName) await db.query(`CREATE DATABASE ${dbName};`); - + + const connectionString = createConnectionString({ + ...env.postgres, + db: dbName, + }); const slonik = await createPool( - createConnectionString({ - ...env.postgres, - db: dbName, - }), + connectionString ); const actionsDirectory = resolve(__dirname + '/../../src/actions/'); @@ -44,6 +42,7 @@ export async function initMigrationTestingEnvironment() { return { + connectionString, db: slonik, async runTo(name: string) { await runPGMigrations({ slonik, runTo: name }); diff --git a/packages/services/api/src/modules/schema/providers/schema-publisher.ts b/packages/services/api/src/modules/schema/providers/schema-publisher.ts index 75aa4745bf4..2a284aa9fee 100644 --- a/packages/services/api/src/modules/schema/providers/schema-publisher.ts +++ b/packages/services/api/src/modules/schema/providers/schema-publisher.ts @@ -7,7 +7,14 @@ import { SchemaCheck } from '@hive/storage'; import * as Sentry from '@sentry/node'; import type { Span } from '@sentry/types'; import * as Types from '../../../__generated__/types'; -import { Organization, Project, ProjectType, Schema, Target } from '../../../shared/entities'; +import { + hashSDL, + Organization, + Project, + ProjectType, + Schema, + Target, +} from '../../../shared/entities'; import { HiveError } from '../../../shared/errors'; import { isGitHubRepositoryString } from '../../../shared/is-github-repository-string'; import { bolderize } from '../../../shared/markdown'; @@ -396,6 +403,11 @@ export class SchemaPublisher { if (checkResult.conclusion === SchemaCheckConclusion.Failure) { schemaCheck = await this.storage.createSchemaCheck({ schemaSDL: sdl, + schemaSDLHash: hashSDL( + parse(sdl, { + noLocation: true, + }), + ), serviceName: input.service ?? null, meta: input.meta ?? null, targetId: target.id, @@ -409,12 +421,26 @@ export class SchemaPublisher { ? { schemaCompositionErrors: checkResult.state.composition.errors, compositeSchemaSDL: null, + compositeSchemaSDLHash: null, supergraphSDL: null, + supergraphSDLHash: null, } : { schemaCompositionErrors: null, compositeSchemaSDL: checkResult.state.composition.compositeSchemaSDL, + compositeSchemaSDLHash: hashSDL( + parse(checkResult.state.composition.compositeSchemaSDL, { + noLocation: true, + }), + ), supergraphSDL: checkResult.state.composition.supergraphSDL, + supergraphSDLHash: checkResult.state.composition.supergraphSDL + ? hashSDL( + parse(checkResult.state.composition.supergraphSDL, { + noLocation: true, + }), + ) + : null, }), isManuallyApproved: false, manualApprovalUserId: null, @@ -472,6 +498,11 @@ export class SchemaPublisher { schemaCheck = await this.storage.createSchemaCheck({ schemaSDL: sdl, + schemaSDLHash: hashSDL( + parse(sdl, { + noLocation: true, + }), + ), serviceName: input.service ?? null, meta: input.meta ?? null, targetId: target.id, @@ -483,7 +514,19 @@ export class SchemaPublisher { schemaPolicyErrors: null, schemaCompositionErrors: null, compositeSchemaSDL: composition.compositeSchemaSDL, + compositeSchemaSDLHash: hashSDL( + parse(composition.compositeSchemaSDL, { + noLocation: true, + }), + ), supergraphSDL: composition.supergraphSDL, + supergraphSDLHash: composition.supergraphSDL + ? hashSDL( + parse(composition.supergraphSDL, { + noLocation: true, + }), + ) + : null, isManuallyApproved: false, manualApprovalUserId: null, githubCheckRunId: githubCheckRun?.id ?? null, diff --git a/packages/services/api/src/shared/entities.ts b/packages/services/api/src/shared/entities.ts index dcfab65ec33..43fea752163 100644 --- a/packages/services/api/src/shared/entities.ts +++ b/packages/services/api/src/shared/entities.ts @@ -1,4 +1,5 @@ -import { DocumentNode, GraphQLError, parse, SourceLocation } from 'graphql'; +import { createHash } from 'node:crypto'; +import { DocumentNode, GraphQLError, parse, print, SourceLocation } from 'graphql'; import { z } from 'zod'; import type { AvailableRulesResponse, PolicyConfigurationObject } from '@hive/policy'; import type { CompositionFailureError } from '@hive/schema'; @@ -11,6 +12,7 @@ import type { ProjectAccessScope, TargetAccessScope, } from '../__generated__/types'; +import { sortDocumentNode } from './schema'; export const SingleSchemaModel = z .object({ @@ -119,6 +121,12 @@ export class GraphQLDocumentStringInvalidError extends Error { } } +export function hashSDL(sdl: DocumentNode): string { + const hasher = createHash('md5'); + hasher.update(print(sortDocumentNode(sdl))); + return hasher.digest('hex'); +} + export function createSchemaObject( schema: | Pick diff --git a/packages/services/storage/src/db/types.ts b/packages/services/storage/src/db/types.ts index 018e9c1d99b..ed5d363611e 100644 --- a/packages/services/storage/src/db/types.ts +++ b/packages/services/storage/src/db/types.ts @@ -171,6 +171,7 @@ export interface projects { export interface schema_checks { breaking_schema_changes: any | null; composite_schema_sdl: string | null; + composite_schema_sdl_store_id: string | null; created_at: Date; expires_at: Date | null; github_check_run_id: string | null; @@ -185,10 +186,12 @@ export interface schema_checks { schema_composition_errors: any | null; schema_policy_errors: any | null; schema_policy_warnings: any | null; - schema_sdl: string; + schema_sdl: string | null; + schema_sdl_store_id: string | null; schema_version_id: string | null; service_name: string | null; supergraph_sdl: string | null; + supergraph_sdl_store_id: string | null; target_id: string; updated_at: Date; } @@ -246,6 +249,11 @@ export interface schema_versions { target_id: string; } +export interface sdl_store { + id: string; + sdl: string; +} + export interface target_validation { destination_target_id: string; target_id: string; @@ -328,6 +336,7 @@ export interface DBTables { schema_version_changes: schema_version_changes; schema_version_to_log: schema_version_to_log; schema_versions: schema_versions; + sdl_store: sdl_store; target_validation: target_validation; targets: targets; tokens: tokens; diff --git a/packages/services/storage/src/index.ts b/packages/services/storage/src/index.ts index 9300059fb14..a6611be1ef1 100644 --- a/packages/services/storage/src/index.ts +++ b/packages/services/storage/src/index.ts @@ -1,4 +1,4 @@ -import { SerializableChange } from 'packages/services/api/src/modules/schema/schema-change-from-meta'; +import type { SerializableChange } from 'packages/services/api/src/modules/schema/schema-change-from-meta'; import { DatabasePool, DatabaseTransactionConnection, @@ -29,8 +29,8 @@ import type { User, } from '@hive/api'; import { batch } from '@theguild/buddy'; -import { ProjectType } from '../../api/src'; import { + ProjectType, type CDNAccessToken, type OIDCIntegration, type SchemaLog, @@ -3436,9 +3436,44 @@ export async function createStorage(connection: string, maximumPoolSize: number) return DocumentCollectionDocumentModel.parse(result); }, async createSchemaCheck(args) { - const result = await pool.maybeOne(sql` + const result = await pool.transaction(async trx => { + const sdlStoreInserts: Array> = [ + trx.query(sql` + INSERT INTO "public"."sdl_store" (id, sdl) + VALUES (${args.schemaSDLHash}, ${args.schemaSDL}) + ON CONFLICT (id) DO NOTHING; + `), + ]; + + if (args.compositeSchemaSDLHash) { + sdlStoreInserts.push( + trx.query(sql` + INSERT INTO "public"."sdl_store" (id, sdl) + VALUES (${args.compositeSchemaSDLHash}, ${args.compositeSchemaSDL}) + ON CONFLICT (id) DO NOTHING; + `), + ); + } + + if (args.supergraphSDLHash) { + if (!args.supergraphSDL) { + throw new Error('supergraphSDLHash provided without supergraphSDL'); + } + + sdlStoreInserts.push( + trx.query(sql` + INSERT INTO "public"."sdl_store" (id, sdl) + VALUES (${args.supergraphSDLHash}, ${args.supergraphSDL}) + ON CONFLICT (id) DO NOTHING; + `), + ); + } + + await Promise.all(sdlStoreInserts); + + return trx.one<{ id: string }>(sql` INSERT INTO "public"."schema_checks" ( - "schema_sdl" + "schema_sdl_store_id" , "service_name" , "meta" , "target_id" @@ -3449,8 +3484,8 @@ export async function createStorage(connection: string, maximumPoolSize: number) , "safe_schema_changes" , "schema_policy_warnings" , "schema_policy_errors" - , "composite_schema_sdl" - , "supergraph_sdl" + , "composite_schema_sdl_store_id" + , "supergraph_sdl_store_id" , "is_manually_approved" , "manual_approval_user_id" , "github_check_run_id" @@ -3459,7 +3494,7 @@ export async function createStorage(connection: string, maximumPoolSize: number) , "expires_at" ) VALUES ( - ${args.schemaSDL} + ${args.schemaSDLHash} , ${args.serviceName} , ${jsonify(args.meta)} , ${args.targetId} @@ -3470,8 +3505,8 @@ export async function createStorage(connection: string, maximumPoolSize: number) , ${jsonify(args.safeSchemaChanges?.map(toSerializableSchemaChange))} , ${jsonify(args.schemaPolicyWarnings?.map(w => SchemaPolicyWarningModel.parse(w)))} , ${jsonify(args.schemaPolicyErrors?.map(w => SchemaPolicyWarningModel.parse(w)))} - , ${args.compositeSchemaSDL} - , ${args.supergraphSDL} + , ${args.compositeSchemaSDLHash} + , ${args.supergraphSDLHash} , ${args.isManuallyApproved} , ${args.manualApprovalUserId} , ${args.githubCheckRunId} @@ -3479,21 +3514,33 @@ export async function createStorage(connection: string, maximumPoolSize: number) , ${args.githubSha} , ${args.expiresAt?.toISOString() ?? null} ) - RETURNING - ${schemaCheckSQLFields} + RETURNING id `); + }); - return SchemaCheckModel.parse(result); + const check = await this.findSchemaCheck({ + schemaCheckId: result.id, + targetId: args.targetId, + }); + + if (!check) { + throw new Error('Failed to fetch newly created schema check'); + } + + return check; }, async findSchemaCheck(args) { const result = await pool.maybeOne(sql` SELECT ${schemaCheckSQLFields} FROM - "public"."schema_checks" + "public"."schema_checks" as c + LEFT JOIN "public"."sdl_store" as s_schema ON s_schema."id" = c."schema_sdl_store_id" + LEFT JOIN "public"."sdl_store" as s_composite_schema ON s_composite_schema."id" = c."composite_schema_sdl_store_id" + LEFT JOIN "public"."sdl_store" as s_supergraph ON s_supergraph."id" = c."supergraph_sdl_store_id" WHERE - "id" = ${args.schemaCheckId} - AND "target_id" = ${args.targetId} + c."id" = ${args.schemaCheckId} + AND c."target_id" = ${args.targetId} `); if (result == null) { @@ -3503,25 +3550,40 @@ export async function createStorage(connection: string, maximumPoolSize: number) return SchemaCheckModel.parse(result); }, async setSchemaCheckGithubCheckRunId(args) { - const result = await pool.maybeOne(sql` + const updateResult = await pool.maybeOne<{ + id: string; + }>(sql` UPDATE "public"."schema_checks" SET "github_check_run_id" = ${args.githubCheckRunId} WHERE "id" = ${args.schemaCheckId} - RETURNING - ${schemaCheckSQLFields} + RETURNING id `); - if (result == null) { + if (updateResult == null) { return null; } + const result = await pool.maybeOne(sql` + SELECT + ${schemaCheckSQLFields} + FROM + "public"."schema_checks" as c + LEFT JOIN "public"."sdl_store" as s_schema ON s_schema."id" = c."schema_sdl_store_id" + LEFT JOIN "public"."sdl_store" as s_composite_schema ON s_composite_schema."id" = c."composite_schema_sdl_store_id" + LEFT JOIN "public"."sdl_store" as s_supergraph ON s_supergraph."id" = c."supergraph_sdl_store_id" + WHERE + c."id" = ${updateResult.id} + `); + return SchemaCheckModel.parse(result); }, async approveFailedSchemaCheck(args) { - const result = await pool.maybeOne(sql` + const updateResult = await pool.maybeOne<{ + id: string; + }>(sql` UPDATE "public"."schema_checks" SET @@ -3532,14 +3594,25 @@ export async function createStorage(connection: string, maximumPoolSize: number) "id" = ${args.schemaCheckId} AND "is_success" = false AND "schema_composition_errors" IS NULL - RETURNING - ${schemaCheckSQLFields} + RETURNING id `); - if (result == null) { + if (updateResult == null) { return null; } + const result = await pool.maybeOne(sql` + SELECT + ${schemaCheckSQLFields} + FROM + "public"."schema_checks" as c + LEFT JOIN "public"."sdl_store" as s_schema ON s_schema."id" = c."schema_sdl_store_id" + LEFT JOIN "public"."sdl_store" as s_composite_schema ON s_composite_schema."id" = c."composite_schema_sdl_store_id" + LEFT JOIN "public"."sdl_store" as s_supergraph ON s_supergraph."id" = c."supergraph_sdl_store_id" + WHERE + c."id" = ${updateResult.id} + `); + return SchemaCheckModel.parse(result); }, async getPaginatedSchemaChecksForTarget(args) { @@ -3558,26 +3631,29 @@ export async function createStorage(connection: string, maximumPoolSize: number) SELECT ${schemaCheckSQLFields} FROM - "public"."schema_checks" + "public"."schema_checks" as c + LEFT JOIN "public"."sdl_store" as s_schema ON s_schema."id" = c."schema_sdl_store_id" + LEFT JOIN "public"."sdl_store" as s_composite_schema ON s_composite_schema."id" = c."composite_schema_sdl_store_id" + LEFT JOIN "public"."sdl_store" as s_supergraph ON s_supergraph."id" = c."supergraph_sdl_store_id" WHERE - "target_id" = ${args.targetId} + c."target_id" = ${args.targetId} ${ cursor ? sql` AND ( ( - "created_at" = ${cursor.createdAt} - AND "id" < ${cursor.id} + c."created_at" = ${cursor.createdAt} + AND c."id" < ${cursor.id} ) - OR "created_at" < ${cursor.createdAt} + OR c."created_at" < ${cursor.createdAt} ) ` : sql`` } ORDER BY - "target_id" ASC - , "created_at" DESC - , "id" DESC + c."target_id" ASC + , c."created_at" DESC + , c."id" DESC LIMIT ${limit + 1} `); @@ -4062,27 +4138,27 @@ function toSerializableSchemaChange(change: { } const schemaCheckSQLFields = sql` - "id" - , to_json("created_at") as "createdAt" - , to_json("updated_at") as "updatedAt" - , "schema_sdl" as "schemaSDL" - , "service_name" as "serviceName" - , "meta" - , "target_id" as "targetId" - , "schema_version_id" as "schemaVersionId" - , "is_success" as "isSuccess" - , "schema_composition_errors" as "schemaCompositionErrors" - , "breaking_schema_changes" as "breakingSchemaChanges" - , "safe_schema_changes" as "safeSchemaChanges" - , "schema_policy_warnings" as "schemaPolicyWarnings" - , "schema_policy_errors" as "schemaPolicyErrors" - , "composite_schema_sdl" as "compositeSchemaSDL" - , "supergraph_sdl" as "supergraphSDL" - , "github_check_run_id" as "githubCheckRunId" - , "github_repository" as "githubRepository" - , "github_sha" as "githubSha" - , coalesce("is_manually_approved", false) as "isManuallyApproved" - , "manual_approval_user_id" as "manualApprovalUserId" + c."id" + , to_json(c."created_at") as "createdAt" + , to_json(c."updated_at") as "updatedAt" + , coalesce(c."schema_sdl", s_schema."sdl") as "schemaSDL" + , c."service_name" as "serviceName" + , c."meta" + , c."target_id" as "targetId" + , c."schema_version_id" as "schemaVersionId" + , c."is_success" as "isSuccess" + , c."schema_composition_errors" as "schemaCompositionErrors" + , c."breaking_schema_changes" as "breakingSchemaChanges" + , c."safe_schema_changes" as "safeSchemaChanges" + , c."schema_policy_warnings" as "schemaPolicyWarnings" + , c."schema_policy_errors" as "schemaPolicyErrors" + , coalesce(c."composite_schema_sdl", s_composite_schema."sdl") as "compositeSchemaSDL" + , coalesce(c."supergraph_sdl", s_supergraph."sdl") as "supergraphSDL" + , c."github_check_run_id" as "githubCheckRunId" + , c."github_repository" as "githubRepository" + , c."github_sha" as "githubSha" + , coalesce(c."is_manually_approved", false) as "isManuallyApproved" + , c."manual_approval_user_id" as "manualApprovalUserId" `; const schemaVersionSQLFields = (t = sql``) => sql` diff --git a/packages/services/storage/src/schema-change-model.ts b/packages/services/storage/src/schema-change-model.ts index 2cdff9bdb1a..df2f4321f58 100644 --- a/packages/services/storage/src/schema-change-model.ts +++ b/packages/services/storage/src/schema-change-model.ts @@ -1,5 +1,5 @@ /** These mirror DB models from */ -import { RegistryServiceUrlChangeSerializableChange } from 'packages/services/api/src/modules/schema/schema-change-from-meta'; +import type { RegistryServiceUrlChangeSerializableChange } from 'packages/services/api/src/modules/schema/schema-change-from-meta'; import { z } from 'zod'; import type { ChangeType, @@ -801,61 +801,53 @@ const SchemaChangeModelWithIsSafeBreakingChange = z.intersection( z.object({ isSafeBasedOnUsage: z.boolean().optional() }), ); -const FailedSchemaCheckPartialModel = z.intersection( - z.object({ - isSuccess: z.literal(false), +// Schema Checks - breakingSchemaChanges: z.array(SchemaChangeModelWithIsSafeBreakingChange).nullable(), - safeSchemaChanges: z.array(SchemaChangeModelWithIsSafeBreakingChange).nullable(), +const FailedSchemaCompositionOutputFields = { + schemaCompositionErrors: z.array(SchemaCompositionErrorModel), + compositeSchemaSDL: z.null(), + supergraphSDL: z.null(), +}; - schemaPolicyWarnings: z.array(SchemaPolicyWarningModel).nullable(), - schemaPolicyErrors: z.array(SchemaPolicyWarningModel).nullable(), +const FailedSchemaCompositionInputFields = { + ...FailedSchemaCompositionOutputFields, + compositeSchemaSDLHash: z.null(), + supergraphSDLHash: z.null(), +}; - isManuallyApproved: z.literal(false), - manualApprovalUserId: z.null(), - }), - z.union([ - z.object({ - schemaCompositionErrors: z.array(SchemaCompositionErrorModel), +const SuccessfulSchemaCompositionOutputFields = { + schemaCompositionErrors: z.null(), + compositeSchemaSDL: z.string(), + supergraphSDL: z.string().nullable(), +}; - compositeSchemaSDL: z.null(), - supergraphSDL: z.null(), - }), - z.object({ - schemaCompositionErrors: z.null(), +const SuccessfulSchemaCompositionInputFields = { + ...SuccessfulSchemaCompositionOutputFields, + compositeSchemaSDLHash: z.string(), + supergraphSDLHash: z.string().nullable(), +}; - compositeSchemaSDL: z.string(), - supergraphSDL: z.string().nullable(), - }), - ]), -); +const SchemaCheckSharedPolicyFields = { + schemaPolicyWarnings: z.array(SchemaPolicyWarningModel).nullable(), + schemaPolicyErrors: z.array(SchemaPolicyWarningModel).nullable(), +}; -const SuccessfulSchemaCheckPartialModel = z.intersection( - z.object({ - isSuccess: z.literal(true), - schemaCompositionErrors: z.null(), +const SchemaCheckSharedChangesFields = { + safeSchemaChanges: z.array(SchemaChangeModelWithIsSafeBreakingChange).nullable(), + breakingSchemaChanges: z.array(SchemaChangeModelWithIsSafeBreakingChange).nullable(), +}; - safeSchemaChanges: z.array(SchemaChangeModelWithIsSafeBreakingChange).nullable(), - breakingSchemaChanges: z.array(SchemaChangeModelWithIsSafeBreakingChange).nullable(), - schemaPolicyWarnings: z.array(SchemaPolicyWarningModel).nullable(), - schemaPolicyErrors: z.array(SchemaPolicyWarningModel).nullable(), +const ManuallyApprovedSchemaCheckFields = { + isManuallyApproved: z.literal(true), + manualApprovalUserId: z.string().nullable(), +}; - compositeSchemaSDL: z.string(), - supergraphSDL: z.string().nullable(), - }), - z.union([ - z.object({ - isManuallyApproved: z.literal(true), - manualApprovalUserId: z.string().nullable(), - }), - z.object({ - isManuallyApproved: z.literal(false), - manualApprovalUserId: z.null(), - }), - ]), -); +const NotManuallyApprovedSchemaCheckFields = { + isManuallyApproved: z.literal(false), + manualApprovalUserId: z.null(), +}; -const SchemaCheckSharedFieldsModel = z.object({ +const SchemaCheckSharedOutputFields = { schemaSDL: z.string(), serviceName: z.string().nullable(), targetId: z.string(), @@ -872,35 +864,81 @@ const SchemaCheckSharedFieldsModel = z.object({ // we need to improve the model code to reflect that githubRepository: z.string().nullable(), githubSha: z.string().nullable(), -}); +}; -const SchemaCheckInputModel = z.intersection( - SchemaCheckSharedFieldsModel, - z.union([FailedSchemaCheckPartialModel, SuccessfulSchemaCheckPartialModel]), -); +const SchemaCheckSharedInputFields = { + ...SchemaCheckSharedOutputFields, + schemaSDLHash: z.string(), +}; + +const SchemaCheckInputModel = z.union([ + z.intersection( + z.object({ + isSuccess: z.literal(false), + ...SchemaCheckSharedPolicyFields, + ...SchemaCheckSharedChangesFields, + ...NotManuallyApprovedSchemaCheckFields, + ...SchemaCheckSharedInputFields, + }), + z.union([ + z.object(FailedSchemaCompositionInputFields), + z.object(SuccessfulSchemaCompositionInputFields), + ]), + ), + z.intersection( + z.object({ + isSuccess: z.literal(true), + ...SchemaCheckSharedPolicyFields, + ...SchemaCheckSharedChangesFields, + ...SuccessfulSchemaCompositionInputFields, + ...SchemaCheckSharedInputFields, + }), + z.union([ + z.object({ ...ManuallyApprovedSchemaCheckFields }), + z.object({ ...NotManuallyApprovedSchemaCheckFields }), + ]), + ), +]); -const PersistedSchemaCheckPartialModel = z.object({ +const PersistedSchemaCheckFields = { id: z.string(), createdAt: z.string(), updatedAt: z.string(), -}); - -export const FailedSchemaCheckModel = z.intersection( - SchemaCheckSharedFieldsModel, - z.intersection(PersistedSchemaCheckPartialModel, FailedSchemaCheckPartialModel), -); - -export const SuccessfulSchemaCheckModel = z.intersection( - SchemaCheckSharedFieldsModel, - z.intersection(PersistedSchemaCheckPartialModel, SuccessfulSchemaCheckPartialModel), -); +}; -export const SchemaCheckModel = z.union([FailedSchemaCheckModel, SuccessfulSchemaCheckModel]); +export const SchemaCheckModel = z.union([ + z.intersection( + z.object({ + isSuccess: z.literal(false), + ...SchemaCheckSharedPolicyFields, + ...SchemaCheckSharedChangesFields, + ...PersistedSchemaCheckFields, + ...NotManuallyApprovedSchemaCheckFields, + ...SchemaCheckSharedOutputFields, + }), + z.union([ + z.object(FailedSchemaCompositionOutputFields), + z.object(SuccessfulSchemaCompositionOutputFields), + ]), + ), + z.intersection( + z.object({ + isSuccess: z.literal(true), + ...SchemaCheckSharedPolicyFields, + ...SchemaCheckSharedChangesFields, + ...SuccessfulSchemaCompositionOutputFields, + ...PersistedSchemaCheckFields, + ...SchemaCheckSharedOutputFields, + }), + z.union([ + z.object({ ...ManuallyApprovedSchemaCheckFields }), + z.object({ ...NotManuallyApprovedSchemaCheckFields }), + ]), + ), +]); export type SchemaCheckInput = z.TypeOf; export type SchemaCheck = z.TypeOf; -export type FailedSchemaCheck = z.TypeOf; -export type SuccessfulSchemaCheck = z.TypeOf; export const TargetBreadcrumbModel = z.object({ organization: z.string(),