Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .claude/skills/pg_dump/SKILL.md
Original file line number Diff line number Diff line change
Expand Up @@ -240,5 +240,6 @@ After consulting pg_dump and implementing in pgschema:
- [ ] Internal/system objects are filtered out
- [ ] Dependencies are tracked correctly
- [ ] Integration test added in `testdata/diff/`
- [ ] Test passes with `go test -v ./internal/diff -run TestDiffFromFiles`
- [ ] Test passes with `go test -v ./cmd -run TestPlanAndApply`
- [ ] Tested against multiple PostgreSQL versions (14-17)
49 changes: 46 additions & 3 deletions cmd/migrate_integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@ func TestPlanAndApply(t *testing.T) {
// Check if this directory contains the required test files
oldFile := filepath.Join(path, "old.sql")
newFile := filepath.Join(path, "new.sql")
setupFile := filepath.Join(path, "setup.sql")
planSQLFile := filepath.Join(path, "plan.sql")
planJSONFile := filepath.Join(path, "plan.json")
planTXTFile := filepath.Join(path, "plan.txt")
Expand Down Expand Up @@ -162,6 +163,7 @@ func TestPlanAndApply(t *testing.T) {
name: testName,
oldFile: oldFile,
newFile: newFile,
setupFile: setupFile,
planSQLFile: planSQLFile,
planJSONFile: planJSONFile,
planTXTFile: planTXTFile,
Expand Down Expand Up @@ -191,6 +193,7 @@ type testCase struct {
name string
oldFile string
newFile string
setupFile string
planSQLFile string
planJSONFile string
planTXTFile string
Expand Down Expand Up @@ -229,6 +232,19 @@ func runPlanAndApplyTest(t *testing.T, ctx context.Context, container *struct {
t.Fatalf("Failed to create test database %s: %v", dbName, err)
}

// STEP 0: Execute optional setup.sql (for cross-schema setup, extension types, etc.)
if _, err := os.Stat(tc.setupFile); err == nil {
setupContent, err := os.ReadFile(tc.setupFile)
if err != nil {
t.Fatalf("Failed to read setup.sql: %v", err)
}
if len(strings.TrimSpace(string(setupContent))) > 0 {
if err := executeSQL(ctx, containerHost, portMapped, dbName, string(setupContent)); err != nil {
t.Fatalf("Failed to execute setup.sql: %v", err)
}
}
}

// STEP 1: Apply old.sql to initialize database state
oldContent, err := os.ReadFile(tc.oldFile)
if err != nil {
Expand All @@ -243,17 +259,44 @@ func runPlanAndApplyTest(t *testing.T, ctx context.Context, container *struct {
}

// STEP 2: Test plan command with new.sql as target
testPlanOutputs(t, container, dbName, tc.newFile, tc.planSQLFile, tc.planJSONFile, tc.planTXTFile)
// If setup.sql exists, create a temporary combined file (setup + new)
schemaFileForPlan := tc.newFile
if _, err := os.Stat(tc.setupFile); err == nil {
setupContent, err := os.ReadFile(tc.setupFile)
if err != nil {
t.Fatalf("Failed to read setup.sql for plan: %v", err)
}
newContent, err := os.ReadFile(tc.newFile)
if err != nil {
t.Fatalf("Failed to read new.sql for plan: %v", err)
}

// Create temporary combined file
combinedContent := string(setupContent) + "\n\n" + string(newContent)
tmpFile, err := os.CreateTemp("", "pgschema_test_*.sql")
if err != nil {
t.Fatalf("Failed to create temporary file: %v", err)
}
defer tmpFile.Close()
defer os.Remove(tmpFile.Name())

if _, err := tmpFile.WriteString(combinedContent); err != nil {
t.Fatalf("Failed to write to temporary file: %v", err)
}
schemaFileForPlan = tmpFile.Name()
}

testPlanOutputs(t, container, dbName, schemaFileForPlan, tc.planSQLFile, tc.planJSONFile, tc.planTXTFile)

if !*generate {
// STEP 3: Apply the migration using apply command
err = applySchemaChanges(containerHost, portMapped, dbName, container.User, container.Password, "public", tc.newFile)
err = applySchemaChanges(containerHost, portMapped, dbName, container.User, container.Password, "public", schemaFileForPlan)
if err != nil {
t.Fatalf("Failed to apply schema changes using pgschema apply: %v", err)
}

// STEP 4: Test idempotency - plan should produce no changes
secondPlanOutput, err := generatePlanSQLFormatted(containerHost, portMapped, dbName, container.User, container.Password, "public", tc.newFile)
secondPlanOutput, err := generatePlanSQLFormatted(containerHost, portMapped, dbName, container.User, container.Password, "public", schemaFileForPlan)
if err != nil {
t.Fatalf("Failed to generate plan SQL for idempotency check: %v", err)
}
Expand Down
29 changes: 22 additions & 7 deletions internal/diff/diff_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,15 @@ func buildSQLFromSteps(diffs []Diff) string {
return sqlOutput.String()
}

// parseSQL is a helper function to convert SQL string to IR for tests
// Uses embedded PostgreSQL to ensure tests use the same code path as production
func parseSQL(t *testing.T, sql string) *ir.IR {
// parseSQLWithSetup applies optional setup SQL before main SQL, then parses to IR
// This allows tests to have shared setup (e.g., types in different schemas) before the main schema
func parseSQLWithSetup(t *testing.T, setup, sql string) *ir.IR {
t.Helper()
return testutil.ParseSQLToIR(t, sharedTestPostgres, sql, "public")

// Combine setup and main SQL with separator if both exist
combinedSQL := setup + "\n\n" + sql

return testutil.ParseSQLToIR(t, sharedTestPostgres, combinedSQL, "public")
}

// TestDiffFromFiles runs file-based diff tests from testdata directory.
Expand Down Expand Up @@ -159,6 +163,17 @@ func runFileBasedDiffTest(t *testing.T, oldFile, newFile, diffFile, testName str
// If skipped, ShouldSkipTest will call t.Skipf() and stop execution
testutil.ShouldSkipTest(t, testName, majorVersion)

// Read optional setup.sql (for cross-schema setup, extension types, etc.)
setupFile := filepath.Join(filepath.Dir(oldFile), "setup.sql")
var setupSQL string
if _, err := os.Stat(setupFile); err == nil {
setupContent, err := os.ReadFile(setupFile)
if err != nil {
t.Fatalf("Failed to read setup.sql: %v", err)
}
setupSQL = string(setupContent)
}

// Read old DDL
oldDDL, err := os.ReadFile(oldFile)
if err != nil {
Expand All @@ -177,9 +192,9 @@ func runFileBasedDiffTest(t *testing.T, oldFile, newFile, diffFile, testName str
t.Fatalf("Failed to read plan.sql: %v", err)
}

// Parse DDL to IR
oldIR := parseSQL(t, string(oldDDL))
newIR := parseSQL(t, string(newDDL))
// Parse DDL to IR (with optional setup SQL)
oldIR := parseSQLWithSetup(t, setupSQL, string(oldDDL))
newIR := parseSQLWithSetup(t, setupSQL, string(newDDL))

// Run diff
diffs := GenerateMigration(oldIR, newIR, "public")
Expand Down
14 changes: 14 additions & 0 deletions ir/queries/queries.sql
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,13 @@ SELECT
CASE WHEN dn.nspname = c.table_schema THEN dt.typname
ELSE dn.nspname || '.' || dt.typname
END
WHEN dt.typtype = 'b' THEN
-- Base types: qualify if not in pg_catalog or table's schema
CASE
WHEN dn.nspname = 'pg_catalog' THEN c.udt_name
WHEN dn.nspname = c.table_schema THEN dt.typname
ELSE dn.nspname || '.' || dt.typname
END
ELSE c.udt_name
END AS resolved_type,
c.is_identity,
Expand Down Expand Up @@ -136,6 +143,13 @@ SELECT
CASE WHEN dn.nspname = c.table_schema THEN dt.typname
ELSE dn.nspname || '.' || dt.typname
END
WHEN dt.typtype = 'b' THEN
-- Base types: qualify if not in pg_catalog or table's schema
CASE
WHEN dn.nspname = 'pg_catalog' THEN c.udt_name
WHEN dn.nspname = c.table_schema THEN dt.typname
ELSE dn.nspname || '.' || dt.typname
END
ELSE c.udt_name
END AS resolved_type,
c.is_identity,
Expand Down
3 changes: 3 additions & 0 deletions testdata/diff/create_table/add_column_custom_type/diff.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
ALTER TABLE users ADD COLUMN email email_address NOT NULL;

ALTER TABLE users ADD COLUMN status user_status DEFAULT 'active' NOT NULL;
13 changes: 13 additions & 0 deletions testdata/diff/create_table/add_column_custom_type/new.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
-- New state: Add columns using custom types from setup.sql
-- Types (email_address, user_status) are created in setup.sql
-- Use unqualified names - types will be resolved via setup.sql context

-- Modified table with new columns using custom types
CREATE TABLE public.users (
id bigint PRIMARY KEY,
username text NOT NULL,
created_at timestamp DEFAULT CURRENT_TIMESTAMP,
-- New columns using types from setup.sql
email email_address NOT NULL,
status user_status NOT NULL DEFAULT 'active'
);
6 changes: 6 additions & 0 deletions testdata/diff/create_table/add_column_custom_type/old.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
-- Initial state: Basic table without custom types
CREATE TABLE public.users (
id bigint PRIMARY KEY,
username text NOT NULL,
created_at timestamp DEFAULT CURRENT_TIMESTAMP
);
26 changes: 26 additions & 0 deletions testdata/diff/create_table/add_column_custom_type/plan.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
{
"version": "1.0.0",
"pgschema_version": "1.4.1",
"created_at": "1970-01-01T00:00:00Z",
"source_fingerprint": {
"hash": "4c7808528324af927b7ef815b8b70beac95edd62ed24c6da247a89cdf74ebb0f"
},
"groups": [
{
"steps": [
{
"sql": "ALTER TABLE users ADD COLUMN email email_address NOT NULL;",
"type": "table.column",
"operation": "create",
"path": "public.users.email"
},
{
"sql": "ALTER TABLE users ADD COLUMN status user_status DEFAULT 'active' NOT NULL;",
"type": "table.column",
"operation": "create",
"path": "public.users.status"
}
]
}
]
}
3 changes: 3 additions & 0 deletions testdata/diff/create_table/add_column_custom_type/plan.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
ALTER TABLE users ADD COLUMN email email_address NOT NULL;

ALTER TABLE users ADD COLUMN status user_status DEFAULT 'active' NOT NULL;
16 changes: 16 additions & 0 deletions testdata/diff/create_table/add_column_custom_type/plan.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
Plan: 1 to modify.

Summary by type:
tables: 1 to modify

Tables:
~ users
+ email (column)
+ status (column)

DDL to be executed:
--------------------------------------------------

ALTER TABLE users ADD COLUMN email email_address NOT NULL;

ALTER TABLE users ADD COLUMN status user_status DEFAULT 'active' NOT NULL;
14 changes: 14 additions & 0 deletions testdata/diff/create_table/add_column_custom_type/setup.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
-- Setup: Create types in public schema
-- This simulates extension types like citext installed in public schema

CREATE TYPE public.email_address AS (
local_part text,
domain text
);

CREATE TYPE public.user_status AS ENUM (
'active',
'inactive',
'suspended',
'pending'
);