Skip to content

Commit cc004c7

Browse files
committed
feat: query builder integration for typed data transforms
queryOperations on SqlControlStaticContributions for extension function support. createBuilders<Contract>() for typed dataTransform callbacks. SQL lowered at verify time via postgres adapter. Journey test fixes: migration-new reads storage.storageHash correctly, resume-after-failure tests use unique constraint violation scenario.
1 parent f158985 commit cc004c7

6 files changed

Lines changed: 200 additions & 201 deletions

File tree

test/integration/test/cli-journeys/migration-apply-edge-cases.e2e.test.ts

Lines changed: 17 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -72,14 +72,14 @@ withTempDir(({ createTempDir }) => {
7272
const db = useDevDatabase();
7373

7474
it(
75-
'resumes from last successful migration after empty-table precheck failure',
75+
'resumes from last successful migration after unique constraint violation',
7676
async () => {
7777
const ctx: JourneyContext = setupJourney({
7878
connectionString: db.connectionString,
7979
createTempDir,
8080
});
8181

82-
// Plan and apply initial migration (creates user table)
82+
// Plan and apply initial migration (creates user table with id + email)
8383
const emit0 = await runContractEmit(ctx);
8484
expect(emit0.exitCode, 'emit base').toBe(0);
8585
const plan0 = await runMigrationPlan(ctx, ['--name', 'initial']);
@@ -92,24 +92,22 @@ withTempDir(({ createTempDir }) => {
9292
);
9393
expect(firstResult.migrationsApplied, 'applied 1').toBe(1);
9494

95-
// Insert data so a NOT NULL column addition will fail
95+
// Insert rows with duplicate emails
9696
await sql(
9797
db.connectionString,
98-
`INSERT INTO "user" (id, email) VALUES (1, 'user@example.com')`,
98+
`INSERT INTO "user" (id, email) VALUES (1, 'dup@example.com'), (2, 'dup@example.com')`,
9999
);
100100

101-
// Plan a migration that adds a non-nullable column (will fail on existing rows)
102-
swapContract(ctx, 'contract-additive-required');
101+
// Plan migration that adds a unique constraint on email
102+
swapContract(ctx, 'contract-unique-email');
103103
const emit1 = await runContractEmit(ctx);
104-
expect(emit1.exitCode, 'emit additive-required').toBe(0);
105-
const plan1 = await runMigrationPlan(ctx, ['--name', 'add-required-name']);
106-
expect(plan1.exitCode, 'plan add-required-name').toBe(0);
104+
expect(emit1.exitCode, 'emit unique-email').toBe(0);
105+
const plan1 = await runMigrationPlan(ctx, ['--name', 'add-unique-email']);
106+
expect(plan1.exitCode, 'plan add-unique-email').toBe(0);
107107

108-
// Apply fails because the planner's empty-table precheck rejects adding
109-
// a NOT NULL + UNIQUE column to a non-empty table (temporary default
110-
// strategy is disabled when the column has a UNIQUE constraint).
108+
// Apply fails because duplicate emails violate the unique constraint
111109
const applyFail = await runMigrationApply(ctx, ['--json']);
112-
expect(applyFail.exitCode, 'apply fails on non-empty table precheck').toBe(1);
110+
expect(applyFail.exitCode, 'apply fails on duplicate key').toBe(1);
113111

114112
// Marker stays at the first migration's target hash
115113
const marker = await sql(
@@ -121,9 +119,13 @@ withTempDir(({ createTempDir }) => {
121119
firstResult.markerHash,
122120
);
123121

124-
// Fix: remove conflicting data, then resume
125-
await sql(db.connectionString, 'DELETE FROM "user"');
122+
// Fix: deduplicate emails
123+
await sql(
124+
db.connectionString,
125+
`UPDATE "user" SET email = 'unique@example.com' WHERE id = 2`,
126+
);
126127

128+
// Resume: apply succeeds now that duplicates are resolved
127129
const applyResume = await runMigrationApply(ctx, ['--json']);
128130
expect(applyResume.exitCode, 'resume succeeds').toBe(0);
129131

test/integration/test/cli.migration-apply.e2e.test.ts

Lines changed: 10 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -314,32 +314,30 @@ withTempDir(({ createTempDir }) => {
314314
const firstApply = JSON.parse(consoleOutput.join('\n').trim()) as MigrationApplyResult;
315315
expect(firstApply.migrationsApplied).toBe(1);
316316

317-
// Insert data so a later NOT NULL column addition will fail.
317+
// Insert rows with duplicate emails so a unique constraint will fail.
318318
await withClient(connectionString, async (client) => {
319-
await client.query(`INSERT INTO "user" (id, email) VALUES (1, 'user@example.com')`);
319+
await client.query(
320+
`INSERT INTO "user" (id, email) VALUES (1, 'dup@example.com'), (2, 'dup@example.com')`,
321+
);
320322
});
321323

322-
// Plan second migration that adds a NOT NULL + UNIQUE column.
323-
// The UNIQUE constraint prevents the planner's temporary-default
324-
// strategy (a uniform default would violate uniqueness), so the
325-
// planner falls back to an empty-table precheck that fails here.
324+
// Plan second migration that adds a unique constraint on email.
326325
replaceInFileOrThrow(
327326
contractPath!,
328327
' email: field.column(textColumn),\n',
329-
` email: field.column(textColumn),\n required_name: field.column(textColumn).unique({ name: 'user_required_name_key' }),\n`,
328+
` email: field.column(textColumn).unique({ name: 'user_email_key' }),\n`,
330329
);
331330

332331
await emitContract(testDir, configPath);
333332
await runMigrationPlan(testDir, [
334333
'--config',
335334
configPath,
336335
'--name',
337-
'add_required_name',
336+
'add_unique_email',
338337
'--no-color',
339338
]);
340339

341-
// Apply fails: the empty-table precheck rejects adding a NOT NULL + UNIQUE
342-
// column to a non-empty table.
340+
// Apply fails: duplicate emails violate the unique constraint.
343341
consoleOutput.length = 0;
344342
let failed = false;
345343
try {
@@ -349,9 +347,6 @@ withTempDir(({ createTempDir }) => {
349347
}
350348
expect(failed).toBe(true);
351349
expect(getExitCode()).toBe(1);
352-
const errorOutput = stripAnsi(consoleOutput.join('\n'));
353-
expect(errorOutput).toContain('failed during precheck');
354-
expect(errorOutput).toContain('is empty before adding NOT NULL column');
355350

356351
// Marker must remain at the first migration hash (resume point).
357352
const migrationsDir = join(testDir, 'migrations');
@@ -371,9 +366,9 @@ withTempDir(({ createTempDir }) => {
371366
expect(marker.rows[0]?.core_hash).toBe(firstMigration!.manifest.to);
372367
});
373368

374-
// Make second migration runnable, then re-run apply; it should resume from marker.
369+
// Fix: deduplicate emails, then re-run apply; it should resume from marker.
375370
await withClient(connectionString, async (client) => {
376-
await client.query('DELETE FROM "user"');
371+
await client.query(`UPDATE "user" SET email = 'unique@example.com' WHERE id = 2`);
377372
});
378373

379374
consoleOutput.length = 0;
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
import { int4Column, textColumn } from '@prisma-next/adapter-postgres/column-types';
2+
import sqlFamily from '@prisma-next/family-sql/pack';
3+
import { defineContract, field, model } from '@prisma-next/sql-contract-ts/contract-builder';
4+
import postgresPack from '@prisma-next/target-postgres/pack';
5+
6+
export const contract = defineContract({
7+
family: sqlFamily,
8+
target: postgresPack,
9+
models: {
10+
User: model('User', {
11+
fields: {
12+
id: field.column(int4Column).id(),
13+
email: field.column(textColumn).unique({ name: 'user_email_key' }),
14+
},
15+
}).sql({ table: 'user' }),
16+
},
17+
});
Lines changed: 68 additions & 134 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,69 @@
1-
import type { CodecTypes as PgCodecTypes } from '@prisma-next/adapter-postgres/codec-types';
21
import { int4Column, textColumn } from '@prisma-next/adapter-postgres/column-types';
3-
import type { CodecTypes as PgVectorCodecTypes } from '@prisma-next/extension-pgvector/codec-types';
42
import { vectorColumn } from '@prisma-next/extension-pgvector/column-types';
53
import pgvector from '@prisma-next/extension-pgvector/pack';
4+
import sqlFamily from '@prisma-next/family-sql/pack';
65
import { uuidv4 } from '@prisma-next/ids';
7-
import { defineContract } from '@prisma-next/sql-contract-ts/contract-builder';
6+
import { defineContract, field, model, rel } from '@prisma-next/sql-contract-ts/contract-builder';
87
import postgresPack from '@prisma-next/target-postgres/pack';
98

10-
type CodecTypes = PgCodecTypes & PgVectorCodecTypes;
9+
const UserBase = model('User', {
10+
fields: {
11+
id: field.column(int4Column).id(),
12+
name: field.column(textColumn),
13+
email: field.column(textColumn),
14+
invitedById: field.column(int4Column).optional().column('invited_by_id'),
15+
},
16+
});
1117

12-
export const contract = defineContract<CodecTypes>()
13-
.target(postgresPack)
14-
.extensionPacks({ pgvector })
15-
.capabilities({
18+
const Post = model('Post', {
19+
fields: {
20+
id: field.column(int4Column).id(),
21+
title: field.column(textColumn),
22+
userId: field.column(int4Column).column('user_id'),
23+
views: field.column(int4Column),
24+
embedding: field.column(vectorColumn).optional(),
25+
},
26+
relations: {
27+
comments: rel.hasMany(() => Comment, { by: 'postId' }),
28+
author: rel.belongsTo(UserBase, { from: 'userId', to: 'id' }),
29+
},
30+
}).sql({ table: 'posts' });
31+
32+
const Comment = model('Comment', {
33+
fields: {
34+
id: field.column(int4Column).id(),
35+
body: field.column(textColumn),
36+
postId: field.column(int4Column).column('post_id'),
37+
},
38+
}).sql({ table: 'comments' });
39+
40+
const Profile = model('Profile', {
41+
fields: {
42+
id: field.column(int4Column).id(),
43+
userId: field.column(int4Column).column('user_id'),
44+
bio: field.column(textColumn),
45+
},
46+
}).sql({ table: 'profiles' });
47+
48+
const Article = model('Article', {
49+
fields: {
50+
id: field.generated(uuidv4()).id(),
51+
title: field.column(textColumn),
52+
},
53+
}).sql({ table: 'articles' });
54+
55+
const User = UserBase.relations({
56+
invitedUsers: rel.hasMany(() => UserBase, { by: 'invitedById' }),
57+
invitedBy: rel.belongsTo(UserBase, { from: 'invitedById', to: 'id' }),
58+
posts: rel.hasMany(() => Post, { by: 'userId' }),
59+
profile: rel.hasOne(Profile, { by: 'userId' }),
60+
}).sql({ table: 'users' });
61+
62+
export const contract = defineContract({
63+
family: sqlFamily,
64+
target: postgresPack,
65+
extensionPacks: { pgvector },
66+
capabilities: {
1667
sql: {
1768
lateral: true,
1869
returning: true,
@@ -33,129 +84,12 @@ export const contract = defineContract<CodecTypes>()
3384
hnsw: true,
3485
vector: true,
3586
},
36-
})
37-
.table('users', (table) =>
38-
table
39-
.column('id', { type: int4Column, nullable: false })
40-
.column('name', { type: textColumn, nullable: false })
41-
.column('email', { type: textColumn, nullable: false })
42-
.column('invited_by_id', { type: int4Column, nullable: true })
43-
.primaryKey(['id']),
44-
)
45-
.table('posts', (table) =>
46-
table
47-
.column('id', { type: int4Column, nullable: false })
48-
.column('title', { type: textColumn, nullable: false })
49-
.column('user_id', { type: int4Column, nullable: false })
50-
.column('views', { type: int4Column, nullable: false })
51-
.column('embedding', { type: vectorColumn, nullable: true })
52-
.primaryKey(['id']),
53-
)
54-
.table('comments', (table) =>
55-
table
56-
.column('id', { type: int4Column, nullable: false })
57-
.column('body', { type: textColumn, nullable: false })
58-
.column('post_id', { type: int4Column, nullable: false })
59-
.primaryKey(['id']),
60-
)
61-
.table('profiles', (table) =>
62-
table
63-
.column('id', { type: int4Column, nullable: false })
64-
.column('user_id', { type: int4Column, nullable: false })
65-
.column('bio', { type: textColumn, nullable: false })
66-
.primaryKey(['id']),
67-
)
68-
.table('articles', (table) =>
69-
table
70-
.generated('id', uuidv4())
71-
.column('title', { type: textColumn, nullable: false })
72-
.primaryKey(['id']),
73-
)
74-
.model('User', 'users', (model) =>
75-
model
76-
.field('id', 'id')
77-
.field('name', 'name')
78-
.field('email', 'email')
79-
.field('invitedById', 'invited_by_id')
80-
.relation('invitedUsers', {
81-
toModel: 'User',
82-
toTable: 'users',
83-
cardinality: '1:N',
84-
on: {
85-
parentTable: 'users',
86-
parentColumns: ['id'],
87-
childTable: 'users',
88-
childColumns: ['invited_by_id'],
89-
},
90-
})
91-
.relation('invitedBy', {
92-
toModel: 'User',
93-
toTable: 'users',
94-
cardinality: 'N:1',
95-
on: {
96-
parentTable: 'users',
97-
parentColumns: ['invited_by_id'],
98-
childTable: 'users',
99-
childColumns: ['id'],
100-
},
101-
})
102-
.relation('posts', {
103-
toModel: 'Post',
104-
toTable: 'posts',
105-
cardinality: '1:N',
106-
on: {
107-
parentTable: 'users',
108-
parentColumns: ['id'],
109-
childTable: 'posts',
110-
childColumns: ['user_id'],
111-
},
112-
})
113-
.relation('profile', {
114-
toModel: 'Profile',
115-
toTable: 'profiles',
116-
cardinality: '1:1',
117-
on: {
118-
parentTable: 'users',
119-
parentColumns: ['id'],
120-
childTable: 'profiles',
121-
childColumns: ['user_id'],
122-
},
123-
}),
124-
)
125-
.model('Post', 'posts', (model) =>
126-
model
127-
.field('id', 'id')
128-
.field('title', 'title')
129-
.field('userId', 'user_id')
130-
.field('views', 'views')
131-
.relation('comments', {
132-
toModel: 'Comment',
133-
toTable: 'comments',
134-
cardinality: '1:N',
135-
on: {
136-
parentTable: 'posts',
137-
parentColumns: ['id'],
138-
childTable: 'comments',
139-
childColumns: ['post_id'],
140-
},
141-
})
142-
.relation('author', {
143-
toModel: 'User',
144-
toTable: 'users',
145-
cardinality: 'N:1',
146-
on: {
147-
parentTable: 'posts',
148-
parentColumns: ['user_id'],
149-
childTable: 'users',
150-
childColumns: ['id'],
151-
},
152-
}),
153-
)
154-
.model('Comment', 'comments', (model) =>
155-
model.field('id', 'id').field('body', 'body').field('postId', 'post_id'),
156-
)
157-
.model('Profile', 'profiles', (model) =>
158-
model.field('id', 'id').field('userId', 'user_id').field('bio', 'bio'),
159-
)
160-
.model('Article', 'articles', (model) => model.field('id', 'id').field('title', 'title'))
161-
.build();
87+
},
88+
models: {
89+
User,
90+
Post,
91+
Comment,
92+
Profile,
93+
Article,
94+
},
95+
});

0 commit comments

Comments
 (0)