Skip to content

Commit 570ac9d

Browse files
authored
exclude file path from hashing, add debug logs (#24)
1 parent 7d5e628 commit 570ac9d

8 files changed

Lines changed: 133 additions & 14 deletions

File tree

PgCodeGen.scala

Lines changed: 18 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -248,22 +248,13 @@ class PgCodeGen private (
248248
_ <- Future {
249249
if debug then println("Running migrations...")
250250

251-
val sortedFiles = sourceFiles
252-
.map(p =>
253-
MigrationVersion.fromFileName(p.getFileName().toString()) match
254-
case Right(v) => p -> v
255-
case Left(err) => throw Throwable(s"Invalid migration file name: $err")
256-
)
257-
.sortBy((_, version) => version)
258-
.map((path, _) => path)
259-
260251
Zone:
261252
Using(
262253
Database(connectionString).either match
263254
case Left(err) => throw err
264255
case Right(db) => db
265256
)(db =>
266-
sortedFiles.foreach: path =>
257+
sourceFiles.foreach: path =>
267258
if debug then println(s"Running migration for $path")
268259
db.execute(Files.readString(path)).either match
269260
case Left(err) => throw err
@@ -759,12 +750,19 @@ object PgCodeGen {
759750
def listMigrationFiles: Future[(List[Path], String)] = Future:
760751
val digest = MessageDigest.getInstance("SHA-1")
761752
val files = listFilesRec(sourceDir.toPath)
762-
.map(path =>
763-
digest.update(path.toString.getBytes("UTF-8"))
764-
if !Files.isDirectory(path) then digest.update(Files.readAllBytes(path))
753+
.filter(!Files.isDirectory(_))
754+
.map(p =>
755+
MigrationVersion.fromFileName(p.getFileName().toString()) match
756+
case Right(v) => Some(p -> v)
757+
case Left(_) if !p.endsWith(".sql") => None // ignore non .sql files
758+
case Left(err) => throw Throwable(s"Invalid migration file name: $err")
759+
)
760+
.collect { case Some(v) => v }
761+
.sortBy((_, version) => version)
762+
.map((path, _) =>
763+
digest.update(Files.readAllBytes(path))
765764
path
766765
)
767-
.filter(!Files.isDirectory(_))
768766

769767
(files, digest.digest().map("%02x".format(_)).mkString)
770768

@@ -775,7 +773,13 @@ object PgCodeGen {
775773
else
776774
listMigrationFiles.flatMap:
777775
case (sourceFiles, sha1) =>
776+
if debug then println(s"Found ${sourceFiles.length} migration files (SHA1: $sha1)")
777+
778778
val isDivergent = !Files.exists(outDir(sha1))
779+
780+
if debug && isDivergent then
781+
println(s"No generated files found in: ${outDir(sha1).toAbsolutePath().toString()}")
782+
779783
if forceRegeneration || isDivergent then
780784
for
781785
_ <-

test.sh

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,24 @@ else
5959
exit 1
6060
fi
6161

62+
echo "⏳ running generator again with additional non-sql file and -force=false should not run code generation"
63+
./$CODEGEN_BIN \
64+
-use-docker-image="postgres:17-alpine" \
65+
-output-dir=test-generated \
66+
-pkg-name=generated \
67+
-exclude-tables=unsupported_yet \
68+
-source-dir=test/migrations_copy \
69+
-force=false
70+
71+
TIMESTAMP_D=$(stat test-generated | grep Modify)
72+
73+
if [ "$TIMESTAMP_B" == "$TIMESTAMP_D" ]; then
74+
echo "✅ Code generation with additional non-sql file and -force=false as expected (timestamps are the same)"
75+
else
76+
echo "❌ Error: Code generation with additional non-sql file and -force=false not as expected (timestamps differ)"
77+
exit 1
78+
fi
79+
6280
echo "⏳ running code generator with provided connection"
6381
docker run --rm --name codegentest -e POSTGRES_PASSWORD=postgres -p 5555:5432 -d postgres:17-alpine
6482

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
CREATE VIEW r_test_view_a AS SELECT 1;
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
CREATE VIEW r_test_view_b AS SELECT * FROM r_test_view_a;

test/migrations_copy/V1__test.sql

Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
CREATE TYPE test_enum_type AS ENUM ('T1_ONE', 't2_two', 't3_Three', 'T4_FOUR', 'T5_FIVE', 'T6Six', 'MULTIPLE_WORD_ENUM');
2+
3+
-- some comment
4+
CREATE TABLE test (
5+
-- ignore this...
6+
id SERIAL PRIMARY KEY,
7+
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
8+
name text,
9+
name_2 varchar NOT NULL,
10+
number int,
11+
template test_enum_type,
12+
type varchar,
13+
tla char(3) NOT NULL,
14+
tla_var varchar(3) NOT NULL,
15+
numeric_default numeric NOT NULL,
16+
numeric_24p numeric(24) NOT NULL,
17+
numeric_16p_2s numeric(16, 2) NOT NULL,
18+
gen INT NOT NULL GENERATED ALWAYS AS (1 + 1) STORED,
19+
gen_opt INT GENERATED ALWAYS AS (1 + 1) STORED
20+
);
21+
22+
CREATE TABLE test_ref_only (
23+
test_id INT NOT NULL REFERENCES test(id) ON DELETE CASCADE
24+
);
25+
26+
CREATE TABLE test_ref (
27+
test_id INT NOT NULL REFERENCES test(id) ON DELETE CASCADE,
28+
ref_name VARCHAR NOT NULL
29+
);
30+
31+
CREATE TABLE test_ref_auto_pk (
32+
id SERIAL PRIMARY KEY,
33+
test_id INT NOT NULL REFERENCES test(id) ON DELETE CASCADE,
34+
ref_name VARCHAR NOT NULL
35+
);
36+
37+
CREATE TABLE test_ref_pk (
38+
id VARCHAR PRIMARY KEY,
39+
test_id INT NOT NULL REFERENCES test(id) ON DELETE CASCADE,
40+
ref_name VARCHAR NOT NULL
41+
);
42+
43+
CREATE MATERIALIZED VIEW test_materialized_view AS SELECT id,
44+
created_at,
45+
name,
46+
name_2,
47+
number
48+
FROM test WITH DATA;
49+
50+
51+
CREATE VIEW test_view AS SELECT id,
52+
created_at,
53+
name,
54+
name_2,
55+
number,
56+
numeric_default
57+
FROM test;
Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
CREATE TABLE test_b (
2+
key_a VARCHAR NOT NULL,
3+
key_b VARCHAR NOT NULL,
4+
val_1 VARCHAR NOT NULL,
5+
val_2 VARCHAR NOT NULL,
6+
val_3 VARCHAR NOT NULL,
7+
val_4 VARCHAR NOT NULL,
8+
val_5 VARCHAR NOT NULL,
9+
val_6 VARCHAR NOT NULL,
10+
val_7 VARCHAR NOT NULL,
11+
val_8 VARCHAR NOT NULL,
12+
val_9 VARCHAR NOT NULL,
13+
val_10 VARCHAR NOT NULL,
14+
val_11 VARCHAR NOT NULL,
15+
val_12 VARCHAR NOT NULL,
16+
val_13 VARCHAR NOT NULL,
17+
val_14 VARCHAR NOT NULL,
18+
val_15 VARCHAR NOT NULL,
19+
val_16 VARCHAR NOT NULL,
20+
val_17 VARCHAR NOT NULL,
21+
val_18 VARCHAR NOT NULL,
22+
val_19 VARCHAR NOT NULL,
23+
val_20 VARCHAR NOT NULL,
24+
val_21 VARCHAR NOT NULL,
25+
val_22 VARCHAR NOT NULL,
26+
val_23 VARCHAR NOT NULL,
27+
val_24 VARCHAR NOT NULL,
28+
val_25 VARCHAR NOT NULL,
29+
val_26 VARCHAR[] NOT NULL,
30+
val_27 INT[],
31+
date DATE,
32+
PRIMARY KEY (key_a, key_b)
33+
);
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
CREATE TABLE unsupported_yet (
2+
field_a JSON,
3+
field_b JSONB,
4+
field_c INT[]
5+
);

test/migrations_copy/ignore_me

Whitespace-only changes.

0 commit comments

Comments
 (0)