From db2d39c4cd6fa70914058376eb525f5467cd78e7 Mon Sep 17 00:00:00 2001 From: kanthi subramanian Date: Wed, 11 Feb 2026 22:26:42 -0600 Subject: [PATCH 1/4] Add Docker integration tests. --- .bin/pre-release-docker | 5 + .github/workflows/pre-release.yaml | 25 ++- ice-rest-catalog/pom.xml | 1 - .../rest/catalog/DockerScenarioBasedIT.java | 183 ++++++++++++++++++ .../ice/rest/catalog/RESTCatalogTestBase.java | 97 ++++++++++ .../ice/rest/catalog/ScenarioBasedIT.java | 111 +---------- .../ice/rest/catalog/ScenarioConfig.java | 13 +- .../ice/rest/catalog/ScenarioTestRunner.java | 4 +- .../src/test/resources/scenarios/README.md | 20 -- .../scenarios/basic-operations/input.parquet | Bin 0 -> 2446 bytes .../scenarios/basic-operations/run.sh.tmpl | 8 - .../scenarios/basic-operations/scenario.yaml | 19 -- .../insert-partitioned/scenario.yaml | 19 -- .../scenarios/insert-scan/scenario.yaml | 17 -- 14 files changed, 315 insertions(+), 207 deletions(-) create mode 100644 ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/DockerScenarioBasedIT.java create mode 100644 ice-rest-catalog/src/test/resources/scenarios/basic-operations/input.parquet diff --git a/.bin/pre-release-docker b/.bin/pre-release-docker index 2671de32..990a5597 100755 --- a/.bin/pre-release-docker +++ b/.bin/pre-release-docker @@ -8,6 +8,11 @@ export SKIP_VERIFY=1 export PATH="$(pwd)/.bin:$PATH" +echo >&2 'Building ice Docker image' +docker-build-ice +echo >&2 'Building ice-rest-catalog Docker image' +docker-build-ice-rest-catalog + echo >&2 'Pushing ice Docker image' docker-build-ice --push echo >&2 'Pushing ice-rest-catalog Docker image' diff --git a/.github/workflows/pre-release.yaml b/.github/workflows/pre-release.yaml index b1ec2086..fb893650 100644 --- a/.github/workflows/pre-release.yaml +++ b/.github/workflows/pre-release.yaml @@ -49,4 +49,27 @@ jobs: - uses: actions/checkout@v4 - - run: .bin/pre-release-docker + - uses: actions/setup-java@v4 + with: + java-version: '21' + distribution: 'graalvm' + cache: maven + + - name: Build Docker images + run: | + export VERSION=0.0.0-latest-master+$(git rev-parse --short HEAD) + export IMAGE_TAG="latest-master" + export SKIP_VERIFY=1 + export PATH="$(pwd)/.bin:$PATH" + docker-build-ice + docker-build-ice-rest-catalog + + - name: Run Docker integration tests + run: > + ./mvnw -pl ice-rest-catalog install -Dmaven.test.skip=true -Pno-check && + ./mvnw -pl ice-rest-catalog failsafe:integration-test failsafe:verify + -Dit.test=DockerScenarioBasedIT + -Ddocker.image=altinity/ice-rest-catalog:debug-with-ice-latest-master-amd64 + + - name: Push Docker images + run: .bin/pre-release-docker diff --git a/ice-rest-catalog/pom.xml b/ice-rest-catalog/pom.xml index 43a8121e..fda0550b 100644 --- a/ice-rest-catalog/pom.xml +++ b/ice-rest-catalog/pom.xml @@ -393,7 +393,6 @@ com.fasterxml.jackson.dataformat jackson-dataformat-yaml ${jackson.version} - test io.etcd diff --git a/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/DockerScenarioBasedIT.java b/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/DockerScenarioBasedIT.java new file mode 100644 index 00000000..2916589a --- /dev/null +++ b/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/DockerScenarioBasedIT.java @@ -0,0 +1,183 @@ +/* + * Copyright (c) 2025 Altinity Inc and/or its affiliates. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package com.altinity.ice.rest.catalog; + +import java.io.File; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.HashMap; +import java.util.Map; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.Network; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.utility.MountableFile; +import org.testng.annotations.AfterClass; +import org.testng.annotations.BeforeClass; + +/** + * Docker-based integration tests for ICE REST Catalog. + * + *

Runs the ice-rest-catalog Docker image (specified via system property {@code docker.image}) + * alongside a MinIO container, then executes scenario-based tests against it. + */ +public class DockerScenarioBasedIT extends RESTCatalogTestBase { + + private Network network; + + private GenericContainer minio; + + private GenericContainer catalog; + + @Override + @BeforeClass + public void setUp() throws Exception { + String dockerImage = + System.getProperty("docker.image", "altinity/ice-rest-catalog:debug-with-ice-0.12.0"); + logger.info("Using Docker image: {}", dockerImage); + + network = Network.newNetwork(); + + // Start MinIO + minio = + new GenericContainer<>("minio/minio:latest") + .withNetwork(network) + .withNetworkAliases("minio") + .withExposedPorts(9000) + .withEnv("MINIO_ACCESS_KEY", "minioadmin") + .withEnv("MINIO_SECRET_KEY", "minioadmin") + .withCommand("server", "/data") + .waitingFor(Wait.forHttp("/minio/health/live").forPort(9000)); + minio.start(); + + // Create test bucket via MinIO's host-mapped port + String minioHostEndpoint = "http://" + minio.getHost() + ":" + minio.getMappedPort(9000); + try (var s3Client = + software.amazon.awssdk.services.s3.S3Client.builder() + .endpointOverride(java.net.URI.create(minioHostEndpoint)) + .region(software.amazon.awssdk.regions.Region.US_EAST_1) + .credentialsProvider( + software.amazon.awssdk.auth.credentials.StaticCredentialsProvider.create( + software.amazon.awssdk.auth.credentials.AwsBasicCredentials.create( + "minioadmin", "minioadmin"))) + .forcePathStyle(true) + .build()) { + s3Client.createBucket( + software.amazon.awssdk.services.s3.model.CreateBucketRequest.builder() + .bucket("test-bucket") + .build()); + logger.info("Created test-bucket in MinIO"); + } + + // Build YAML config for the catalog container (using the Docker network alias for MinIO) + String catalogConfig = + String.join( + "\n", + "uri: \"jdbc:sqlite::memory:\"", + "warehouse: \"s3://test-bucket/warehouse\"", + "s3:", + " endpoint: \"http://minio:9000\"", + " pathStyleAccess: true", + " accessKeyID: \"minioadmin\"", + " secretAccessKey: \"minioadmin\"", + " region: \"us-east-1\"", + "anonymousAccess:", + " enabled: true", + " accessConfig:", + " readOnly: false", + ""); + + Path scenariosDir = getScenariosDirectory().toAbsolutePath(); + if (!Files.exists(scenariosDir) || !Files.isDirectory(scenariosDir)) { + throw new IllegalStateException( + "Scenarios directory must exist at " + + scenariosDir + + ". Run 'mvn test-compile' or run the test from Maven (e.g. mvn failsafe:integration-test)."); + } + Path insertScanInput = scenariosDir.resolve("insert-scan").resolve("input.parquet"); + if (!Files.exists(insertScanInput)) { + throw new IllegalStateException( + "Scenario input not found at " + + insertScanInput + + ". Ensure test resources are on the classpath and scenarios/insert-scan/input.parquet exists."); + } + + // Start the ice-rest-catalog container (debug-with-ice has ice CLI at /usr/local/bin/ice) + GenericContainer catalogContainer = + new GenericContainer<>(dockerImage) + .withNetwork(network) + .withExposedPorts(5000) + .withEnv("ICE_REST_CATALOG_CONFIG", "") + .withEnv("ICE_REST_CATALOG_CONFIG_YAML", catalogConfig) + .withFileSystemBind(scenariosDir.toString(), "/scenarios") + .waitingFor(Wait.forHttp("/v1/config").forPort(5000).forStatusCode(200)); + + catalog = catalogContainer; + try { + catalog.start(); + } catch (Exception e) { + if (catalog != null) { + logger.error("Catalog container logs (stdout): {}", catalog.getLogs()); + } + throw e; + } + + // Copy CLI config into container so ice CLI can talk to co-located REST server + File cliConfigHost = File.createTempFile("ice-docker-cli-", ".yaml"); + try { + Files.write(cliConfigHost.toPath(), "uri: http://localhost:5000\n".getBytes()); + catalog.copyFileToContainer( + MountableFile.forHostPath(cliConfigHost.toPath()), "/tmp/ice-cli.yaml"); + } finally { + cliConfigHost.delete(); + } + + logger.info( + "Catalog container started at {}:{}", catalog.getHost(), catalog.getMappedPort(5000)); + } + + @Override + @AfterClass + public void tearDown() { + if (catalog != null && catalog.isRunning()) { + catalog.stop(); + } + if (minio != null && minio.isRunning()) { + minio.stop(); + } + if (network != null) { + network.close(); + } + } + + @Override + protected ScenarioTestRunner createScenarioRunner(String scenarioName) throws Exception { + Path scenariosDir = getScenariosDirectory(); + + String containerId = catalog.getContainerId(); + + // Wrapper script on host: docker exec ice "$@" (CLI runs inside container) + File wrapperScript = File.createTempFile("ice-docker-exec-", ".sh"); + wrapperScript.deleteOnExit(); + String wrapperContent = "#!/bin/sh\n" + "exec docker exec " + containerId + " ice \"$@\"\n"; + Files.write(wrapperScript.toPath(), wrapperContent.getBytes()); + if (!wrapperScript.setExecutable(true)) { + throw new IllegalStateException("Could not set wrapper script executable: " + wrapperScript); + } + + Map templateVars = new HashMap<>(); + templateVars.put("ICE_CLI", wrapperScript.getAbsolutePath()); + templateVars.put("CLI_CONFIG", "/tmp/ice-cli.yaml"); + templateVars.put("SCENARIO_DIR", "/scenarios/" + scenarioName); + templateVars.put("MINIO_ENDPOINT", ""); + templateVars.put("CATALOG_URI", "http://localhost:5000"); + + return new ScenarioTestRunner(scenariosDir, templateVars); + } +} diff --git a/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/RESTCatalogTestBase.java b/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/RESTCatalogTestBase.java index d7ffcf85..54c94d40 100644 --- a/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/RESTCatalogTestBase.java +++ b/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/RESTCatalogTestBase.java @@ -14,7 +14,12 @@ import com.altinity.ice.rest.catalog.internal.config.Config; import java.io.File; import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.List; import java.util.Map; import org.apache.iceberg.catalog.Catalog; import org.eclipse.jetty.server.Server; @@ -23,6 +28,8 @@ import org.testcontainers.containers.GenericContainer; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider; import software.amazon.awssdk.regions.Region; @@ -152,4 +159,94 @@ protected String getMinioEndpoint() { protected String getCatalogUri() { return "http://localhost:8080"; } + + /** + * Get the path to the scenarios directory. + * + * @return Path to scenarios directory + * @throws URISyntaxException If the resource URL cannot be converted to a path + */ + protected Path getScenariosDirectory() throws URISyntaxException { + URL scenariosUrl = getClass().getClassLoader().getResource("scenarios"); + if (scenariosUrl == null) { + return Paths.get("src/test/resources/scenarios"); + } + return Paths.get(scenariosUrl.toURI()); + } + + /** + * Create a ScenarioTestRunner for the given scenario. Subclasses provide host or container-based + * CLI and config. + * + * @param scenarioName Name of the scenario (e.g. for container path resolution) + * @return Configured ScenarioTestRunner + * @throws Exception If there's an error creating the runner + */ + protected abstract ScenarioTestRunner createScenarioRunner(String scenarioName) throws Exception; + + /** Data provider that discovers all test scenarios. */ + @DataProvider(name = "scenarios") + public Object[][] scenarioProvider() throws Exception { + Path scenariosDir = getScenariosDirectory(); + ScenarioTestRunner runner = new ScenarioTestRunner(scenariosDir, Map.of()); + List scenarios = runner.discoverScenarios(); + + if (scenarios.isEmpty()) { + logger.warn("No test scenarios found in: {}", scenariosDir); + return new Object[0][0]; + } + + logger.info("Discovered {} test scenario(s): {}", scenarios.size(), scenarios); + + Object[][] data = new Object[scenarios.size()][1]; + for (int i = 0; i < scenarios.size(); i++) { + data[i][0] = scenarios.get(i); + } + return data; + } + + /** Parameterized test that executes a single scenario. */ + @Test(dataProvider = "scenarios") + public void testScenario(String scenarioName) throws Exception { + logger.info("====== Starting scenario test: {} ======", scenarioName); + + ScenarioTestRunner runner = createScenarioRunner(scenarioName); + ScenarioTestRunner.ScenarioResult result = runner.executeScenario(scenarioName); + + if (result.runScriptResult() != null) { + logger.info("Run script exit code: {}", result.runScriptResult().exitCode()); + } + if (result.verifyScriptResult() != null) { + logger.info("Verify script exit code: {}", result.verifyScriptResult().exitCode()); + } + + assertScenarioSuccess(scenarioName, result); + logger.info("====== Scenario test passed: {} ======", scenarioName); + } + + /** Assert that the scenario result indicates success; otherwise throw AssertionError. */ + protected void assertScenarioSuccess( + String scenarioName, ScenarioTestRunner.ScenarioResult result) { + if (result.isSuccess()) { + return; + } + StringBuilder errorMessage = new StringBuilder(); + errorMessage.append("Scenario '").append(scenarioName).append("' failed:\n"); + + if (result.runScriptResult() != null && result.runScriptResult().exitCode() != 0) { + errorMessage.append("\nRun script failed with exit code: "); + errorMessage.append(result.runScriptResult().exitCode()); + errorMessage.append("\nStdout:\n").append(result.runScriptResult().stdout()); + errorMessage.append("\nStderr:\n").append(result.runScriptResult().stderr()); + } + + if (result.verifyScriptResult() != null && result.verifyScriptResult().exitCode() != 0) { + errorMessage.append("\nVerify script failed with exit code: "); + errorMessage.append(result.verifyScriptResult().exitCode()); + errorMessage.append("\nStdout:\n").append(result.verifyScriptResult().stdout()); + errorMessage.append("\nStderr:\n").append(result.verifyScriptResult().stderr()); + } + + throw new AssertionError(errorMessage.toString()); + } } diff --git a/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioBasedIT.java b/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioBasedIT.java index b4351344..24b098e7 100644 --- a/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioBasedIT.java +++ b/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioBasedIT.java @@ -10,15 +10,10 @@ package com.altinity.ice.rest.catalog; import java.io.File; -import java.net.URISyntaxException; -import java.net.URL; import java.nio.file.Path; import java.nio.file.Paths; import java.util.HashMap; -import java.util.List; import java.util.Map; -import org.testng.annotations.DataProvider; -import org.testng.annotations.Test; /** * Scenario-based integration tests for ICE REST Catalog. @@ -28,92 +23,8 @@ */ public class ScenarioBasedIT extends RESTCatalogTestBase { - /** - * Data provider that discovers all test scenarios. - * - * @return Array of scenario names to be used as test parameters - * @throws Exception If there's an error discovering scenarios - */ - @DataProvider(name = "scenarios") - public Object[][] scenarioProvider() throws Exception { - Path scenariosDir = getScenariosDirectory(); - ScenarioTestRunner runner = createScenarioRunner(); - - List scenarios = runner.discoverScenarios(); - - if (scenarios.isEmpty()) { - logger.warn("No test scenarios found in: {}", scenariosDir); - return new Object[0][0]; - } - - logger.info("Discovered {} test scenario(s): {}", scenarios.size(), scenarios); - - // Convert to Object[][] for TestNG data provider - Object[][] data = new Object[scenarios.size()][1]; - for (int i = 0; i < scenarios.size(); i++) { - data[i][0] = scenarios.get(i); - } - return data; - } - - /** - * Parameterized test that executes a single scenario. - * - * @param scenarioName Name of the scenario to execute - * @throws Exception If the scenario execution fails - */ - @Test(dataProvider = "scenarios") - public void testScenario(String scenarioName) throws Exception { - logger.info("====== Starting scenario test: {} ======", scenarioName); - - ScenarioTestRunner runner = createScenarioRunner(); - ScenarioTestRunner.ScenarioResult result = runner.executeScenario(scenarioName); - - // Log results - if (result.runScriptResult() != null) { - logger.info("Run script exit code: {}", result.runScriptResult().exitCode()); - } - - if (result.verifyScriptResult() != null) { - logger.info("Verify script exit code: {}", result.verifyScriptResult().exitCode()); - } - - // Assert success - if (!result.isSuccess()) { - StringBuilder errorMessage = new StringBuilder(); - errorMessage.append("Scenario '").append(scenarioName).append("' failed:\n"); - - if (result.runScriptResult() != null && result.runScriptResult().exitCode() != 0) { - errorMessage.append("\nRun script failed with exit code: "); - errorMessage.append(result.runScriptResult().exitCode()); - errorMessage.append("\nStdout:\n"); - errorMessage.append(result.runScriptResult().stdout()); - errorMessage.append("\nStderr:\n"); - errorMessage.append(result.runScriptResult().stderr()); - } - - if (result.verifyScriptResult() != null && result.verifyScriptResult().exitCode() != 0) { - errorMessage.append("\nVerify script failed with exit code: "); - errorMessage.append(result.verifyScriptResult().exitCode()); - errorMessage.append("\nStdout:\n"); - errorMessage.append(result.verifyScriptResult().stdout()); - errorMessage.append("\nStderr:\n"); - errorMessage.append(result.verifyScriptResult().stderr()); - } - - throw new AssertionError(errorMessage.toString()); - } - - logger.info("====== Scenario test passed: {} ======", scenarioName); - } - - /** - * Create a ScenarioTestRunner with the appropriate template variables. - * - * @return Configured ScenarioTestRunner - * @throws Exception If there's an error creating the runner - */ - private ScenarioTestRunner createScenarioRunner() throws Exception { + @Override + protected ScenarioTestRunner createScenarioRunner(String scenarioName) throws Exception { Path scenariosDir = getScenariosDirectory(); // Create CLI config file @@ -143,22 +54,4 @@ private ScenarioTestRunner createScenarioRunner() throws Exception { return new ScenarioTestRunner(scenariosDir, templateVars); } - - /** - * Get the path to the scenarios directory. - * - * @return Path to scenarios directory - * @throws URISyntaxException If the resource URL cannot be converted to a path - */ - private Path getScenariosDirectory() throws URISyntaxException { - // Get the scenarios directory from test resources - URL scenariosUrl = getClass().getClassLoader().getResource("scenarios"); - - if (scenariosUrl == null) { - // If not found in resources, try relative to project - return Paths.get("src/test/resources/scenarios"); - } - - return Paths.get(scenariosUrl.toURI()); - } } diff --git a/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioConfig.java b/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioConfig.java index 17d5b169..3d6ae098 100644 --- a/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioConfig.java +++ b/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioConfig.java @@ -9,7 +9,6 @@ */ package com.altinity.ice.rest.catalog; -import java.util.List; import java.util.Map; /** @@ -21,17 +20,7 @@ public record ScenarioConfig( String name, String description, CatalogConfig catalogConfig, - Map env, - CloudResources cloudResources, - List phases) { + Map env) { public record CatalogConfig(String warehouse, String name, String uri) {} - - public record CloudResources(S3Resources s3, SqsResources sqs) {} - - public record S3Resources(List buckets) {} - - public record SqsResources(List queues) {} - - public record Phase(String name, String description) {} } diff --git a/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioTestRunner.java b/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioTestRunner.java index 83a6b19d..c74d22b0 100644 --- a/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioTestRunner.java +++ b/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioTestRunner.java @@ -105,7 +105,9 @@ public ScenarioResult executeScenario(String scenarioName) throws Exception { // Build template variables map Map templateVars = new HashMap<>(globalTemplateVars); - templateVars.put("SCENARIO_DIR", scenarioDir.toAbsolutePath().toString()); + if (!templateVars.containsKey("SCENARIO_DIR")) { + templateVars.put("SCENARIO_DIR", scenarioDir.toAbsolutePath().toString()); + } // Add environment variables from scenario config if (config.env() != null) { diff --git a/ice-rest-catalog/src/test/resources/scenarios/README.md b/ice-rest-catalog/src/test/resources/scenarios/README.md index 0b0ba237..feb6de3e 100644 --- a/ice-rest-catalog/src/test/resources/scenarios/README.md +++ b/ice-rest-catalog/src/test/resources/scenarios/README.md @@ -32,26 +32,6 @@ env: NAMESPACE_NAME: "test_ns" TABLE_NAME: "test_ns.table1" INPUT_FILE: "input.parquet" - -# Optional: Cloud resources needed (for future provisioning) -cloudResources: - s3: - buckets: - - "test-bucket" - sqs: - queues: - - "test-queue" - -# Optional: Test execution phases -phases: - - name: "setup" - description: "Initialize resources" - - name: "run" - description: "Execute main test logic" - - name: "verify" - description: "Verify results" - - name: "cleanup" - description: "Clean up resources" ``` ## Script Templates diff --git a/ice-rest-catalog/src/test/resources/scenarios/basic-operations/input.parquet b/ice-rest-catalog/src/test/resources/scenarios/basic-operations/input.parquet new file mode 100644 index 0000000000000000000000000000000000000000..028c64cf9f7d9ea46bfeeaf940482aaee90a1b54 GIT binary patch literal 2446 zcmZ`*L1-Ii7JgDj^8efVlXj z+h~)tReRFWhaP+iY7a$+Qgkq^*-PEa;-d=<2zpU(g6+Yl#l^)qB!vBb$#tBx9gt_{ z{qO(gz4v|Z{Vu=u=TMXW->5#yzNoj-c+To9(buwYj)=C>skKZhHy(ZdSt^wqi|&7y zN?D^(=3;JbG@?w3J<{(uM64qj3mr{oQZ#l|z@TipsH&9LfvnNEcG%`J+fs0g##Ncd zbUX7exWy$nkLrLEd4Po?Zml3GL2Z)^amx`evS2yK8Lb3r>3ybuE+Fm-Q=hc3= zNd<)4#fLqv(M9da4o6rocpN%8X>h^1rgkZHk(+Rj{l-b2p`%XBV1fC_E;2_-tzKG(RB zxnWH|)LsdNuF!!t4fhz?K0wudcULwRJSzEwmaEg@e2aLSeYTC_Yuo}$M_1AGM>@{zU?#KLo&SQ`ZEz>>IwEsSoA>Df=};m6B7Ba zYPh`*Hwha|&mO5BWFjo18v1$OCRwHj$n`+`nLnXJ50r-ichGNo)XJZ@VbL)d_zA^& zgo(2W(^`t(p^AoQy!rplOZJpnJOxz=gF0Hp^ypT|-Ly+QQoK(kQ1y)Z&I9Jjbf985Lr!D=ae)}z)e`vso zO&o&sm({zs?ko@E>94D|@2p?Hb@SG3Y2Mav-&lWt{rWOyZ9#kegVP=yoag^|!e@Gg zR#Gczopx`&49To~aQ*Kq?_Ildy}J63Wj#!Kcp~lO?yT09Z@zSM_5B-ns{_->!1U4j z$}g5&ZT0T6P6N}4Qw~T&A1>csU%mVB1W|q~?H1g>P2%Vob~O&ur@o@oSkK=Z%YQL6 zDPDEN;MBMrIbynhag-}9|3F6rxPXRw2n*3_$6p?|5J?1)tc-D%QCx}lac?$W} z(B!|*pF&9H>%5SA6e=JPiD-;}gstG4MkWHTQ+21e&5i48iNkM-0b*SR?d74NQmGPh0{{e!n B Date: Tue, 17 Feb 2026 16:38:00 -0600 Subject: [PATCH 2/4] Added logic to skip DockerScenarioBasedIT --- .github/workflows/verify.yaml | 40 ++- ice-rest-catalog/pom.xml | 9 + .../rest/catalog/DockerScenarioBasedIT.java | 61 +++-- .../ice/rest/catalog/ScenarioConfig.java | 5 +- .../test/resources/docker-catalog-config.yaml | 12 + .../main/java/com/altinity/ice/cli/Main.java | 63 +++++ .../ice/cli/internal/cmd/DescribeParquet.java | 229 ++++++++++++++++++ .../cli/internal/cmd/DescribeParquetTest.java | 88 +++++++ 8 files changed, 467 insertions(+), 40 deletions(-) create mode 100644 ice-rest-catalog/src/test/resources/docker-catalog-config.yaml create mode 100644 ice/src/main/java/com/altinity/ice/cli/internal/cmd/DescribeParquet.java create mode 100644 ice/src/test/java/com/altinity/ice/cli/internal/cmd/DescribeParquetTest.java diff --git a/.github/workflows/verify.yaml b/.github/workflows/verify.yaml index 6eff3114..c75844db 100644 --- a/.github/workflows/verify.yaml +++ b/.github/workflows/verify.yaml @@ -15,9 +15,45 @@ jobs: distribution: 'graalvm' cache: maven - run: ./mvnw clean verify - - name: Install + - name: Install run: ./mvnw install # TODO: check native-image can build ice - name: Run Scenario-Based Integration Tests run: ../mvnw test -Dtest=ScenarioBasedIT - working-directory: ice-rest-catalog \ No newline at end of file + working-directory: ice-rest-catalog + docker-integration: + runs-on: ubuntu-24.04 + steps: + - uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + # https://github.com/docker/setup-buildx-action + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Install regctl + uses: regclient/actions/regctl-installer@ce5fd131e371ffcdd7508b478cb223b3511a9183 + - name: regctl login + uses: regclient/actions/regctl-login@ce5fd131e371ffcdd7508b478cb223b3511a9183 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 + with: + java-version: '21' + distribution: 'graalvm' + cache: maven + - name: Build Docker images + run: | + export VERSION=0.0.0-latest-master+$(git rev-parse --short HEAD) + export IMAGE_TAG="latest-master" + export SKIP_VERIFY=1 + export PATH="$(pwd)/.bin:$PATH" + docker-build-ice + docker-build-ice-rest-catalog + - name: Run Docker integration tests + run: > + ./mvnw -pl ice-rest-catalog install -DskipTests=true -Pno-check && + ./mvnw -pl ice-rest-catalog failsafe:integration-test failsafe:verify + -Dit.test=DockerScenarioBasedIT + -Ddocker.image=altinity/ice-rest-catalog:debug-with-ice-latest-master-amd64 diff --git a/ice-rest-catalog/pom.xml b/ice-rest-catalog/pom.xml index fda0550b..38053fdb 100644 --- a/ice-rest-catalog/pom.xml +++ b/ice-rest-catalog/pom.xml @@ -558,6 +558,15 @@ + + org.apache.maven.plugins + maven-failsafe-plugin + + + **/DockerScenarioBasedIT.java + + + org.apache.maven.plugins maven-shade-plugin diff --git a/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/DockerScenarioBasedIT.java b/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/DockerScenarioBasedIT.java index 2916589a..f3fc0889 100644 --- a/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/DockerScenarioBasedIT.java +++ b/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/DockerScenarioBasedIT.java @@ -10,8 +10,11 @@ package com.altinity.ice.rest.catalog; import java.io.File; +import java.net.URI; +import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.Paths; import java.util.HashMap; import java.util.Map; import org.testcontainers.containers.GenericContainer; @@ -20,6 +23,11 @@ import org.testcontainers.utility.MountableFile; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; +import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; +import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider; +import software.amazon.awssdk.regions.Region; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.model.CreateBucketRequest; /** * Docker-based integration tests for ICE REST Catalog. @@ -37,6 +45,7 @@ public class DockerScenarioBasedIT extends RESTCatalogTestBase { @Override @BeforeClass + @SuppressWarnings("resource") public void setUp() throws Exception { String dockerImage = System.getProperty("docker.image", "altinity/ice-rest-catalog:debug-with-ice-0.12.0"); @@ -59,39 +68,24 @@ public void setUp() throws Exception { // Create test bucket via MinIO's host-mapped port String minioHostEndpoint = "http://" + minio.getHost() + ":" + minio.getMappedPort(9000); try (var s3Client = - software.amazon.awssdk.services.s3.S3Client.builder() - .endpointOverride(java.net.URI.create(minioHostEndpoint)) - .region(software.amazon.awssdk.regions.Region.US_EAST_1) + S3Client.builder() + .endpointOverride(URI.create(minioHostEndpoint)) + .region(Region.US_EAST_1) .credentialsProvider( - software.amazon.awssdk.auth.credentials.StaticCredentialsProvider.create( - software.amazon.awssdk.auth.credentials.AwsBasicCredentials.create( - "minioadmin", "minioadmin"))) + StaticCredentialsProvider.create( + AwsBasicCredentials.create("minioadmin", "minioadmin"))) .forcePathStyle(true) .build()) { - s3Client.createBucket( - software.amazon.awssdk.services.s3.model.CreateBucketRequest.builder() - .bucket("test-bucket") - .build()); + s3Client.createBucket(CreateBucketRequest.builder().bucket("test-bucket").build()); logger.info("Created test-bucket in MinIO"); } - // Build YAML config for the catalog container (using the Docker network alias for MinIO) - String catalogConfig = - String.join( - "\n", - "uri: \"jdbc:sqlite::memory:\"", - "warehouse: \"s3://test-bucket/warehouse\"", - "s3:", - " endpoint: \"http://minio:9000\"", - " pathStyleAccess: true", - " accessKeyID: \"minioadmin\"", - " secretAccessKey: \"minioadmin\"", - " region: \"us-east-1\"", - "anonymousAccess:", - " enabled: true", - " accessConfig:", - " readOnly: false", - ""); + // Load YAML config for the catalog container (MinIO via Docker network alias "minio") + URL configResource = getClass().getClassLoader().getResource("docker-catalog-config.yaml"); + if (configResource == null) { + throw new IllegalStateException("docker-catalog-config.yaml not found on classpath"); + } + String catalogConfig = Files.readString(Paths.get(configResource.toURI())); Path scenariosDir = getScenariosDirectory().toAbsolutePath(); if (!Files.exists(scenariosDir) || !Files.isDirectory(scenariosDir)) { @@ -109,16 +103,15 @@ public void setUp() throws Exception { } // Start the ice-rest-catalog container (debug-with-ice has ice CLI at /usr/local/bin/ice) - GenericContainer catalogContainer = + catalog = new GenericContainer<>(dockerImage) .withNetwork(network) .withExposedPorts(5000) .withEnv("ICE_REST_CATALOG_CONFIG", "") .withEnv("ICE_REST_CATALOG_CONFIG_YAML", catalogConfig) - .withFileSystemBind(scenariosDir.toString(), "/scenarios") + .withCopyFileToContainer(MountableFile.forHostPath(scenariosDir), "/scenarios") .waitingFor(Wait.forHttp("/v1/config").forPort(5000).forStatusCode(200)); - catalog = catalogContainer; try { catalog.start(); } catch (Exception e) { @@ -145,11 +138,11 @@ public void setUp() throws Exception { @Override @AfterClass public void tearDown() { - if (catalog != null && catalog.isRunning()) { - catalog.stop(); + if (catalog != null) { + catalog.close(); } - if (minio != null && minio.isRunning()) { - minio.stop(); + if (minio != null) { + minio.close(); } if (network != null) { network.close(); diff --git a/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioConfig.java b/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioConfig.java index 3d6ae098..e7163456 100644 --- a/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioConfig.java +++ b/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioConfig.java @@ -17,10 +17,7 @@ *

This class uses Jackson/SnakeYAML annotations for YAML deserialization. */ public record ScenarioConfig( - String name, - String description, - CatalogConfig catalogConfig, - Map env) { + String name, String description, CatalogConfig catalogConfig, Map env) { public record CatalogConfig(String warehouse, String name, String uri) {} } diff --git a/ice-rest-catalog/src/test/resources/docker-catalog-config.yaml b/ice-rest-catalog/src/test/resources/docker-catalog-config.yaml new file mode 100644 index 00000000..d7c07ecd --- /dev/null +++ b/ice-rest-catalog/src/test/resources/docker-catalog-config.yaml @@ -0,0 +1,12 @@ +uri: "jdbc:sqlite::memory:" +warehouse: "s3://test-bucket/warehouse" +s3: + endpoint: "http://minio:9000" + pathStyleAccess: true + accessKeyID: "minioadmin" + secretAccessKey: "minioadmin" + region: "us-east-1" +anonymousAccess: + enabled: true + accessConfig: + readOnly: false diff --git a/ice/src/main/java/com/altinity/ice/cli/Main.java b/ice/src/main/java/com/altinity/ice/cli/Main.java index be7ada3a..014d2ec5 100644 --- a/ice/src/main/java/com/altinity/ice/cli/Main.java +++ b/ice/src/main/java/com/altinity/ice/cli/Main.java @@ -18,6 +18,7 @@ import com.altinity.ice.cli.internal.cmd.DeleteNamespace; import com.altinity.ice.cli.internal.cmd.DeleteTable; import com.altinity.ice.cli.internal.cmd.Describe; +import com.altinity.ice.cli.internal.cmd.DescribeParquet; import com.altinity.ice.cli.internal.cmd.Insert; import com.altinity.ice.cli.internal.cmd.InsertWatch; import com.altinity.ice.cli.internal.cmd.Scan; @@ -142,6 +143,68 @@ void describe( } } + @CommandLine.Command(name = "describe-parquet", description = "Describe parquet file metadata.") + void describeParquet( + @CommandLine.Parameters( + arity = "1", + paramLabel = "", + description = "Path to parquet file") + String target, + @CommandLine.Option( + names = {"-a", "--all"}, + description = "Show everything") + boolean showAll, + @CommandLine.Option( + names = {"-s", "--summary"}, + description = "Show size, rows, number of row groups, size, compress_size, etc.") + boolean showSummary, + @CommandLine.Option( + names = {"--columns"}, + description = "Show columns") + boolean showColumns, + @CommandLine.Option( + names = {"-r", "--row-groups"}, + description = "Show row groups") + boolean showRowGroups, + @CommandLine.Option( + names = {"-d", "--row-group-details"}, + description = "Show column stats within row group") + boolean showRowGroupDetails, + @CommandLine.Option( + names = {"--json"}, + description = "Output JSON instead of YAML") + boolean json, + @CommandLine.Option(names = {"--s3-region"}) String s3Region, + @CommandLine.Option( + names = {"--s3-no-sign-request"}, + description = "Access S3 files without authentication") + boolean s3NoSignRequest) + throws IOException { + setAWSRegion(s3Region); + try (RESTCatalog catalog = loadCatalog()) { + var options = new ArrayList(); + if (showAll || showSummary) { + options.add(DescribeParquet.Option.SUMMARY); + } + if (showAll || showColumns) { + options.add(DescribeParquet.Option.COLUMNS); + } + if (showAll || showRowGroups) { + options.add(DescribeParquet.Option.ROW_GROUPS); + } + if (showAll || showRowGroupDetails) { + options.add(DescribeParquet.Option.ROW_GROUP_DETAILS); + } + + if (options.isEmpty()) { + options.add(DescribeParquet.Option.SUMMARY); + } + + DescribeParquet.run( + catalog, target, json, s3NoSignRequest, options.toArray(new DescribeParquet.Option[0])); + } + } + public record IceSortOrder( @JsonProperty("column") String column, @JsonProperty("desc") boolean desc, diff --git a/ice/src/main/java/com/altinity/ice/cli/internal/cmd/DescribeParquet.java b/ice/src/main/java/com/altinity/ice/cli/internal/cmd/DescribeParquet.java new file mode 100644 index 00000000..dd9dd1e8 --- /dev/null +++ b/ice/src/main/java/com/altinity/ice/cli/internal/cmd/DescribeParquet.java @@ -0,0 +1,229 @@ +/* + * Copyright (c) 2025 Altinity Inc and/or its affiliates. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package com.altinity.ice.cli.internal.cmd; + +import com.altinity.ice.cli.internal.iceberg.io.Input; +import com.altinity.ice.cli.internal.iceberg.parquet.Metadata; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; +import org.apache.iceberg.io.FileIO; +import org.apache.iceberg.io.InputFile; +import org.apache.iceberg.rest.RESTCatalog; +import org.apache.parquet.column.statistics.Statistics; +import org.apache.parquet.hadoop.metadata.BlockMetaData; +import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData; +import org.apache.parquet.hadoop.metadata.FileMetaData; +import org.apache.parquet.hadoop.metadata.ParquetMetadata; +import org.apache.parquet.schema.MessageType; +import org.apache.parquet.schema.Type; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.internal.crossregion.S3CrossRegionSyncClient; +import software.amazon.awssdk.utils.Lazy; + +public final class DescribeParquet { + + private DescribeParquet() {} + + public enum Option { + ALL, + SUMMARY, + COLUMNS, + ROW_GROUPS, + ROW_GROUP_DETAILS + } + + public static void run( + RESTCatalog catalog, + String filePath, + boolean json, + boolean s3NoSignRequest, + Option... options) + throws IOException { + + Lazy s3ClientLazy = + new Lazy<>( + () -> + new S3CrossRegionSyncClient( + com.altinity.ice.cli.internal.s3.S3.newClient(s3NoSignRequest))); + FileIO io = Input.newIO(filePath, null, s3ClientLazy); + InputFile inputFile = Input.newFile(filePath, catalog, io); + run(inputFile, json, options); + } + + public static void run(InputFile inputFile, boolean json, Option... options) throws IOException { + ParquetMetadata metadata = Metadata.read(inputFile); + + ParquetInfo info = extractParquetInfo(metadata, options); + + ObjectMapper mapper = json ? new ObjectMapper() : new ObjectMapper(new YAMLFactory()); + mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + String output = mapper.writeValueAsString(info); + System.out.println(output); + } + + private static ParquetInfo extractParquetInfo(ParquetMetadata metadata, Option... options) { + var optionsSet = Set.of(options); + boolean includeAll = optionsSet.contains(Option.ALL); + + FileMetaData fileMetadata = metadata.getFileMetaData(); + + // Summary info + Summary summary = null; + if (includeAll || optionsSet.contains(Option.SUMMARY)) { + long totalRows = metadata.getBlocks().stream().mapToLong(BlockMetaData::getRowCount).sum(); + + long compressedSize = + metadata.getBlocks().stream().mapToLong(BlockMetaData::getCompressedSize).sum(); + + long uncompressedSize = + metadata.getBlocks().stream().mapToLong(BlockMetaData::getTotalByteSize).sum(); + + summary = + new Summary( + totalRows, + metadata.getBlocks().size(), + compressedSize, + uncompressedSize, + fileMetadata.getCreatedBy(), + fileMetadata.getSchema().getFieldCount()); + } + + // Column info + List columns = null; + if (includeAll || optionsSet.contains(Option.COLUMNS)) { + columns = extractColumns(fileMetadata.getSchema()); + } + + // Row group info + List rowGroups = null; + if (includeAll + || optionsSet.contains(Option.ROW_GROUPS) + || optionsSet.contains(Option.ROW_GROUP_DETAILS)) { + boolean includeDetails = includeAll || optionsSet.contains(Option.ROW_GROUP_DETAILS); + rowGroups = extractRowGroups(metadata.getBlocks(), includeDetails); + } + + return new ParquetInfo(summary, columns, rowGroups); + } + + private static List extractColumns(MessageType schema) { + List columns = new ArrayList<>(); + for (Type field : schema.getFields()) { + String logicalType = null; + if (field.isPrimitive()) { + var annotation = field.asPrimitiveType().getLogicalTypeAnnotation(); + logicalType = annotation != null ? annotation.toString() : null; + } + columns.add( + new Column( + field.getName(), + field.isPrimitive() ? field.asPrimitiveType().getPrimitiveTypeName().name() : "GROUP", + field.getRepetition().name(), + logicalType)); + } + return columns; + } + + private static List extractRowGroups( + List blocks, boolean includeDetails) { + List rowGroups = new ArrayList<>(); + + for (int i = 0; i < blocks.size(); i++) { + BlockMetaData block = blocks.get(i); + + List columnChunks = null; + if (includeDetails) { + columnChunks = new ArrayList<>(); + for (ColumnChunkMetaData column : block.getColumns()) { + Statistics stats = column.getStatistics(); + + ColumnStats columnStats = null; + if (stats != null && !stats.isEmpty()) { + long nulls = stats.isNumNullsSet() ? stats.getNumNulls() : 0; + String min = null; + String max = null; + if (stats.hasNonNullValue()) { + Object minVal = stats.genericGetMin(); + Object maxVal = stats.genericGetMax(); + min = minVal != null ? minVal.toString() : null; + max = maxVal != null ? maxVal.toString() : null; + } + columnStats = new ColumnStats(nulls, min, max); + } + + columnChunks.add( + new ColumnChunk( + column.getPath().toDotString(), + column.getPrimitiveType().getName(), + column.getEncodings().toString(), + column.getCodec().name(), + column.getTotalSize(), + column.getTotalUncompressedSize(), + column.getValueCount(), + columnStats)); + } + } + + rowGroups.add( + new RowGroup( + i, + block.getRowCount(), + block.getTotalByteSize(), + block.getCompressedSize(), + block.getStartingPos(), + columnChunks)); + } + + return rowGroups; + } + + @JsonInclude(JsonInclude.Include.NON_NULL) + public record ParquetInfo(Summary summary, List columns, List rowGroups) {} + + @JsonInclude(JsonInclude.Include.NON_NULL) + public record Summary( + long rows, + int rowGroups, + long compressedSize, + long uncompressedSize, + String createdBy, + int columnCount) {} + + @JsonInclude(JsonInclude.Include.NON_NULL) + public record Column(String name, String type, String repetition, String logicalType) {} + + @JsonInclude(JsonInclude.Include.NON_NULL) + public record RowGroup( + int index, + long rowCount, + long totalSize, + long compressedSize, + long startingPos, + List columns) {} + + @JsonInclude(JsonInclude.Include.NON_NULL) + public record ColumnChunk( + String path, + String type, + String encodings, + String codec, + long totalSize, + long uncompressedSize, + long valueCount, + ColumnStats stats) {} + + @JsonInclude(JsonInclude.Include.NON_NULL) + public record ColumnStats(long nulls, String min, String max) {} +} diff --git a/ice/src/test/java/com/altinity/ice/cli/internal/cmd/DescribeParquetTest.java b/ice/src/test/java/com/altinity/ice/cli/internal/cmd/DescribeParquetTest.java new file mode 100644 index 00000000..64c372d3 --- /dev/null +++ b/ice/src/test/java/com/altinity/ice/cli/internal/cmd/DescribeParquetTest.java @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2025 Altinity Inc and/or its affiliates. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package com.altinity.ice.cli.internal.cmd; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.altinity.ice.test.Resource; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.PrintStream; +import org.testng.annotations.Test; + +public class DescribeParquetTest { + + @Test + public void testDescribeParquetSummary() throws IOException { + ByteArrayOutputStream outContent = new ByteArrayOutputStream(); + PrintStream originalOut = System.out; + System.setOut(new PrintStream(outContent)); + + try { + var sampleFile = + Resource.asInputFile("com/altinity/ice/cli/internal/iceberg/parquet/sample-001.parquet"); + + DescribeParquet.run(sampleFile, false, DescribeParquet.Option.SUMMARY); + + String output = outContent.toString(); + + assertThat(output).contains("rows:"); + assertThat(output).contains("rowGroups:"); + assertThat(output).contains("compressedSize:"); + assertThat(output).contains("uncompressedSize:"); + } finally { + System.setOut(originalOut); + } + } + + @Test + public void testDescribeParquetColumns() throws IOException { + ByteArrayOutputStream outContent = new ByteArrayOutputStream(); + PrintStream originalOut = System.out; + System.setOut(new PrintStream(outContent)); + + try { + var sampleFile = + Resource.asInputFile("com/altinity/ice/cli/internal/iceberg/parquet/sample-001.parquet"); + + DescribeParquet.run(sampleFile, false, DescribeParquet.Option.COLUMNS); + + String output = outContent.toString(); + + assertThat(output).contains("columns:"); + assertThat(output).contains("name:"); + assertThat(output).contains("type:"); + } finally { + System.setOut(originalOut); + } + } + + @Test + public void testDescribeParquetJson() throws IOException { + ByteArrayOutputStream outContent = new ByteArrayOutputStream(); + PrintStream originalOut = System.out; + System.setOut(new PrintStream(outContent)); + + try { + var sampleFile = + Resource.asInputFile("com/altinity/ice/cli/internal/iceberg/parquet/sample-001.parquet"); + + DescribeParquet.run(sampleFile, true, DescribeParquet.Option.SUMMARY); + + String output = outContent.toString(); + + assertThat(output).contains("{"); + assertThat(output).contains("}"); + assertThat(output).contains("\"summary\""); + } finally { + System.setOut(originalOut); + } + } +} From a0e3fe7a6b986e7843adddb27b22d1693a8e0706 Mon Sep 17 00:00:00 2001 From: kanthi subramanian Date: Tue, 17 Feb 2026 16:41:29 -0600 Subject: [PATCH 3/4] Removed docker push in pre-release-docker --- .bin/pre-release-docker | 5 ----- 1 file changed, 5 deletions(-) diff --git a/.bin/pre-release-docker b/.bin/pre-release-docker index 990a5597..2671de32 100755 --- a/.bin/pre-release-docker +++ b/.bin/pre-release-docker @@ -8,11 +8,6 @@ export SKIP_VERIFY=1 export PATH="$(pwd)/.bin:$PATH" -echo >&2 'Building ice Docker image' -docker-build-ice -echo >&2 'Building ice-rest-catalog Docker image' -docker-build-ice-rest-catalog - echo >&2 'Pushing ice Docker image' docker-build-ice --push echo >&2 'Pushing ice-rest-catalog Docker image' From 145160fb00012de2a74fbb0cdcf557314a1027be Mon Sep 17 00:00:00 2001 From: kanthi subramanian Date: Tue, 17 Feb 2026 16:51:19 -0600 Subject: [PATCH 4/4] pass -am to make Maven build ice first. --- .github/workflows/verify.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/verify.yaml b/.github/workflows/verify.yaml index c75844db..053511ae 100644 --- a/.github/workflows/verify.yaml +++ b/.github/workflows/verify.yaml @@ -53,7 +53,7 @@ jobs: docker-build-ice-rest-catalog - name: Run Docker integration tests run: > - ./mvnw -pl ice-rest-catalog install -DskipTests=true -Pno-check && + ./mvnw -pl ice-rest-catalog -am install -DskipTests=true -Pno-check && ./mvnw -pl ice-rest-catalog failsafe:integration-test failsafe:verify -Dit.test=DockerScenarioBasedIT -Ddocker.image=altinity/ice-rest-catalog:debug-with-ice-latest-master-amd64