From e92da9b0a6de577d2467dec5cc0c76faac3bcc8a Mon Sep 17 00:00:00 2001 From: Michael Collado Date: Sat, 27 Apr 2024 13:55:32 -0700 Subject: [PATCH] Mcollado ci (#7) * Add initial integration tests * Add gradle workflow in github * Fixed Dockerfile to create default-realm dir for sqlite * Add docker-compose and Dockerfile for regtest * Docker build in ci * Fix context in docker-compose file * Update regtest to work locally and in docker * Fix dockerfile to run gradle build * Add .keep file for output directory --- .github/workflows/gradle.yml | 49 ++++++ .github/workflows/regtest.yml | 18 +++ .gitignore | 8 + docker-compose.yml | 22 +++ Dockerfile => iceberg-rest-server/Dockerfile | 6 +- .../iceberg/rest/IcebergRestApplication.java | 4 +- .../iceberg/rest/IcebergRestServerMain.java | 30 ++-- .../config/IcebergRestApplicationConfig.java | 12 ++ ...IcebergRestApplicationIntegrationTest.java | 146 ++++++++++++++++++ .../iceberg-rest-server-integrationtest.yml | 91 +++++++++++ regtests/Dockerfile | 15 ++ regtests/output/.keep | 0 regtests/run.sh | 1 + regtests/setup.sh | 3 +- 14 files changed, 389 insertions(+), 16 deletions(-) create mode 100644 .github/workflows/gradle.yml create mode 100644 .github/workflows/regtest.yml create mode 100644 docker-compose.yml rename Dockerfile => iceberg-rest-server/Dockerfile (84%) create mode 100644 iceberg-rest-server/src/test/java/org/apache/iceberg/rest/IcebergRestApplicationIntegrationTest.java create mode 100644 iceberg-rest-server/src/test/resources/iceberg-rest-server-integrationtest.yml create mode 100644 regtests/Dockerfile create mode 100644 regtests/output/.keep diff --git a/.github/workflows/gradle.yml b/.github/workflows/gradle.yml new file mode 100644 index 000000000..336c8747d --- /dev/null +++ b/.github/workflows/gradle.yml @@ -0,0 +1,49 @@ +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. +# This workflow will build a Java project with Gradle and cache/restore any dependencies to improve the workflow execution time +# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-gradle + +name: Java CI with Gradle + +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + +jobs: + build: + + runs-on: ubuntu-latest + permissions: + contents: read + + steps: + - uses: actions/checkout@v4 + - name: Set up JDK 21 + uses: actions/setup-java@v4 + with: + java-version: '21' + distribution: 'temurin' + + # Configure Gradle for optimal use in GiHub Actions, including caching of downloaded dependencies. + # See: https://github.com/gradle/actions/blob/main/setup-gradle/README.md + - name: Setup Gradle + uses: gradle/actions/setup-gradle@417ae3ccd767c252f5661f1ace9f835f9654f2b5 # v3.1.0 + + - name: Build with Gradle Wrapper + working-directory: iceberg-rest-server + run: ./gradlew test + + # NOTE: The Gradle Wrapper is the default and recommended way to run Gradle (https://docs.gradle.org/current/userguide/gradle_wrapper.html). + # If your project does not have the Gradle Wrapper configured, you can use the following configuration to run Gradle with a specified version. + # + # - name: Setup Gradle + # uses: gradle/actions/setup-gradle@417ae3ccd767c252f5661f1ace9f835f9654f2b5 # v3.1.0 + # with: + # gradle-version: '8.6' + # + # - name: Build with Gradle 8.6 + # run: gradle build diff --git a/.github/workflows/regtest.yml b/.github/workflows/regtest.yml new file mode 100644 index 000000000..6f1b1493f --- /dev/null +++ b/.github/workflows/regtest.yml @@ -0,0 +1,18 @@ +name: Regression Tests +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + +jobs: + build: + + runs-on: ubuntu-latest + permissions: + contents: read + + steps: + - uses: actions/checkout@v4 + - name: Regression Test + run: docker compose up --exit-code-from regtest \ No newline at end of file diff --git a/.gitignore b/.gitignore index ef2829e2c..eae1e1e19 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,11 @@ iceberg-rest-server/.gradle/* iceberg-rest-server/.idea/* iceberg-rest-server/build/* +iceberg-rest-server/.java-version +iceberg-rest-server/iceberg-rest-server.iml +iceberg-rest-server/iceberg-rest-server.ipr +iceberg-rest-server/iceberg-rest-server.iws +iceberg-rest-server/logs/ +regtests/derby.log +regtests/metastore_db +regtests/output/ diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 000000000..3a43ade8d --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,22 @@ +services: + pinnacle: + build: + context: ./iceberg-rest-server + ports: + - "8181:8181" + regtest: + build: + context: ./regtests + args: + PINNACLE_HOST: pinnacle + depends_on: + - pinnacle + volumes: + - local_output:/tmp/pinnacle-regtests/ +volumes: + local_output: + driver: local + driver_opts: + o: bind + type: none + device: ./regtests/output \ No newline at end of file diff --git a/Dockerfile b/iceberg-rest-server/Dockerfile similarity index 84% rename from Dockerfile rename to iceberg-rest-server/Dockerfile index c18c9f3d3..fe04664b7 100644 --- a/Dockerfile +++ b/iceberg-rest-server/Dockerfile @@ -2,20 +2,22 @@ FROM openjdk:21 as build # Copy the REST catalog into the container -COPY iceberg-rest-server /app +COPY ./ /app # Set the working directory in the container, nuke any existing builds WORKDIR app CMD ["rm", "-rf", "build"] # Build the rest catalog -CMD ["./gradlew", "build"] +RUN ./gradlew build FROM openjdk:21 WORKDIR /app COPY --from=build /app/build/libs/iceberg-rest-server-all.jar /app COPY --from=build /app/iceberg-rest-server.yml /app +RUN mkdir -p /tmp/iceberg_rest_server_sqlitestate_basedir/default-realm + # Expose the port that we run on (hardcoded as of now in `IcebergRestServerMain.java`) EXPOSE 8181 diff --git a/iceberg-rest-server/src/main/java/org/apache/iceberg/rest/IcebergRestApplication.java b/iceberg-rest-server/src/main/java/org/apache/iceberg/rest/IcebergRestApplication.java index b49e9dc2f..8bbe8541d 100644 --- a/iceberg-rest-server/src/main/java/org/apache/iceberg/rest/IcebergRestApplication.java +++ b/iceberg-rest-server/src/main/java/org/apache/iceberg/rest/IcebergRestApplication.java @@ -68,7 +68,9 @@ public RealmContext resolveRealmContext(RESTCatalogAdapter.HTTPMethod method, St return realmProperties::get; } })).addMappingForUrlPatterns(EnumSet.of(DispatcherType.REQUEST), true,"/*"); - environment.jersey().register(new IcebergRestCatalogApi(new IcebergCatalogAdapter(new IcebergRestServerMain.CachingRealmCatalogFactory()))); + IcebergRestServerMain.CachingRealmCatalogFactory catalogFactory = + new IcebergRestServerMain.CachingRealmCatalogFactory(configuration.getSqlLiteCatalogDirs()); + environment.jersey().register(new IcebergRestCatalogApi(new IcebergCatalogAdapter(catalogFactory))); environment.jersey().register(new IcebergRestConfigurationApi(new IcebergRestConfigurationApiService() { @Override public Response getConfig(String warehouse, SecurityContext securityContext) { diff --git a/iceberg-rest-server/src/main/java/org/apache/iceberg/rest/IcebergRestServerMain.java b/iceberg-rest-server/src/main/java/org/apache/iceberg/rest/IcebergRestServerMain.java index 063f26f37..4cdb0fa57 100644 --- a/iceberg-rest-server/src/main/java/org/apache/iceberg/rest/IcebergRestServerMain.java +++ b/iceberg-rest-server/src/main/java/org/apache/iceberg/rest/IcebergRestServerMain.java @@ -14,7 +14,6 @@ import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.gzip.GzipHandler; import org.eclipse.jetty.servlet.ServletContextHandler; -import org.eclipse.jetty.servlet.ServletHolder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -22,17 +21,23 @@ public class IcebergRestServerMain { private static final Logger LOG = LoggerFactory.getLogger(IcebergRestServerMain.class); private static final int SERVER_PORT = 8181; - private static final String METASTORE_STATE_BASEDIR = + private static final String DEFAULT_METASTORE_STATE_BASEDIR = "/tmp/iceberg_rest_server_sqlitestate_basedir/"; private static final String WAREHOUSE_LOCATION_BASEDIR = "/tmp/iceberg_rest_server_warehouse_data/"; static class CachingRealmCatalogFactory implements RealmCatalogFactory { private Map cachedCatalogs = new HashMap<>(); + private final Map catalogBaseDirs; - @Override + CachingRealmCatalogFactory(Map catalogBaseDirs) { + this.catalogBaseDirs = catalogBaseDirs; + } + + @Override public Catalog getOrCreateCatalog(RealmContext context, String catalogName) { - String realmName = context.getProperty("realm"); - Catalog catalogInstance = cachedCatalogs.get(realmName); + String realm = context.getProperty("realm"); + String catalogKey = realm + "/" + catalogName; + Catalog catalogInstance = cachedCatalogs.get(catalogKey); if (catalogInstance == null) { Map catalogProperties = new HashMap<>(); catalogProperties.put( @@ -41,16 +46,17 @@ public Catalog getOrCreateCatalog(RealmContext context, String catalogName) { // TODO: Do sanitization in case this ever runs in an exposed prod environment to avoid // injection attacks. + String baseDir = catalogBaseDirs.getOrDefault(realm, DEFAULT_METASTORE_STATE_BASEDIR); catalogProperties.put( - CatalogProperties.URI, "jdbc:sqlite:file:" + METASTORE_STATE_BASEDIR + realmName); + CatalogProperties.URI, "jdbc:sqlite:file:" + baseDir + "/" + catalogKey); // TODO: Derive warehouse location from ream configs. catalogProperties.put( - CatalogProperties.WAREHOUSE_LOCATION, WAREHOUSE_LOCATION_BASEDIR + realmName); + CatalogProperties.WAREHOUSE_LOCATION, WAREHOUSE_LOCATION_BASEDIR + catalogKey); catalogInstance = CatalogUtil.buildIcebergCatalog( - "catalog_" + realmName, catalogProperties, new Configuration()); - cachedCatalogs.put(realmName, catalogInstance); + "catalog_" + catalogKey, catalogProperties, new Configuration()); + cachedCatalogs.put(catalogKey, catalogInstance); } return catalogInstance; } @@ -58,8 +64,8 @@ public Catalog getOrCreateCatalog(RealmContext context, String catalogName) { public static void main(String[] args) throws Exception { // Ensure parent directories of metastore-state base directory exists. - LOG.info("Creating metastore state directory: " + METASTORE_STATE_BASEDIR); - Path result = Files.createDirectories(FileSystems.getDefault().getPath(METASTORE_STATE_BASEDIR)); + LOG.info("Creating metastore state directory: " + DEFAULT_METASTORE_STATE_BASEDIR); + Path result = Files.createDirectories(FileSystems.getDefault().getPath(DEFAULT_METASTORE_STATE_BASEDIR)); RESTCatalogAdapter adapter = new RealmContextDelegatingRESTCatalogAdapter( new RealmContextResolver() { @@ -103,7 +109,7 @@ public String getProperty(String key) { }; } }, - new CachingRealmCatalogFactory()); + new CachingRealmCatalogFactory(Map.of())); RESTCatalogServlet servlet = new RESTCatalogServlet(adapter); ServletContextHandler context = new ServletContextHandler(ServletContextHandler.NO_SESSIONS); diff --git a/iceberg-rest-server/src/main/java/org/apache/iceberg/rest/config/IcebergRestApplicationConfig.java b/iceberg-rest-server/src/main/java/org/apache/iceberg/rest/config/IcebergRestApplicationConfig.java index a07e8fab9..93af30afa 100644 --- a/iceberg-rest-server/src/main/java/org/apache/iceberg/rest/config/IcebergRestApplicationConfig.java +++ b/iceberg-rest-server/src/main/java/org/apache/iceberg/rest/config/IcebergRestApplicationConfig.java @@ -2,5 +2,17 @@ import io.dropwizard.core.Configuration; +import java.util.HashMap; +import java.util.Map; + public class IcebergRestApplicationConfig extends Configuration { + private Map sqlLiteCatalogDirs = new HashMap<>(); + + public Map getSqlLiteCatalogDirs() { + return sqlLiteCatalogDirs; + } + + public void setSqlLiteCatalogDirs(Map sqlLiteCatalogDirs) { + this.sqlLiteCatalogDirs = sqlLiteCatalogDirs; + } } diff --git a/iceberg-rest-server/src/test/java/org/apache/iceberg/rest/IcebergRestApplicationIntegrationTest.java b/iceberg-rest-server/src/test/java/org/apache/iceberg/rest/IcebergRestApplicationIntegrationTest.java new file mode 100644 index 000000000..b99245e41 --- /dev/null +++ b/iceberg-rest-server/src/test/java/org/apache/iceberg/rest/IcebergRestApplicationIntegrationTest.java @@ -0,0 +1,146 @@ +package org.apache.iceberg.rest; + +import io.dropwizard.testing.ResourceHelpers; +import io.dropwizard.testing.junit5.DropwizardAppExtension; +import io.dropwizard.testing.junit5.DropwizardExtensionsSupport; +import org.apache.iceberg.catalog.Namespace; +import org.apache.iceberg.catalog.SessionCatalog; +import org.apache.iceberg.exceptions.NoSuchNamespaceException; +import org.apache.iceberg.rest.config.IcebergRestApplicationConfig; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Comparator; +import java.util.List; +import java.util.Map; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; + +@ExtendWith(DropwizardExtensionsSupport.class) +public class IcebergRestApplicationIntegrationTest { + private static DropwizardAppExtension EXT = + new DropwizardAppExtension<>( + IcebergRestApplication.class, + ResourceHelpers.resourceFilePath("iceberg-rest-server-integrationtest.yml") + ); + + @BeforeAll + public static void setup() throws IOException { + Path testDir = Path.of("build/test_data/iceberg/default-realm"); + if (Files.exists(testDir)) { + if (Files.isDirectory(testDir)) { + Files.walk(testDir) + .sorted(Comparator.reverseOrder()) + .forEach(path -> { + try { + Files.delete(path); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + + } else { + Files.delete(testDir); + } + } + Files.createDirectories(testDir); + } + + private static RESTSessionCatalog newSessionCatalog(String catalog) { + RESTSessionCatalog sessionCatalog = new RESTSessionCatalog(); + sessionCatalog.initialize("snowflake", Map.of( + "uri", "http://localhost:" + EXT.getLocalPort() + "/api/catalog", + "prefix", catalog + )); + return sessionCatalog; + } + + @Test + public void testIcebergListNamespaces() throws IOException { + try (RESTSessionCatalog sessionCatalog = newSessionCatalog("testIcebergListNamespaces")) { + SessionCatalog.SessionContext sessionContext = SessionCatalog.SessionContext.createEmpty(); + List namespaces = sessionCatalog.listNamespaces(sessionContext); + assertThat(namespaces) + .isNotNull() + .isEmpty(); + } + } + + @Test + public void testIcebergListNamespacesNotFound() throws IOException { + try (RESTSessionCatalog sessionCatalog = newSessionCatalog("testIcebergListNamespacesNotFound")) { + SessionCatalog.SessionContext sessionContext = SessionCatalog.SessionContext.createEmpty(); + try { + sessionCatalog.listNamespaces(sessionContext, Namespace.of("whoops")); + fail("Expected exception to be thrown"); + } catch (NoSuchNamespaceException e) { + // we expect this! + Assertions.assertThat(e).isNotNull(); + } catch (Exception e) { + fail("Unexpected exception", e); + } + } + } + + @Test + public void testIcebergListNamespacesNestedNotFound() throws IOException { + try (RESTSessionCatalog sessionCatalog = newSessionCatalog("testIcebergListNamespacesNestedNotFound")) { + SessionCatalog.SessionContext sessionContext = SessionCatalog.SessionContext.createEmpty(); + Namespace topLevelNamespace = Namespace.of("top_level"); + sessionCatalog.createNamespace(sessionContext, topLevelNamespace); + sessionCatalog.loadNamespaceMetadata(sessionContext, Namespace.of("top_level")); + try { + sessionCatalog.listNamespaces(sessionContext, Namespace.of("top_level", "whoops")); + fail("Expected exception to be thrown"); + } catch (NoSuchNamespaceException e) { + // we expect this! + Assertions.assertThat(e).isNotNull(); + } catch (Exception e) { + fail("Unexpected exception", e); + } + } + } + + @Test + public void testIcebergListTablesNamespaceNotFound() throws IOException { + try (RESTSessionCatalog sessionCatalog = newSessionCatalog("testIcebergListTablesNamespaceNotFound")) { + SessionCatalog.SessionContext sessionContext = SessionCatalog.SessionContext.createEmpty(); + try { + sessionCatalog.listTables(sessionContext, Namespace.of("whoops")); + fail("Expected exception to be thrown"); + } catch (NoSuchNamespaceException e) { + // we expect this! + Assertions.assertThat(e).isNotNull(); + } catch (Exception e) { + fail("Unexpected exception", e); + } + } + } + + @Test + public void testIcebergCreateNamespace() throws IOException { + try (RESTSessionCatalog sessionCatalog = newSessionCatalog("testIcebergCreateNamespace")) { + SessionCatalog.SessionContext sessionContext = SessionCatalog.SessionContext.createEmpty(); + Namespace topLevelNamespace = Namespace.of("top_level"); + sessionCatalog.createNamespace(sessionContext, topLevelNamespace); + List namespaces = sessionCatalog.listNamespaces(sessionContext); + assertThat(namespaces) + .isNotNull() + .hasSize(1) + .containsExactly(topLevelNamespace); + Namespace nestedNamespace = Namespace.of("top_level", "second_level"); + sessionCatalog.createNamespace(sessionContext, nestedNamespace); + namespaces = sessionCatalog.listNamespaces(sessionContext, topLevelNamespace); + assertThat(namespaces) + .isNotNull() + .hasSize(1) + .containsExactly(nestedNamespace); + } + } +} diff --git a/iceberg-rest-server/src/test/resources/iceberg-rest-server-integrationtest.yml b/iceberg-rest-server/src/test/resources/iceberg-rest-server-integrationtest.yml new file mode 100644 index 000000000..9551cd12b --- /dev/null +++ b/iceberg-rest-server/src/test/resources/iceberg-rest-server-integrationtest.yml @@ -0,0 +1,91 @@ +server: + # Maximum number of threads. + maxThreads: 200 + + # Minimum number of thread to keep alive. + minThreads: 10 + applicationConnectors: + # HTTP-specific options. + - type: http + + # The port on which the HTTP server listens for service requests. + port: 8181 + + adminConnectors: + - type: http + port: 8081 + + # The hostname of the interface to which the HTTP server socket wil be found. If omitted, the + # socket will listen on all interfaces. + #bindHost: localhost + + # ssl: + # keyStore: ./example.keystore + # keyStorePassword: example + # + # keyStoreType: JKS # (optional, JKS is default) + + # HTTP request log settings + requestLog: + appenders: + # Settings for logging to stdout. + - type: console + + # Settings for logging to a file. + - type: file + + # The file to which statements will be logged. + currentLogFilename: ./logs/request.log + + # When the log file rolls over, the file will be archived to requests-2012-03-15.log.gz, + # requests.log will be truncated, and new statements written to it. + archivedLogFilenamePattern: ./logs/requests-%d.log.gz + + # The maximum number of log files to archive. + archivedFileCount: 14 + + # Enable archiving if the request log entries go to the their own file + archive: true +sqlLiteCatalogDirs: + default-realm: ./build/test_data/iceberg + +# Logging settings. +logging: + + # The default level of all loggers. Can be OFF, ERROR, WARN, INFO, DEBUG, TRACE, or ALL. + level: INFO + + # Logger-specific levels. + loggers: + org.apache.iceberg.rest: DEBUG + + appenders: + + - type: console + # If true, write log statements to stdout. +# enabled: true + # Do not display log statements below this threshold to stdout. + threshold: ALL + # Custom Logback PatternLayout with threadname. + logFormat: "%-5p [%d{ISO8601} - %-6r] [%t] [%X{aid}%X{sid}%X{tid}%X{wid}%X{oid}%X{srv}%X{job}%X{rid}] %c{30}: %m%n%ex" + + # Settings for logging to a file. + - type: file + # If true, write log statements to a file. +# enabled: true + # Do not write log statements below this threshold to the file. + threshold: ALL + # Custom Logback PatternLayout with threadname. + logFormat: "%-5p [%d{ISO8601} - %-6r] [%t] [%X{aid}%X{sid}%X{tid}%X{wid}%X{oid}%X{srv}%X{job}%X{rid}] %c: %m%n%ex" + + # when using json logging, you must use a format like this, else the + # mdc section of the json log will be incorrect + # logFormat: "%-5p [%d{ISO8601} - %-6r] [%t] [%X] %c: %m%n%ex" + + # The file to which statements will be logged. + currentLogFilename: ./logs/iceberg-rest.log + # When the log file rolls over, the file will be archived to snowflake-2012-03-15.log.gz, + # snowflake.log will be truncated, and new statements written to it. + archivedLogFilenamePattern: ./logs/iceberg-rest-%d.log.gz + # The maximum number of log files to archive. + archivedFileCount: 14 diff --git a/regtests/Dockerfile b/regtests/Dockerfile new file mode 100644 index 000000000..22d90d0d2 --- /dev/null +++ b/regtests/Dockerfile @@ -0,0 +1,15 @@ +FROM openjdk:21-slim +ARG PINNACLE_HOST=pinnacle +ENV PINNACLE_HOST=$PINNACLE_HOST + +RUN apt update +RUN apt-get install -y diffutils wget + +# Copy and run setup.sh separately so that test sources can change, but the setup script run is still cached +WORKDIR /app/regtests +COPY ./setup.sh /app/regtests/setup.sh +RUN ./setup.sh + +COPY . /app/regtests + +CMD ["./run.sh"] \ No newline at end of file diff --git a/regtests/output/.keep b/regtests/output/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/regtests/run.sh b/regtests/run.sh index e6b35513e..a92acb898 100755 --- a/regtests/run.sh +++ b/regtests/run.sh @@ -57,5 +57,6 @@ for TEST_FILE in ${TEST_LIST}; do chmod 750 ${TEST_TMPDIR}/${TEST_SHORTNAME}.fixdiffs.sh logred "To compare and fix diffs: ${TEST_TMPDIR}/${TEST_SHORTNAME}.fixdiffs.sh" logred "See stderr from test run for additional diagnostics: ${TEST_STDERR}" + exit 1 fi done diff --git a/regtests/setup.sh b/regtests/setup.sh index 0cc62e5b9..f4645d6f1 100755 --- a/regtests/setup.sh +++ b/regtests/setup.sh @@ -35,12 +35,13 @@ cat << EOF >> ${SPARK_CONF} spark.jars.packages org.apache.iceberg:iceberg-spark-runtime-3.5_2.13:1.5.0,org.apache.hadoop:hadoop-aws:3.3.1,software.amazon.awssdk:bundle:2.15.40,software.amazon.awssdk:url-connection-client:2.15.40 spark.hadoop.fs.s3.impl org.apache.hadoop.fs.s3a.S3AFileSystem spark.hadoop.fs.AbstractFileSystem.s3.impl org.apache.hadoop.fs.s3a.S3A +spark.sql.variable.substitute true spark.driver.extraJavaOptions -Dderby.system.home=/tmp/derby spark.sql.catalog.pinnacle=org.apache.iceberg.spark.SparkCatalog spark.sql.catalog.pinnacle.catalog-impl=org.apache.iceberg.rest.RESTCatalog -spark.sql.catalog.pinnacle.uri=http://localhost:8181/api/catalog +spark.sql.catalog.pinnacle.uri=http://${PINNACLE_HOST:-localhost}:8181/api/catalog spark.sql.catalog.pinnacle.prefix=snowflake EOF echo 'Success!'