From e930b0cad9e3aa8faf6e2df1335dbd0222759131 Mon Sep 17 00:00:00 2001 From: Shaofeng Shi Date: Wed, 22 May 2024 19:11:04 +0800 Subject: [PATCH] [#3483] refactor(API): separate API for client using: SupportsSchemas (#3419) ### What changes were proposed in this pull request? Today Gravitino java client and server sides share the same interfaces, like SupportsMetalakes, SupportsCatalogs, SupportsSchemas, etc. These interfaces are good for server side, but not good for client side. After some discussion with Jerry and others, if we want to make it easier to use, and still keep server side code stable, the only way is to separate the APIs: create individual APIs for the java client. This PR will introduce the simplified API for client: SupportsSchemas. The interface for serverside has been moved into core module with package name "com.datastrato.gravitino.schema"; The current one in api module with package name "com.datastrato.gravitino.rel" is for client side, whose method signatures have been changed, so the client code will be clear. Besides, the "Catalog.asSchema()", "Catalog.asTableCatalog()" should only be implemented on the client side, not server side (as server side we can use the subclasses of "CatalogOperations"). I updated the occurrances of such usages, mainly in some integration tests. The integration tests which uses Java client to manipulate metadata also be updated. ### Why are the changes needed? To make the client API simple and easy to use. Fix: #3483 ### Does this PR introduce _any_ user-facing change? Will change the Java client API, mainly on the method signatures; for example, "listSchemas()" won't need an input parameter, "schemaExists()" will use a String value as the input parameter instead of a NameIdentifier object, etc. ### How was this patch tested? All existing integration tests will cover the API change. --- .../com/datastrato/gravitino/Catalog.java | 6 +- .../gravitino/{rel => }/Schema.java | 5 +- .../gravitino/{rel => }/SchemaChange.java | 2 +- .../gravitino/{rel => }/SupportsCatalogs.java | 10 +- .../{rel => }/SupportsMetalakes.java | 4 +- .../datastrato/gravitino/SupportsSchemas.java | 113 ++++++++++++ .../gravitino/TestSchemaChange.java | 5 +- .../catalog/hadoop/HadoopCatalog.java | 12 -- .../hadoop/HadoopCatalogOperations.java | 6 +- .../hadoop/TestHadoopCatalogOperations.java | 4 +- .../integration/test/HadoopCatalogIT.java | 27 +-- .../gravitino/catalog/hive/HiveCatalog.java | 22 --- .../catalog/hive/HiveCatalogOperations.java | 4 +- .../catalog/hive/TestHiveSchema.java | 54 +++--- .../gravitino/catalog/hive/TestHiveTable.java | 115 ++++++------ .../catalog/hive/TestHiveTableOperations.java | 7 +- .../hive/integration/test/CatalogHiveIT.java | 29 ++- .../integration/test/ProxyCatalogHiveIT.java | 8 +- .../gravitino/catalog/jdbc/JdbcCatalog.java | 14 -- .../catalog/jdbc/JdbcCatalogOperations.java | 4 +- .../integration/test/CatalogDorisIT.java | 50 ++--- .../integration/test/AuditCatalogMysqlIT.java | 9 +- .../integration/test/CatalogMysqlIT.java | 82 ++++----- .../integration/test/CatalogPostgreSqlIT.java | 62 +++---- .../test/TestMultipleJDBCLoad.java | 23 +-- .../gravitino/catalog/kafka/KafkaCatalog.java | 12 -- .../catalog/kafka/KafkaCatalogOperations.java | 6 +- .../kafka/TestKafkaCatalogOperations.java | 4 +- .../integration/test/CatalogKafkaIT.java | 33 +--- .../lakehouse/iceberg/IcebergCatalog.java | 14 -- .../iceberg/IcebergCatalogOperations.java | 4 +- .../lakehouse/iceberg/TestIcebergSchema.java | 61 +++--- .../lakehouse/iceberg/TestIcebergTable.java | 173 ++++++++---------- .../test/CatalogIcebergBaseIT.java | 57 +++--- .../test/TestMultipleJDBCLoad.java | 23 +-- .../gravitino/client/BaseSchemaCatalog.java | 60 +++--- .../gravitino/client/DTOConverters.java | 9 +- .../gravitino/client/FilesetCatalog.java | 17 +- .../client/GravitinoAdminClient.java | 2 +- .../gravitino/client/GravitinoClient.java | 2 +- .../gravitino/client/GravitinoMetalake.java | 10 +- .../gravitino/client/MessagingCatalog.java | 18 +- .../gravitino/client/RelationalCatalog.java | 17 +- .../client/TestRelationalCatalog.java | 36 ++-- .../gravitino/client/TestRelationalTable.java | 4 +- .../datastrato/gravitino/dto/CatalogDTO.java | 2 +- .../gravitino/dto/{rel => }/SchemaDTO.java | 5 +- .../dto/requests/SchemaUpdateRequest.java | 2 +- .../dto/responses/SchemaResponse.java | 2 +- .../gravitino/dto/util/DTOConverters.java | 4 +- .../dto/responses/TestResponses.java | 2 +- .../gravitino/catalog/CatalogManager.java | 2 +- .../catalog/EntityCombinedSchema.java | 2 +- .../catalog/OperationDispatcher.java | 2 +- .../gravitino/catalog/SchemaDispatcher.java | 2 +- .../catalog/SchemaNormalizeDispatcher.java | 4 +- .../catalog/SchemaOperationDispatcher.java | 4 +- .../gravitino/connector/BaseSchema.java | 2 +- .../connector/CatalogOperations.java | 2 +- .../gravitino/connector}/SupportsSchemas.java | 4 +- .../listener/SchemaEventDispatcher.java | 4 +- .../listener/api/event/AlterSchemaEvent.java | 2 +- .../api/event/AlterSchemaFailureEvent.java | 2 +- .../listener/api/info/SchemaInfo.java | 2 +- .../com/datastrato/gravitino/TestCatalog.java | 3 +- .../gravitino/catalog/TestBaseCatalog.java | 2 +- .../TestSchemaNormalizeDispatcher.java | 4 +- .../TestSchemaOperationDispatcher.java | 4 +- .../TestCatalogOperations.java | 22 ++- .../listener/api/event/TestSchemaEvent.java | 4 +- .../hadoop/GravitinoVirtualFileSystemIT.java | 5 +- .../test/trino/TrinoConnectorIT.java | 26 +-- .../test/trino/TrinoQueryITBase.java | 7 +- .../test/web/ui/CatalogsPageDorisTest.java | 5 +- .../test/web/ui/CatalogsPageTest.java | 19 +- .../server/web/rest/SchemaOperations.java | 4 +- .../server/web/rest/TestSchemaOperations.java | 4 +- .../spark/connector/catalog/BaseCatalog.java | 25 +-- .../catalog/CatalogConnectorMetadata.java | 22 +-- .../connector/metadata/GravitinoSchema.java | 2 +- .../trino/connector/GravitinoMockServer.java | 31 ++-- .../metadata/TestGravitinoSchema.java | 2 +- 82 files changed, 697 insertions(+), 784 deletions(-) rename api/src/main/java/com/datastrato/gravitino/{rel => }/Schema.java (88%) rename api/src/main/java/com/datastrato/gravitino/{rel => }/SchemaChange.java (99%) rename api/src/main/java/com/datastrato/gravitino/{rel => }/SupportsCatalogs.java (92%) rename api/src/main/java/com/datastrato/gravitino/{rel => }/SupportsMetalakes.java (94%) create mode 100644 api/src/main/java/com/datastrato/gravitino/SupportsSchemas.java rename common/src/main/java/com/datastrato/gravitino/dto/{rel => }/SchemaDTO.java (96%) rename {api/src/main/java/com/datastrato/gravitino/rel => core/src/main/java/com/datastrato/gravitino/connector}/SupportsSchemas.java (97%) rename core/src/test/java/com/datastrato/gravitino/{ => connector}/TestCatalogOperations.java (97%) diff --git a/api/src/main/java/com/datastrato/gravitino/Catalog.java b/api/src/main/java/com/datastrato/gravitino/Catalog.java index 9b27c866c12..b62859a90c8 100644 --- a/api/src/main/java/com/datastrato/gravitino/Catalog.java +++ b/api/src/main/java/com/datastrato/gravitino/Catalog.java @@ -7,13 +7,13 @@ import com.datastrato.gravitino.annotation.Evolving; import com.datastrato.gravitino.file.FilesetCatalog; import com.datastrato.gravitino.messaging.TopicCatalog; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.rel.TableCatalog; import java.util.Map; /** * The interface of a catalog. The catalog is the second level entity in the gravitino system, - * containing a set of tables. + * containing a set of tables. The server side should use the other one with the same name in the + * core module. */ @Evolving public interface Catalog extends Auditable { @@ -72,8 +72,8 @@ enum Type { /** * Return the {@link SupportsSchemas} if the catalog supports schema operations. * - * @throws UnsupportedOperationException if the catalog does not support schema operations. * @return The {@link SupportsSchemas} if the catalog supports schema operations. + * @throws UnsupportedOperationException if the catalog does not support schema operations. */ default SupportsSchemas asSchemas() throws UnsupportedOperationException { throw new UnsupportedOperationException("Catalog does not support schema operations"); diff --git a/api/src/main/java/com/datastrato/gravitino/rel/Schema.java b/api/src/main/java/com/datastrato/gravitino/Schema.java similarity index 88% rename from api/src/main/java/com/datastrato/gravitino/rel/Schema.java rename to api/src/main/java/com/datastrato/gravitino/Schema.java index 418b4310ce5..b910ebdef3a 100644 --- a/api/src/main/java/com/datastrato/gravitino/rel/Schema.java +++ b/api/src/main/java/com/datastrato/gravitino/Schema.java @@ -2,9 +2,8 @@ * Copyright 2023 Datastrato Pvt Ltd. * This software is licensed under the Apache License version 2. */ -package com.datastrato.gravitino.rel; +package com.datastrato.gravitino; -import com.datastrato.gravitino.Auditable; import com.datastrato.gravitino.annotation.Evolving; import java.util.Collections; import java.util.Map; @@ -16,7 +15,7 @@ * which means it can be schema1.schema2.table. * *

This defines the basic properties of a schema. A catalog implementation with {@link - * SupportsSchemas} should implement this interface. + * com.datastrato.gravitino.SupportsSchemas} should implement this interface. */ @Evolving public interface Schema extends Auditable { diff --git a/api/src/main/java/com/datastrato/gravitino/rel/SchemaChange.java b/api/src/main/java/com/datastrato/gravitino/SchemaChange.java similarity index 99% rename from api/src/main/java/com/datastrato/gravitino/rel/SchemaChange.java rename to api/src/main/java/com/datastrato/gravitino/SchemaChange.java index 49d8d9691e5..87923c271f3 100644 --- a/api/src/main/java/com/datastrato/gravitino/rel/SchemaChange.java +++ b/api/src/main/java/com/datastrato/gravitino/SchemaChange.java @@ -18,7 +18,7 @@ // Referred from Apache Spark's connector/catalog implementation // sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/NamespaceChange.java -package com.datastrato.gravitino.rel; +package com.datastrato.gravitino; import com.datastrato.gravitino.annotation.Evolving; import java.util.Objects; diff --git a/api/src/main/java/com/datastrato/gravitino/rel/SupportsCatalogs.java b/api/src/main/java/com/datastrato/gravitino/SupportsCatalogs.java similarity index 92% rename from api/src/main/java/com/datastrato/gravitino/rel/SupportsCatalogs.java rename to api/src/main/java/com/datastrato/gravitino/SupportsCatalogs.java index 901fc3badfa..ea91a2c0cca 100644 --- a/api/src/main/java/com/datastrato/gravitino/rel/SupportsCatalogs.java +++ b/api/src/main/java/com/datastrato/gravitino/SupportsCatalogs.java @@ -2,12 +2,8 @@ * Copyright 2024 Datastrato Pvt Ltd. * This software is licensed under the Apache License version 2. */ -package com.datastrato.gravitino.rel; +package com.datastrato.gravitino; -import com.datastrato.gravitino.Catalog; -import com.datastrato.gravitino.CatalogChange; -import com.datastrato.gravitino.CatalogProvider; -import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.annotation.Evolving; import com.datastrato.gravitino.exceptions.CatalogAlreadyExistsException; import com.datastrato.gravitino.exceptions.NoSuchCatalogException; @@ -65,8 +61,8 @@ default boolean catalogExists(String catalogName) { * Create a catalog with specified identifier. * *

The parameter "provider" is a short name of the catalog, used to tell Gravitino which - * catalog should be created. The short name should be the same as the {@link CatalogProvider} - * interface provided. + * catalog should be created. The short name should be the same as the {@link + * com.datastrato.gravitino.CatalogProvider} interface provided. * * @param catalogName the name of the catalog. * @param type the type of the catalog. diff --git a/api/src/main/java/com/datastrato/gravitino/rel/SupportsMetalakes.java b/api/src/main/java/com/datastrato/gravitino/SupportsMetalakes.java similarity index 94% rename from api/src/main/java/com/datastrato/gravitino/rel/SupportsMetalakes.java rename to api/src/main/java/com/datastrato/gravitino/SupportsMetalakes.java index ca2e89c2171..9c08e7fe9de 100644 --- a/api/src/main/java/com/datastrato/gravitino/rel/SupportsMetalakes.java +++ b/api/src/main/java/com/datastrato/gravitino/SupportsMetalakes.java @@ -2,10 +2,8 @@ * Copyright 2024 Datastrato Pvt Ltd. * This software is licensed under the Apache License version 2. */ -package com.datastrato.gravitino.rel; +package com.datastrato.gravitino; -import com.datastrato.gravitino.Metalake; -import com.datastrato.gravitino.MetalakeChange; import com.datastrato.gravitino.annotation.Evolving; import com.datastrato.gravitino.exceptions.MetalakeAlreadyExistsException; import com.datastrato.gravitino.exceptions.NoSuchMetalakeException; diff --git a/api/src/main/java/com/datastrato/gravitino/SupportsSchemas.java b/api/src/main/java/com/datastrato/gravitino/SupportsSchemas.java new file mode 100644 index 00000000000..5dcc57edb29 --- /dev/null +++ b/api/src/main/java/com/datastrato/gravitino/SupportsSchemas.java @@ -0,0 +1,113 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// Referred from Apache Spark's connector/catalog implementation +// sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/SupportNamespaces.java + +package com.datastrato.gravitino; + +import com.datastrato.gravitino.annotation.Evolving; +import com.datastrato.gravitino.exceptions.NoSuchCatalogException; +import com.datastrato.gravitino.exceptions.NoSuchSchemaException; +import com.datastrato.gravitino.exceptions.NonEmptySchemaException; +import com.datastrato.gravitino.exceptions.SchemaAlreadyExistsException; +import java.util.Map; + +/** + * The client interface to support schema operations. The server side should use the other one with + * the same name in the core module. + */ +@Evolving +public interface SupportsSchemas { + + /** + * List schemas under the entity. + * + *

If an entity such as a table, view exists, its parent schemas must also exist and must be + * returned by this discovery method. For example, if table a.b.t exists, this method invoked as + * listSchemas(a) must return [a.b] in the result array + * + * @return An array of schema identifier under the namespace. + * @throws NoSuchCatalogException If the catalog does not exist. + */ + NameIdentifier[] listSchemas() throws NoSuchCatalogException; + + /** + * Check if a schema exists. + * + *

If an entity such as a table, view exists, its parent namespaces must also exist. For + * example, if table a.b.t exists, this method invoked as schemaExists(a.b) must return true. + * + * @param schemaName The name of the schema. + * @return True if the schema exists, false otherwise. + */ + default boolean schemaExists(String schemaName) { + try { + loadSchema(schemaName); + return true; + } catch (NoSuchSchemaException e) { + return false; + } + } + + /** + * Create a schema in the catalog. + * + * @param schemaName The name of the schema. + * @param comment The comment of the schema. + * @param properties The properties of the schema. + * @return The created schema. + * @throws NoSuchCatalogException If the catalog does not exist. + * @throws SchemaAlreadyExistsException If the schema already exists. + */ + Schema createSchema(String schemaName, String comment, Map properties) + throws NoSuchCatalogException, SchemaAlreadyExistsException; + + /** + * Load metadata properties for a schema. + * + * @param schemaName The name of the schema. + * @return A schema. + * @throws NoSuchSchemaException If the schema does not exist (optional). + */ + Schema loadSchema(String schemaName) throws NoSuchSchemaException; + + /** + * Apply the metadata change to a schema in the catalog. + * + * @param schemaName The name of the schema. + * @param changes The metadata changes to apply. + * @return The altered schema. + * @throws NoSuchSchemaException If the schema does not exist. + */ + Schema alterSchema(String schemaName, SchemaChange... changes) throws NoSuchSchemaException; + + /** + * Drop a schema from the catalog. If cascade option is true, recursively drop all objects within + * the schema. + * + *

If the catalog implementation does not support this operation, it may throw {@link + * UnsupportedOperationException}. + * + * @param schemaName The name of the schema. + * @param cascade If true, recursively drop all objects within the schema. + * @return True if the schema exists and is dropped successfully, false if the schema doesn't + * exist. + * @throws NonEmptySchemaException If the schema is not empty and cascade is false. + */ + boolean dropSchema(String schemaName, boolean cascade) throws NonEmptySchemaException; +} diff --git a/api/src/test/java/com/datastrato/gravitino/TestSchemaChange.java b/api/src/test/java/com/datastrato/gravitino/TestSchemaChange.java index 85cdb8d6440..5dd7de7f462 100644 --- a/api/src/test/java/com/datastrato/gravitino/TestSchemaChange.java +++ b/api/src/test/java/com/datastrato/gravitino/TestSchemaChange.java @@ -9,9 +9,8 @@ import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertTrue; -import com.datastrato.gravitino.rel.SchemaChange; -import com.datastrato.gravitino.rel.SchemaChange.RemoveProperty; -import com.datastrato.gravitino.rel.SchemaChange.SetProperty; +import com.datastrato.gravitino.SchemaChange.RemoveProperty; +import com.datastrato.gravitino.SchemaChange.SetProperty; import org.junit.jupiter.api.Test; public class TestSchemaChange { diff --git a/catalogs/catalog-hadoop/src/main/java/com/datastrato/gravitino/catalog/hadoop/HadoopCatalog.java b/catalogs/catalog-hadoop/src/main/java/com/datastrato/gravitino/catalog/hadoop/HadoopCatalog.java index 6864ee874e4..f9ecb37578f 100644 --- a/catalogs/catalog-hadoop/src/main/java/com/datastrato/gravitino/catalog/hadoop/HadoopCatalog.java +++ b/catalogs/catalog-hadoop/src/main/java/com/datastrato/gravitino/catalog/hadoop/HadoopCatalog.java @@ -7,8 +7,6 @@ import com.datastrato.gravitino.connector.BaseCatalog; import com.datastrato.gravitino.connector.CatalogOperations; import com.datastrato.gravitino.connector.capability.Capability; -import com.datastrato.gravitino.file.FilesetCatalog; -import com.datastrato.gravitino.rel.SupportsSchemas; import java.util.Map; /** @@ -33,14 +31,4 @@ protected CatalogOperations newOps(Map config) { protected Capability newCapability() { return new HadoopCatalogCapability(); } - - @Override - public SupportsSchemas asSchemas() { - return (HadoopCatalogOperations) ops(); - } - - @Override - public FilesetCatalog asFilesetCatalog() { - return (HadoopCatalogOperations) ops(); - } } diff --git a/catalogs/catalog-hadoop/src/main/java/com/datastrato/gravitino/catalog/hadoop/HadoopCatalogOperations.java b/catalogs/catalog-hadoop/src/main/java/com/datastrato/gravitino/catalog/hadoop/HadoopCatalogOperations.java index 7de08bedf6e..3f077b99560 100644 --- a/catalogs/catalog-hadoop/src/main/java/com/datastrato/gravitino/catalog/hadoop/HadoopCatalogOperations.java +++ b/catalogs/catalog-hadoop/src/main/java/com/datastrato/gravitino/catalog/hadoop/HadoopCatalogOperations.java @@ -11,10 +11,13 @@ import com.datastrato.gravitino.GravitinoEnv; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.StringIdentifier; import com.datastrato.gravitino.connector.CatalogInfo; import com.datastrato.gravitino.connector.CatalogOperations; import com.datastrato.gravitino.connector.PropertiesMetadata; +import com.datastrato.gravitino.connector.SupportsSchemas; import com.datastrato.gravitino.exceptions.AlreadyExistsException; import com.datastrato.gravitino.exceptions.FilesetAlreadyExistsException; import com.datastrato.gravitino.exceptions.NoSuchCatalogException; @@ -29,9 +32,6 @@ import com.datastrato.gravitino.meta.AuditInfo; import com.datastrato.gravitino.meta.FilesetEntity; import com.datastrato.gravitino.meta.SchemaEntity; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SchemaChange; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.utils.PrincipalUtils; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; diff --git a/catalogs/catalog-hadoop/src/test/java/com/datastrato/gravitino/catalog/hadoop/TestHadoopCatalogOperations.java b/catalogs/catalog-hadoop/src/test/java/com/datastrato/gravitino/catalog/hadoop/TestHadoopCatalogOperations.java index bb4b69b752e..a46d1094d5a 100644 --- a/catalogs/catalog-hadoop/src/test/java/com/datastrato/gravitino/catalog/hadoop/TestHadoopCatalogOperations.java +++ b/catalogs/catalog-hadoop/src/test/java/com/datastrato/gravitino/catalog/hadoop/TestHadoopCatalogOperations.java @@ -19,6 +19,8 @@ import com.datastrato.gravitino.EntityStoreFactory; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.StringIdentifier; import com.datastrato.gravitino.exceptions.NoSuchFilesetException; import com.datastrato.gravitino.exceptions.NoSuchSchemaException; @@ -26,8 +28,6 @@ import com.datastrato.gravitino.exceptions.SchemaAlreadyExistsException; import com.datastrato.gravitino.file.Fileset; import com.datastrato.gravitino.file.FilesetChange; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SchemaChange; import com.datastrato.gravitino.storage.IdGenerator; import com.datastrato.gravitino.storage.RandomIdGenerator; import com.google.common.collect.ImmutableMap; diff --git a/catalogs/catalog-hadoop/src/test/java/com/datastrato/gravitino/catalog/hadoop/integration/test/HadoopCatalogIT.java b/catalogs/catalog-hadoop/src/test/java/com/datastrato/gravitino/catalog/hadoop/integration/test/HadoopCatalogIT.java index 1a40d840f3b..277652b8ed7 100644 --- a/catalogs/catalog-hadoop/src/test/java/com/datastrato/gravitino/catalog/hadoop/integration/test/HadoopCatalogIT.java +++ b/catalogs/catalog-hadoop/src/test/java/com/datastrato/gravitino/catalog/hadoop/integration/test/HadoopCatalogIT.java @@ -7,6 +7,7 @@ import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; import com.datastrato.gravitino.client.GravitinoMetalake; import com.datastrato.gravitino.exceptions.FilesetAlreadyExistsException; import com.datastrato.gravitino.exceptions.NoSuchFilesetException; @@ -16,7 +17,6 @@ import com.datastrato.gravitino.integration.test.container.HiveContainer; import com.datastrato.gravitino.integration.test.util.AbstractIT; import com.datastrato.gravitino.integration.test.util.GravitinoITUtils; -import com.datastrato.gravitino.rel.Schema; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import java.io.IOException; @@ -100,15 +100,14 @@ private static void createCatalog() { } private static void createSchema() { - NameIdentifier ident = NameIdentifier.of(metalakeName, catalogName, schemaName); Map properties = Maps.newHashMap(); properties.put("key1", "val1"); properties.put("key2", "val2"); properties.put("location", defaultBaseLocation()); String comment = "comment"; - catalog.asSchemas().createSchema(ident, comment, properties); - Schema loadSchema = catalog.asSchemas().loadSchema(ident); + catalog.asSchemas().createSchema(schemaName, comment, properties); + Schema loadSchema = catalog.asSchemas().loadSchema(schemaName); Assertions.assertEquals(schemaName, loadSchema.name()); Assertions.assertEquals(comment, loadSchema.comment()); Assertions.assertEquals("val1", loadSchema.properties().get("key1")); @@ -117,9 +116,8 @@ private static void createSchema() { } private static void dropSchema() { - NameIdentifier ident = NameIdentifier.of(metalakeName, catalogName, schemaName); - catalog.asSchemas().dropSchema(ident, true); - Assertions.assertFalse(catalog.asSchemas().schemaExists(ident)); + catalog.asSchemas().dropSchema(schemaName, true); + Assertions.assertFalse(catalog.asSchemas().schemaExists(schemaName)); } @Test @@ -548,22 +546,13 @@ public void testDropCatalogWithEmptySchema() { // Create a schema without specifying location. String schemaName = GravitinoITUtils.genRandomName("test_drop_catalog_with_empty_schema_schema"); - filesetCatalog - .asSchemas() - .createSchema( - NameIdentifier.of(metalakeName, catalogName, schemaName), "comment", ImmutableMap.of()); + filesetCatalog.asSchemas().createSchema(schemaName, "comment", ImmutableMap.of()); // Drop the empty schema. - boolean dropped = - filesetCatalog - .asSchemas() - .dropSchema(NameIdentifier.of(metalakeName, catalogName, schemaName), true); + boolean dropped = filesetCatalog.asSchemas().dropSchema(schemaName, true); Assertions.assertTrue(dropped, "schema should be dropped"); Assertions.assertFalse( - filesetCatalog - .asSchemas() - .schemaExists(NameIdentifier.of(metalakeName, catalogName, schemaName)), - "schema should not be exists"); + filesetCatalog.asSchemas().schemaExists(schemaName), "schema should not be exists"); // Drop the catalog. dropped = metalake.dropCatalog(catalogName); diff --git a/catalogs/catalog-hive/src/main/java/com/datastrato/gravitino/catalog/hive/HiveCatalog.java b/catalogs/catalog-hive/src/main/java/com/datastrato/gravitino/catalog/hive/HiveCatalog.java index a84f582d2b0..ba6bceb530a 100644 --- a/catalogs/catalog-hive/src/main/java/com/datastrato/gravitino/catalog/hive/HiveCatalog.java +++ b/catalogs/catalog-hive/src/main/java/com/datastrato/gravitino/catalog/hive/HiveCatalog.java @@ -8,8 +8,6 @@ import com.datastrato.gravitino.connector.CatalogOperations; import com.datastrato.gravitino.connector.ProxyPlugin; import com.datastrato.gravitino.connector.capability.Capability; -import com.datastrato.gravitino.rel.SupportsSchemas; -import com.datastrato.gravitino.rel.TableCatalog; import java.util.Map; import java.util.Optional; @@ -43,26 +41,6 @@ public Capability newCapability() { return new HiveCatalogCapability(); } - /** - * Returns the Hive catalog operations as a {@link SupportsSchemas}. - * - * @return The Hive catalog operations as {@link HiveCatalogOperations}. - */ - @Override - public SupportsSchemas asSchemas() { - return (SupportsSchemas) ops(); - } - - /** - * Returns the Hive catalog operations as a {@link TableCatalog}. - * - * @return The Hive catalog operations as {@link HiveCatalogOperations}. - */ - @Override - public TableCatalog asTableCatalog() { - return (TableCatalog) ops(); - } - @Override protected Optional newProxyPlugin(Map config) { boolean impersonationEnabled = diff --git a/catalogs/catalog-hive/src/main/java/com/datastrato/gravitino/catalog/hive/HiveCatalogOperations.java b/catalogs/catalog-hive/src/main/java/com/datastrato/gravitino/catalog/hive/HiveCatalogOperations.java index 3e419c788e0..344c8519eb0 100644 --- a/catalogs/catalog-hive/src/main/java/com/datastrato/gravitino/catalog/hive/HiveCatalogOperations.java +++ b/catalogs/catalog-hive/src/main/java/com/datastrato/gravitino/catalog/hive/HiveCatalogOperations.java @@ -16,12 +16,14 @@ import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.catalog.hive.HiveTablePropertiesMetadata.TableType; import com.datastrato.gravitino.catalog.hive.converter.ToHiveType; import com.datastrato.gravitino.connector.CatalogInfo; import com.datastrato.gravitino.connector.CatalogOperations; import com.datastrato.gravitino.connector.PropertiesMetadata; import com.datastrato.gravitino.connector.ProxyPlugin; +import com.datastrato.gravitino.connector.SupportsSchemas; import com.datastrato.gravitino.exceptions.NoSuchCatalogException; import com.datastrato.gravitino.exceptions.NoSuchSchemaException; import com.datastrato.gravitino.exceptions.NoSuchTableException; @@ -30,8 +32,6 @@ import com.datastrato.gravitino.exceptions.TableAlreadyExistsException; import com.datastrato.gravitino.meta.AuditInfo; import com.datastrato.gravitino.rel.Column; -import com.datastrato.gravitino.rel.SchemaChange; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.rel.Table; import com.datastrato.gravitino.rel.TableCatalog; import com.datastrato.gravitino.rel.TableChange; diff --git a/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/TestHiveSchema.java b/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/TestHiveSchema.java index d7b044a1e06..5dfc1679ba5 100644 --- a/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/TestHiveSchema.java +++ b/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/TestHiveSchema.java @@ -9,13 +9,12 @@ import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.catalog.hive.miniHMS.MiniHiveMetastoreService; import com.datastrato.gravitino.exceptions.SchemaAlreadyExistsException; import com.datastrato.gravitino.meta.AuditInfo; import com.datastrato.gravitino.meta.CatalogEntity; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SchemaChange; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.google.common.collect.Maps; import java.time.Instant; import java.util.Arrays; @@ -63,24 +62,25 @@ private HiveCatalog initHiveCatalog() { public void testCreateHiveSchema() { HiveCatalog hiveCatalog = initHiveCatalog(); + HiveCatalogOperations catalogOperations = (HiveCatalogOperations) hiveCatalog.ops(); + NameIdentifier ident = NameIdentifier.of("metalake", hiveCatalog.name(), genRandomName()); Map properties = Maps.newHashMap(); properties.put("key1", "val1"); properties.put("key2", "val2"); String comment = "comment"; - SupportsSchemas schemas = hiveCatalog.asSchemas(); - Schema schema = schemas.createSchema(ident, comment, properties); + Schema schema = catalogOperations.createSchema(ident, comment, properties); Assertions.assertEquals(ident.name(), schema.name()); Assertions.assertEquals(comment, schema.comment()); Assertions.assertEquals(properties, schema.properties()); - Assertions.assertTrue(schemas.schemaExists(ident)); + Assertions.assertTrue(catalogOperations.schemaExists(ident)); - NameIdentifier[] idents = schemas.listSchemas(ident.namespace()); + NameIdentifier[] idents = catalogOperations.listSchemas(ident.namespace()); Assertions.assertTrue(Arrays.asList(idents).contains(ident)); - Schema loadedSchema = schemas.loadSchema(ident); + Schema loadedSchema = catalogOperations.loadSchema(ident); Assertions.assertEquals(schema.auditInfo().creator(), loadedSchema.auditInfo().creator()); Assertions.assertNull(loadedSchema.auditInfo().createTime()); Assertions.assertEquals("val1", loadedSchema.properties().get("key1")); @@ -91,7 +91,7 @@ public void testCreateHiveSchema() { Assertions.assertThrows( SchemaAlreadyExistsException.class, () -> { - schemas.createSchema(ident, comment, properties); + catalogOperations.createSchema(ident, comment, properties); }); Assertions.assertTrue(exception.getMessage().contains("already exists in Hive Metastore")); } @@ -99,6 +99,7 @@ public void testCreateHiveSchema() { @Test public void testAlterSchema() { HiveCatalog hiveCatalog = initHiveCatalog(); + HiveCatalogOperations catalogOperations = (HiveCatalogOperations) hiveCatalog.ops(); NameIdentifier ident = NameIdentifier.of("metalake", hiveCatalog.name(), genRandomName()); Map properties = Maps.newHashMap(); @@ -106,20 +107,16 @@ public void testAlterSchema() { properties.put("key2", "val2"); String comment = "comment"; - Schema createdSchema = hiveCatalog.asSchemas().createSchema(ident, comment, properties); - Assertions.assertTrue(hiveCatalog.asSchemas().schemaExists(ident)); + Schema createdSchema = catalogOperations.createSchema(ident, comment, properties); + Assertions.assertTrue(catalogOperations.schemaExists(ident)); - Map properties1 = hiveCatalog.asSchemas().loadSchema(ident).properties(); + Map properties1 = catalogOperations.loadSchema(ident).properties(); Assertions.assertEquals("val1", properties1.get("key1")); Assertions.assertEquals("val2", properties1.get("key2")); - hiveCatalog - .asSchemas() - .alterSchema( - ident, - SchemaChange.removeProperty("key1"), - SchemaChange.setProperty("key2", "val2-alter")); - Schema alteredSchema = hiveCatalog.asSchemas().loadSchema(ident); + catalogOperations.alterSchema( + ident, SchemaChange.removeProperty("key1"), SchemaChange.setProperty("key2", "val2-alter")); + Schema alteredSchema = catalogOperations.loadSchema(ident); Map properties2 = alteredSchema.properties(); Assertions.assertFalse(properties2.containsKey("key1")); Assertions.assertEquals("val2-alter", properties2.get("key2")); @@ -130,13 +127,9 @@ public void testAlterSchema() { Assertions.assertNull(alteredSchema.auditInfo().lastModifier()); Assertions.assertNull(alteredSchema.auditInfo().lastModifiedTime()); - hiveCatalog - .asSchemas() - .alterSchema( - ident, - SchemaChange.setProperty("key3", "val3"), - SchemaChange.setProperty("key4", "val4")); - Schema alteredSchema1 = hiveCatalog.asSchemas().loadSchema(ident); + catalogOperations.alterSchema( + ident, SchemaChange.setProperty("key3", "val3"), SchemaChange.setProperty("key4", "val4")); + Schema alteredSchema1 = catalogOperations.loadSchema(ident); Map properties3 = alteredSchema1.properties(); Assertions.assertEquals("val3", properties3.get("key3")); Assertions.assertEquals("val4", properties3.get("key4")); @@ -151,6 +144,7 @@ public void testAlterSchema() { @Test public void testDropSchema() { HiveCatalog hiveCatalog = initHiveCatalog(); + HiveCatalogOperations catalogOperations = (HiveCatalogOperations) hiveCatalog.ops(); NameIdentifier ident = NameIdentifier.of("metalake", hiveCatalog.name(), genRandomName()); Map properties = Maps.newHashMap(); @@ -158,9 +152,9 @@ public void testDropSchema() { properties.put("key2", "val2"); String comment = "comment"; - hiveCatalog.asSchemas().createSchema(ident, comment, properties); - Assertions.assertTrue(hiveCatalog.asSchemas().schemaExists(ident)); - hiveCatalog.asSchemas().dropSchema(ident, true); - Assertions.assertFalse(hiveCatalog.asSchemas().schemaExists(ident)); + catalogOperations.createSchema(ident, comment, properties); + Assertions.assertTrue(catalogOperations.schemaExists(ident)); + catalogOperations.dropSchema(ident, true); + Assertions.assertFalse(catalogOperations.schemaExists(ident)); } } diff --git a/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/TestHiveTable.java b/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/TestHiveTable.java index d54889de6a6..edd5fb5f972 100644 --- a/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/TestHiveTable.java +++ b/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/TestHiveTable.java @@ -50,6 +50,7 @@ public class TestHiveTable extends MiniHiveMetastoreService { protected static final String HIVE_SCHEMA_NAME = "test_schema"; protected static final String HIVE_COMMENT = "test_comment"; private static HiveCatalog hiveCatalog; + private static HiveCatalogOperations hiveCatalogOperations; private static HiveSchema hiveSchema; private static final NameIdentifier schemaIdent = NameIdentifier.of(META_LAKE_NAME, HIVE_CATALOG_NAME, HIVE_SCHEMA_NAME); @@ -57,12 +58,13 @@ public class TestHiveTable extends MiniHiveMetastoreService { @BeforeAll private static void setup() { hiveCatalog = initHiveCatalog(); + hiveCatalogOperations = (HiveCatalogOperations) hiveCatalog.ops(); hiveSchema = initHiveSchema(hiveCatalog); } @AfterEach private void resetSchema() { - hiveCatalog.asSchemas().dropSchema(schemaIdent, true); + hiveCatalogOperations.dropSchema(schemaIdent, true); hiveSchema = initHiveSchema(hiveCatalog); } @@ -71,7 +73,7 @@ protected static HiveSchema initHiveSchema(HiveCatalog hiveCatalog) { properties.put("key1", "val1"); properties.put("key2", "val2"); - return (HiveSchema) hiveCatalog.asSchemas().createSchema(schemaIdent, HIVE_COMMENT, properties); + return hiveCatalogOperations.createSchema(schemaIdent, HIVE_COMMENT, properties); } protected static HiveCatalog initHiveCatalog() { @@ -144,23 +146,22 @@ public void testCreateHiveTable() { Distribution distribution = createDistribution(); SortOrder[] sortOrders = createSortOrder(); + HiveCatalogOperations hiveCatalogOperations = (HiveCatalogOperations) hiveCatalog.ops(); Table table = - hiveCatalog - .asTableCatalog() - .createTable( - tableIdentifier, - columns, - HIVE_COMMENT, - properties, - new Transform[0], - distribution, - sortOrders); + hiveCatalogOperations.createTable( + tableIdentifier, + columns, + HIVE_COMMENT, + properties, + new Transform[0], + distribution, + sortOrders); Assertions.assertEquals(tableIdentifier.name(), table.name()); Assertions.assertEquals(HIVE_COMMENT, table.comment()); Assertions.assertEquals("val1", table.properties().get("key1")); Assertions.assertEquals("val2", table.properties().get("key2")); - Table loadedTable = hiveCatalog.asTableCatalog().loadTable(tableIdentifier); + Table loadedTable = hiveCatalogOperations.loadTable(tableIdentifier); Assertions.assertEquals(table.auditInfo().creator(), loadedTable.auditInfo().creator()); Assertions.assertNull(loadedTable.auditInfo().lastModifier()); Assertions.assertNull(loadedTable.auditInfo().lastModifiedTime()); @@ -168,9 +169,8 @@ public void testCreateHiveTable() { Assertions.assertEquals("val1", loadedTable.properties().get("key1")); Assertions.assertEquals("val2", loadedTable.properties().get("key2")); - Assertions.assertTrue(hiveCatalog.asTableCatalog().tableExists(tableIdentifier)); - NameIdentifier[] tableIdents = - hiveCatalog.asTableCatalog().listTables(tableIdentifier.namespace()); + Assertions.assertTrue(hiveCatalogOperations.tableExists(tableIdentifier)); + NameIdentifier[] tableIdents = hiveCatalogOperations.listTables(tableIdentifier.namespace()); Assertions.assertTrue(Arrays.asList(tableIdents).contains(tableIdentifier)); // Compare sort and order @@ -185,7 +185,7 @@ public void testCreateHiveTable() { } // Test exception - TableCatalog tableCatalog = hiveCatalog.asTableCatalog(); + TableCatalog tableCatalog = hiveCatalogOperations; Throwable exception = Assertions.assertThrows( TableAlreadyExistsException.class, @@ -225,16 +225,16 @@ public void testCreatePartitionedHiveTable() { Transform[] partitions = new Transform[] {identity(col2.name())}; - TableCatalog tableCatalog = hiveCatalog.asTableCatalog(); Table table = - tableCatalog.createTable(tableIdentifier, columns, HIVE_COMMENT, properties, partitions); + hiveCatalogOperations.createTable( + tableIdentifier, columns, HIVE_COMMENT, properties, partitions); Assertions.assertEquals(tableIdentifier.name(), table.name()); Assertions.assertEquals(HIVE_COMMENT, table.comment()); Assertions.assertEquals("val1", table.properties().get("key1")); Assertions.assertEquals("val2", table.properties().get("key2")); Assertions.assertArrayEquals(partitions, table.partitioning()); - Table loadedTable = tableCatalog.loadTable(tableIdentifier); + Table loadedTable = hiveCatalogOperations.loadTable(tableIdentifier); Assertions.assertEquals(table.auditInfo().creator(), loadedTable.auditInfo().creator()); Assertions.assertNull(loadedTable.auditInfo().lastModifier()); @@ -244,8 +244,8 @@ public void testCreatePartitionedHiveTable() { Assertions.assertEquals("val2", loadedTable.properties().get("key2")); Assertions.assertArrayEquals(partitions, loadedTable.partitioning()); - Assertions.assertTrue(tableCatalog.tableExists(tableIdentifier)); - NameIdentifier[] tableIdents = tableCatalog.listTables(tableIdentifier.namespace()); + Assertions.assertTrue(hiveCatalogOperations.tableExists(tableIdentifier)); + NameIdentifier[] tableIdents = hiveCatalogOperations.listTables(tableIdentifier.namespace()); Assertions.assertTrue(Arrays.asList(tableIdents).contains(tableIdentifier)); // Test exception @@ -254,7 +254,7 @@ public void testCreatePartitionedHiveTable() { Assertions.assertThrows( IllegalArgumentException.class, () -> - tableCatalog.createTable( + hiveCatalogOperations.createTable( tableIdentifier, columns, HIVE_COMMENT, properties, partitions2)); Assertions.assertTrue( exception.getMessage().contains("Hive partition only supports identity transform")); @@ -268,7 +268,9 @@ public void testCreatePartitionedHiveTable() { exception = Assertions.assertThrows( RuntimeException.class, - () -> tableCatalog.createTable(randid, columns, HIVE_COMMENT, properties, partitions3)); + () -> + hiveCatalogOperations.createTable( + randid, columns, HIVE_COMMENT, properties, partitions3)); Assertions.assertTrue( exception.getMessage().contains("Hive partition does not support nested field")); @@ -277,7 +279,7 @@ public void testCreatePartitionedHiveTable() { Assertions.assertThrows( IllegalArgumentException.class, () -> - tableCatalog.createTable( + hiveCatalogOperations.createTable( tableIdentifier, columns, HIVE_COMMENT, properties, partitions4)); Assertions.assertEquals( "The partition field must be placed at the end of the columns in order", @@ -306,26 +308,24 @@ public void testDropHiveTable() { .build(); Column[] columns = new Column[] {col1, col2}; - hiveCatalog - .asTableCatalog() - .createTable( - tableIdentifier, - columns, - HIVE_COMMENT, - properties, - new Transform[0], - Distributions.NONE, - new SortOrder[0]); + hiveCatalogOperations.createTable( + tableIdentifier, + columns, + HIVE_COMMENT, + properties, + new Transform[0], + Distributions.NONE, + new SortOrder[0]); - Assertions.assertTrue(hiveCatalog.asTableCatalog().tableExists(tableIdentifier)); - hiveCatalog.asTableCatalog().dropTable(tableIdentifier); - Assertions.assertFalse(hiveCatalog.asTableCatalog().tableExists(tableIdentifier)); + Assertions.assertTrue(hiveCatalogOperations.tableExists(tableIdentifier)); + hiveCatalogOperations.dropTable(tableIdentifier); + Assertions.assertFalse(hiveCatalogOperations.tableExists(tableIdentifier)); } @Test public void testListTableException() { Namespace tableNs = Namespace.of("metalake", hiveCatalog.name(), "not_exist_db"); - TableCatalog tableCatalog = hiveCatalog.asTableCatalog(); + TableCatalog tableCatalog = hiveCatalogOperations; Throwable exception = Assertions.assertThrows( NoSuchSchemaException.class, () -> tableCatalog.listTables(tableNs)); @@ -358,7 +358,7 @@ public void testAlterHiveTable() { Distribution distribution = createDistribution(); SortOrder[] sortOrders = createSortOrder(); - TableCatalog tableCatalog = hiveCatalog.asTableCatalog(); + TableCatalog tableCatalog = hiveCatalogOperations; Table createdTable = tableCatalog.createTable( tableIdentifier, @@ -368,7 +368,7 @@ public void testAlterHiveTable() { new Transform[] {identity(col2.name())}, distribution, sortOrders); - Assertions.assertTrue(hiveCatalog.asTableCatalog().tableExists(tableIdentifier)); + Assertions.assertTrue(hiveCatalogOperations.tableExists(tableIdentifier)); TableChange tableChange1 = TableChange.updateColumnPosition( @@ -522,19 +522,17 @@ public void testPurgeHiveTable() { Distribution distribution = createDistribution(); SortOrder[] sortOrders = createSortOrder(); - hiveCatalog - .asTableCatalog() - .createTable( - tableIdentifier, - columns, - HIVE_COMMENT, - properties, - new Transform[0], - distribution, - sortOrders); - Assertions.assertTrue(hiveCatalog.asTableCatalog().tableExists(tableIdentifier)); - hiveCatalog.asTableCatalog().purgeTable(tableIdentifier); - Assertions.assertFalse(hiveCatalog.asTableCatalog().tableExists(tableIdentifier)); + hiveCatalogOperations.createTable( + tableIdentifier, + columns, + HIVE_COMMENT, + properties, + new Transform[0], + distribution, + sortOrders); + Assertions.assertTrue(hiveCatalogOperations.tableExists(tableIdentifier)); + hiveCatalogOperations.purgeTable(tableIdentifier); + Assertions.assertFalse(hiveCatalogOperations.tableExists(tableIdentifier)); } @Test @@ -561,9 +559,8 @@ public void testPurgeExternalHiveTable() { Distribution distribution = createDistribution(); SortOrder[] sortOrders = createSortOrder(); - TableCatalog tableCatalog = hiveCatalog.asTableCatalog(); - tableCatalog.createTable( + hiveCatalogOperations.createTable( tableIdentifier, columns, HIVE_COMMENT, @@ -571,13 +568,13 @@ public void testPurgeExternalHiveTable() { new Transform[0], distribution, sortOrders); - Assertions.assertTrue(tableCatalog.tableExists(tableIdentifier)); + Assertions.assertTrue(hiveCatalogOperations.tableExists(tableIdentifier)); Assertions.assertThrows( UnsupportedOperationException.class, () -> { - tableCatalog.purgeTable(tableIdentifier); + hiveCatalogOperations.purgeTable(tableIdentifier); }, "Can't purge a external hive table"); - Assertions.assertTrue(tableCatalog.tableExists(tableIdentifier)); + Assertions.assertTrue(hiveCatalogOperations.tableExists(tableIdentifier)); } } diff --git a/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/TestHiveTableOperations.java b/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/TestHiveTableOperations.java index e979fee6850..f2423c80405 100644 --- a/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/TestHiveTableOperations.java +++ b/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/TestHiveTableOperations.java @@ -36,6 +36,7 @@ public class TestHiveTableOperations extends MiniHiveMetastoreService { private static final NameIdentifier tableIdentifier = NameIdentifier.of(META_LAKE_NAME, HIVE_CATALOG_NAME, HIVE_SCHEMA_NAME, genRandomName()); private static HiveCatalog hiveCatalog; + private static HiveCatalogOperations hiveCatalogOperations; private static HiveTable hiveTable; private static Column[] columns; private static Partition existingPartition; @@ -43,6 +44,7 @@ public class TestHiveTableOperations extends MiniHiveMetastoreService { @BeforeAll public static void setup() { hiveCatalog = initHiveCatalog(); + hiveCatalogOperations = (HiveCatalogOperations) hiveCatalog.ops(); initHiveSchema(hiveCatalog); hiveTable = createPartitionedTable(); @@ -73,9 +75,8 @@ private static HiveTable createPartitionedTable() { Transform[] partitioning = new Transform[] {identity(col1.name()), identity(col2.name())}; return (HiveTable) - hiveCatalog - .asTableCatalog() - .createTable(tableIdentifier, columns, HIVE_COMMENT, properties, partitioning); + hiveCatalogOperations.createTable( + tableIdentifier, columns, HIVE_COMMENT, properties, partitioning); } @Test diff --git a/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/integration/test/CatalogHiveIT.java b/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/integration/test/CatalogHiveIT.java index 3fd03fd4548..69c182a4273 100644 --- a/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/integration/test/CatalogHiveIT.java +++ b/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/integration/test/CatalogHiveIT.java @@ -31,6 +31,9 @@ import com.datastrato.gravitino.CatalogChange; import com.datastrato.gravitino.MetalakeChange; import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SchemaChange; +import com.datastrato.gravitino.SupportsSchemas; import com.datastrato.gravitino.auth.AuthConstants; import com.datastrato.gravitino.catalog.hive.HiveCatalogOperations; import com.datastrato.gravitino.catalog.hive.HiveClientPool; @@ -48,9 +51,6 @@ import com.datastrato.gravitino.integration.test.util.AbstractIT; import com.datastrato.gravitino.integration.test.util.GravitinoITUtils; import com.datastrato.gravitino.rel.Column; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SchemaChange; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.rel.Table; import com.datastrato.gravitino.rel.TableCatalog; import com.datastrato.gravitino.rel.TableChange; @@ -220,7 +220,7 @@ public static void stop() throws IOException { @AfterEach public void resetSchema() throws TException, InterruptedException { - catalog.asSchemas().dropSchema(NameIdentifier.of(metalakeName, catalogName, schemaName), true); + catalog.asSchemas().dropSchema(schemaName, true); assertThrows( NoSuchObjectException.class, () -> hiveClientPool.run(client -> client.getDatabase(schemaName))); @@ -262,8 +262,8 @@ private static void createSchema() throws TException, InterruptedException { schemaName.toLowerCase())); String comment = "comment"; - catalog.asSchemas().createSchema(ident, comment, properties); - Schema loadSchema = catalog.asSchemas().loadSchema(ident); + catalog.asSchemas().createSchema(ident.name(), comment, properties); + Schema loadSchema = catalog.asSchemas().loadSchema(ident.name()); Assertions.assertEquals(schemaName.toLowerCase(), loadSchema.name()); Assertions.assertEquals(comment, loadSchema.comment()); Assertions.assertEquals("val1", loadSchema.properties().get("key1")); @@ -616,7 +616,7 @@ public void testHiveSchemaProperties() throws TException, InterruptedException { HiveContainer.HDFS_DEFAULTFS_PORT); properties.put(HiveSchemaPropertiesMetadata.LOCATION, expectedSchemaLocation); - catalog.asSchemas().createSchema(schemaIdent, "comment", properties); + catalog.asSchemas().createSchema(schemaIdent.name(), "comment", properties); Database actualSchema = hiveClientPool.run(client -> client.getDatabase(schemaIdent.name())); String actualSchemaLocation = actualSchema.getLocationUri(); @@ -1254,21 +1254,21 @@ public void testAlterSchema() throws TException, InterruptedException { GravitinoMetalake metalake = client.loadMetalake(metalakeName); Catalog catalog = metalake.loadCatalog(catalogName); - Schema schema = catalog.asSchemas().loadSchema(ident); + Schema schema = catalog.asSchemas().loadSchema(ident.name()); Assertions.assertNull(schema.auditInfo().lastModifier()); Assertions.assertEquals(AuthConstants.ANONYMOUS_USER, schema.auditInfo().creator()); schema = catalog .asSchemas() .alterSchema( - ident, + ident.name(), SchemaChange.removeProperty("key1"), SchemaChange.setProperty("key2", "val2-alter")); Assertions.assertEquals(AuthConstants.ANONYMOUS_USER, schema.auditInfo().lastModifier()); Assertions.assertEquals(AuthConstants.ANONYMOUS_USER, schema.auditInfo().creator()); - Map properties2 = catalog.asSchemas().loadSchema(ident).properties(); + Map properties2 = catalog.asSchemas().loadSchema(ident.name()).properties(); Assertions.assertFalse(properties2.containsKey("key1")); Assertions.assertEquals("val2-alter", properties2.get("key2")); @@ -1311,11 +1311,11 @@ void testLoadEntityWithSamePrefix() { final int length = i; final NameIdentifier id = NameIdentifier.of(metalakeName, catalogName, schemaName.substring(0, length)); - Assertions.assertThrows(NoSuchSchemaException.class, () -> schemas.loadSchema(id)); + Assertions.assertThrows(NoSuchSchemaException.class, () -> schemas.loadSchema(id.name())); } NameIdentifier idC = NameIdentifier.of(metalakeName, catalogName, schemaName + "a"); - Assertions.assertThrows(NoSuchSchemaException.class, () -> schemas.loadSchema(idC)); + Assertions.assertThrows(NoSuchSchemaException.class, () -> schemas.loadSchema(idC.name())); TableCatalog tableCatalog = catalog.asTableCatalog(); @@ -1383,10 +1383,7 @@ void testAlterEntityName() { // Schema does not have the rename operation. final String schemaName = GravitinoITUtils.genRandomName("CatalogHiveIT_schema"); - catalog - .asSchemas() - .createSchema( - NameIdentifier.of(metalakeName, catalogName, schemaName), "", ImmutableMap.of()); + catalog.asSchemas().createSchema(schemaName, "", ImmutableMap.of()); final Catalog cata = catalog; // Now try to rename table diff --git a/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java b/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java index f8a339e871a..f6f2beb1ab2 100644 --- a/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java +++ b/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java @@ -11,6 +11,7 @@ import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.Configs; import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.SupportsSchemas; import com.datastrato.gravitino.auth.AuthenticatorType; import com.datastrato.gravitino.catalog.hive.HiveClientPool; import com.datastrato.gravitino.client.GravitinoAdminClient; @@ -21,7 +22,6 @@ import com.datastrato.gravitino.integration.test.util.AbstractIT; import com.datastrato.gravitino.integration.test.util.GravitinoITUtils; import com.datastrato.gravitino.rel.Column; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.rel.Table; import com.datastrato.gravitino.rel.TableCatalog; import com.datastrato.gravitino.rel.expressions.literals.Literal; @@ -127,7 +127,6 @@ public void testOperateSchema() throws Exception { String anotherSchemaName = GravitinoITUtils.genRandomName(SCHEMA_PREFIX); NameIdentifier ident = NameIdentifier.of(METALAKE_NAME, CATALOG_NAME, schemaName); - NameIdentifier anotherIdent = NameIdentifier.of(METALAKE_NAME, CATALOG_NAME, anotherSchemaName); String comment = "comment"; createSchema(schemaName, ident, comment); @@ -149,7 +148,8 @@ public void testOperateSchema() throws Exception { SupportsSchemas schemas = anotherCatalog.asSchemas(); Exception e = Assertions.assertThrows( - RuntimeException.class, () -> schemas.createSchema(anotherIdent, comment, properties)); + RuntimeException.class, + () -> schemas.createSchema(anotherSchemaName, comment, properties)); Assertions.assertTrue(e.getMessage().contains("AccessControlException Permission denied")); } @@ -209,7 +209,7 @@ private static void createSchema(String schemaName, NameIdentifier ident, String containerSuite.getHiveContainer().getContainerIpAddress(), HiveContainer.HDFS_DEFAULTFS_PORT, schemaName.toLowerCase())); - catalog.asSchemas().createSchema(ident, comment, properties); + catalog.asSchemas().createSchema(ident.name(), comment, properties); } @Test diff --git a/catalogs/catalog-jdbc-common/src/main/java/com/datastrato/gravitino/catalog/jdbc/JdbcCatalog.java b/catalogs/catalog-jdbc-common/src/main/java/com/datastrato/gravitino/catalog/jdbc/JdbcCatalog.java index 7d1c30bfa32..b576591cf78 100644 --- a/catalogs/catalog-jdbc-common/src/main/java/com/datastrato/gravitino/catalog/jdbc/JdbcCatalog.java +++ b/catalogs/catalog-jdbc-common/src/main/java/com/datastrato/gravitino/catalog/jdbc/JdbcCatalog.java @@ -12,8 +12,6 @@ import com.datastrato.gravitino.connector.BaseCatalog; import com.datastrato.gravitino.connector.CatalogOperations; import com.datastrato.gravitino.connector.PropertyEntry; -import com.datastrato.gravitino.rel.SupportsSchemas; -import com.datastrato.gravitino.rel.TableCatalog; import java.util.Collections; import java.util.Map; @@ -40,18 +38,6 @@ protected CatalogOperations newOps(Map config) { return ops; } - /** @return The Jdbc catalog operations as {@link JdbcCatalogOperations}. */ - @Override - public SupportsSchemas asSchemas() { - return (JdbcCatalogOperations) ops(); - } - - /** @return The Jdbc catalog operations as {@link JdbcCatalogOperations}. */ - @Override - public TableCatalog asTableCatalog() { - return (JdbcCatalogOperations) ops(); - } - /** @return The {@link JdbcExceptionConverter} to be used by the catalog. */ protected JdbcExceptionConverter createExceptionConverter() { return new JdbcExceptionConverter() {}; diff --git a/catalogs/catalog-jdbc-common/src/main/java/com/datastrato/gravitino/catalog/jdbc/JdbcCatalogOperations.java b/catalogs/catalog-jdbc-common/src/main/java/com/datastrato/gravitino/catalog/jdbc/JdbcCatalogOperations.java index 9463478ce1c..cd0b6197ba0 100644 --- a/catalogs/catalog-jdbc-common/src/main/java/com/datastrato/gravitino/catalog/jdbc/JdbcCatalogOperations.java +++ b/catalogs/catalog-jdbc-common/src/main/java/com/datastrato/gravitino/catalog/jdbc/JdbcCatalogOperations.java @@ -8,6 +8,7 @@ import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.StringIdentifier; import com.datastrato.gravitino.catalog.jdbc.config.JdbcConfig; import com.datastrato.gravitino.catalog.jdbc.converter.JdbcColumnDefaultValueConverter; @@ -21,6 +22,7 @@ import com.datastrato.gravitino.connector.CatalogInfo; import com.datastrato.gravitino.connector.CatalogOperations; import com.datastrato.gravitino.connector.PropertiesMetadata; +import com.datastrato.gravitino.connector.SupportsSchemas; import com.datastrato.gravitino.exceptions.NoSuchCatalogException; import com.datastrato.gravitino.exceptions.NoSuchSchemaException; import com.datastrato.gravitino.exceptions.NoSuchTableException; @@ -29,8 +31,6 @@ import com.datastrato.gravitino.exceptions.TableAlreadyExistsException; import com.datastrato.gravitino.meta.AuditInfo; import com.datastrato.gravitino.rel.Column; -import com.datastrato.gravitino.rel.SchemaChange; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.rel.Table; import com.datastrato.gravitino.rel.TableCatalog; import com.datastrato.gravitino.rel.TableChange; diff --git a/catalogs/catalog-jdbc-doris/src/test/java/com/datastrato/gravitino/catalog/doris/integration/test/CatalogDorisIT.java b/catalogs/catalog-jdbc-doris/src/test/java/com/datastrato/gravitino/catalog/doris/integration/test/CatalogDorisIT.java index 61a41784109..5751ae6d817 100644 --- a/catalogs/catalog-jdbc-doris/src/test/java/com/datastrato/gravitino/catalog/doris/integration/test/CatalogDorisIT.java +++ b/catalogs/catalog-jdbc-doris/src/test/java/com/datastrato/gravitino/catalog/doris/integration/test/CatalogDorisIT.java @@ -9,6 +9,8 @@ import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SupportsSchemas; import com.datastrato.gravitino.catalog.jdbc.config.JdbcConfig; import com.datastrato.gravitino.client.GravitinoMetalake; import com.datastrato.gravitino.exceptions.NoSuchSchemaException; @@ -20,8 +22,6 @@ import com.datastrato.gravitino.integration.test.util.ITUtils; import com.datastrato.gravitino.integration.test.util.JdbcDriverDownloader; import com.datastrato.gravitino.rel.Column; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.rel.Table; import com.datastrato.gravitino.rel.TableCatalog; import com.datastrato.gravitino.rel.TableChange; @@ -124,7 +124,7 @@ private void clearTableAndSchema() { for (NameIdentifier nameIdentifier : nameIdentifiers) { catalog.asTableCatalog().dropTable(nameIdentifier); } - catalog.asSchemas().dropSchema(NameIdentifier.of(metalakeName, catalogName, schemaName), true); + catalog.asSchemas().dropSchema(schemaName, true); } private void createMetalake() { @@ -174,8 +174,8 @@ private void createSchema() { Map prop = Maps.newHashMap(); prop.put(propKey, propValue); - Schema createdSchema = catalog.asSchemas().createSchema(ident, schema_comment, prop); - Schema loadSchema = catalog.asSchemas().loadSchema(ident); + Schema createdSchema = catalog.asSchemas().createSchema(ident.name(), schema_comment, prop); + Schema loadSchema = catalog.asSchemas().loadSchema(ident.name()); Assertions.assertEquals(createdSchema.name(), loadSchema.name()); Assertions.assertEquals(createdSchema.properties().get(propKey), propValue); @@ -202,10 +202,9 @@ private Distribution createDistribution() { @Test void testDorisSchemaBasicOperation() { SupportsSchemas schemas = catalog.asSchemas(); - Namespace namespace = Namespace.of(metalakeName, catalogName); // test list schemas - NameIdentifier[] nameIdentifiers = schemas.listSchemas(namespace); + NameIdentifier[] nameIdentifiers = schemas.listSchemas(); Set schemaNames = Arrays.stream(nameIdentifiers).map(NameIdentifier::name).collect(Collectors.toSet()); Assertions.assertTrue(schemaNames.contains(schemaName)); @@ -213,9 +212,9 @@ void testDorisSchemaBasicOperation() { // test create schema already exists String testSchemaName = GravitinoITUtils.genRandomName("create_schema_test"); NameIdentifier schemaIdent = NameIdentifier.of(metalakeName, catalogName, testSchemaName); - schemas.createSchema(schemaIdent, schema_comment, Collections.emptyMap()); + schemas.createSchema(schemaIdent.name(), schema_comment, Collections.emptyMap()); - nameIdentifiers = schemas.listSchemas(Namespace.of(metalakeName, catalogName)); + nameIdentifiers = schemas.listSchemas(); Map schemaMap = Arrays.stream(nameIdentifiers).collect(Collectors.toMap(NameIdentifier::name, v -> v)); Assertions.assertTrue(schemaMap.containsKey(testSchemaName)); @@ -223,37 +222,33 @@ void testDorisSchemaBasicOperation() { Assertions.assertThrows( SchemaAlreadyExistsException.class, () -> { - schemas.createSchema(schemaIdent, schema_comment, Collections.emptyMap()); + schemas.createSchema(schemaIdent.name(), schema_comment, Collections.emptyMap()); }); // test drop schema - Assertions.assertTrue(schemas.dropSchema(schemaIdent, false)); + Assertions.assertTrue(schemas.dropSchema(schemaIdent.name(), false)); // check schema is deleted // 1. check by load schema - Assertions.assertThrows(NoSuchSchemaException.class, () -> schemas.loadSchema(schemaIdent)); + Assertions.assertThrows( + NoSuchSchemaException.class, () -> schemas.loadSchema(schemaIdent.name())); // 2. check by list schema - nameIdentifiers = schemas.listSchemas(Namespace.of(metalakeName, catalogName)); + nameIdentifiers = schemas.listSchemas(); schemaMap = Arrays.stream(nameIdentifiers).collect(Collectors.toMap(NameIdentifier::name, v -> v)); Assertions.assertFalse(schemaMap.containsKey(testSchemaName)); // test drop schema not exists NameIdentifier notExistsSchemaIdent = NameIdentifier.of(metalakeName, catalogName, "no-exits"); - Assertions.assertFalse(schemas.dropSchema(notExistsSchemaIdent, false)); + Assertions.assertFalse(schemas.dropSchema(notExistsSchemaIdent.name(), false)); } @Test void testDropDorisSchema() { String schemaName = GravitinoITUtils.genRandomName("doris_it_schema_dropped").toLowerCase(); - catalog - .asSchemas() - .createSchema( - NameIdentifier.of(metalakeName, catalogName, schemaName), - "test_comment", - ImmutableMap.of("key", "value")); + catalog.asSchemas().createSchema(schemaName, "test_comment", ImmutableMap.of("key", "value")); catalog .asTableCatalog() @@ -267,27 +262,20 @@ void testDropDorisSchema() { null); // Try to drop a database, and cascade equals to false, it should not be allowed. - Assertions.assertFalse( - catalog - .asSchemas() - .dropSchema(NameIdentifier.of(metalakeName, catalogName, schemaName), false)); + Assertions.assertFalse(catalog.asSchemas().dropSchema(schemaName, false)); // Check the database still exists - catalog.asSchemas().loadSchema(NameIdentifier.of(metalakeName, catalogName, schemaName)); + catalog.asSchemas().loadSchema(schemaName); // Try to drop a database, and cascade equals to true, it should be allowed. - Assertions.assertTrue( - catalog - .asSchemas() - .dropSchema(NameIdentifier.of(metalakeName, catalogName, schemaName), true)); + Assertions.assertTrue(catalog.asSchemas().dropSchema(schemaName, true)); // Check database has been dropped SupportsSchemas schemas = catalog.asSchemas(); - NameIdentifier of = NameIdentifier.of(metalakeName, catalogName, schemaName); Assertions.assertThrows( NoSuchSchemaException.class, () -> { - schemas.loadSchema(of); + schemas.loadSchema(schemaName); }); } diff --git a/catalogs/catalog-jdbc-mysql/src/test/java/com/datastrato/gravitino/catalog/mysql/integration/test/AuditCatalogMysqlIT.java b/catalogs/catalog-jdbc-mysql/src/test/java/com/datastrato/gravitino/catalog/mysql/integration/test/AuditCatalogMysqlIT.java index 36d34796ff6..d3459975463 100644 --- a/catalogs/catalog-jdbc-mysql/src/test/java/com/datastrato/gravitino/catalog/mysql/integration/test/AuditCatalogMysqlIT.java +++ b/catalogs/catalog-jdbc-mysql/src/test/java/com/datastrato/gravitino/catalog/mysql/integration/test/AuditCatalogMysqlIT.java @@ -9,6 +9,7 @@ import com.datastrato.gravitino.CatalogChange; import com.datastrato.gravitino.Configs; import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.Schema; import com.datastrato.gravitino.auth.AuthenticatorType; import com.datastrato.gravitino.catalog.jdbc.config.JdbcConfig; import com.datastrato.gravitino.catalog.mysql.integration.test.service.MysqlService; @@ -21,7 +22,6 @@ import com.datastrato.gravitino.integration.test.util.JdbcDriverDownloader; import com.datastrato.gravitino.integration.test.util.TestDatabaseName; import com.datastrato.gravitino.rel.Column; -import com.datastrato.gravitino.rel.Schema; import com.datastrato.gravitino.rel.Table; import com.datastrato.gravitino.rel.TableChange; import com.datastrato.gravitino.rel.types.Types; @@ -99,9 +99,8 @@ public void testAuditSchema() throws Exception { String catalogName = GravitinoITUtils.genRandomName("audit_mysql_schema_catalog"); String schemaName = GravitinoITUtils.genRandomName("audit_mysql_schema"); Catalog catalog = createCatalog(catalogName); - NameIdentifier ident = NameIdentifier.of(metalakeName, catalogName, schemaName); Map prop = Maps.newHashMap(); - Schema schema = catalog.asSchemas().createSchema(ident, null, prop); + Schema schema = catalog.asSchemas().createSchema(schemaName, null, prop); Assertions.assertEquals(expectUser, schema.auditInfo().creator()); Assertions.assertNull(schema.auditInfo().lastModifier()); } @@ -116,9 +115,7 @@ public void testAuditTable() throws Exception { Column col1 = Column.of("col_1", Types.IntegerType.get(), "col_1_comment"); - catalog - .asSchemas() - .createSchema(NameIdentifier.of(metalakeName, catalogName, schemaName), null, properties); + catalog.asSchemas().createSchema(schemaName, null, properties); Table table = catalog .asTableCatalog() diff --git a/catalogs/catalog-jdbc-mysql/src/test/java/com/datastrato/gravitino/catalog/mysql/integration/test/CatalogMysqlIT.java b/catalogs/catalog-jdbc-mysql/src/test/java/com/datastrato/gravitino/catalog/mysql/integration/test/CatalogMysqlIT.java index eb6afb36880..425e33db939 100644 --- a/catalogs/catalog-jdbc-mysql/src/test/java/com/datastrato/gravitino/catalog/mysql/integration/test/CatalogMysqlIT.java +++ b/catalogs/catalog-jdbc-mysql/src/test/java/com/datastrato/gravitino/catalog/mysql/integration/test/CatalogMysqlIT.java @@ -11,6 +11,8 @@ import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SupportsSchemas; import com.datastrato.gravitino.auth.AuthConstants; import com.datastrato.gravitino.catalog.jdbc.config.JdbcConfig; import com.datastrato.gravitino.catalog.mysql.integration.test.service.MysqlService; @@ -27,8 +29,6 @@ import com.datastrato.gravitino.integration.test.util.TestDatabaseName; import com.datastrato.gravitino.rel.Column; import com.datastrato.gravitino.rel.Column.ColumnImpl; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.rel.Table; import com.datastrato.gravitino.rel.TableCatalog; import com.datastrato.gravitino.rel.TableChange; @@ -154,7 +154,7 @@ private void clearTableAndSchema() { for (NameIdentifier nameIdentifier : nameIdentifiers) { catalog.asTableCatalog().dropTable(nameIdentifier); } - catalog.asSchemas().dropSchema(NameIdentifier.of(metalakeName, catalogName, schemaName), false); + catalog.asSchemas().dropSchema(schemaName, false); } private void createMetalake() { @@ -196,8 +196,8 @@ private void createSchema() { NameIdentifier ident = NameIdentifier.of(metalakeName, catalogName, schemaName); Map prop = Maps.newHashMap(); - Schema createdSchema = catalog.asSchemas().createSchema(ident, schema_comment, prop); - Schema loadSchema = catalog.asSchemas().loadSchema(ident); + Schema createdSchema = catalog.asSchemas().createSchema(ident.name(), schema_comment, prop); + Schema loadSchema = catalog.asSchemas().loadSchema(ident.name()); Assertions.assertEquals(createdSchema.name(), loadSchema.name()); prop.forEach((key, value) -> Assertions.assertEquals(loadSchema.properties().get(key), value)); } @@ -256,7 +256,7 @@ void testOperationMysqlSchema() { SupportsSchemas schemas = catalog.asSchemas(); Namespace namespace = Namespace.of(metalakeName, catalogName); // list schema check. - NameIdentifier[] nameIdentifiers = schemas.listSchemas(namespace); + NameIdentifier[] nameIdentifiers = schemas.listSchemas(); Set schemaNames = Arrays.stream(nameIdentifiers).map(NameIdentifier::name).collect(Collectors.toSet()); Assertions.assertTrue(schemaNames.contains(schemaName)); @@ -269,8 +269,8 @@ void testOperationMysqlSchema() { // create schema check. String testSchemaName = GravitinoITUtils.genRandomName("test_schema_1"); NameIdentifier schemaIdent = NameIdentifier.of(metalakeName, catalogName, testSchemaName); - schemas.createSchema(schemaIdent, schema_comment, Collections.emptyMap()); - nameIdentifiers = schemas.listSchemas(Namespace.of(metalakeName, catalogName)); + schemas.createSchema(schemaIdent.name(), schema_comment, Collections.emptyMap()); + nameIdentifiers = schemas.listSchemas(); Map schemaMap = Arrays.stream(nameIdentifiers).collect(Collectors.toMap(NameIdentifier::name, v -> v)); Assertions.assertTrue(schemaMap.containsKey(testSchemaName)); @@ -284,21 +284,21 @@ void testOperationMysqlSchema() { Assertions.assertThrows( SchemaAlreadyExistsException.class, () -> { - schemas.createSchema(schemaIdent, schema_comment, emptyMap); + schemas.createSchema(schemaIdent.name(), schema_comment, emptyMap); }); // drop schema check. - schemas.dropSchema(schemaIdent, false); - Assertions.assertThrows(NoSuchSchemaException.class, () -> schemas.loadSchema(schemaIdent)); + schemas.dropSchema(schemaIdent.name(), false); + Assertions.assertThrows( + NoSuchSchemaException.class, () -> schemas.loadSchema(schemaIdent.name())); Assertions.assertThrows( NoSuchSchemaException.class, () -> mysqlService.loadSchema(schemaIdent)); - nameIdentifiers = schemas.listSchemas(Namespace.of(metalakeName, catalogName)); + nameIdentifiers = schemas.listSchemas(); schemaMap = Arrays.stream(nameIdentifiers).collect(Collectors.toMap(NameIdentifier::name, v -> v)); Assertions.assertFalse(schemaMap.containsKey(testSchemaName)); - Assertions.assertFalse( - schemas.dropSchema(NameIdentifier.of(metalakeName, catalogName, "no-exits"), false)); + Assertions.assertFalse(schemas.dropSchema("no-exits", false)); TableCatalog tableCatalog = catalog.asTableCatalog(); // create failed check. @@ -316,8 +316,8 @@ void testOperationMysqlSchema() { Distributions.NONE, null)); // drop schema failed check. - Assertions.assertFalse(schemas.dropSchema(schemaIdent, true)); - Assertions.assertFalse(schemas.dropSchema(schemaIdent, false)); + Assertions.assertFalse(schemas.dropSchema(schemaIdent.name(), true)); + Assertions.assertFalse(schemas.dropSchema(schemaIdent.name(), false)); Assertions.assertFalse(tableCatalog.dropTable(table)); mysqlNamespaces = mysqlService.listSchemas(Namespace.empty()); schemaNames = @@ -844,10 +844,7 @@ void testDropMySQLDatabase() { catalog .asSchemas() - .createSchema( - NameIdentifier.of(metalakeName, catalogName, schemaName), - null, - ImmutableMap.builder().build()); + .createSchema(schemaName, null, ImmutableMap.builder().build()); catalog .asTableCatalog() @@ -859,19 +856,18 @@ void testDropMySQLDatabase() { // Try to drop a database, and cascade equals to false, it should not be // allowed. - catalog.asSchemas().dropSchema(NameIdentifier.of(metalakeName, catalogName, schemaName), false); + catalog.asSchemas().dropSchema(schemaName, false); // Check the database still exists - catalog.asSchemas().loadSchema(NameIdentifier.of(metalakeName, catalogName, schemaName)); + catalog.asSchemas().loadSchema(schemaName); // Try to drop a database, and cascade equals to true, it should be allowed. - catalog.asSchemas().dropSchema(NameIdentifier.of(metalakeName, catalogName, schemaName), true); + catalog.asSchemas().dropSchema(schemaName, true); // Check database has been dropped SupportsSchemas schemas = catalog.asSchemas(); - NameIdentifier of = NameIdentifier.of(metalakeName, catalogName, schemaName); Assertions.assertThrows( NoSuchSchemaException.class, () -> { - schemas.loadSchema(of); + schemas.loadSchema(schemaName); }); } @@ -1117,23 +1113,21 @@ public void testAutoIncrement() { @Test public void testSchemaComment() { - String testSchemaName = "test"; - NameIdentifier identer = NameIdentifier.of(metalakeName, catalogName, testSchemaName); + final String testSchemaName = "test"; RuntimeException exception = Assertions.assertThrowsExactly( UnsupportedOperationException.class, - () -> catalog.asSchemas().createSchema(identer, "comment", null)); + () -> catalog.asSchemas().createSchema(testSchemaName, "comment", null)); Assertions.assertTrue( exception.getMessage().contains("MySQL doesn't support set schema comment: comment")); // test null comment - testSchemaName = "test2"; - NameIdentifier ident = NameIdentifier.of(metalakeName, catalogName, testSchemaName); - Schema schema = catalog.asSchemas().createSchema(ident, "", null); + String testSchemaName2 = "test2"; + Schema schema = catalog.asSchemas().createSchema(testSchemaName2, "", null); Assertions.assertTrue(StringUtils.isEmpty(schema.comment())); - schema = catalog.asSchemas().loadSchema(ident); + schema = catalog.asSchemas().loadSchema(testSchemaName2); Assertions.assertTrue(StringUtils.isEmpty(schema.comment())); - catalog.asSchemas().dropSchema(ident, true); + catalog.asSchemas().dropSchema(testSchemaName2, true); } @Test @@ -1335,17 +1329,15 @@ void testNameSpec() { String sql = String.format("CREATE DATABASE `%s`", testSchemaName); mysqlService.executeQuery(sql); - NameIdentifier schemaIdent = NameIdentifier.of(metalakeName, catalogName, testSchemaName); - Schema schema = catalog.asSchemas().loadSchema(schemaIdent); + Schema schema = catalog.asSchemas().loadSchema(testSchemaName); Assertions.assertEquals(testSchemaName, schema.name()); - NameIdentifier[] schemaIdents = - catalog.asSchemas().listSchemas(Namespace.of(metalakeName, catalogName)); + NameIdentifier[] schemaIdents = catalog.asSchemas().listSchemas(); Assertions.assertTrue( Arrays.stream(schemaIdents).anyMatch(s -> s.name().equals(testSchemaName))); - Assertions.assertTrue(catalog.asSchemas().dropSchema(schemaIdent, false)); - Assertions.assertFalse(catalog.asSchemas().schemaExists(schemaIdent)); + Assertions.assertTrue(catalog.asSchemas().dropSchema(testSchemaName, false)); + Assertions.assertFalse(catalog.asSchemas().schemaExists(testSchemaName)); // test operate illegal table name from MySQL mysqlService.executeQuery(sql); @@ -1367,7 +1359,7 @@ void testNameSpec() { Assertions.assertTrue(catalog.asTableCatalog().dropTable(tableIdent)); Assertions.assertFalse(catalog.asTableCatalog().tableExists(tableIdent)); Assertions.assertFalse(catalog.asTableCatalog().purgeTable(tableIdent)); - catalog.asSchemas().dropSchema(schemaIdent, true); + catalog.asSchemas().dropSchema(testSchemaName, true); } @Test @@ -1383,13 +1375,12 @@ void testMySQLSchemaNameCaseSensitive() { SupportsSchemas schemaSupport = catalog.asSchemas(); for (String schema : schemas) { - NameIdentifier schemaIdentifier = NameIdentifier.of(metalakeName, catalogName, schema); - schemaSupport.createSchema(schemaIdentifier, null, Collections.emptyMap()); - Assertions.assertNotNull(schemaSupport.loadSchema(schemaIdentifier)); + schemaSupport.createSchema(schema, null, Collections.emptyMap()); + Assertions.assertNotNull(schemaSupport.loadSchema(schema)); } Set schemaNames = - Arrays.stream(schemaSupport.listSchemas(Namespace.of(metalakeName, catalogName))) + Arrays.stream(schemaSupport.listSchemas()) .map(NameIdentifier::name) .collect(Collectors.toSet()); @@ -1434,8 +1425,7 @@ void testMySQLSchemaNameCaseSensitive() { } for (String schema : schemas) { - NameIdentifier schemaIdentifier = NameIdentifier.of(metalakeName, catalogName, schema); - schemaSupport.dropSchema(schemaIdentifier, true); + schemaSupport.dropSchema(schema, true); } } diff --git a/catalogs/catalog-jdbc-postgresql/src/test/java/com/datastrato/gravitino/catalog/postgresql/integration/test/CatalogPostgreSqlIT.java b/catalogs/catalog-jdbc-postgresql/src/test/java/com/datastrato/gravitino/catalog/postgresql/integration/test/CatalogPostgreSqlIT.java index b8cc28931c5..ca8d7714279 100644 --- a/catalogs/catalog-jdbc-postgresql/src/test/java/com/datastrato/gravitino/catalog/postgresql/integration/test/CatalogPostgreSqlIT.java +++ b/catalogs/catalog-jdbc-postgresql/src/test/java/com/datastrato/gravitino/catalog/postgresql/integration/test/CatalogPostgreSqlIT.java @@ -10,6 +10,8 @@ import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SupportsSchemas; import com.datastrato.gravitino.auth.AuthConstants; import com.datastrato.gravitino.catalog.jdbc.config.JdbcConfig; import com.datastrato.gravitino.catalog.postgresql.integration.test.service.PostgreSqlService; @@ -25,8 +27,6 @@ import com.datastrato.gravitino.integration.test.util.JdbcDriverDownloader; import com.datastrato.gravitino.integration.test.util.TestDatabaseName; import com.datastrato.gravitino.rel.Column; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.rel.Table; import com.datastrato.gravitino.rel.TableCatalog; import com.datastrato.gravitino.rel.TableChange; @@ -135,7 +135,7 @@ private void clearTableAndSchema() { for (NameIdentifier nameIdentifier : nameIdentifiers) { catalog.asTableCatalog().dropTable(nameIdentifier); } - catalog.asSchemas().dropSchema(NameIdentifier.of(metalakeName, catalogName, schemaName), false); + catalog.asSchemas().dropSchema(schemaName, false); } private void createMetalake() { @@ -174,8 +174,8 @@ private void createSchema() { NameIdentifier ident = NameIdentifier.of(metalakeName, catalogName, schemaName); Schema createdSchema = - catalog.asSchemas().createSchema(ident, schema_comment, Collections.EMPTY_MAP); - Schema loadSchema = catalog.asSchemas().loadSchema(ident); + catalog.asSchemas().createSchema(ident.name(), schema_comment, Collections.EMPTY_MAP); + Schema loadSchema = catalog.asSchemas().loadSchema(ident.name()); Assertions.assertEquals(createdSchema.name(), loadSchema.name()); Assertions.assertEquals(createdSchema.comment(), loadSchema.comment()); } @@ -295,7 +295,7 @@ void testOperationPostgreSqlSchema() { SupportsSchemas schemas = catalog.asSchemas(); Namespace namespace = Namespace.of(metalakeName, catalogName); // list schema check. - NameIdentifier[] nameIdentifiers = schemas.listSchemas(namespace); + NameIdentifier[] nameIdentifiers = schemas.listSchemas(); Set schemaNames = Arrays.stream(nameIdentifiers).map(NameIdentifier::name).collect(Collectors.toSet()); Assertions.assertTrue(schemaNames.contains(schemaName)); @@ -308,8 +308,8 @@ void testOperationPostgreSqlSchema() { // create schema check. String testSchemaName = GravitinoITUtils.genRandomName("test_schema_1"); NameIdentifier schemaIdent = NameIdentifier.of(metalakeName, catalogName, testSchemaName); - schemas.createSchema(schemaIdent, schema_comment, Collections.emptyMap()); - nameIdentifiers = schemas.listSchemas(Namespace.of(metalakeName, catalogName)); + schemas.createSchema(schemaIdent.name(), schema_comment, Collections.emptyMap()); + nameIdentifiers = schemas.listSchemas(); Map schemaMap = Arrays.stream(nameIdentifiers).collect(Collectors.toMap(NameIdentifier::name, v -> v)); Assertions.assertTrue(schemaMap.containsKey(testSchemaName)); @@ -323,21 +323,21 @@ void testOperationPostgreSqlSchema() { Assertions.assertThrows( SchemaAlreadyExistsException.class, () -> { - schemas.createSchema(schemaIdent, schema_comment, emptyMap); + schemas.createSchema(schemaIdent.name(), schema_comment, emptyMap); }); // drop schema check. - schemas.dropSchema(schemaIdent, false); - Assertions.assertThrows(NoSuchSchemaException.class, () -> schemas.loadSchema(schemaIdent)); + schemas.dropSchema(schemaIdent.name(), false); + Assertions.assertThrows( + NoSuchSchemaException.class, () -> schemas.loadSchema(schemaIdent.name())); Assertions.assertThrows( NoSuchSchemaException.class, () -> postgreSqlService.loadSchema(schemaIdent)); - nameIdentifiers = schemas.listSchemas(Namespace.of(metalakeName, catalogName)); + nameIdentifiers = schemas.listSchemas(); schemaMap = Arrays.stream(nameIdentifiers).collect(Collectors.toMap(NameIdentifier::name, v -> v)); Assertions.assertFalse(schemaMap.containsKey(testSchemaName)); - Assertions.assertFalse( - schemas.dropSchema(NameIdentifier.of(metalakeName, catalogName, "no-exits"), false)); + Assertions.assertFalse(schemas.dropSchema("no-exits", false)); TableCatalog tableCatalog = catalog.asTableCatalog(); // create failed check. @@ -355,8 +355,8 @@ void testOperationPostgreSqlSchema() { Distributions.NONE, null)); // drop schema failed check. - Assertions.assertFalse(schemas.dropSchema(schemaIdent, true)); - Assertions.assertFalse(schemas.dropSchema(schemaIdent, false)); + Assertions.assertFalse(schemas.dropSchema(schemaIdent.name(), true)); + Assertions.assertFalse(schemas.dropSchema(schemaIdent.name(), false)); Assertions.assertFalse(tableCatalog.dropTable(table)); postgreSqlNamespaces = postgreSqlService.listSchemas(Namespace.empty()); schemaNames = @@ -529,32 +529,29 @@ void testAlterAndDropPostgreSqlTable() { @Test void testCreateAndLoadSchema() { String testSchemaName = "test"; - NameIdentifier ident = NameIdentifier.of(metalakeName, catalogName, testSchemaName); - Schema schema = catalog.asSchemas().createSchema(ident, "comment", null); + Schema schema = catalog.asSchemas().createSchema(testSchemaName, "comment", null); Assertions.assertEquals("anonymous", schema.auditInfo().creator()); Assertions.assertEquals("comment", schema.comment()); - schema = catalog.asSchemas().loadSchema(ident); + schema = catalog.asSchemas().loadSchema(testSchemaName); Assertions.assertEquals("anonymous", schema.auditInfo().creator()); Assertions.assertEquals("comment", schema.comment()); // test null comment testSchemaName = "test2"; - ident = NameIdentifier.of(metalakeName, catalogName, testSchemaName); - schema = catalog.asSchemas().createSchema(ident, null, null); + schema = catalog.asSchemas().createSchema(testSchemaName, null, null); Assertions.assertEquals("anonymous", schema.auditInfo().creator()); // todo: Gravitino put id to comment, makes comment is empty string not null. Assertions.assertTrue(StringUtils.isEmpty(schema.comment())); - schema = catalog.asSchemas().loadSchema(ident); + schema = catalog.asSchemas().loadSchema(testSchemaName); Assertions.assertEquals("anonymous", schema.auditInfo().creator()); Assertions.assertTrue(StringUtils.isEmpty(schema.comment())); } @Test void testListSchema() { - NameIdentifier[] nameIdentifiers = - catalog.asSchemas().listSchemas(Namespace.of(metalakeName, catalogName)); + NameIdentifier[] nameIdentifiers = catalog.asSchemas().listSchemas(); Set schemaNames = Arrays.stream(nameIdentifiers).map(NameIdentifier::name).collect(Collectors.toSet()); Assertions.assertTrue(schemaNames.contains("public")); @@ -1102,10 +1099,7 @@ void testPGListTable() { String[] dbs = {schemaName1, schemaName2, schemaName3, schemaName4, schemaName5}; for (int i = 0; i < dbs.length; i++) { - catalog - .asSchemas() - .createSchema( - NameIdentifier.of(metalakeName, catalogName, dbs[i]), dbs[i], Maps.newHashMap()); + catalog.asSchemas().createSchema(dbs[i], dbs[i], Maps.newHashMap()); } String tableName1 = "table1"; @@ -1155,10 +1149,7 @@ void testCreateSameTableInDifferentSchema() { String[] dbs = {schemaName1, schemaName2, schemaName3}; for (int i = 0; i < dbs.length; i++) { - catalog - .asSchemas() - .createSchema( - NameIdentifier.of(metalakeName, catalogName, dbs[i]), dbs[i], Maps.newHashMap()); + catalog.asSchemas().createSchema(dbs[i], dbs[i], Maps.newHashMap()); } String tableName1 = "table1"; @@ -1227,13 +1218,12 @@ void testPostgreSQLSchemaNameCaseSensitive() { SupportsSchemas schemaSupport = catalog.asSchemas(); for (String schema : schemas) { - NameIdentifier schemaIdentifier = NameIdentifier.of(metalakeName, catalogName, schema); - schemaSupport.createSchema(schemaIdentifier, null, Collections.emptyMap()); - Assertions.assertNotNull(schemaSupport.loadSchema(schemaIdentifier)); + schemaSupport.createSchema(schema, null, Collections.emptyMap()); + Assertions.assertNotNull(schemaSupport.loadSchema(schema)); } Set schemaNames = - Arrays.stream(schemaSupport.listSchemas(Namespace.of(metalakeName, catalogName))) + Arrays.stream(schemaSupport.listSchemas()) .map(NameIdentifier::name) .collect(Collectors.toSet()); diff --git a/catalogs/catalog-jdbc-postgresql/src/test/java/com/datastrato/gravitino/catalog/postgresql/integration/test/TestMultipleJDBCLoad.java b/catalogs/catalog-jdbc-postgresql/src/test/java/com/datastrato/gravitino/catalog/postgresql/integration/test/TestMultipleJDBCLoad.java index 0a52b0b9d54..be54d63e345 100644 --- a/catalogs/catalog-jdbc-postgresql/src/test/java/com/datastrato/gravitino/catalog/postgresql/integration/test/TestMultipleJDBCLoad.java +++ b/catalogs/catalog-jdbc-postgresql/src/test/java/com/datastrato/gravitino/catalog/postgresql/integration/test/TestMultipleJDBCLoad.java @@ -7,7 +7,6 @@ import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.NameIdentifier; -import com.datastrato.gravitino.Namespace; import com.datastrato.gravitino.catalog.jdbc.config.JdbcConfig; import com.datastrato.gravitino.client.GravitinoMetalake; import com.datastrato.gravitino.integration.test.container.ContainerSuite; @@ -110,30 +109,16 @@ public void testCreateMultipleJdbc() throws URISyntaxException, SQLException { metalake.createCatalog( mysqlCatalogName, Catalog.Type.RELATIONAL, "jdbc-mysql", "comment", mysqlConf); - NameIdentifier[] nameIdentifiers = - mysqlCatalog.asSchemas().listSchemas(Namespace.of(metalakeName, mysqlCatalogName)); + NameIdentifier[] nameIdentifiers = mysqlCatalog.asSchemas().listSchemas(); Assertions.assertNotEquals(0, nameIdentifiers.length); - nameIdentifiers = - postgreSqlCatalog - .asSchemas() - .listSchemas(Namespace.of(metalakeName, postgreSqlCatalogName)); + nameIdentifiers = postgreSqlCatalog.asSchemas().listSchemas(); Assertions.assertEquals(1, nameIdentifiers.length); Assertions.assertEquals("public", nameIdentifiers[0].name()); String schemaName = RandomNameUtils.genRandomName("it_schema"); - mysqlCatalog - .asSchemas() - .createSchema( - NameIdentifier.of(metalakeName, mysqlCatalogName, schemaName), - null, - Collections.emptyMap()); + mysqlCatalog.asSchemas().createSchema(schemaName, null, Collections.emptyMap()); - postgreSqlCatalog - .asSchemas() - .createSchema( - NameIdentifier.of(metalakeName, postgreSqlCatalogName, schemaName), - null, - Collections.emptyMap()); + postgreSqlCatalog.asSchemas().createSchema(schemaName, null, Collections.emptyMap()); String tableName = RandomNameUtils.genRandomName("it_table"); diff --git a/catalogs/catalog-kafka/src/main/java/com/datastrato/gravitino/catalog/kafka/KafkaCatalog.java b/catalogs/catalog-kafka/src/main/java/com/datastrato/gravitino/catalog/kafka/KafkaCatalog.java index 46fbc1bf1c7..d0188666eeb 100644 --- a/catalogs/catalog-kafka/src/main/java/com/datastrato/gravitino/catalog/kafka/KafkaCatalog.java +++ b/catalogs/catalog-kafka/src/main/java/com/datastrato/gravitino/catalog/kafka/KafkaCatalog.java @@ -8,8 +8,6 @@ import com.datastrato.gravitino.connector.BaseCatalog; import com.datastrato.gravitino.connector.CatalogOperations; import com.datastrato.gravitino.connector.capability.Capability; -import com.datastrato.gravitino.messaging.TopicCatalog; -import com.datastrato.gravitino.rel.SupportsSchemas; import java.util.Map; /** Kafka catalog is a messaging catalog that can manage topics on the Kafka messaging system. */ @@ -30,14 +28,4 @@ protected CatalogOperations newOps(Map config) { protected Capability newCapability() { return new KafkaCatalogCapability(); } - - @Override - public SupportsSchemas asSchemas() throws UnsupportedOperationException { - return (KafkaCatalogOperations) ops(); - } - - @Override - public TopicCatalog asTopicCatalog() throws UnsupportedOperationException { - return (KafkaCatalogOperations) ops(); - } } diff --git a/catalogs/catalog-kafka/src/main/java/com/datastrato/gravitino/catalog/kafka/KafkaCatalogOperations.java b/catalogs/catalog-kafka/src/main/java/com/datastrato/gravitino/catalog/kafka/KafkaCatalogOperations.java index bb5f60960e5..d986c378414 100644 --- a/catalogs/catalog-kafka/src/main/java/com/datastrato/gravitino/catalog/kafka/KafkaCatalogOperations.java +++ b/catalogs/catalog-kafka/src/main/java/com/datastrato/gravitino/catalog/kafka/KafkaCatalogOperations.java @@ -17,10 +17,13 @@ import com.datastrato.gravitino.GravitinoEnv; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.StringIdentifier; import com.datastrato.gravitino.connector.CatalogInfo; import com.datastrato.gravitino.connector.CatalogOperations; import com.datastrato.gravitino.connector.PropertiesMetadata; +import com.datastrato.gravitino.connector.SupportsSchemas; import com.datastrato.gravitino.exceptions.NoSuchCatalogException; import com.datastrato.gravitino.exceptions.NoSuchEntityException; import com.datastrato.gravitino.exceptions.NoSuchSchemaException; @@ -34,9 +37,6 @@ import com.datastrato.gravitino.messaging.TopicChange; import com.datastrato.gravitino.meta.AuditInfo; import com.datastrato.gravitino.meta.SchemaEntity; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SchemaChange; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.storage.IdGenerator; import com.datastrato.gravitino.utils.PrincipalUtils; import com.google.common.annotations.VisibleForTesting; diff --git a/catalogs/catalog-kafka/src/test/java/com/datastrato/gravitino/catalog/kafka/TestKafkaCatalogOperations.java b/catalogs/catalog-kafka/src/test/java/com/datastrato/gravitino/catalog/kafka/TestKafkaCatalogOperations.java index 975bd851628..1d919706ece 100644 --- a/catalogs/catalog-kafka/src/test/java/com/datastrato/gravitino/catalog/kafka/TestKafkaCatalogOperations.java +++ b/catalogs/catalog-kafka/src/test/java/com/datastrato/gravitino/catalog/kafka/TestKafkaCatalogOperations.java @@ -24,6 +24,8 @@ import com.datastrato.gravitino.EntityStoreFactory; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.catalog.kafka.embedded.KafkaClusterEmbedded; import com.datastrato.gravitino.exceptions.NoSuchSchemaException; import com.datastrato.gravitino.exceptions.NoSuchTopicException; @@ -32,8 +34,6 @@ import com.datastrato.gravitino.messaging.TopicChange; import com.datastrato.gravitino.meta.AuditInfo; import com.datastrato.gravitino.meta.CatalogEntity; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SchemaChange; import com.datastrato.gravitino.storage.IdGenerator; import com.datastrato.gravitino.storage.RandomIdGenerator; import com.google.common.collect.ImmutableMap; diff --git a/catalogs/catalog-kafka/src/test/java/com/datastrato/gravitino/catalog/kafka/integration/test/CatalogKafkaIT.java b/catalogs/catalog-kafka/src/test/java/com/datastrato/gravitino/catalog/kafka/integration/test/CatalogKafkaIT.java index 3fc4fc84292..b5392bea408 100644 --- a/catalogs/catalog-kafka/src/test/java/com/datastrato/gravitino/catalog/kafka/integration/test/CatalogKafkaIT.java +++ b/catalogs/catalog-kafka/src/test/java/com/datastrato/gravitino/catalog/kafka/integration/test/CatalogKafkaIT.java @@ -14,6 +14,8 @@ import com.datastrato.gravitino.CatalogChange; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.client.GravitinoMetalake; import com.datastrato.gravitino.exceptions.NoSuchCatalogException; import com.datastrato.gravitino.integration.test.container.ContainerSuite; @@ -21,8 +23,6 @@ import com.datastrato.gravitino.integration.test.util.GravitinoITUtils; import com.datastrato.gravitino.messaging.Topic; import com.datastrato.gravitino.messaging.TopicChange; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SchemaChange; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; @@ -207,15 +207,11 @@ public void testCatalogException() { @Test public void testDefaultSchema() { - NameIdentifier[] schemas = - catalog.asSchemas().listSchemas(Namespace.ofSchema(METALAKE_NAME, CATALOG_NAME)); + NameIdentifier[] schemas = catalog.asSchemas().listSchemas(); Assertions.assertEquals(1, schemas.length); Assertions.assertEquals(DEFAULT_SCHEMA_NAME, schemas[0].name()); - Schema loadSchema = - catalog - .asSchemas() - .loadSchema(NameIdentifier.ofSchema(METALAKE_NAME, CATALOG_NAME, DEFAULT_SCHEMA_NAME)); + Schema loadSchema = catalog.asSchemas().loadSchema(DEFAULT_SCHEMA_NAME); Assertions.assertEquals( "The default schema of Kafka catalog including all topics", loadSchema.comment()); @@ -226,17 +222,11 @@ public void testDefaultSchema() { () -> catalog .asSchemas() - .alterSchema( - NameIdentifier.ofSchema(METALAKE_NAME, CATALOG_NAME, DEFAULT_SCHEMA_NAME), - SchemaChange.removeProperty("key1"))); + .alterSchema(DEFAULT_SCHEMA_NAME, SchemaChange.removeProperty("key1"))); Assertions.assertTrue(exception.getMessage().contains("Cannot alter the default schema")); // test drop default schema - boolean dropped = - catalog - .asSchemas() - .dropSchema( - NameIdentifier.ofSchema(METALAKE_NAME, CATALOG_NAME, DEFAULT_SCHEMA_NAME), true); + boolean dropped = catalog.asSchemas().dropSchema(DEFAULT_SCHEMA_NAME, true); Assertions.assertFalse(dropped); } @@ -246,21 +236,14 @@ public void testCreateSchema() { Exception ex = Assertions.assertThrows( UnsupportedOperationException.class, - () -> - catalog - .asSchemas() - .createSchema( - NameIdentifier.of(METALAKE_NAME, CATALOG_NAME, schemaName), - "comment", - Collections.emptyMap())); + () -> catalog.asSchemas().createSchema(schemaName, "comment", Collections.emptyMap())); Assertions.assertTrue( ex.getMessage().contains("Kafka catalog does not support schema creation")); } @Test public void testListSchema() { - NameIdentifier[] schemas = - catalog.asSchemas().listSchemas(Namespace.ofSchema(METALAKE_NAME, CATALOG_NAME)); + NameIdentifier[] schemas = catalog.asSchemas().listSchemas(); Assertions.assertEquals(1, schemas.length); Assertions.assertEquals(DEFAULT_SCHEMA_NAME, schemas[0].name()); } diff --git a/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalog.java b/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalog.java index ffa8c2a0cdd..6094a39397e 100644 --- a/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalog.java +++ b/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalog.java @@ -7,8 +7,6 @@ import com.datastrato.gravitino.connector.BaseCatalog; import com.datastrato.gravitino.connector.CatalogOperations; import com.datastrato.gravitino.connector.capability.Capability; -import com.datastrato.gravitino.rel.SupportsSchemas; -import com.datastrato.gravitino.rel.TableCatalog; import java.util.Map; /** Implementation of an Iceberg catalog in Gravitino. */ @@ -36,16 +34,4 @@ protected CatalogOperations newOps(Map config) { public Capability newCapability() { return new IcebergCatalogCapability(); } - - /** @return The Iceberg catalog operations as {@link IcebergCatalogOperations}. */ - @Override - public SupportsSchemas asSchemas() { - return (IcebergCatalogOperations) ops(); - } - - /** @return The Iceberg catalog operations as {@link IcebergCatalogOperations}. */ - @Override - public TableCatalog asTableCatalog() { - return (IcebergCatalogOperations) ops(); - } } diff --git a/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalogOperations.java b/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalogOperations.java index 3c28fbd6527..1d52b578688 100644 --- a/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalogOperations.java +++ b/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalogOperations.java @@ -8,11 +8,13 @@ import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.catalog.lakehouse.iceberg.ops.IcebergTableOps; import com.datastrato.gravitino.catalog.lakehouse.iceberg.ops.IcebergTableOpsHelper; import com.datastrato.gravitino.connector.CatalogInfo; import com.datastrato.gravitino.connector.CatalogOperations; import com.datastrato.gravitino.connector.PropertiesMetadata; +import com.datastrato.gravitino.connector.SupportsSchemas; import com.datastrato.gravitino.exceptions.NoSuchCatalogException; import com.datastrato.gravitino.exceptions.NoSuchSchemaException; import com.datastrato.gravitino.exceptions.NoSuchTableException; @@ -21,8 +23,6 @@ import com.datastrato.gravitino.exceptions.TableAlreadyExistsException; import com.datastrato.gravitino.meta.AuditInfo; import com.datastrato.gravitino.rel.Column; -import com.datastrato.gravitino.rel.SchemaChange; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.rel.Table; import com.datastrato.gravitino.rel.TableCatalog; import com.datastrato.gravitino.rel.TableChange; diff --git a/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/TestIcebergSchema.java b/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/TestIcebergSchema.java index a510c92598c..89370953b6f 100644 --- a/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/TestIcebergSchema.java +++ b/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/TestIcebergSchema.java @@ -6,14 +6,14 @@ import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.catalog.PropertiesMetadataHelpers; import com.datastrato.gravitino.connector.PropertiesMetadata; +import com.datastrato.gravitino.connector.SupportsSchemas; import com.datastrato.gravitino.exceptions.SchemaAlreadyExistsException; import com.datastrato.gravitino.meta.AuditInfo; import com.datastrato.gravitino.meta.CatalogEntity; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SchemaChange; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.google.common.collect.Maps; import java.time.Instant; import java.util.Arrays; @@ -35,27 +35,28 @@ public class TestIcebergSchema { @Test public void testCreateIcebergSchema() { IcebergCatalog icebergCatalog = initIcebergCatalog("testCreateIcebergSchema"); + IcebergCatalogOperations catalogOperations = (IcebergCatalogOperations) icebergCatalog.ops(); NameIdentifier ident = NameIdentifier.of("metalake", icebergCatalog.name(), "test"); Map properties = Maps.newHashMap(); properties.put("key1", "val1"); properties.put("key2", "val2"); - Schema schema = icebergCatalog.asSchemas().createSchema(ident, COMMENT_VALUE, properties); + Schema schema = catalogOperations.createSchema(ident, COMMENT_VALUE, properties); Assertions.assertEquals(ident.name(), schema.name()); Assertions.assertEquals(COMMENT_VALUE, schema.comment()); Assertions.assertEquals(properties, schema.properties()); - Assertions.assertTrue(icebergCatalog.asSchemas().schemaExists(ident)); + Assertions.assertTrue(catalogOperations.schemaExists(ident)); Set names = - Arrays.stream(icebergCatalog.asSchemas().listSchemas(ident.namespace())) + Arrays.stream(catalogOperations.listSchemas(ident.namespace())) .map(NameIdentifier::name) .collect(Collectors.toSet()); Assertions.assertTrue(names.contains(ident.name())); // Test schema already exists - SupportsSchemas schemas = icebergCatalog.asSchemas(); + SupportsSchemas schemas = catalogOperations; Throwable exception = Assertions.assertThrows( SchemaAlreadyExistsException.class, @@ -68,10 +69,11 @@ public void testCreateIcebergSchema() { @Test public void testListSchema() { IcebergCatalog icebergCatalog = initIcebergCatalog("testListIcebergSchema"); + IcebergCatalogOperations catalogOperations = (IcebergCatalogOperations) icebergCatalog.ops(); NameIdentifier ident = NameIdentifier.of("metalake", icebergCatalog.name(), "test"); - icebergCatalog.asSchemas().createSchema(ident, COMMENT_VALUE, Maps.newHashMap()); + catalogOperations.createSchema(ident, COMMENT_VALUE, Maps.newHashMap()); - NameIdentifier[] schemas = icebergCatalog.asSchemas().listSchemas(ident.namespace()); + NameIdentifier[] schemas = catalogOperations.listSchemas(ident.namespace()); Assertions.assertEquals(1, schemas.length); Assertions.assertEquals(ident.name(), schemas[0].name()); Assertions.assertEquals(ident.namespace(), schemas[0].namespace()); @@ -80,37 +82,30 @@ public void testListSchema() { @Test public void testAlterSchema() { IcebergCatalog icebergCatalog = initIcebergCatalog("testAlterSchema"); + IcebergCatalogOperations catalogOperations = (IcebergCatalogOperations) icebergCatalog.ops(); NameIdentifier ident = NameIdentifier.of("metalake", icebergCatalog.name(), "test"); Map properties = Maps.newHashMap(); properties.put("key1", "val1"); properties.put("key2", "val2"); - icebergCatalog.asSchemas().createSchema(ident, COMMENT_VALUE, properties); - Assertions.assertTrue(icebergCatalog.asSchemas().schemaExists(ident)); + catalogOperations.createSchema(ident, COMMENT_VALUE, properties); + Assertions.assertTrue(catalogOperations.schemaExists(ident)); - Map properties1 = icebergCatalog.asSchemas().loadSchema(ident).properties(); + Map properties1 = catalogOperations.loadSchema(ident).properties(); Assertions.assertEquals("val1", properties1.get("key1")); Assertions.assertEquals("val2", properties1.get("key2")); - icebergCatalog - .asSchemas() - .alterSchema( - ident, - SchemaChange.removeProperty("key1"), - SchemaChange.setProperty("key2", "val2-alter")); - Schema alteredSchema = icebergCatalog.asSchemas().loadSchema(ident); + catalogOperations.alterSchema( + ident, SchemaChange.removeProperty("key1"), SchemaChange.setProperty("key2", "val2-alter")); + Schema alteredSchema = catalogOperations.loadSchema(ident); Map properties2 = alteredSchema.properties(); Assertions.assertFalse(properties2.containsKey("key1")); Assertions.assertEquals("val2-alter", properties2.get("key2")); - icebergCatalog - .asSchemas() - .alterSchema( - ident, - SchemaChange.setProperty("key3", "val3"), - SchemaChange.setProperty("key4", "val4")); - Schema alteredSchema1 = icebergCatalog.asSchemas().loadSchema(ident); + catalogOperations.alterSchema( + ident, SchemaChange.setProperty("key3", "val3"), SchemaChange.setProperty("key4", "val4")); + Schema alteredSchema1 = catalogOperations.loadSchema(ident); Map properties3 = alteredSchema1.properties(); Assertions.assertEquals("val3", properties3.get("key3")); Assertions.assertEquals("val4", properties3.get("key4")); @@ -119,23 +114,23 @@ public void testAlterSchema() { @Test public void testDropSchema() { IcebergCatalog icebergCatalog = initIcebergCatalog("testDropSchema"); + IcebergCatalogOperations catalogOperations = (IcebergCatalogOperations) icebergCatalog.ops(); NameIdentifier ident = NameIdentifier.of("metalake", icebergCatalog.name(), "test"); Map properties = Maps.newHashMap(); properties.put("key1", "val1"); properties.put("key2", "val2"); - icebergCatalog.asSchemas().createSchema(ident, COMMENT_VALUE, properties); - Assertions.assertTrue(icebergCatalog.asSchemas().schemaExists(ident)); - icebergCatalog.asSchemas().dropSchema(ident, false); - Assertions.assertFalse(icebergCatalog.asSchemas().schemaExists(ident)); + catalogOperations.createSchema(ident, COMMENT_VALUE, properties); + Assertions.assertTrue(catalogOperations.schemaExists(ident)); + catalogOperations.dropSchema(ident, false); + Assertions.assertFalse(catalogOperations.schemaExists(ident)); - Assertions.assertFalse(icebergCatalog.asSchemas().dropSchema(ident, false)); + Assertions.assertFalse(catalogOperations.dropSchema(ident, false)); - SupportsSchemas schemas = icebergCatalog.asSchemas(); Throwable exception = Assertions.assertThrows( - IllegalArgumentException.class, () -> schemas.dropSchema(ident, true)); + IllegalArgumentException.class, () -> catalogOperations.dropSchema(ident, true)); Assertions.assertTrue( exception.getMessage().contains("Iceberg does not support cascading delete operations")); } diff --git a/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/TestIcebergTable.java b/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/TestIcebergTable.java index c5367f86a34..28f20b9a6fa 100644 --- a/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/TestIcebergTable.java +++ b/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/TestIcebergTable.java @@ -54,6 +54,7 @@ public class TestIcebergTable { private static final String ICEBERG_SCHEMA_NAME = "test_schema"; private static final String ICEBERG_COMMENT = "test_comment"; private static IcebergCatalog icebergCatalog; + private static IcebergCatalogOperations icebergCatalogOperations; private static IcebergSchema icebergSchema; private static final NameIdentifier schemaIdent = NameIdentifier.of(META_LAKE_NAME, ICEBERG_CATALOG_NAME, ICEBERG_SCHEMA_NAME); @@ -66,14 +67,13 @@ private static void setup() { @AfterEach private void resetSchema() { - TableCatalog tableCatalog = icebergCatalog.asTableCatalog(); NameIdentifier[] nameIdentifiers = - tableCatalog.listTables( + icebergCatalogOperations.listTables( Namespace.of(ArrayUtils.add(schemaIdent.namespace().levels(), schemaIdent.name()))); if (ArrayUtils.isNotEmpty(nameIdentifiers)) { - Arrays.stream(nameIdentifiers).forEach(tableCatalog::dropTable); + Arrays.stream(nameIdentifiers).forEach(icebergCatalogOperations::dropTable); } - icebergCatalog.asSchemas().dropSchema(schemaIdent, false); + icebergCatalogOperations.dropSchema(schemaIdent, false); initIcebergSchema(); } @@ -82,9 +82,7 @@ private static void initIcebergSchema() { properties.put("key1", "val1"); properties.put("key2", "val2"); - icebergSchema = - (IcebergSchema) - icebergCatalog.asSchemas().createSchema(schemaIdent, ICEBERG_COMMENT, properties); + icebergSchema = icebergCatalogOperations.createSchema(schemaIdent, ICEBERG_COMMENT, properties); } private static void initIcebergCatalog() { @@ -92,6 +90,7 @@ private static void initIcebergCatalog() { Map conf = Maps.newHashMap(); icebergCatalog = new IcebergCatalog().withCatalogConf(conf).withCatalogEntity(entity); + icebergCatalogOperations = (IcebergCatalogOperations) icebergCatalog.ops(); } private static CatalogEntity createDefaultCatalogEntity() { @@ -163,22 +162,20 @@ public void testCreateIcebergTable() { SortOrder[] sortOrders = createSortOrder(); Table table = - icebergCatalog - .asTableCatalog() - .createTable( - tableIdentifier, - columns, - ICEBERG_COMMENT, - properties, - new Transform[0], - Distributions.NONE, - sortOrders); + icebergCatalogOperations.createTable( + tableIdentifier, + columns, + ICEBERG_COMMENT, + properties, + new Transform[0], + Distributions.NONE, + sortOrders); Assertions.assertEquals(tableIdentifier.name(), table.name()); Assertions.assertEquals(ICEBERG_COMMENT, table.comment()); Assertions.assertEquals("val1", table.properties().get("key1")); Assertions.assertEquals("val2", table.properties().get("key2")); - Table loadedTable = icebergCatalog.asTableCatalog().loadTable(tableIdentifier); + Table loadedTable = icebergCatalogOperations.loadTable(tableIdentifier); Assertions.assertEquals("val1", loadedTable.properties().get("key1")); Assertions.assertEquals("val2", loadedTable.properties().get("key2")); @@ -186,9 +183,8 @@ public void testCreateIcebergTable() { Assertions.assertFalse(loadedTable.columns()[1].nullable()); Assertions.assertFalse(loadedTable.columns()[2].nullable()); - Assertions.assertTrue(icebergCatalog.asTableCatalog().tableExists(tableIdentifier)); - NameIdentifier[] tableIdents = - icebergCatalog.asTableCatalog().listTables(tableIdentifier.namespace()); + Assertions.assertTrue(icebergCatalogOperations.tableExists(tableIdentifier)); + NameIdentifier[] tableIdents = icebergCatalogOperations.listTables(tableIdentifier.namespace()); Assertions.assertTrue(Arrays.asList(tableIdents).contains(tableIdentifier)); Assertions.assertEquals(sortOrders.length, loadedTable.sortOrder().length); @@ -200,7 +196,7 @@ public void testCreateIcebergTable() { // Compare sort and order // Test exception - TableCatalog tableCatalog = icebergCatalog.asTableCatalog(); + TableCatalog tableCatalog = icebergCatalogOperations; Throwable exception = Assertions.assertThrows( TableAlreadyExistsException.class, @@ -245,28 +241,26 @@ public void testCreatePartitionedIcebergTable() { }; Table table = - icebergCatalog - .asTableCatalog() - .createTable(tableIdentifier, columns, ICEBERG_COMMENT, properties, partitions); + icebergCatalogOperations.createTable( + tableIdentifier, columns, ICEBERG_COMMENT, properties, partitions); Assertions.assertEquals(tableIdentifier.name(), table.name()); Assertions.assertEquals(ICEBERG_COMMENT, table.comment()); Assertions.assertEquals("val1", table.properties().get("key1")); Assertions.assertEquals("val2", table.properties().get("key2")); Assertions.assertArrayEquals(partitions, table.partitioning()); - Table loadedTable = icebergCatalog.asTableCatalog().loadTable(tableIdentifier); + Table loadedTable = icebergCatalogOperations.loadTable(tableIdentifier); Assertions.assertEquals("val1", loadedTable.properties().get("key1")); Assertions.assertEquals("val2", loadedTable.properties().get("key2")); Assertions.assertArrayEquals(partitions, loadedTable.partitioning()); - Assertions.assertTrue(icebergCatalog.asTableCatalog().tableExists(tableIdentifier)); - NameIdentifier[] tableIdents = - icebergCatalog.asTableCatalog().listTables(tableIdentifier.namespace()); + Assertions.assertTrue(icebergCatalogOperations.tableExists(tableIdentifier)); + NameIdentifier[] tableIdents = icebergCatalogOperations.listTables(tableIdentifier.namespace()); Assertions.assertTrue(Arrays.asList(tableIdents).contains(tableIdentifier)); // Test exception - TableCatalog tableCatalog = icebergCatalog.asTableCatalog(); + TableCatalog tableCatalog = icebergCatalogOperations; Transform[] partitions1 = new Transform[] {day(col2.name())}; Throwable exception = Assertions.assertThrows( @@ -319,26 +313,24 @@ public void testDropIcebergTable() { .build(); Column[] columns = new Column[] {col1, col2}; - icebergCatalog - .asTableCatalog() - .createTable( - tableIdentifier, - columns, - ICEBERG_COMMENT, - properties, - new Transform[0], - Distributions.NONE, - new SortOrder[0]); - - Assertions.assertTrue(icebergCatalog.asTableCatalog().tableExists(tableIdentifier)); - icebergCatalog.asTableCatalog().dropTable(tableIdentifier); - Assertions.assertFalse(icebergCatalog.asTableCatalog().tableExists(tableIdentifier)); + icebergCatalogOperations.createTable( + tableIdentifier, + columns, + ICEBERG_COMMENT, + properties, + new Transform[0], + Distributions.NONE, + new SortOrder[0]); + + Assertions.assertTrue(icebergCatalogOperations.tableExists(tableIdentifier)); + icebergCatalogOperations.dropTable(tableIdentifier); + Assertions.assertFalse(icebergCatalogOperations.tableExists(tableIdentifier)); } @Test public void testListTableException() { Namespace tableNs = Namespace.of("metalake", icebergCatalog.name(), "not_exist_db"); - TableCatalog tableCatalog = icebergCatalog.asTableCatalog(); + TableCatalog tableCatalog = icebergCatalogOperations; Throwable exception = Assertions.assertThrows( NoSuchSchemaException.class, () -> tableCatalog.listTables(tableNs)); @@ -373,60 +365,52 @@ public void testAlterIcebergTable() { SortOrder[] sortOrders = createSortOrder(); Table createdTable = - icebergCatalog - .asTableCatalog() - .createTable( - tableIdentifier, - columns, - ICEBERG_COMMENT, - properties, - new Transform[0], - distribution, - sortOrders); - Assertions.assertTrue(icebergCatalog.asTableCatalog().tableExists(tableIdentifier)); - - TableCatalog tableCatalog = icebergCatalog.asTableCatalog(); + icebergCatalogOperations.createTable( + tableIdentifier, + columns, + ICEBERG_COMMENT, + properties, + new Transform[0], + distribution, + sortOrders); + Assertions.assertTrue(icebergCatalogOperations.tableExists(tableIdentifier)); + TableChange update = TableChange.updateComment(ICEBERG_COMMENT + "_new"); TableChange rename = TableChange.rename("test_iceberg_table_new"); Throwable exception = Assertions.assertThrows( IllegalArgumentException.class, - () -> tableCatalog.alterTable(tableIdentifier, update, rename)); + () -> icebergCatalogOperations.alterTable(tableIdentifier, update, rename)); Assertions.assertTrue( exception.getMessage().contains("The operation to change the table name cannot")); // test alter - icebergCatalog - .asTableCatalog() - .alterTable( - tableIdentifier, - TableChange.updateComment(ICEBERG_COMMENT + "_new"), - TableChange.removeProperty("key1"), - TableChange.setProperty("key2", "val2_new"), - // columns current format: [col_1:I8:comment, col_2:DATE:comment] - TableChange.addColumn(new String[] {"col_3"}, Types.StringType.get()), - // columns current format: [col_1:I8:comment, col_2:DATE:comment, col_3:STRING:null] - TableChange.renameColumn(new String[] {"col_2"}, "col_2_new"), - // columns current format: [col_1:I8:comment, col_2_new:DATE:comment, col_3:STRING:null] - TableChange.updateColumnComment(new String[] {"col_1"}, ICEBERG_COMMENT + "_new"), - // columns current format: [col_1:I8:comment_new, col_2_new:DATE:comment, - // col_3:STRING:null] - TableChange.updateColumnType(new String[] {"col_1"}, Types.IntegerType.get()), - // columns current format: [col_1:I32:comment_new, col_2_new:DATE:comment, - // col_3:STRING:null] - TableChange.updateColumnPosition( - new String[] {"col_2"}, TableChange.ColumnPosition.first()) - // columns current: [col_2_new:DATE:comment, col_1:I32:comment_new, col_3:STRING:null] - ); - - icebergCatalog - .asTableCatalog() - .alterTable(tableIdentifier, TableChange.rename("test_iceberg_table_new")); + icebergCatalogOperations.alterTable( + tableIdentifier, + TableChange.updateComment(ICEBERG_COMMENT + "_new"), + TableChange.removeProperty("key1"), + TableChange.setProperty("key2", "val2_new"), + // columns current format: [col_1:I8:comment, col_2:DATE:comment] + TableChange.addColumn(new String[] {"col_3"}, Types.StringType.get()), + // columns current format: [col_1:I8:comment, col_2:DATE:comment, col_3:STRING:null] + TableChange.renameColumn(new String[] {"col_2"}, "col_2_new"), + // columns current format: [col_1:I8:comment, col_2_new:DATE:comment, col_3:STRING:null] + TableChange.updateColumnComment(new String[] {"col_1"}, ICEBERG_COMMENT + "_new"), + // columns current format: [col_1:I8:comment_new, col_2_new:DATE:comment, + // col_3:STRING:null] + TableChange.updateColumnType(new String[] {"col_1"}, Types.IntegerType.get()), + // columns current format: [col_1:I32:comment_new, col_2_new:DATE:comment, + // col_3:STRING:null] + TableChange.updateColumnPosition(new String[] {"col_2"}, TableChange.ColumnPosition.first()) + // columns current: [col_2_new:DATE:comment, col_1:I32:comment_new, col_3:STRING:null] + ); + + icebergCatalogOperations.alterTable( + tableIdentifier, TableChange.rename("test_iceberg_table_new")); Table alteredTable = - icebergCatalog - .asTableCatalog() - .loadTable(NameIdentifier.of(tableIdentifier.namespace(), "test_iceberg_table_new")); + icebergCatalogOperations.loadTable( + NameIdentifier.of(tableIdentifier.namespace(), "test_iceberg_table_new")); Assertions.assertEquals(ICEBERG_COMMENT + "_new", alteredTable.comment()); Assertions.assertFalse(alteredTable.properties().containsKey("key1")); @@ -455,15 +439,12 @@ public void testAlterIcebergTable() { Assertions.assertArrayEquals(expected, alteredTable.columns()); // test delete column change - icebergCatalog - .asTableCatalog() - .alterTable( - NameIdentifier.of(tableIdentifier.namespace(), "test_iceberg_table_new"), - TableChange.deleteColumn(new String[] {"col_1"}, false)); + icebergCatalogOperations.alterTable( + NameIdentifier.of(tableIdentifier.namespace(), "test_iceberg_table_new"), + TableChange.deleteColumn(new String[] {"col_1"}, false)); Table alteredTable1 = - icebergCatalog - .asTableCatalog() - .loadTable(NameIdentifier.of(tableIdentifier.namespace(), "test_iceberg_table_new")); + icebergCatalogOperations.loadTable( + NameIdentifier.of(tableIdentifier.namespace(), "test_iceberg_table_new")); expected = Arrays.stream(expected).filter(c -> !"col_1".equals(c.name())).toArray(Column[]::new); Assertions.assertArrayEquals(expected, alteredTable1.columns()); diff --git a/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/integration/test/CatalogIcebergBaseIT.java b/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/integration/test/CatalogIcebergBaseIT.java index 0afad8e473e..59ce2993b16 100644 --- a/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/integration/test/CatalogIcebergBaseIT.java +++ b/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/integration/test/CatalogIcebergBaseIT.java @@ -15,6 +15,9 @@ import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SchemaChange; +import com.datastrato.gravitino.SupportsSchemas; import com.datastrato.gravitino.auth.AuthConstants; import com.datastrato.gravitino.catalog.lakehouse.iceberg.IcebergConfig; import com.datastrato.gravitino.catalog.lakehouse.iceberg.IcebergSchemaPropertiesMetadata; @@ -30,9 +33,6 @@ import com.datastrato.gravitino.integration.test.util.AbstractIT; import com.datastrato.gravitino.integration.test.util.GravitinoITUtils; import com.datastrato.gravitino.rel.Column; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SchemaChange; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.rel.Table; import com.datastrato.gravitino.rel.TableCatalog; import com.datastrato.gravitino.rel.TableChange; @@ -163,7 +163,7 @@ private void clearTableAndSchema() { for (NameIdentifier nameIdentifier : nameIdentifiers) { catalog.asTableCatalog().dropTable(nameIdentifier); } - catalog.asSchemas().dropSchema(NameIdentifier.of(metalakeName, catalogName, schemaName), false); + catalog.asSchemas().dropSchema(schemaName, false); } private void createMetalake() { @@ -211,8 +211,8 @@ private void createSchema() { prop.put("key1", "val1"); prop.put("key2", "val2"); - Schema createdSchema = catalog.asSchemas().createSchema(ident, schema_comment, prop); - Schema loadSchema = catalog.asSchemas().loadSchema(ident); + Schema createdSchema = catalog.asSchemas().createSchema(ident.name(), schema_comment, prop); + Schema loadSchema = catalog.asSchemas().loadSchema(ident.name()); Assertions.assertEquals(createdSchema.name(), loadSchema.name()); prop.forEach((key, value) -> Assertions.assertEquals(loadSchema.properties().get(key), value)); } @@ -246,9 +246,8 @@ private Map createProperties() { @Test void testOperationIcebergSchema() { SupportsSchemas schemas = catalog.asSchemas(); - Namespace namespace = Namespace.of(metalakeName, catalogName); // list schema check. - NameIdentifier[] nameIdentifiers = schemas.listSchemas(namespace); + NameIdentifier[] nameIdentifiers = schemas.listSchemas(); Set schemaNames = Arrays.stream(nameIdentifiers).map(NameIdentifier::name).collect(Collectors.toSet()); Assertions.assertTrue(schemaNames.contains(schemaName)); @@ -262,8 +261,8 @@ void testOperationIcebergSchema() { // create schema check. String testSchemaName = GravitinoITUtils.genRandomName("test_schema_1"); NameIdentifier schemaIdent = NameIdentifier.of(metalakeName, catalogName, testSchemaName); - schemas.createSchema(schemaIdent, schema_comment, Collections.emptyMap()); - nameIdentifiers = schemas.listSchemas(Namespace.of(metalakeName, catalogName)); + schemas.createSchema(schemaIdent.name(), schema_comment, Collections.emptyMap()); + nameIdentifiers = schemas.listSchemas(); Map schemaMap = Arrays.stream(nameIdentifiers).collect(Collectors.toMap(NameIdentifier::name, v -> v)); Assertions.assertTrue(schemaMap.containsKey(testSchemaName)); @@ -275,8 +274,8 @@ void testOperationIcebergSchema() { Assertions.assertTrue(schemaNames.contains(testSchemaName)); // alert、load schema check. - schemas.alterSchema(schemaIdent, SchemaChange.setProperty("t1", "v1")); - Schema schema = schemas.loadSchema(schemaIdent); + schemas.alterSchema(schemaIdent.name(), SchemaChange.setProperty("t1", "v1")); + Schema schema = schemas.loadSchema(schemaIdent.name()); String val = schema.properties().get("t1"); Assertions.assertEquals("v1", val); @@ -288,11 +287,12 @@ void testOperationIcebergSchema() { Map emptyMap = Collections.emptyMap(); Assertions.assertThrows( SchemaAlreadyExistsException.class, - () -> schemas.createSchema(schemaIdent, schema_comment, emptyMap)); + () -> schemas.createSchema(schemaIdent.name(), schema_comment, emptyMap)); // drop schema check. - schemas.dropSchema(schemaIdent, false); - Assertions.assertThrows(NoSuchSchemaException.class, () -> schemas.loadSchema(schemaIdent)); + schemas.dropSchema(schemaIdent.name(), false); + Assertions.assertThrows( + NoSuchSchemaException.class, () -> schemas.loadSchema(schemaIdent.name())); org.apache.iceberg.catalog.Namespace icebergNamespace = IcebergTableOpsHelper.getIcebergNamespace(schemaIdent.name()); Assertions.assertThrows( @@ -301,12 +301,11 @@ void testOperationIcebergSchema() { icebergSupportsNamespaces.loadNamespaceMetadata(icebergNamespace); }); - nameIdentifiers = schemas.listSchemas(Namespace.of(metalakeName, catalogName)); + nameIdentifiers = schemas.listSchemas(); schemaMap = Arrays.stream(nameIdentifiers).collect(Collectors.toMap(NameIdentifier::name, v -> v)); Assertions.assertFalse(schemaMap.containsKey(testSchemaName)); - Assertions.assertFalse( - schemas.dropSchema(NameIdentifier.of(metalakeName, catalogName, "no-exits"), false)); + Assertions.assertFalse(schemas.dropSchema("no-exits", false)); TableCatalog tableCatalog = catalog.asTableCatalog(); // create failed check. @@ -324,8 +323,8 @@ void testOperationIcebergSchema() { Distributions.NONE, null)); // drop schema failed check. - Assertions.assertFalse(schemas.dropSchema(schemaIdent, true)); - Assertions.assertFalse(schemas.dropSchema(schemaIdent, false)); + Assertions.assertFalse(schemas.dropSchema(schemaIdent.name(), true)); + Assertions.assertFalse(schemas.dropSchema(schemaIdent.name(), false)); Assertions.assertFalse(tableCatalog.dropTable(table)); icebergNamespaces = icebergSupportsNamespaces.listNamespaces(IcebergTableOpsHelper.getIcebergNamespace()); @@ -894,12 +893,12 @@ public void testOperatorSchemeProperties() { IllegalArgumentException illegalArgumentException = Assertions.assertThrows( IllegalArgumentException.class, - () -> schemas.createSchema(ident, schema_comment, prop)); + () -> schemas.createSchema(ident.name(), schema_comment, prop)); Assertions.assertTrue( illegalArgumentException.getMessage().contains(IcebergSchemaPropertiesMetadata.COMMENT)); prop.remove(IcebergSchemaPropertiesMetadata.COMMENT); - catalog.asSchemas().createSchema(ident, schema_comment, prop); - Schema loadSchema = catalog.asSchemas().loadSchema(ident); + catalog.asSchemas().createSchema(ident.name(), schema_comment, prop); + Schema loadSchema = catalog.asSchemas().loadSchema(ident.name()); Assertions.assertEquals(AuthConstants.ANONYMOUS_USER, loadSchema.auditInfo().creator()); Assertions.assertNull(loadSchema.auditInfo().lastModifier()); Assertions.assertFalse( @@ -909,19 +908,21 @@ public void testOperatorSchemeProperties() { // alter SchemaChange change = SchemaChange.setProperty("comment", "v1"); Assertions.assertThrows( - IllegalArgumentException.class, () -> schemas.alterSchema(ident, change)); + IllegalArgumentException.class, () -> schemas.alterSchema(ident.name(), change)); Assertions.assertDoesNotThrow( () -> - catalog.asSchemas().alterSchema(ident, SchemaChange.setProperty("comment-test", "v1"))); - Schema schema = catalog.asSchemas().loadSchema(ident); + catalog + .asSchemas() + .alterSchema(ident.name(), SchemaChange.setProperty("comment-test", "v1"))); + Schema schema = catalog.asSchemas().loadSchema(ident.name()); Assertions.assertEquals(AuthConstants.ANONYMOUS_USER, schema.auditInfo().creator()); Assertions.assertEquals(AuthConstants.ANONYMOUS_USER, schema.auditInfo().lastModifier()); Assertions.assertEquals("v1", schema.properties().get("comment-test")); // drop - Assertions.assertTrue(catalog.asSchemas().dropSchema(ident, false)); - Assertions.assertThrows(NoSuchSchemaException.class, () -> schemas.loadSchema(ident)); + Assertions.assertTrue(catalog.asSchemas().dropSchema(ident.name(), false)); + Assertions.assertThrows(NoSuchSchemaException.class, () -> schemas.loadSchema(ident.name())); } @Test diff --git a/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/integration/test/TestMultipleJDBCLoad.java b/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/integration/test/TestMultipleJDBCLoad.java index 7924130bd92..4a0e4e3b53e 100644 --- a/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/integration/test/TestMultipleJDBCLoad.java +++ b/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/integration/test/TestMultipleJDBCLoad.java @@ -10,7 +10,6 @@ import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.NameIdentifier; -import com.datastrato.gravitino.Namespace; import com.datastrato.gravitino.catalog.lakehouse.iceberg.IcebergConfig; import com.datastrato.gravitino.client.GravitinoMetalake; import com.datastrato.gravitino.integration.test.container.MySQLContainer; @@ -121,29 +120,15 @@ public void testCreateMultipleJdbcInIceberg() throws URISyntaxException, SQLExce "comment", icebergMysqlConf); - NameIdentifier[] nameIdentifiers = - mysqlCatalog.asSchemas().listSchemas(Namespace.of(metalakeName, mysqlCatalogName)); + NameIdentifier[] nameIdentifiers = mysqlCatalog.asSchemas().listSchemas(); Assertions.assertEquals(0, nameIdentifiers.length); - nameIdentifiers = - postgreSqlCatalog - .asSchemas() - .listSchemas(Namespace.of(metalakeName, postgreSqlCatalogName)); + nameIdentifiers = postgreSqlCatalog.asSchemas().listSchemas(); Assertions.assertEquals(0, nameIdentifiers.length); String schemaName = RandomNameUtils.genRandomName("it_schema"); - mysqlCatalog - .asSchemas() - .createSchema( - NameIdentifier.of(metalakeName, mysqlCatalogName, schemaName), - null, - Collections.emptyMap()); + mysqlCatalog.asSchemas().createSchema(schemaName, null, Collections.emptyMap()); - postgreSqlCatalog - .asSchemas() - .createSchema( - NameIdentifier.of(metalakeName, postgreSqlCatalogName, schemaName), - null, - Collections.emptyMap()); + postgreSqlCatalog.asSchemas().createSchema(schemaName, null, Collections.emptyMap()); String tableName = RandomNameUtils.genRandomName("it_table"); diff --git a/clients/client-java/src/main/java/com/datastrato/gravitino/client/BaseSchemaCatalog.java b/clients/client-java/src/main/java/com/datastrato/gravitino/client/BaseSchemaCatalog.java index c03f77607ca..66ec720e795 100644 --- a/clients/client-java/src/main/java/com/datastrato/gravitino/client/BaseSchemaCatalog.java +++ b/clients/client-java/src/main/java/com/datastrato/gravitino/client/BaseSchemaCatalog.java @@ -4,8 +4,12 @@ */ package com.datastrato.gravitino.client; +import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SchemaChange; +import com.datastrato.gravitino.SupportsSchemas; import com.datastrato.gravitino.dto.AuditDTO; import com.datastrato.gravitino.dto.CatalogDTO; import com.datastrato.gravitino.dto.requests.SchemaCreateRequest; @@ -18,9 +22,6 @@ import com.datastrato.gravitino.exceptions.NoSuchSchemaException; import com.datastrato.gravitino.exceptions.NonEmptySchemaException; import com.datastrato.gravitino.exceptions.SchemaAlreadyExistsException; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SchemaChange; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.rest.RESTUtils; import java.util.Arrays; import java.util.Collections; @@ -35,15 +36,21 @@ * common methods for managing schemas in a catalog. With {@link BaseSchemaCatalog}, users can list, * create, load, alter and drop a schema with specified identifier. */ -abstract class BaseSchemaCatalog extends CatalogDTO implements SupportsSchemas { +abstract class BaseSchemaCatalog extends CatalogDTO implements Catalog, SupportsSchemas { private static final Logger LOG = LoggerFactory.getLogger(BaseSchemaCatalog.class); /** The REST client to send the requests. */ protected final RESTClient restClient; + /** The namespace of current catalog, which is the metalake name. */ + protected final Namespace namespace; + + /** The namespace of the schemas, which is the metalake name with catalog name. */ + protected final Namespace schemaNamespace; BaseSchemaCatalog( + Namespace namespace, String name, - Type type, + Catalog.Type type, String provider, String comment, Map properties, @@ -51,6 +58,9 @@ abstract class BaseSchemaCatalog extends CatalogDTO implements SupportsSchemas { RESTClient restClient) { super(name, type, provider, comment, properties, auditDTO); this.restClient = restClient; + Namespace.checkCatalog(namespace); + this.namespace = namespace; + this.schemaNamespace = Namespace.ofSchema(namespace.level(0), name); } @Override @@ -61,17 +71,15 @@ public SupportsSchemas asSchemas() throws UnsupportedOperationException { /** * List all the schemas under the given catalog namespace. * - * @param namespace The namespace of the catalog. * @return A list of {@link NameIdentifier} of the schemas under the given catalog namespace. * @throws NoSuchCatalogException if the catalog with specified namespace does not exist. */ @Override - public NameIdentifier[] listSchemas(Namespace namespace) throws NoSuchCatalogException { - Namespace.checkSchema(namespace); + public NameIdentifier[] listSchemas() throws NoSuchCatalogException { EntityListResponse resp = restClient.get( - formatSchemaRequestPath(namespace), + formatSchemaRequestPath(schemaNamespace), EntityListResponse.class, Collections.emptyMap(), ErrorHandlers.schemaErrorHandler()); @@ -83,7 +91,7 @@ public NameIdentifier[] listSchemas(Namespace namespace) throws NoSuchCatalogExc /** * Create a new schema with specified identifier, comment and metadata. * - * @param ident The name identifier of the schema. + * @param schemaName The name identifier of the schema. * @param comment The comment of the schema. * @param properties The properties of the schema. * @return The created {@link Schema}. @@ -91,17 +99,16 @@ public NameIdentifier[] listSchemas(Namespace namespace) throws NoSuchCatalogExc * @throws SchemaAlreadyExistsException if the schema with specified identifier already exists. */ @Override - public Schema createSchema(NameIdentifier ident, String comment, Map properties) + public Schema createSchema(String schemaName, String comment, Map properties) throws NoSuchCatalogException, SchemaAlreadyExistsException { - NameIdentifier.checkSchema(ident); SchemaCreateRequest req = - new SchemaCreateRequest(RESTUtils.encodeString(ident.name()), comment, properties); + new SchemaCreateRequest(RESTUtils.encodeString(schemaName), comment, properties); req.validate(); SchemaResponse resp = restClient.post( - formatSchemaRequestPath(ident.namespace()), + formatSchemaRequestPath(schemaNamespace), req, SchemaResponse.class, Collections.emptyMap(), @@ -114,17 +121,16 @@ public Schema createSchema(NameIdentifier ident, String comment, Map reqs = Arrays.stream(changes) @@ -155,7 +160,7 @@ public Schema alterSchema(NameIdentifier ident, SchemaChange... changes) SchemaResponse resp = restClient.put( - formatSchemaRequestPath(ident.namespace()) + "/" + RESTUtils.encodeString(ident.name()), + formatSchemaRequestPath(schemaNamespace) + "/" + RESTUtils.encodeString(schemaName), updatesRequest, SchemaResponse.class, Collections.emptyMap(), @@ -168,21 +173,18 @@ public Schema alterSchema(NameIdentifier ident, SchemaChange... changes) /** * Drop the schema with specified identifier. * - * @param ident The name identifier of the schema. + * @param schemaName The name identifier of the schema. * @param cascade Whether to drop all the tables under the schema. * @return true if the schema is dropped successfully, false otherwise. * @throws NonEmptySchemaException if the schema is not empty and cascade is false. */ @Override - public boolean dropSchema(NameIdentifier ident, boolean cascade) throws NonEmptySchemaException { - NameIdentifier.checkSchema(ident); + public boolean dropSchema(String schemaName, boolean cascade) throws NonEmptySchemaException { try { DropResponse resp = restClient.delete( - formatSchemaRequestPath(ident.namespace()) - + "/" - + RESTUtils.encodeString(ident.name()), + formatSchemaRequestPath(schemaNamespace) + "/" + RESTUtils.encodeString(schemaName), Collections.singletonMap("cascade", String.valueOf(cascade)), DropResponse.class, Collections.emptyMap(), @@ -193,7 +195,7 @@ public boolean dropSchema(NameIdentifier ident, boolean cascade) throws NonEmpty } catch (NonEmptySchemaException e) { throw e; } catch (Exception e) { - LOG.warn("Failed to drop schema {}", ident, e); + LOG.warn("Failed to drop schema {}", schemaName, e); return false; } } diff --git a/clients/client-java/src/main/java/com/datastrato/gravitino/client/DTOConverters.java b/clients/client-java/src/main/java/com/datastrato/gravitino/client/DTOConverters.java index b12af8ba85f..e785f962139 100644 --- a/clients/client-java/src/main/java/com/datastrato/gravitino/client/DTOConverters.java +++ b/clients/client-java/src/main/java/com/datastrato/gravitino/client/DTOConverters.java @@ -9,6 +9,8 @@ import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.CatalogChange; import com.datastrato.gravitino.MetalakeChange; +import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.authorization.SecurableObject; import com.datastrato.gravitino.dto.AuditDTO; import com.datastrato.gravitino.dto.CatalogDTO; @@ -24,7 +26,6 @@ import com.datastrato.gravitino.file.FilesetChange; import com.datastrato.gravitino.messaging.TopicChange; import com.datastrato.gravitino.rel.Column; -import com.datastrato.gravitino.rel.SchemaChange; import com.datastrato.gravitino.rel.TableChange; import com.datastrato.gravitino.rel.expressions.Expression; @@ -66,10 +67,12 @@ static MetalakeUpdateRequest toMetalakeUpdateRequest(MetalakeChange change) { } @SuppressWarnings("unchecked") - static Catalog toCatalog(CatalogDTO catalog, RESTClient client) { + static Catalog toCatalog(String metalake, CatalogDTO catalog, RESTClient client) { + Namespace namespace = Namespace.ofCatalog(metalake); switch (catalog.type()) { case RELATIONAL: return RelationalCatalog.builder() + .withNamespace(namespace) .withName(catalog.name()) .withType(catalog.type()) .withProvider(catalog.provider()) @@ -81,6 +84,7 @@ static Catalog toCatalog(CatalogDTO catalog, RESTClient client) { case FILESET: return FilesetCatalog.builder() + .withNamespace(namespace) .withName(catalog.name()) .withType(catalog.type()) .withProvider(catalog.provider()) @@ -92,6 +96,7 @@ static Catalog toCatalog(CatalogDTO catalog, RESTClient client) { case MESSAGING: return MessagingCatalog.builder() + .withNamespace(namespace) .withName(catalog.name()) .withType(catalog.type()) .withProvider(catalog.provider()) diff --git a/clients/client-java/src/main/java/com/datastrato/gravitino/client/FilesetCatalog.java b/clients/client-java/src/main/java/com/datastrato/gravitino/client/FilesetCatalog.java index 027aac6a164..0a620d45bd8 100644 --- a/clients/client-java/src/main/java/com/datastrato/gravitino/client/FilesetCatalog.java +++ b/clients/client-java/src/main/java/com/datastrato/gravitino/client/FilesetCatalog.java @@ -4,6 +4,7 @@ */ package com.datastrato.gravitino.client; +import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; import com.datastrato.gravitino.dto.AuditDTO; @@ -38,14 +39,15 @@ public class FilesetCatalog extends BaseSchemaCatalog implements com.datastrato.gravitino.file.FilesetCatalog { FilesetCatalog( + Namespace namespace, String name, - Type type, + Catalog.Type type, String provider, String comment, Map properties, AuditDTO auditDTO, RESTClient restClient) { - super(name, type, provider, comment, properties, auditDTO, restClient); + super(namespace, name, type, provider, comment, properties, auditDTO, restClient); } @Override @@ -228,9 +230,16 @@ public static Builder builder() { static class Builder extends CatalogDTO.Builder { /** The REST client to send the requests. */ private RESTClient restClient; + /** The namespace of the catalog */ + private Namespace namespace; private Builder() {} + Builder withNamespace(Namespace namespace) { + this.namespace = namespace; + return this; + } + Builder withRestClient(RESTClient restClient) { this.restClient = restClient; return this; @@ -238,13 +247,15 @@ Builder withRestClient(RESTClient restClient) { @Override public FilesetCatalog build() { + Namespace.checkCatalog(namespace); Preconditions.checkArgument(restClient != null, "restClient must be set"); Preconditions.checkArgument(StringUtils.isNotBlank(name), "name must not be blank"); Preconditions.checkArgument(type != null, "type must not be null"); Preconditions.checkArgument(StringUtils.isNotBlank(provider), "provider must not be blank"); Preconditions.checkArgument(audit != null, "audit must not be null"); - return new FilesetCatalog(name, type, provider, comment, properties, audit, restClient); + return new FilesetCatalog( + namespace, name, type, provider, comment, properties, audit, restClient); } } } diff --git a/clients/client-java/src/main/java/com/datastrato/gravitino/client/GravitinoAdminClient.java b/clients/client-java/src/main/java/com/datastrato/gravitino/client/GravitinoAdminClient.java index efa06c6008b..1ba99650744 100644 --- a/clients/client-java/src/main/java/com/datastrato/gravitino/client/GravitinoAdminClient.java +++ b/clients/client-java/src/main/java/com/datastrato/gravitino/client/GravitinoAdminClient.java @@ -6,6 +6,7 @@ package com.datastrato.gravitino.client; import com.datastrato.gravitino.MetalakeChange; +import com.datastrato.gravitino.SupportsMetalakes; import com.datastrato.gravitino.authorization.Group; import com.datastrato.gravitino.authorization.Role; import com.datastrato.gravitino.authorization.SecurableObject; @@ -35,7 +36,6 @@ import com.datastrato.gravitino.exceptions.NoSuchUserException; import com.datastrato.gravitino.exceptions.RoleAlreadyExistsException; import com.datastrato.gravitino.exceptions.UserAlreadyExistsException; -import com.datastrato.gravitino.rel.SupportsMetalakes; import com.google.common.base.Preconditions; import java.util.Arrays; import java.util.Collections; diff --git a/clients/client-java/src/main/java/com/datastrato/gravitino/client/GravitinoClient.java b/clients/client-java/src/main/java/com/datastrato/gravitino/client/GravitinoClient.java index acee395edb6..598156d1c6d 100644 --- a/clients/client-java/src/main/java/com/datastrato/gravitino/client/GravitinoClient.java +++ b/clients/client-java/src/main/java/com/datastrato/gravitino/client/GravitinoClient.java @@ -8,10 +8,10 @@ import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.CatalogChange; import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.SupportsCatalogs; import com.datastrato.gravitino.exceptions.CatalogAlreadyExistsException; import com.datastrato.gravitino.exceptions.NoSuchCatalogException; import com.datastrato.gravitino.exceptions.NoSuchMetalakeException; -import com.datastrato.gravitino.rel.SupportsCatalogs; import com.google.common.base.Preconditions; import java.util.Map; diff --git a/clients/client-java/src/main/java/com/datastrato/gravitino/client/GravitinoMetalake.java b/clients/client-java/src/main/java/com/datastrato/gravitino/client/GravitinoMetalake.java index 25f35dcc32e..f207d4d9919 100644 --- a/clients/client-java/src/main/java/com/datastrato/gravitino/client/GravitinoMetalake.java +++ b/clients/client-java/src/main/java/com/datastrato/gravitino/client/GravitinoMetalake.java @@ -7,6 +7,7 @@ import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.CatalogChange; import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.SupportsCatalogs; import com.datastrato.gravitino.dto.AuditDTO; import com.datastrato.gravitino.dto.MetalakeDTO; import com.datastrato.gravitino.dto.requests.CatalogCreateRequest; @@ -19,7 +20,6 @@ import com.datastrato.gravitino.exceptions.CatalogAlreadyExistsException; import com.datastrato.gravitino.exceptions.NoSuchCatalogException; import com.datastrato.gravitino.exceptions.NoSuchMetalakeException; -import com.datastrato.gravitino.rel.SupportsCatalogs; import com.google.common.base.Preconditions; import java.util.Arrays; import java.util.Collections; @@ -94,7 +94,7 @@ public Catalog[] listCatalogsInfo() throws NoSuchMetalakeException { ErrorHandlers.catalogErrorHandler()); return Arrays.stream(resp.getCatalogs()) - .map(c -> DTOConverters.toCatalog(c, restClient)) + .map(c -> DTOConverters.toCatalog(this.name(), c, restClient)) .toArray(Catalog[]::new); } @@ -116,7 +116,7 @@ public Catalog loadCatalog(String catalogName) throws NoSuchCatalogException { ErrorHandlers.catalogErrorHandler()); resp.validate(); - return DTOConverters.toCatalog(resp.getCatalog(), restClient); + return DTOConverters.toCatalog(this.name(), resp.getCatalog(), restClient); } /** @@ -153,7 +153,7 @@ public Catalog createCatalog( ErrorHandlers.catalogErrorHandler()); resp.validate(); - return DTOConverters.toCatalog(resp.getCatalog(), restClient); + return DTOConverters.toCatalog(this.name(), resp.getCatalog(), restClient); } /** @@ -185,7 +185,7 @@ public Catalog alterCatalog(String catalogName, CatalogChange... changes) ErrorHandlers.catalogErrorHandler()); resp.validate(); - return DTOConverters.toCatalog(resp.getCatalog(), restClient); + return DTOConverters.toCatalog(this.name(), resp.getCatalog(), restClient); } /** diff --git a/clients/client-java/src/main/java/com/datastrato/gravitino/client/MessagingCatalog.java b/clients/client-java/src/main/java/com/datastrato/gravitino/client/MessagingCatalog.java index cd8f83781a0..681d4319599 100644 --- a/clients/client-java/src/main/java/com/datastrato/gravitino/client/MessagingCatalog.java +++ b/clients/client-java/src/main/java/com/datastrato/gravitino/client/MessagingCatalog.java @@ -4,6 +4,7 @@ */ package com.datastrato.gravitino.client; +import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; import com.datastrato.gravitino.dto.AuditDTO; @@ -38,14 +39,15 @@ public class MessagingCatalog extends BaseSchemaCatalog implements TopicCatalog { MessagingCatalog( + Namespace namespace, String name, - Type type, + Catalog.Type type, String provider, String comment, Map properties, AuditDTO auditDTO, RESTClient restClient) { - super(name, type, provider, comment, properties, auditDTO, restClient); + super(namespace, name, type, provider, comment, properties, auditDTO, restClient); } /** @return A new builder for {@link MessagingCatalog}. */ @@ -205,8 +207,16 @@ static class Builder extends CatalogDTO.Builder { /** The REST client to send the requests. */ private RESTClient restClient; + /** The namespace of the catalog */ + private Namespace namespace; + private Builder() {} + Builder withNamespace(Namespace namespace) { + this.namespace = namespace; + return this; + } + Builder withRestClient(RESTClient restClient) { this.restClient = restClient; return this; @@ -214,13 +224,15 @@ Builder withRestClient(RESTClient restClient) { @Override public MessagingCatalog build() { + Namespace.checkCatalog(namespace); Preconditions.checkArgument(StringUtils.isNotBlank(name), "name must not be blank"); Preconditions.checkArgument(type != null, "type must not be null"); Preconditions.checkArgument(StringUtils.isNotBlank(provider), "provider must not be blank"); Preconditions.checkArgument(audit != null, "audit must not be null"); Preconditions.checkArgument(restClient != null, "restClient must be set"); - return new MessagingCatalog(name, type, provider, comment, properties, audit, restClient); + return new MessagingCatalog( + namespace, name, type, provider, comment, properties, audit, restClient); } } } diff --git a/clients/client-java/src/main/java/com/datastrato/gravitino/client/RelationalCatalog.java b/clients/client-java/src/main/java/com/datastrato/gravitino/client/RelationalCatalog.java index 3f4c48d6aaf..2cd54fab6ad 100644 --- a/clients/client-java/src/main/java/com/datastrato/gravitino/client/RelationalCatalog.java +++ b/clients/client-java/src/main/java/com/datastrato/gravitino/client/RelationalCatalog.java @@ -7,6 +7,7 @@ import static com.datastrato.gravitino.dto.util.DTOConverters.toDTO; import static com.datastrato.gravitino.dto.util.DTOConverters.toDTOs; +import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; import com.datastrato.gravitino.dto.AuditDTO; @@ -51,14 +52,15 @@ public class RelationalCatalog extends BaseSchemaCatalog implements TableCatalog private static final Logger LOG = LoggerFactory.getLogger(RelationalCatalog.class); RelationalCatalog( + Namespace namespace, String name, - Type type, + Catalog.Type type, String provider, String comment, Map properties, AuditDTO auditDTO, RESTClient restClient) { - super(name, type, provider, comment, properties, auditDTO, restClient); + super(namespace, name, type, provider, comment, properties, auditDTO, restClient); } @Override @@ -277,9 +279,16 @@ public static Builder builder() { static class Builder extends CatalogDTO.Builder { /** The REST client to send the requests. */ private RESTClient restClient; + /** The namespace of the catalog */ + private Namespace namespace; protected Builder() {} + Builder withNamespace(Namespace namespace) { + this.namespace = namespace; + return this; + } + Builder withRestClient(RESTClient restClient) { this.restClient = restClient; return this; @@ -287,13 +296,15 @@ Builder withRestClient(RESTClient restClient) { @Override public RelationalCatalog build() { + Namespace.checkCatalog(namespace); Preconditions.checkArgument(restClient != null, "restClient must be set"); Preconditions.checkArgument(StringUtils.isNotBlank(name), "name must not be blank"); Preconditions.checkArgument(type != null, "type must not be null"); Preconditions.checkArgument(StringUtils.isNotBlank(provider), "provider must not be blank"); Preconditions.checkArgument(audit != null, "audit must not be null"); - return new RelationalCatalog(name, type, provider, comment, properties, audit, restClient); + return new RelationalCatalog( + namespace, name, type, provider, comment, properties, audit, restClient); } } } diff --git a/clients/client-java/src/test/java/com/datastrato/gravitino/client/TestRelationalCatalog.java b/clients/client-java/src/test/java/com/datastrato/gravitino/client/TestRelationalCatalog.java index e6e7f7f3746..83705ba5c75 100644 --- a/clients/client-java/src/test/java/com/datastrato/gravitino/client/TestRelationalCatalog.java +++ b/clients/client-java/src/test/java/com/datastrato/gravitino/client/TestRelationalCatalog.java @@ -18,11 +18,13 @@ import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SupportsSchemas; import com.datastrato.gravitino.dto.AuditDTO; import com.datastrato.gravitino.dto.CatalogDTO; +import com.datastrato.gravitino.dto.SchemaDTO; import com.datastrato.gravitino.dto.rel.ColumnDTO; import com.datastrato.gravitino.dto.rel.DistributionDTO; -import com.datastrato.gravitino.dto.rel.SchemaDTO; import com.datastrato.gravitino.dto.rel.SortOrderDTO; import com.datastrato.gravitino.dto.rel.TableDTO; import com.datastrato.gravitino.dto.rel.expressions.FieldReferenceDTO; @@ -55,8 +57,6 @@ import com.datastrato.gravitino.exceptions.SchemaAlreadyExistsException; import com.datastrato.gravitino.exceptions.TableAlreadyExistsException; import com.datastrato.gravitino.rel.Column; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.rel.Table; import com.datastrato.gravitino.rel.TableCatalog; import com.datastrato.gravitino.rel.TableChange; @@ -139,7 +139,7 @@ public void testListSchemas() throws JsonProcessingException { EntityListResponse resp = new EntityListResponse(new NameIdentifier[] {schema1, schema2}); buildMockResource(Method.GET, schemaPath, null, resp, SC_OK); - NameIdentifier[] schemas = catalog.asSchemas().listSchemas(schemaNs); + NameIdentifier[] schemas = catalog.asSchemas().listSchemas(); Assertions.assertEquals(2, schemas.length); Assertions.assertEquals(schema1, schemas[0]); @@ -148,7 +148,7 @@ public void testListSchemas() throws JsonProcessingException { // Test return empty schema list EntityListResponse emptyResp = new EntityListResponse(new NameIdentifier[] {}); buildMockResource(Method.GET, schemaPath, null, emptyResp, SC_OK); - NameIdentifier[] emptySchemas = catalog.asSchemas().listSchemas(schemaNs); + NameIdentifier[] emptySchemas = catalog.asSchemas().listSchemas(); Assertions.assertEquals(0, emptySchemas.length); // Test throw NoSuchCatalogException @@ -157,21 +157,20 @@ public void testListSchemas() throws JsonProcessingException { buildMockResource(Method.GET, schemaPath, null, errorResp, SC_NOT_FOUND); SupportsSchemas supportSchemas = catalog.asSchemas(); Throwable ex = - Assertions.assertThrows( - NoSuchCatalogException.class, () -> supportSchemas.listSchemas(schemaNs)); + Assertions.assertThrows(NoSuchCatalogException.class, () -> supportSchemas.listSchemas()); Assertions.assertTrue(ex.getMessage().contains("catalog not found")); // Test throw RuntimeException ErrorResponse errorResp1 = ErrorResponse.internalError("internal error"); buildMockResource(Method.GET, schemaPath, null, errorResp1, SC_INTERNAL_SERVER_ERROR); Throwable ex1 = - Assertions.assertThrows(RuntimeException.class, () -> supportSchemas.listSchemas(schemaNs)); + Assertions.assertThrows(RuntimeException.class, () -> supportSchemas.listSchemas()); Assertions.assertTrue(ex1.getMessage().contains("internal error")); // Test throw unparsed system error buildMockResource(Method.GET, schemaPath, null, "unparsed error", SC_BAD_REQUEST); Throwable ex2 = - Assertions.assertThrows(RESTException.class, () -> supportSchemas.listSchemas(schemaNs)); + Assertions.assertThrows(RESTException.class, () -> supportSchemas.listSchemas()); Assertions.assertTrue(ex2.getMessage().contains("unparsed error")); } @@ -186,7 +185,7 @@ public void testCreateSchema() throws JsonProcessingException { buildMockResource(Method.POST, schemaPath, req, resp, SC_OK); Schema createdSchema = - catalog.asSchemas().createSchema(schemaId, "comment", Collections.emptyMap()); + catalog.asSchemas().createSchema(schemaId.name(), "comment", Collections.emptyMap()); Assertions.assertEquals("schema1", createdSchema.name()); Assertions.assertEquals("comment", createdSchema.comment()); Assertions.assertEquals(Collections.emptyMap(), createdSchema.properties()); @@ -201,7 +200,7 @@ public void testCreateSchema() throws JsonProcessingException { Throwable ex = Assertions.assertThrows( NoSuchCatalogException.class, - () -> schemas.createSchema(schemaId, "comment", emptyMap)); + () -> schemas.createSchema(schemaId.name(), "comment", emptyMap)); Assertions.assertTrue(ex.getMessage().contains("catalog not found")); // Test throw SchemaAlreadyExistsException @@ -213,7 +212,7 @@ public void testCreateSchema() throws JsonProcessingException { Throwable ex1 = Assertions.assertThrows( SchemaAlreadyExistsException.class, - () -> schemas.createSchema(schemaId, "comment", emptyMap)); + () -> schemas.createSchema(schemaId.name(), "comment", emptyMap)); Assertions.assertTrue(ex1.getMessage().contains("schema already exists")); } @@ -230,7 +229,7 @@ public void testLoadSchema() throws JsonProcessingException { SchemaResponse resp = new SchemaResponse(schema); buildMockResource(Method.GET, schemaPath, null, resp, SC_OK); - Schema loadedSchema = catalog.asSchemas().loadSchema(schemaId); + Schema loadedSchema = catalog.asSchemas().loadSchema(schemaId.name()); Assertions.assertEquals("schema1", loadedSchema.name()); Assertions.assertEquals("comment", loadedSchema.comment()); Assertions.assertEquals(Collections.emptyMap(), loadedSchema.properties()); @@ -242,7 +241,8 @@ public void testLoadSchema() throws JsonProcessingException { SupportsSchemas schemas = catalog.asSchemas(); Throwable ex1 = - Assertions.assertThrows(NoSuchSchemaException.class, () -> schemas.loadSchema(schemaId)); + Assertions.assertThrows( + NoSuchSchemaException.class, () -> schemas.loadSchema(schemaId.name())); Assertions.assertTrue(ex1.getMessage().contains("schema not found")); } @@ -275,14 +275,14 @@ public void testDropSchema() throws JsonProcessingException { DropResponse resp = new DropResponse(true); buildMockResource(Method.DELETE, schemaPath, null, resp, SC_OK); - Assertions.assertTrue(catalog.asSchemas().dropSchema(ident, false)); + Assertions.assertTrue(catalog.asSchemas().dropSchema(ident.name(), false)); // Test with cascade to ture DropResponse resp1 = new DropResponse(true); buildMockResource( Method.DELETE, schemaPath, ImmutableMap.of("cascade", "true"), null, resp1, SC_OK); - Assertions.assertTrue(catalog.asSchemas().dropSchema(ident, true)); + Assertions.assertTrue(catalog.asSchemas().dropSchema(ident.name(), true)); // Test throw NonEmptySchemaException ErrorResponse errorResp = @@ -293,7 +293,7 @@ public void testDropSchema() throws JsonProcessingException { SupportsSchemas schemas = catalog.asSchemas(); Throwable ex = Assertions.assertThrows( - NonEmptySchemaException.class, () -> schemas.dropSchema(ident, true)); + NonEmptySchemaException.class, () -> schemas.dropSchema(ident.name(), true)); Assertions.assertTrue(ex.getMessage().contains("schema is not empty")); } @@ -1163,7 +1163,7 @@ private void testAlterSchema( SchemaResponse resp = new SchemaResponse(updatedSchema); buildMockResource(Method.PUT, schemaPath, updatesReq, resp, SC_OK); - Schema alteredSchema = catalog.asSchemas().alterSchema(ident, req.schemaChange()); + Schema alteredSchema = catalog.asSchemas().alterSchema(ident.name(), req.schemaChange()); Assertions.assertEquals(updatedSchema.name(), alteredSchema.name()); Assertions.assertEquals(updatedSchema.comment(), alteredSchema.comment()); Assertions.assertEquals(updatedSchema.properties(), alteredSchema.properties()); diff --git a/clients/client-java/src/test/java/com/datastrato/gravitino/client/TestRelationalTable.java b/clients/client-java/src/test/java/com/datastrato/gravitino/client/TestRelationalTable.java index d2c6f9b5320..55b0ed85b76 100644 --- a/clients/client-java/src/test/java/com/datastrato/gravitino/client/TestRelationalTable.java +++ b/clients/client-java/src/test/java/com/datastrato/gravitino/client/TestRelationalTable.java @@ -12,9 +12,9 @@ import static org.apache.http.HttpStatus.SC_NOT_IMPLEMENTED; import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.dto.SchemaDTO; import com.datastrato.gravitino.dto.rel.ColumnDTO; import com.datastrato.gravitino.dto.rel.DistributionDTO; -import com.datastrato.gravitino.dto.rel.SchemaDTO; import com.datastrato.gravitino.dto.rel.SortOrderDTO; import com.datastrato.gravitino.dto.rel.TableDTO; import com.datastrato.gravitino.dto.rel.expressions.LiteralDTO; @@ -72,7 +72,7 @@ public static void setUp() throws Exception { SchemaResponse resp = new SchemaResponse(mockedSchema); buildMockResource(Method.POST, schemaPath, req, resp, SC_OK); - catalog.asSchemas().createSchema(schemaId, "comment", Collections.emptyMap()); + catalog.asSchemas().createSchema(schemaId.name(), "comment", Collections.emptyMap()); // setup partitioned table NameIdentifier tableId = diff --git a/common/src/main/java/com/datastrato/gravitino/dto/CatalogDTO.java b/common/src/main/java/com/datastrato/gravitino/dto/CatalogDTO.java index 319ece39c78..5025a09f08b 100644 --- a/common/src/main/java/com/datastrato/gravitino/dto/CatalogDTO.java +++ b/common/src/main/java/com/datastrato/gravitino/dto/CatalogDTO.java @@ -54,7 +54,7 @@ protected CatalogDTO() {} */ protected CatalogDTO( String name, - Type type, + Catalog.Type type, String provider, String comment, Map properties, diff --git a/common/src/main/java/com/datastrato/gravitino/dto/rel/SchemaDTO.java b/common/src/main/java/com/datastrato/gravitino/dto/SchemaDTO.java similarity index 96% rename from common/src/main/java/com/datastrato/gravitino/dto/rel/SchemaDTO.java rename to common/src/main/java/com/datastrato/gravitino/dto/SchemaDTO.java index 45f91e02fe0..c07a9abdbf1 100644 --- a/common/src/main/java/com/datastrato/gravitino/dto/rel/SchemaDTO.java +++ b/common/src/main/java/com/datastrato/gravitino/dto/SchemaDTO.java @@ -2,10 +2,9 @@ * Copyright 2023 Datastrato Pvt Ltd. * This software is licensed under the Apache License version 2. */ -package com.datastrato.gravitino.dto.rel; +package com.datastrato.gravitino.dto; -import com.datastrato.gravitino.dto.AuditDTO; -import com.datastrato.gravitino.rel.Schema; +import com.datastrato.gravitino.Schema; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import java.util.Map; diff --git a/common/src/main/java/com/datastrato/gravitino/dto/requests/SchemaUpdateRequest.java b/common/src/main/java/com/datastrato/gravitino/dto/requests/SchemaUpdateRequest.java index e156350d071..e7da41182fe 100644 --- a/common/src/main/java/com/datastrato/gravitino/dto/requests/SchemaUpdateRequest.java +++ b/common/src/main/java/com/datastrato/gravitino/dto/requests/SchemaUpdateRequest.java @@ -4,7 +4,7 @@ */ package com.datastrato.gravitino.dto.requests; -import com.datastrato.gravitino.rel.SchemaChange; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.rest.RESTRequest; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; diff --git a/common/src/main/java/com/datastrato/gravitino/dto/responses/SchemaResponse.java b/common/src/main/java/com/datastrato/gravitino/dto/responses/SchemaResponse.java index e4819623aad..444f5966540 100644 --- a/common/src/main/java/com/datastrato/gravitino/dto/responses/SchemaResponse.java +++ b/common/src/main/java/com/datastrato/gravitino/dto/responses/SchemaResponse.java @@ -4,7 +4,7 @@ */ package com.datastrato.gravitino.dto.responses; -import com.datastrato.gravitino.dto.rel.SchemaDTO; +import com.datastrato.gravitino.dto.SchemaDTO; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import lombok.EqualsAndHashCode; diff --git a/common/src/main/java/com/datastrato/gravitino/dto/util/DTOConverters.java b/common/src/main/java/com/datastrato/gravitino/dto/util/DTOConverters.java index 55da063d54a..c140112d59d 100644 --- a/common/src/main/java/com/datastrato/gravitino/dto/util/DTOConverters.java +++ b/common/src/main/java/com/datastrato/gravitino/dto/util/DTOConverters.java @@ -9,6 +9,7 @@ import com.datastrato.gravitino.Audit; import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.Metalake; +import com.datastrato.gravitino.Schema; import com.datastrato.gravitino.authorization.Group; import com.datastrato.gravitino.authorization.Privilege; import com.datastrato.gravitino.authorization.Role; @@ -17,6 +18,7 @@ import com.datastrato.gravitino.dto.AuditDTO; import com.datastrato.gravitino.dto.CatalogDTO; import com.datastrato.gravitino.dto.MetalakeDTO; +import com.datastrato.gravitino.dto.SchemaDTO; import com.datastrato.gravitino.dto.authorization.GroupDTO; import com.datastrato.gravitino.dto.authorization.PrivilegeDTO; import com.datastrato.gravitino.dto.authorization.RoleDTO; @@ -26,7 +28,6 @@ import com.datastrato.gravitino.dto.messaging.TopicDTO; import com.datastrato.gravitino.dto.rel.ColumnDTO; import com.datastrato.gravitino.dto.rel.DistributionDTO; -import com.datastrato.gravitino.dto.rel.SchemaDTO; import com.datastrato.gravitino.dto.rel.SortOrderDTO; import com.datastrato.gravitino.dto.rel.TableDTO; import com.datastrato.gravitino.dto.rel.expressions.FieldReferenceDTO; @@ -53,7 +54,6 @@ import com.datastrato.gravitino.file.Fileset; import com.datastrato.gravitino.messaging.Topic; import com.datastrato.gravitino.rel.Column; -import com.datastrato.gravitino.rel.Schema; import com.datastrato.gravitino.rel.Table; import com.datastrato.gravitino.rel.expressions.Expression; import com.datastrato.gravitino.rel.expressions.FunctionExpression; diff --git a/common/src/test/java/com/datastrato/gravitino/dto/responses/TestResponses.java b/common/src/test/java/com/datastrato/gravitino/dto/responses/TestResponses.java index 80c9a5addcc..16fb335a352 100644 --- a/common/src/test/java/com/datastrato/gravitino/dto/responses/TestResponses.java +++ b/common/src/test/java/com/datastrato/gravitino/dto/responses/TestResponses.java @@ -17,12 +17,12 @@ import com.datastrato.gravitino.dto.AuditDTO; import com.datastrato.gravitino.dto.CatalogDTO; import com.datastrato.gravitino.dto.MetalakeDTO; +import com.datastrato.gravitino.dto.SchemaDTO; import com.datastrato.gravitino.dto.authorization.GroupDTO; import com.datastrato.gravitino.dto.authorization.RoleDTO; import com.datastrato.gravitino.dto.authorization.SecurableObjectDTO; import com.datastrato.gravitino.dto.authorization.UserDTO; import com.datastrato.gravitino.dto.rel.ColumnDTO; -import com.datastrato.gravitino.dto.rel.SchemaDTO; import com.datastrato.gravitino.dto.rel.TableDTO; import com.datastrato.gravitino.dto.rel.partitioning.Partitioning; import com.datastrato.gravitino.dto.util.DTOConverters; diff --git a/core/src/main/java/com/datastrato/gravitino/catalog/CatalogManager.java b/core/src/main/java/com/datastrato/gravitino/catalog/CatalogManager.java index 71c33f3dd85..239d31ced8c 100644 --- a/core/src/main/java/com/datastrato/gravitino/catalog/CatalogManager.java +++ b/core/src/main/java/com/datastrato/gravitino/catalog/CatalogManager.java @@ -27,6 +27,7 @@ import com.datastrato.gravitino.connector.HasPropertyMetadata; import com.datastrato.gravitino.connector.PropertiesMetadata; import com.datastrato.gravitino.connector.PropertyEntry; +import com.datastrato.gravitino.connector.SupportsSchemas; import com.datastrato.gravitino.connector.capability.Capability; import com.datastrato.gravitino.exceptions.CatalogAlreadyExistsException; import com.datastrato.gravitino.exceptions.NoSuchCatalogException; @@ -38,7 +39,6 @@ import com.datastrato.gravitino.meta.CatalogEntity; import com.datastrato.gravitino.meta.SchemaEntity; import com.datastrato.gravitino.rel.SupportsPartitions; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.rel.Table; import com.datastrato.gravitino.rel.TableCatalog; import com.datastrato.gravitino.storage.IdGenerator; diff --git a/core/src/main/java/com/datastrato/gravitino/catalog/EntityCombinedSchema.java b/core/src/main/java/com/datastrato/gravitino/catalog/EntityCombinedSchema.java index bbe19ac39ea..02e45e6eb81 100644 --- a/core/src/main/java/com/datastrato/gravitino/catalog/EntityCombinedSchema.java +++ b/core/src/main/java/com/datastrato/gravitino/catalog/EntityCombinedSchema.java @@ -5,9 +5,9 @@ package com.datastrato.gravitino.catalog; import com.datastrato.gravitino.Audit; +import com.datastrato.gravitino.Schema; import com.datastrato.gravitino.meta.AuditInfo; import com.datastrato.gravitino.meta.SchemaEntity; -import com.datastrato.gravitino.rel.Schema; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; diff --git a/core/src/main/java/com/datastrato/gravitino/catalog/OperationDispatcher.java b/core/src/main/java/com/datastrato/gravitino/catalog/OperationDispatcher.java index 90cd18225f8..a24a258d398 100644 --- a/core/src/main/java/com/datastrato/gravitino/catalog/OperationDispatcher.java +++ b/core/src/main/java/com/datastrato/gravitino/catalog/OperationDispatcher.java @@ -10,6 +10,7 @@ import com.datastrato.gravitino.HasIdentifier; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.StringIdentifier; import com.datastrato.gravitino.connector.HasPropertyMetadata; import com.datastrato.gravitino.connector.PropertiesMetadata; @@ -18,7 +19,6 @@ import com.datastrato.gravitino.exceptions.NoSuchEntityException; import com.datastrato.gravitino.file.FilesetChange; import com.datastrato.gravitino.messaging.TopicChange; -import com.datastrato.gravitino.rel.SchemaChange; import com.datastrato.gravitino.rel.SupportsPartitions; import com.datastrato.gravitino.rel.TableChange; import com.datastrato.gravitino.storage.IdGenerator; diff --git a/core/src/main/java/com/datastrato/gravitino/catalog/SchemaDispatcher.java b/core/src/main/java/com/datastrato/gravitino/catalog/SchemaDispatcher.java index 0667da3df91..e61838e9556 100644 --- a/core/src/main/java/com/datastrato/gravitino/catalog/SchemaDispatcher.java +++ b/core/src/main/java/com/datastrato/gravitino/catalog/SchemaDispatcher.java @@ -5,7 +5,7 @@ package com.datastrato.gravitino.catalog; -import com.datastrato.gravitino.rel.SupportsSchemas; +import com.datastrato.gravitino.connector.SupportsSchemas; /** * {@code SchemaDispatcher} interface acts as a specialization of the {@link SupportsSchemas} diff --git a/core/src/main/java/com/datastrato/gravitino/catalog/SchemaNormalizeDispatcher.java b/core/src/main/java/com/datastrato/gravitino/catalog/SchemaNormalizeDispatcher.java index e1f9e613ebc..9c004989e80 100644 --- a/core/src/main/java/com/datastrato/gravitino/catalog/SchemaNormalizeDispatcher.java +++ b/core/src/main/java/com/datastrato/gravitino/catalog/SchemaNormalizeDispatcher.java @@ -9,13 +9,13 @@ import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.connector.capability.Capability; import com.datastrato.gravitino.exceptions.NoSuchCatalogException; import com.datastrato.gravitino.exceptions.NoSuchSchemaException; import com.datastrato.gravitino.exceptions.NonEmptySchemaException; import com.datastrato.gravitino.exceptions.SchemaAlreadyExistsException; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SchemaChange; import java.util.Map; public class SchemaNormalizeDispatcher implements SchemaDispatcher { diff --git a/core/src/main/java/com/datastrato/gravitino/catalog/SchemaOperationDispatcher.java b/core/src/main/java/com/datastrato/gravitino/catalog/SchemaOperationDispatcher.java index 57c75681e0e..1575ded111d 100644 --- a/core/src/main/java/com/datastrato/gravitino/catalog/SchemaOperationDispatcher.java +++ b/core/src/main/java/com/datastrato/gravitino/catalog/SchemaOperationDispatcher.java @@ -10,6 +10,8 @@ import com.datastrato.gravitino.EntityStore; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.StringIdentifier; import com.datastrato.gravitino.connector.HasPropertyMetadata; import com.datastrato.gravitino.connector.capability.Capability; @@ -20,8 +22,6 @@ import com.datastrato.gravitino.exceptions.SchemaAlreadyExistsException; import com.datastrato.gravitino.meta.AuditInfo; import com.datastrato.gravitino.meta.SchemaEntity; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SchemaChange; import com.datastrato.gravitino.storage.IdGenerator; import com.datastrato.gravitino.utils.PrincipalUtils; import java.time.Instant; diff --git a/core/src/main/java/com/datastrato/gravitino/connector/BaseSchema.java b/core/src/main/java/com/datastrato/gravitino/connector/BaseSchema.java index d340885a8ae..8b9e8acff8a 100644 --- a/core/src/main/java/com/datastrato/gravitino/connector/BaseSchema.java +++ b/core/src/main/java/com/datastrato/gravitino/connector/BaseSchema.java @@ -4,9 +4,9 @@ */ package com.datastrato.gravitino.connector; +import com.datastrato.gravitino.Schema; import com.datastrato.gravitino.annotation.Evolving; import com.datastrato.gravitino.meta.AuditInfo; -import com.datastrato.gravitino.rel.Schema; import java.util.Map; import javax.annotation.Nullable; import lombok.ToString; diff --git a/core/src/main/java/com/datastrato/gravitino/connector/CatalogOperations.java b/core/src/main/java/com/datastrato/gravitino/connector/CatalogOperations.java index ace036bd2ea..2e357cac436 100644 --- a/core/src/main/java/com/datastrato/gravitino/connector/CatalogOperations.java +++ b/core/src/main/java/com/datastrato/gravitino/connector/CatalogOperations.java @@ -4,8 +4,8 @@ */ package com.datastrato.gravitino.connector; +import com.datastrato.gravitino.SupportsSchemas; import com.datastrato.gravitino.annotation.Evolving; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.rel.TableCatalog; import java.io.Closeable; import java.util.Map; diff --git a/api/src/main/java/com/datastrato/gravitino/rel/SupportsSchemas.java b/core/src/main/java/com/datastrato/gravitino/connector/SupportsSchemas.java similarity index 97% rename from api/src/main/java/com/datastrato/gravitino/rel/SupportsSchemas.java rename to core/src/main/java/com/datastrato/gravitino/connector/SupportsSchemas.java index 2c47c304114..5c4dc1e149e 100644 --- a/api/src/main/java/com/datastrato/gravitino/rel/SupportsSchemas.java +++ b/core/src/main/java/com/datastrato/gravitino/connector/SupportsSchemas.java @@ -18,10 +18,12 @@ // Referred from Apache Spark's connector/catalog implementation // sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/SupportNamespaces.java -package com.datastrato.gravitino.rel; +package com.datastrato.gravitino.connector; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.annotation.Evolving; import com.datastrato.gravitino.exceptions.NoSuchCatalogException; import com.datastrato.gravitino.exceptions.NoSuchSchemaException; diff --git a/core/src/main/java/com/datastrato/gravitino/listener/SchemaEventDispatcher.java b/core/src/main/java/com/datastrato/gravitino/listener/SchemaEventDispatcher.java index f02f6f516f4..50c6df4ea18 100644 --- a/core/src/main/java/com/datastrato/gravitino/listener/SchemaEventDispatcher.java +++ b/core/src/main/java/com/datastrato/gravitino/listener/SchemaEventDispatcher.java @@ -7,6 +7,8 @@ import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.catalog.SchemaDispatcher; import com.datastrato.gravitino.catalog.SchemaOperationDispatcher; import com.datastrato.gravitino.exceptions.NoSuchCatalogException; @@ -24,8 +26,6 @@ import com.datastrato.gravitino.listener.api.event.LoadSchemaEvent; import com.datastrato.gravitino.listener.api.event.LoadSchemaFailureEvent; import com.datastrato.gravitino.listener.api.info.SchemaInfo; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SchemaChange; import com.datastrato.gravitino.utils.PrincipalUtils; import java.util.Map; diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterSchemaEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterSchemaEvent.java index de32ec82067..5e180ad3c98 100644 --- a/core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterSchemaEvent.java +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterSchemaEvent.java @@ -6,9 +6,9 @@ package com.datastrato.gravitino.listener.api.event; import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.annotation.DeveloperApi; import com.datastrato.gravitino.listener.api.info.SchemaInfo; -import com.datastrato.gravitino.rel.SchemaChange; /** Represents an event fired when a schema is successfully altered. */ @DeveloperApi diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterSchemaFailureEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterSchemaFailureEvent.java index a5b0643e9ef..d55d5c89d88 100644 --- a/core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterSchemaFailureEvent.java +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterSchemaFailureEvent.java @@ -6,8 +6,8 @@ package com.datastrato.gravitino.listener.api.event; import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.annotation.DeveloperApi; -import com.datastrato.gravitino.rel.SchemaChange; /** * Represents an event that is triggered when an attempt to alter a schema fails due to an diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/info/SchemaInfo.java b/core/src/main/java/com/datastrato/gravitino/listener/api/info/SchemaInfo.java index b16192f22ce..7d4cd8ccbdb 100644 --- a/core/src/main/java/com/datastrato/gravitino/listener/api/info/SchemaInfo.java +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/info/SchemaInfo.java @@ -6,8 +6,8 @@ package com.datastrato.gravitino.listener.api.info; import com.datastrato.gravitino.Audit; +import com.datastrato.gravitino.Schema; import com.datastrato.gravitino.annotation.DeveloperApi; -import com.datastrato.gravitino.rel.Schema; import com.google.common.collect.ImmutableMap; import java.util.Map; import javax.annotation.Nullable; diff --git a/core/src/test/java/com/datastrato/gravitino/TestCatalog.java b/core/src/test/java/com/datastrato/gravitino/TestCatalog.java index bbe4b449a68..9fbb06cd2f8 100644 --- a/core/src/test/java/com/datastrato/gravitino/TestCatalog.java +++ b/core/src/test/java/com/datastrato/gravitino/TestCatalog.java @@ -4,10 +4,11 @@ */ package com.datastrato.gravitino; -import static com.datastrato.gravitino.TestCatalogOperations.FAIL_CREATE; +import static com.datastrato.gravitino.connector.TestCatalogOperations.FAIL_CREATE; import com.datastrato.gravitino.connector.BaseCatalog; import com.datastrato.gravitino.connector.CatalogOperations; +import com.datastrato.gravitino.connector.TestCatalogOperations; import com.datastrato.gravitino.connector.capability.Capability; import com.datastrato.gravitino.rel.TableCatalog; import java.util.Map; diff --git a/core/src/test/java/com/datastrato/gravitino/catalog/TestBaseCatalog.java b/core/src/test/java/com/datastrato/gravitino/catalog/TestBaseCatalog.java index bd79f928c23..2f6eef48e73 100644 --- a/core/src/test/java/com/datastrato/gravitino/catalog/TestBaseCatalog.java +++ b/core/src/test/java/com/datastrato/gravitino/catalog/TestBaseCatalog.java @@ -6,9 +6,9 @@ package com.datastrato.gravitino.catalog; import com.datastrato.gravitino.TestCatalog; -import com.datastrato.gravitino.TestCatalogOperations; import com.datastrato.gravitino.connector.BaseCatalog; import com.datastrato.gravitino.connector.CatalogOperations; +import com.datastrato.gravitino.connector.TestCatalogOperations; import com.datastrato.gravitino.meta.CatalogEntity; import com.google.common.collect.ImmutableMap; import org.junit.jupiter.api.Assertions; diff --git a/core/src/test/java/com/datastrato/gravitino/catalog/TestSchemaNormalizeDispatcher.java b/core/src/test/java/com/datastrato/gravitino/catalog/TestSchemaNormalizeDispatcher.java index 9afba6906dc..44a8c21f3ad 100644 --- a/core/src/test/java/com/datastrato/gravitino/catalog/TestSchemaNormalizeDispatcher.java +++ b/core/src/test/java/com/datastrato/gravitino/catalog/TestSchemaNormalizeDispatcher.java @@ -8,8 +8,8 @@ import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SchemaChange; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SchemaChange; import com.google.common.collect.ImmutableMap; import java.io.IOException; import java.util.Arrays; diff --git a/core/src/test/java/com/datastrato/gravitino/catalog/TestSchemaOperationDispatcher.java b/core/src/test/java/com/datastrato/gravitino/catalog/TestSchemaOperationDispatcher.java index 6b830e8a269..adb8fc6c3d2 100644 --- a/core/src/test/java/com/datastrato/gravitino/catalog/TestSchemaOperationDispatcher.java +++ b/core/src/test/java/com/datastrato/gravitino/catalog/TestSchemaOperationDispatcher.java @@ -15,12 +15,12 @@ import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.auth.AuthConstants; import com.datastrato.gravitino.exceptions.NoSuchEntityException; import com.datastrato.gravitino.meta.AuditInfo; import com.datastrato.gravitino.meta.SchemaEntity; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SchemaChange; import com.google.common.collect.ImmutableMap; import java.io.IOException; import java.time.Instant; diff --git a/core/src/test/java/com/datastrato/gravitino/TestCatalogOperations.java b/core/src/test/java/com/datastrato/gravitino/connector/TestCatalogOperations.java similarity index 97% rename from core/src/test/java/com/datastrato/gravitino/TestCatalogOperations.java rename to core/src/test/java/com/datastrato/gravitino/connector/TestCatalogOperations.java index 47b3dccc9fd..383e76e53ae 100644 --- a/core/src/test/java/com/datastrato/gravitino/TestCatalogOperations.java +++ b/core/src/test/java/com/datastrato/gravitino/connector/TestCatalogOperations.java @@ -2,13 +2,18 @@ * Copyright 2023 Datastrato Pvt Ltd. * This software is licensed under the Apache License version 2. */ -package com.datastrato.gravitino; - -import com.datastrato.gravitino.connector.BasePropertiesMetadata; -import com.datastrato.gravitino.connector.CatalogInfo; -import com.datastrato.gravitino.connector.CatalogOperations; -import com.datastrato.gravitino.connector.PropertiesMetadata; -import com.datastrato.gravitino.connector.PropertyEntry; +package com.datastrato.gravitino.connector; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SchemaChange; +import com.datastrato.gravitino.TestBasePropertiesMetadata; +import com.datastrato.gravitino.TestFileset; +import com.datastrato.gravitino.TestFilesetPropertiesMetadata; +import com.datastrato.gravitino.TestSchema; +import com.datastrato.gravitino.TestTable; +import com.datastrato.gravitino.TestTopic; import com.datastrato.gravitino.exceptions.FilesetAlreadyExistsException; import com.datastrato.gravitino.exceptions.NoSuchCatalogException; import com.datastrato.gravitino.exceptions.NoSuchFilesetException; @@ -28,9 +33,6 @@ import com.datastrato.gravitino.messaging.TopicChange; import com.datastrato.gravitino.meta.AuditInfo; import com.datastrato.gravitino.rel.Column; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SchemaChange; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.rel.Table; import com.datastrato.gravitino.rel.TableCatalog; import com.datastrato.gravitino.rel.TableChange; diff --git a/core/src/test/java/com/datastrato/gravitino/listener/api/event/TestSchemaEvent.java b/core/src/test/java/com/datastrato/gravitino/listener/api/event/TestSchemaEvent.java index da99d5d75b3..c70f827cac5 100644 --- a/core/src/test/java/com/datastrato/gravitino/listener/api/event/TestSchemaEvent.java +++ b/core/src/test/java/com/datastrato/gravitino/listener/api/event/TestSchemaEvent.java @@ -12,14 +12,14 @@ import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.catalog.SchemaDispatcher; import com.datastrato.gravitino.exceptions.GravitinoRuntimeException; import com.datastrato.gravitino.listener.DummyEventListener; import com.datastrato.gravitino.listener.EventBus; import com.datastrato.gravitino.listener.SchemaEventDispatcher; import com.datastrato.gravitino.listener.api.info.SchemaInfo; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SchemaChange; import com.google.common.collect.ImmutableMap; import java.util.Arrays; import java.util.Map; diff --git a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/client/filesystem/hadoop/GravitinoVirtualFileSystemIT.java b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/client/filesystem/hadoop/GravitinoVirtualFileSystemIT.java index d3c5c246e32..5d6735c4fe6 100644 --- a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/client/filesystem/hadoop/GravitinoVirtualFileSystemIT.java +++ b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/client/filesystem/hadoop/GravitinoVirtualFileSystemIT.java @@ -62,9 +62,8 @@ public static void startUp() { catalogName, Catalog.Type.FILESET, "hadoop", "catalog comment", properties); Assertions.assertTrue(metalake.catalogExists(catalogName)); - NameIdentifier schemaIdent = NameIdentifier.of(metalakeName, catalogName, schemaName); - catalog.asSchemas().createSchema(schemaIdent, "schema comment", properties); - Assertions.assertTrue(catalog.asSchemas().schemaExists(schemaIdent)); + catalog.asSchemas().createSchema(schemaName, "schema comment", properties); + Assertions.assertTrue(catalog.asSchemas().schemaExists(schemaName)); conf.set( "fs.gvfs.impl", "com.datastrato.gravitino.filesystem.hadoop.GravitinoVirtualFileSystem"); diff --git a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/trino/TrinoConnectorIT.java b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/trino/TrinoConnectorIT.java index bc130b9b319..2574e6c7a54 100644 --- a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/trino/TrinoConnectorIT.java +++ b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/trino/TrinoConnectorIT.java @@ -8,6 +8,7 @@ import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.Schema; import com.datastrato.gravitino.client.GravitinoMetalake; import com.datastrato.gravitino.integration.test.container.ContainerSuite; import com.datastrato.gravitino.integration.test.container.HiveContainer; @@ -17,7 +18,6 @@ import com.datastrato.gravitino.integration.test.util.ITUtils; import com.datastrato.gravitino.integration.test.util.JdbcDriverDownloader; import com.datastrato.gravitino.rel.Column; -import com.datastrato.gravitino.rel.Schema; import com.datastrato.gravitino.rel.Table; import com.datastrato.gravitino.rel.expressions.NamedReference; import com.datastrato.gravitino.rel.expressions.distributions.Distribution; @@ -145,8 +145,7 @@ public static void createSchema() throws TException, InterruptedException { HiveContainer.HDFS_DEFAULTFS_PORT, databaseName); containerSuite.getTrinoContainer().executeUpdateSQL(sql1); - NameIdentifier idSchema = NameIdentifier.of(metalakeName, catalogName, databaseName); - Schema schema = catalog.asSchemas().loadSchema(idSchema); + Schema schema = catalog.asSchemas().loadSchema(databaseName); Assertions.assertEquals(schema.name(), databaseName); ArrayList> r = @@ -377,8 +376,7 @@ void testHiveSchemaCreatedByTrino() { catalogName, schemaName); containerSuite.getTrinoContainer().executeUpdateSQL(createSchemaSql); - Schema schema = - catalog.asSchemas().loadSchema(NameIdentifier.of(metalakeName, catalogName, schemaName)); + Schema schema = catalog.asSchemas().loadSchema(schemaName); Assertions.assertEquals( "hdfs://localhost:9000/user/hive/warehouse/hive_schema_1123123", schema.properties().get("location")); @@ -460,7 +458,7 @@ void testHiveSchemaCreatedByGravitino() { catalog .asSchemas() .createSchema( - NameIdentifier.of(metalakeName, catalogName, schemaName), + schemaName, "Created by gravitino client", ImmutableMap.builder() .put("location", "hdfs://localhost:9000/user/hive/warehouse/hive_schema_1223445.db") @@ -703,7 +701,7 @@ void testHiveTableCreatedByGravitino() { catalog .asSchemas() .createSchema( - NameIdentifier.of(metalakeName, catalogName, schemaName), + schemaName, "Created by gravitino client", ImmutableMap.builder().build()); @@ -925,7 +923,7 @@ void testIcebergTableAndSchemaCreatedByGravitino() { catalog .asSchemas() .createSchema( - NameIdentifier.of(metalakeName, catalogName, schemaName), + schemaName, "Created by gravitino client", ImmutableMap.builder().build()); @@ -1060,7 +1058,7 @@ void testIcebergCatalogCreatedByGravitino() { catalog .asSchemas() .createSchema( - NameIdentifier.of(metalakeName, catalogName, schemaName), + schemaName, "Created by gravitino client", ImmutableMap.builder().build()); @@ -1083,10 +1081,7 @@ void testIcebergCatalogCreatedByGravitino() { } // Do not support the cascade drop - success = - catalog - .asSchemas() - .dropSchema(NameIdentifier.of(metalakeName, catalogName, schemaName), true); + success = catalog.asSchemas().dropSchema(schemaName, true); Assertions.assertFalse(success); final String sql3 = String.format("show schemas in %s like '%s'", catalogName, schemaName); success = checkTrinoHasLoaded(sql3, 30); @@ -1181,10 +1176,7 @@ void testMySQLTableCreatedByGravitino() { Schema schema = catalog .asSchemas() - .createSchema( - NameIdentifier.of(metalakeName, catalogName, schemaName), - null, - ImmutableMap.builder().build()); + .createSchema(schemaName, null, ImmutableMap.builder().build()); Assertions.assertNotNull(schema); diff --git a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/trino/TrinoQueryITBase.java b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/trino/TrinoQueryITBase.java index 282b3d595c1..47bcd6c09b0 100644 --- a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/trino/TrinoQueryITBase.java +++ b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/trino/TrinoQueryITBase.java @@ -9,13 +9,13 @@ import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.SupportsSchemas; import com.datastrato.gravitino.client.GravitinoAdminClient; import com.datastrato.gravitino.client.GravitinoMetalake; import com.datastrato.gravitino.exceptions.RESTException; import com.datastrato.gravitino.integration.test.container.ContainerSuite; import com.datastrato.gravitino.integration.test.container.TrinoITContainers; import com.datastrato.gravitino.integration.test.util.AbstractIT; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.rel.TableCatalog; import java.io.File; import java.io.IOException; @@ -184,7 +184,7 @@ protected static void dropCatalog(String catalogName) { } Catalog catalog = metalake.loadCatalog(catalogName); SupportsSchemas schemas = catalog.asSchemas(); - Arrays.stream(schemas.listSchemas(Namespace.ofSchema(metalakeName, catalogName))) + Arrays.stream(schemas.listSchemas()) .filter(schema -> schema.name().startsWith("gt_")) .forEach( schema -> { @@ -210,8 +210,7 @@ protected static void dropCatalog(String catalogName) { } }); - schemas.dropSchema( - NameIdentifier.ofSchema(metalakeName, catalogName, schema.name()), false); + schemas.dropSchema(schema.name(), false); } catch (Exception e) { LOG.error("Failed to drop schema {}", schema); } diff --git a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/web/ui/CatalogsPageDorisTest.java b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/web/ui/CatalogsPageDorisTest.java index 2e47f35f3f4..edaa286df4d 100644 --- a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/web/ui/CatalogsPageDorisTest.java +++ b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/web/ui/CatalogsPageDorisTest.java @@ -103,10 +103,7 @@ void createSchema(String metalakeName, String catalogName, String schemaName) { Map properties = Maps.newHashMap(); properties.put(PROPERTIES_KEY1, PROPERTIES_VALUE1); Catalog catalog_doris = metalake.loadCatalog(catalogName); - catalog_doris - .asSchemas() - .createSchema( - NameIdentifier.of(metalakeName, catalogName, schemaName), "comment", properties); + catalog_doris.asSchemas().createSchema(schemaName, "comment", properties); } /** diff --git a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/web/ui/CatalogsPageTest.java b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/web/ui/CatalogsPageTest.java index 5380ebf69bb..7c377344768 100644 --- a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/web/ui/CatalogsPageTest.java +++ b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/web/ui/CatalogsPageTest.java @@ -50,9 +50,6 @@ public class CatalogsPageTest extends AbstractWebIT { protected static TrinoITContainers trinoITContainers; protected static GravitinoAdminClient gravitinoClient; - private static GravitinoMetalake metalake; - private static Catalog catalog; - protected static String gravitinoUri = "http://127.0.0.1:8090"; protected static String trinoUri = "http://127.0.0.1:8080"; protected static String hiveMetastoreUri = "thrift://127.0.0.1:9083"; @@ -125,10 +122,9 @@ public static void before() throws Exception { void createSchema(String metalakeName, String catalogName, String schemaName) { Map properties = Maps.newHashMap(); properties.put(PROPERTIES_KEY1, PROPERTIES_VALUE1); - catalog - .asSchemas() - .createSchema( - NameIdentifier.of(metalakeName, catalogName, schemaName), "comment", properties); + GravitinoMetalake metalake = gravitinoClient.loadMetalake(metalakeName); + Catalog catalog = metalake.loadCatalog(catalogName); + catalog.asSchemas().createSchema(schemaName, "comment", properties); } /** @@ -148,7 +144,8 @@ void createHiveTableAndColumn( Distribution distribution = createDistribution(); SortOrder[] sortOrders = createSortOrder(); Transform[] partitions = new Transform[] {identity(col2.name())}; - + GravitinoMetalake metalake = gravitinoClient.loadMetalake(metalakeName); + Catalog catalog = metalake.loadCatalog(catalogName); catalog .asTableCatalog() .createTable( @@ -212,6 +209,7 @@ void createFileset( Map properties = Maps.newHashMap(); properties.put(PROPERTIES_KEY1, PROPERTIES_VALUE1); String storageLocation = storageLocation(schemaName, filesetName); + GravitinoMetalake metalake = gravitinoClient.loadMetalake(metalakeName); Catalog catalog_fileset = metalake.loadCatalog(catalogName); catalog_fileset .asFilesetCatalog() @@ -244,7 +242,7 @@ public void testDeleteCatalog() throws InterruptedException { metalakePage.setMetalakeNameField(METALAKE_SELECT_NAME); clickAndWait(metalakePage.submitHandleMetalakeBtn); // load metalake - metalake = gravitinoClient.loadMetalake(METALAKE_NAME); + gravitinoClient.loadMetalake(METALAKE_NAME); metalakePage.clickMetalakeLink(METALAKE_NAME); // create catalog clickAndWait(catalogsPage.createCatalogBtn); @@ -271,7 +269,8 @@ public void testCreateHiveCatalog() throws InterruptedException { catalogsPage.setCatalogPropsAt(2, "key2", "value2"); clickAndWait(catalogsPage.handleSubmitCatalogBtn); // load catalog - catalog = metalake.loadCatalog(HIVE_CATALOG_NAME); + GravitinoMetalake metalake = gravitinoClient.loadMetalake(METALAKE_NAME); + metalake.loadCatalog(HIVE_CATALOG_NAME); Assertions.assertTrue(catalogsPage.verifyGetCatalog(HIVE_CATALOG_NAME)); } diff --git a/server/src/main/java/com/datastrato/gravitino/server/web/rest/SchemaOperations.java b/server/src/main/java/com/datastrato/gravitino/server/web/rest/SchemaOperations.java index 78e2fa6acfe..ed68d2c14c8 100644 --- a/server/src/main/java/com/datastrato/gravitino/server/web/rest/SchemaOperations.java +++ b/server/src/main/java/com/datastrato/gravitino/server/web/rest/SchemaOperations.java @@ -8,6 +8,8 @@ import com.codahale.metrics.annotation.Timed; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.catalog.SchemaDispatcher; import com.datastrato.gravitino.dto.requests.SchemaCreateRequest; import com.datastrato.gravitino.dto.requests.SchemaUpdateRequest; @@ -19,8 +21,6 @@ import com.datastrato.gravitino.lock.LockType; import com.datastrato.gravitino.lock.TreeLockUtils; import com.datastrato.gravitino.metrics.MetricNames; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SchemaChange; import com.datastrato.gravitino.server.web.Utils; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; diff --git a/server/src/test/java/com/datastrato/gravitino/server/web/rest/TestSchemaOperations.java b/server/src/test/java/com/datastrato/gravitino/server/web/rest/TestSchemaOperations.java index 7b36cac1384..a711cc9351e 100644 --- a/server/src/test/java/com/datastrato/gravitino/server/web/rest/TestSchemaOperations.java +++ b/server/src/test/java/com/datastrato/gravitino/server/web/rest/TestSchemaOperations.java @@ -17,9 +17,10 @@ import com.datastrato.gravitino.Config; import com.datastrato.gravitino.GravitinoEnv; import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.Schema; import com.datastrato.gravitino.catalog.SchemaDispatcher; import com.datastrato.gravitino.catalog.SchemaOperationDispatcher; -import com.datastrato.gravitino.dto.rel.SchemaDTO; +import com.datastrato.gravitino.dto.SchemaDTO; import com.datastrato.gravitino.dto.requests.SchemaCreateRequest; import com.datastrato.gravitino.dto.requests.SchemaUpdateRequest; import com.datastrato.gravitino.dto.requests.SchemaUpdatesRequest; @@ -33,7 +34,6 @@ import com.datastrato.gravitino.exceptions.NonEmptySchemaException; import com.datastrato.gravitino.exceptions.SchemaAlreadyExistsException; import com.datastrato.gravitino.lock.LockManager; -import com.datastrato.gravitino.rel.Schema; import com.datastrato.gravitino.rest.RESTUtils; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; diff --git a/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/catalog/BaseCatalog.java b/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/catalog/BaseCatalog.java index 5f365a036e3..3f55fac06fa 100644 --- a/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/catalog/BaseCatalog.java +++ b/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/catalog/BaseCatalog.java @@ -8,11 +8,11 @@ import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SchemaChange; import com.datastrato.gravitino.exceptions.NoSuchSchemaException; import com.datastrato.gravitino.exceptions.NonEmptySchemaException; import com.datastrato.gravitino.exceptions.SchemaAlreadyExistsException; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SchemaChange; import com.datastrato.gravitino.rel.expressions.literals.Literals; import com.datastrato.gravitino.spark.connector.ConnectorConstants; import com.datastrato.gravitino.spark.connector.PropertiesConverter; @@ -299,8 +299,7 @@ public void renameTable(Identifier oldIdent, Identifier newIdent) @Override public String[][] listNamespaces() throws NoSuchNamespaceException { - NameIdentifier[] schemas = - gravitinoCatalogClient.asSchemas().listSchemas(Namespace.of(metalakeName, catalogName)); + NameIdentifier[] schemas = gravitinoCatalogClient.asSchemas().listSchemas(); return Arrays.stream(schemas) .map(schema -> new String[] {schema.name()}) .toArray(String[][]::new); @@ -319,10 +318,7 @@ public Map loadNamespaceMetadata(String[] namespace) throws NoSuchNamespaceException { validateNamespace(namespace); try { - Schema schema = - gravitinoCatalogClient - .asSchemas() - .loadSchema(NameIdentifier.of(metalakeName, catalogName, namespace[0])); + Schema schema = gravitinoCatalogClient.asSchemas().loadSchema(namespace[0]); String comment = schema.comment(); Map properties = schema.properties(); if (comment != null) { @@ -344,10 +340,7 @@ public void createNamespace(String[] namespace, Map metadata) Map properties = new HashMap<>(metadata); String comment = properties.remove(SupportsNamespaces.PROP_COMMENT); try { - gravitinoCatalogClient - .asSchemas() - .createSchema( - NameIdentifier.of(metalakeName, catalogName, namespace[0]), comment, properties); + gravitinoCatalogClient.asSchemas().createSchema(namespace[0], comment, properties); } catch (SchemaAlreadyExistsException e) { throw new NamespaceAlreadyExistsException(namespace); } @@ -372,9 +365,7 @@ public void alterNamespace(String[] namespace, NamespaceChange... changes) }) .toArray(SchemaChange[]::new); try { - gravitinoCatalogClient - .asSchemas() - .alterSchema(NameIdentifier.of(metalakeName, catalogName, namespace[0]), schemaChanges); + gravitinoCatalogClient.asSchemas().alterSchema(namespace[0], schemaChanges); } catch (NoSuchSchemaException e) { throw new NoSuchNamespaceException(namespace); } @@ -385,9 +376,7 @@ public boolean dropNamespace(String[] namespace, boolean cascade) throws NoSuchNamespaceException, NonEmptyNamespaceException { validateNamespace(namespace); try { - return gravitinoCatalogClient - .asSchemas() - .dropSchema(NameIdentifier.of(metalakeName, catalogName, namespace[0]), cascade); + return gravitinoCatalogClient.asSchemas().dropSchema(namespace[0], cascade); } catch (NonEmptySchemaException e) { throw new NonEmptyNamespaceException(namespace); } diff --git a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/CatalogConnectorMetadata.java b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/CatalogConnectorMetadata.java index fd2b7cdf85a..70b95395dfe 100644 --- a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/CatalogConnectorMetadata.java +++ b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/CatalogConnectorMetadata.java @@ -17,14 +17,14 @@ import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SupportsSchemas; import com.datastrato.gravitino.client.GravitinoMetalake; import com.datastrato.gravitino.exceptions.NoSuchCatalogException; import com.datastrato.gravitino.exceptions.NoSuchSchemaException; import com.datastrato.gravitino.exceptions.NoSuchTableException; import com.datastrato.gravitino.exceptions.NonEmptySchemaException; import com.datastrato.gravitino.exceptions.TableAlreadyExistsException; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.rel.Table; import com.datastrato.gravitino.rel.TableCatalog; import com.datastrato.gravitino.rel.TableChange; @@ -71,10 +71,7 @@ public CatalogConnectorMetadata(GravitinoMetalake metalake, NameIdentifier catal public List listSchemaNames() { try { - return Arrays.stream( - schemaCatalog.listSchemas(Namespace.ofSchema(metalake.name(), catalogName))) - .map(NameIdentifier::name) - .toList(); + return Arrays.stream(schemaCatalog.listSchemas()).map(NameIdentifier::name).toList(); } catch (NoSuchCatalogException e) { throw new TrinoException(GRAVITINO_CATALOG_NOT_EXISTS, CATALOG_DOES_NOT_EXIST_MSG, e); } @@ -82,9 +79,7 @@ public List listSchemaNames() { public GravitinoSchema getSchema(String schemaName) { try { - Schema schema = - schemaCatalog.loadSchema( - NameIdentifier.ofSchema(metalake.name(), catalogName, schemaName)); + Schema schema = schemaCatalog.loadSchema(schemaName); return new GravitinoSchema(schema); } catch (NoSuchSchemaException e) { throw new TrinoException(GRAVITINO_SCHEMA_NOT_EXISTS, SCHEMA_DOES_NOT_EXIST_MSG, e); @@ -139,10 +134,7 @@ public void createTable(GravitinoTable table) { public void createSchema(GravitinoSchema schema) { try { - schemaCatalog.createSchema( - NameIdentifier.ofSchema(metalake.name(), catalogName, schema.getName()), - schema.getComment(), - schema.getProperties()); + schemaCatalog.createSchema(schema.getName(), schema.getComment(), schema.getProperties()); } catch (NoSuchSchemaException e) { throw new TrinoException(GRAVITINO_CATALOG_NOT_EXISTS, CATALOG_DOES_NOT_EXIST_MSG, e); } catch (TableAlreadyExistsException e) { @@ -152,9 +144,7 @@ public void createSchema(GravitinoSchema schema) { public void dropSchema(String schemaName, boolean cascade) { try { - boolean success = - schemaCatalog.dropSchema( - NameIdentifier.ofSchema(metalake.name(), catalogName, schemaName), cascade); + boolean success = schemaCatalog.dropSchema(schemaName, cascade); if (!success) { throw new TrinoException(GRAVITINO_SCHEMA_NOT_EXISTS, "Drop schema failed"); diff --git a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/metadata/GravitinoSchema.java b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/metadata/GravitinoSchema.java index 80843c6349f..65e8a99e832 100644 --- a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/metadata/GravitinoSchema.java +++ b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/metadata/GravitinoSchema.java @@ -4,7 +4,7 @@ */ package com.datastrato.gravitino.trino.connector.metadata; -import com.datastrato.gravitino.rel.Schema; +import com.datastrato.gravitino.Schema; import java.util.Map; /** Help Gravitino connector access SchemaMetadata from gravitino client. */ diff --git a/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/GravitinoMockServer.java b/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/GravitinoMockServer.java index 65821f15a3b..7f98b532b35 100644 --- a/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/GravitinoMockServer.java +++ b/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/GravitinoMockServer.java @@ -18,13 +18,13 @@ import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.Schema; +import com.datastrato.gravitino.SupportsSchemas; import com.datastrato.gravitino.client.GravitinoAdminClient; import com.datastrato.gravitino.client.GravitinoMetalake; import com.datastrato.gravitino.exceptions.NoSuchCatalogException; import com.datastrato.gravitino.exceptions.NoSuchMetalakeException; import com.datastrato.gravitino.rel.Column; -import com.datastrato.gravitino.rel.Schema; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.rel.Table; import com.datastrato.gravitino.rel.TableCatalog; import com.datastrato.gravitino.rel.TableChange; @@ -222,12 +222,12 @@ private Catalog createCatalog(String metalakeName, String catalogName) { private SupportsSchemas createSchemas(GravitinoCatalog catalog) { SupportsSchemas schemas = mock(SupportsSchemas.class); - when(schemas.createSchema(any(NameIdentifier.class), anyString(), anyMap())) + when(schemas.createSchema(any(String.class), anyString(), anyMap())) .thenAnswer( new Answer() { @Override public Schema answer(InvocationOnMock invocation) throws Throwable { - NameIdentifier schemaName = invocation.getArgument(0); + String schemaName = invocation.getArgument(0); Map properties = invocation.getArgument(2); // create schema @@ -242,8 +242,8 @@ public Schema answer(InvocationOnMock invocation) throws Throwable { catalogConnectorManager .getCatalogConnector(catalogConnectorManager.getTrinoCatalogName(catalog)) .getMetadataAdapter(); - GravitinoSchema schema = new GravitinoSchema(schemaName.name(), properties, ""); - metadata.createSchema(null, schemaName.name(), emptyMap(), null); + GravitinoSchema schema = new GravitinoSchema(schemaName, properties, ""); + metadata.createSchema(null, schemaName, emptyMap(), null); Schema mockSchema = mockSchema(schema.getName(), schema.getComment(), schema.getProperties()); @@ -251,12 +251,12 @@ public Schema answer(InvocationOnMock invocation) throws Throwable { } }); - when(schemas.dropSchema(any(NameIdentifier.class), anyBoolean())) + when(schemas.dropSchema(any(String.class), anyBoolean())) .thenAnswer( new Answer() { @Override public Boolean answer(InvocationOnMock invocation) throws Throwable { - NameIdentifier nameIdentifier = invocation.getArgument(0); + String schemaName = invocation.getArgument(0); boolean cascade = invocation.getArgument(1); // drop schema, @@ -267,17 +267,16 @@ public Boolean answer(InvocationOnMock invocation) throws Throwable { catalogConnectorManager.getTrinoCatalogName(catalog)) .getInternalConnector(); ConnectorMetadata metadata = memoryConnector.getMetadata(null, null); - metadata.dropSchema(null, nameIdentifier.name(), cascade); + metadata.dropSchema(null, schemaName, cascade); return true; } }); - when(schemas.listSchemas(any(Namespace.class))) + when(schemas.listSchemas()) .thenAnswer( new Answer() { @Override public NameIdentifier[] answer(InvocationOnMock invocation) throws Throwable { - Namespace namespace = invocation.getArgument(0); MemoryConnector memoryConnector = (MemoryConnector) catalogConnectorManager @@ -289,17 +288,17 @@ public NameIdentifier[] answer(InvocationOnMock invocation) throws Throwable { .map( schemaName -> NameIdentifier.ofSchema( - namespace.level(0), namespace.level(1), schemaName)) + catalog.getMetalake(), catalog.getName(), schemaName)) .toArray(NameIdentifier[]::new); } }); - when(schemas.loadSchema(any(NameIdentifier.class))) + when(schemas.loadSchema(any(String.class))) .thenAnswer( new Answer() { @Override public Schema answer(InvocationOnMock invocation) throws Throwable { - NameIdentifier schemaName = invocation.getArgument(0); + String schemaName = invocation.getArgument(0); MemoryConnector memoryConnector = (MemoryConnector) catalogConnectorManager @@ -309,7 +308,7 @@ public Schema answer(InvocationOnMock invocation) throws Throwable { memoryConnector.getMetadata(null, null); ConnectorMetadata metadata = memoryConnector.getMetadata(null, null); Map schemaProperties = - metadata.getSchemaProperties(null, schemaName.name()); + metadata.getSchemaProperties(null, schemaName); CatalogConnectorMetadataAdapter metadataAdapter = catalogConnectorManager @@ -318,7 +317,7 @@ public Schema answer(InvocationOnMock invocation) throws Throwable { GravitinoSchema gravitinoSchema = new GravitinoSchema( - schemaName.name(), + schemaName, metadataAdapter.toGravitinoSchemaProperties(schemaProperties), ""); diff --git a/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/metadata/TestGravitinoSchema.java b/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/metadata/TestGravitinoSchema.java index 95722db8023..49760279964 100644 --- a/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/metadata/TestGravitinoSchema.java +++ b/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/metadata/TestGravitinoSchema.java @@ -9,7 +9,7 @@ import static org.testng.Assert.assertEquals; import com.datastrato.gravitino.Audit; -import com.datastrato.gravitino.rel.Schema; +import com.datastrato.gravitino.Schema; import java.time.Instant; import java.util.HashMap; import java.util.Map;