From 54ff4b072f0beaea1d0b574fe4356a8b49f72f2d Mon Sep 17 00:00:00 2001 From: Jiao <35512473+mxdzs0612@users.noreply.github.com> Date: Mon, 1 Aug 2022 16:44:23 +0800 Subject: [PATCH] [Feature] Iceberg connector supports custom catalog (#8931) --- .../com/starrocks/catalog/IcebergTable.java | 16 ++++- .../connector/iceberg/IcebergConnector.java | 14 +--- .../connector/iceberg/IcebergMetadata.java | 51 ++++++++++---- .../external/iceberg/IcebergCatalog.java | 10 +++ .../external/iceberg/IcebergHiveCatalog.java | 2 + .../external/iceberg/IcebergUtil.java | 20 +++++- .../iceberg/IcebergMetadataTest.java | 39 ++++++++--- .../iceberg/IcebergCustomCatalogTest.java | 66 ++++++++++++++++++- .../iceberg/IcebergHiveCatalogTest.java | 2 +- .../starrocks/server/CatalogLevelTest.java | 4 +- 10 files changed, 180 insertions(+), 44 deletions(-) diff --git a/fe/fe-core/src/main/java/com/starrocks/catalog/IcebergTable.java b/fe/fe-core/src/main/java/com/starrocks/catalog/IcebergTable.java index f9102a533f69a..3a87005b016fa 100644 --- a/fe/fe-core/src/main/java/com/starrocks/catalog/IcebergTable.java +++ b/fe/fe-core/src/main/java/com/starrocks/catalog/IcebergTable.java @@ -48,7 +48,7 @@ public class IcebergTable extends Table { public static final String ICEBERG_CATALOG = "starrocks.catalog-type"; public static final String ICEBERG_METASTORE_URIS = "iceberg.catalog.hive.metastore.uris"; - private static final String ICEBERG_IMPL = "iceberg.catalog-impl"; + public static final String ICEBERG_IMPL = "iceberg.catalog-impl"; public static final String ICEBERG_DB = "database"; public static final String ICEBERG_TABLE = "table"; public static final String ICEBERG_RESOURCE = "resource"; @@ -75,6 +75,10 @@ public IcebergTable(long id, String name, List schema, Map properties, String met icebergProperties.put(ICEBERG_CATALOG, "HIVE_CATALOG"); } + private void setCustomCatalogProperties(Map properties) { + db = properties.remove(ICEBERG_DB); + table = properties.remove(ICEBERG_TABLE); + icebergProperties.put(ICEBERG_CATALOG, "CUSTOM_CATALOG"); + icebergProperties.put(ICEBERG_IMPL, properties.remove(ICEBERG_IMPL)); + for (Map.Entry entry : properties.entrySet()) { + icebergProperties.put(entry.getKey(), entry.getValue()); + } + } + private void validate(Map properties) throws DdlException { if (properties == null) { throw new DdlException("Please set properties of iceberg table, they are: database, table."); diff --git a/fe/fe-core/src/main/java/com/starrocks/connector/iceberg/IcebergConnector.java b/fe/fe-core/src/main/java/com/starrocks/connector/iceberg/IcebergConnector.java index 3247da22ac5d7..228032188a8b5 100644 --- a/fe/fe-core/src/main/java/com/starrocks/connector/iceberg/IcebergConnector.java +++ b/fe/fe-core/src/main/java/com/starrocks/connector/iceberg/IcebergConnector.java @@ -2,9 +2,7 @@ package com.starrocks.connector.iceberg; -import com.google.common.base.Preconditions; import com.starrocks.common.DdlException; -import com.starrocks.common.util.Util; import com.starrocks.connector.Connector; import com.starrocks.connector.ConnectorContext; import com.starrocks.connector.ConnectorMetadata; @@ -13,33 +11,23 @@ import java.util.Map; -import static com.starrocks.catalog.IcebergTable.ICEBERG_METASTORE_URIS; - public class IcebergConnector implements Connector { private static final Logger LOG = LogManager.getLogger(IcebergConnector.class); private final Map properties; private final String catalogName; - private String metastoreURI; private ConnectorMetadata metadata; public IcebergConnector(ConnectorContext context) { this.catalogName = context.getCatalogName(); this.properties = context.getProperties(); - validate(); - } - - public void validate() { - this.metastoreURI = Preconditions.checkNotNull(properties.get(ICEBERG_METASTORE_URIS), - "%s must be set in properties when creating iceberg catalog", ICEBERG_METASTORE_URIS); - Util.validateMetastoreUris(metastoreURI); } @Override public ConnectorMetadata getMetadata() throws DdlException { if (metadata == null) { try { - metadata = new IcebergMetadata(metastoreURI); + metadata = new IcebergMetadata(properties); } catch (Exception e) { LOG.error("Failed to create iceberg metadata on [catalog : {}]", catalogName, e); throw e; diff --git a/fe/fe-core/src/main/java/com/starrocks/connector/iceberg/IcebergMetadata.java b/fe/fe-core/src/main/java/com/starrocks/connector/iceberg/IcebergMetadata.java index 3467365ca9369..fa05ba3f021ef 100644 --- a/fe/fe-core/src/main/java/com/starrocks/connector/iceberg/IcebergMetadata.java +++ b/fe/fe-core/src/main/java/com/starrocks/connector/iceberg/IcebergMetadata.java @@ -5,8 +5,10 @@ import com.starrocks.catalog.Database; import com.starrocks.catalog.Table; import com.starrocks.common.DdlException; +import com.starrocks.common.util.Util; import com.starrocks.connector.ConnectorMetadata; -import com.starrocks.external.iceberg.IcebergHiveCatalog; +import com.starrocks.external.iceberg.IcebergCatalog; +import com.starrocks.external.iceberg.IcebergCatalogType; import com.starrocks.external.iceberg.IcebergUtil; import org.apache.iceberg.catalog.Namespace; import org.apache.iceberg.catalog.TableIdentifier; @@ -15,35 +17,52 @@ import org.apache.thrift.TException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import static com.starrocks.catalog.IcebergTable.ICEBERG_CATALOG; +import static com.starrocks.catalog.IcebergTable.ICEBERG_IMPL; +import static com.starrocks.catalog.IcebergTable.ICEBERG_METASTORE_URIS; +import static com.starrocks.external.iceberg.IcebergUtil.getIcebergCustomCatalog; import static com.starrocks.external.iceberg.IcebergUtil.getIcebergHiveCatalog; public class IcebergMetadata implements ConnectorMetadata { private static final Logger LOG = LogManager.getLogger(IcebergMetadata.class); - private final String metastoreURI; - private IcebergHiveCatalog hiveCatalog; + private String metastoreURI; + private String catalogType; + private String catalogImpl; + private IcebergCatalog icebergCatalog; + private Map customProperties; - public IcebergMetadata(String metastoreURI) { - Map properties = new HashMap<>(); - // the first phase of IcebergConnector only supports hive catalog. - this.hiveCatalog = (IcebergHiveCatalog) getIcebergHiveCatalog(metastoreURI, properties); - this.metastoreURI = metastoreURI; + public IcebergMetadata(Map properties) { + if (IcebergCatalogType.HIVE_CATALOG == IcebergCatalogType.fromString(properties.get(ICEBERG_CATALOG))) { + catalogType = properties.get(ICEBERG_CATALOG); + metastoreURI = properties.get(ICEBERG_METASTORE_URIS); + icebergCatalog = getIcebergHiveCatalog(metastoreURI, properties); + Util.validateMetastoreUris(metastoreURI); + } else if (IcebergCatalogType.CUSTOM_CATALOG == IcebergCatalogType.fromString(properties.get(ICEBERG_CATALOG))) { + catalogType = properties.get(ICEBERG_CATALOG); + catalogImpl = properties.get(ICEBERG_IMPL); + icebergCatalog = getIcebergCustomCatalog(catalogImpl, properties); + properties.remove(ICEBERG_CATALOG); + properties.remove(ICEBERG_IMPL); + customProperties = properties; + } else { + throw new RuntimeException(String.format("Property %s is missing or not supported now.", ICEBERG_CATALOG)); + } } @Override public List listDbNames() throws DdlException { - return hiveCatalog.listAllDatabases(); + return icebergCatalog.listAllDatabases(); } @Override public Database getDb(String dbName) { try { - return hiveCatalog.getDB(dbName); + return icebergCatalog.getDB(dbName); } catch (InterruptedException | TException e) { LOG.error("Failed to get iceberg database " + dbName, e); return null; @@ -52,15 +71,19 @@ public Database getDb(String dbName) { @Override public List listTableNames(String dbName) throws DdlException { - List tableIdentifiers = hiveCatalog.listTables(Namespace.of(dbName)); + List tableIdentifiers = icebergCatalog.listTables(Namespace.of(dbName)); return tableIdentifiers.stream().map(TableIdentifier::name).collect(Collectors.toCollection(ArrayList::new)); } @Override public Table getTable(String dbName, String tblName) { try { - org.apache.iceberg.Table icebergTable = hiveCatalog.loadTable(IcebergUtil.getIcebergTableIdentifier(dbName, tblName)); - return IcebergUtil.convertToSRTable(icebergTable, metastoreURI, dbName, tblName); + org.apache.iceberg.Table icebergTable + = icebergCatalog.loadTable(IcebergUtil.getIcebergTableIdentifier(dbName, tblName)); + if (IcebergCatalogType.fromString(catalogType).equals(IcebergCatalogType.CUSTOM_CATALOG)) { + return IcebergUtil.convertCustomCatalogToSRTable(icebergTable, catalogImpl, dbName, tblName, customProperties); + } + return IcebergUtil.convertHiveCatalogToSRTable(icebergTable, metastoreURI, dbName, tblName); } catch (DdlException e) { LOG.error("Failed to get iceberg table " + IcebergUtil.getIcebergTableIdentifier(dbName, tblName), e); return null; diff --git a/fe/fe-core/src/main/java/com/starrocks/external/iceberg/IcebergCatalog.java b/fe/fe-core/src/main/java/com/starrocks/external/iceberg/IcebergCatalog.java index c1867fc5466db..d315dc4386671 100644 --- a/fe/fe-core/src/main/java/com/starrocks/external/iceberg/IcebergCatalog.java +++ b/fe/fe-core/src/main/java/com/starrocks/external/iceberg/IcebergCatalog.java @@ -2,10 +2,14 @@ package com.starrocks.external.iceberg; +import com.starrocks.catalog.Database; import com.starrocks.catalog.IcebergTable; import org.apache.iceberg.Table; +import org.apache.iceberg.catalog.Namespace; import org.apache.iceberg.catalog.TableIdentifier; +import org.apache.thrift.TException; +import java.util.List; import java.util.Map; /** @@ -39,4 +43,10 @@ public interface IcebergCatalog { Table loadTable(TableIdentifier tableId, String tableLocation, Map properties) throws StarRocksIcebergException; + + List listAllDatabases(); + + Database getDB(String dbName) throws InterruptedException, TException; + + List listTables(Namespace of); } diff --git a/fe/fe-core/src/main/java/com/starrocks/external/iceberg/IcebergHiveCatalog.java b/fe/fe-core/src/main/java/com/starrocks/external/iceberg/IcebergHiveCatalog.java index 21da9f9ff8f59..4cd51a7546d05 100644 --- a/fe/fe-core/src/main/java/com/starrocks/external/iceberg/IcebergHiveCatalog.java +++ b/fe/fe-core/src/main/java/com/starrocks/external/iceberg/IcebergHiveCatalog.java @@ -138,6 +138,7 @@ protected String defaultWarehouseLocation(TableIdentifier tableIdentifier) { throw new UnsupportedOperationException("Not implemented"); } + @Override public List listAllDatabases() { try { return new ArrayList<>(clients.run(IMetaStoreClient::getAllDatabases)); @@ -146,6 +147,7 @@ public List listAllDatabases() { } } + @Override public Database getDB(String dbName) throws InterruptedException, TException { org.apache.hadoop.hive.metastore.api.Database db = clients.run(client -> client.getDatabase(dbName)); if (db == null || db.getName() == null) { diff --git a/fe/fe-core/src/main/java/com/starrocks/external/iceberg/IcebergUtil.java b/fe/fe-core/src/main/java/com/starrocks/external/iceberg/IcebergUtil.java index 9d67c154410ce..16dd25ff1d346 100644 --- a/fe/fe-core/src/main/java/com/starrocks/external/iceberg/IcebergUtil.java +++ b/fe/fe-core/src/main/java/com/starrocks/external/iceberg/IcebergUtil.java @@ -177,13 +177,29 @@ public static Map getIdentityPartitions(PartitionSpec p return columns.build(); } - public static IcebergTable convertToSRTable(org.apache.iceberg.Table icebergTable, String metastoreURI, + public static IcebergTable convertCustomCatalogToSRTable(org.apache.iceberg.Table icebergTable, String catalogImpl, + String dbName, String tblName, Map customProperties) throws DdlException { + Map properties = new HashMap<>(); + properties.put(IcebergTable.ICEBERG_DB, dbName); + properties.put(IcebergTable.ICEBERG_TABLE, tblName); + properties.put(IcebergTable.ICEBERG_CATALOG, "CUSTOM_CATALOG"); + properties.put(IcebergTable.ICEBERG_IMPL, catalogImpl); + properties.putAll(customProperties); + return convertToSRTable(icebergTable, properties); + } + + public static IcebergTable convertHiveCatalogToSRTable(org.apache.iceberg.Table icebergTable, String metastoreURI, String dbName, String tblName) throws DdlException { Map properties = new HashMap<>(); - properties.put(IcebergTable.ICEBERG_CATALOG, "HIVE_CATALOG"); properties.put(IcebergTable.ICEBERG_DB, dbName); properties.put(IcebergTable.ICEBERG_TABLE, tblName); + properties.put(IcebergTable.ICEBERG_CATALOG, "HIVE_CATALOG"); properties.put(IcebergTable.ICEBERG_METASTORE_URIS, metastoreURI); + return convertToSRTable(icebergTable, properties); + } + + private static IcebergTable convertToSRTable(org.apache.iceberg.Table icebergTable, Map properties) + throws DdlException { Map icebergColumns = icebergTable.schema().columns().stream() .collect(Collectors.toMap(Types.NestedField::name, field -> field)); List fullSchema = Lists.newArrayList(); diff --git a/fe/fe-core/src/test/java/com/starrocks/connector/iceberg/IcebergMetadataTest.java b/fe/fe-core/src/test/java/com/starrocks/connector/iceberg/IcebergMetadataTest.java index 34aa2d53b81df..f1be86e7a00f4 100644 --- a/fe/fe-core/src/test/java/com/starrocks/connector/iceberg/IcebergMetadataTest.java +++ b/fe/fe-core/src/test/java/com/starrocks/connector/iceberg/IcebergMetadataTest.java @@ -5,7 +5,6 @@ import com.google.common.collect.Lists; import com.starrocks.catalog.Database; import com.starrocks.catalog.Table; -import com.starrocks.common.DdlException; import com.starrocks.external.hive.HiveMetaStoreThriftClient; import com.starrocks.external.iceberg.IcebergHiveCatalog; import com.starrocks.external.iceberg.IcebergUtil; @@ -19,8 +18,12 @@ import org.junit.Test; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import static com.starrocks.catalog.IcebergTable.ICEBERG_CATALOG; +import static com.starrocks.catalog.IcebergTable.ICEBERG_METASTORE_URIS; import static com.starrocks.catalog.Table.TableType.ICEBERG; public class IcebergMetadataTest { @@ -34,8 +37,11 @@ public void testListDatabaseNames(@Mocked HiveMetaStoreThriftClient metaStoreThr } }; + Map properties = new HashMap<>(); String metastoreUris = "thrift://127.0.0.1:9083"; - IcebergMetadata metadata = new IcebergMetadata(metastoreUris); + properties.put(ICEBERG_METASTORE_URIS, metastoreUris); + properties.put(ICEBERG_CATALOG, "hive"); + IcebergMetadata metadata = new IcebergMetadata(properties); List expectResult = Lists.newArrayList("db1", "db2"); Assert.assertEquals(expectResult, metadata.listDbNames()); } @@ -52,8 +58,11 @@ public void testGetDB(@Mocked IcebergHiveCatalog icebergHiveCatalog) throws Exce } }; + Map properties = new HashMap<>(); String metastoreUris = "thrift://127.0.0.1:9083"; - IcebergMetadata metadata = new IcebergMetadata(metastoreUris); + properties.put(ICEBERG_METASTORE_URIS, metastoreUris); + properties.put(ICEBERG_CATALOG, "hive"); + IcebergMetadata metadata = new IcebergMetadata(properties); Database expectResult = new Database(0, db); Assert.assertEquals(expectResult, metadata.getDb(db)); } @@ -73,15 +82,18 @@ public void testListTableNames(@Mocked IcebergHiveCatalog icebergHiveCatalog) th } }; + Map properties = new HashMap<>(); String metastoreUris = "thrift://127.0.0.1:9083"; - IcebergMetadata metadata = new IcebergMetadata(metastoreUris); + properties.put(ICEBERG_METASTORE_URIS, metastoreUris); + properties.put(ICEBERG_CATALOG, "hive"); + IcebergMetadata metadata = new IcebergMetadata(properties); List expectResult = Lists.newArrayList("tbl1", "tbl2"); Assert.assertEquals(expectResult, metadata.listTableNames(db1)); } @Test public void testGetTable(@Mocked IcebergHiveCatalog icebergHiveCatalog, - @Mocked HiveTableOperations hiveTableOperations) throws Exception { + @Mocked HiveTableOperations hiveTableOperations) { new Expectations() { { @@ -91,15 +103,18 @@ public void testGetTable(@Mocked IcebergHiveCatalog icebergHiveCatalog, } }; - String resourceName = "thrift://127.0.0.1:9083"; - IcebergMetadata metadata = new IcebergMetadata(resourceName); + Map properties = new HashMap<>(); + String metastoreUris = "thrift://127.0.0.1:9083"; + properties.put(ICEBERG_METASTORE_URIS, metastoreUris); + properties.put(ICEBERG_CATALOG, "hive"); + IcebergMetadata metadata = new IcebergMetadata(properties); Table expectResult = new Table(0, "tbl", ICEBERG, new ArrayList<>()); Assert.assertEquals(expectResult, metadata.getTable("db", "tbl")); } @Test public void testNotExistTable(@Mocked IcebergHiveCatalog icebergHiveCatalog, - @Mocked HiveTableOperations hiveTableOperations) throws DdlException { + @Mocked HiveTableOperations hiveTableOperations) { new Expectations() { { icebergHiveCatalog.loadTable(IcebergUtil.getIcebergTableIdentifier("db", "tbl")); @@ -107,8 +122,12 @@ public void testNotExistTable(@Mocked IcebergHiveCatalog icebergHiveCatalog, minTimes = 0; } }; - String resourceName = "thrift://127.0.0.1:9083"; - IcebergMetadata metadata = new IcebergMetadata(resourceName); + + Map properties = new HashMap<>(); + String metastoreUris = "thrift://127.0.0.1:9083"; + properties.put(ICEBERG_METASTORE_URIS, metastoreUris); + properties.put(ICEBERG_CATALOG, "hive"); + IcebergMetadata metadata = new IcebergMetadata(properties); Assert.assertNull(metadata.getTable("db", "tbl2").getName()); } } diff --git a/fe/fe-core/src/test/java/com/starrocks/external/iceberg/IcebergCustomCatalogTest.java b/fe/fe-core/src/test/java/com/starrocks/external/iceberg/IcebergCustomCatalogTest.java index df5ddfe58ff47..c6cf2423f823d 100644 --- a/fe/fe-core/src/test/java/com/starrocks/external/iceberg/IcebergCustomCatalogTest.java +++ b/fe/fe-core/src/test/java/com/starrocks/external/iceberg/IcebergCustomCatalogTest.java @@ -4,9 +4,11 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; +import com.starrocks.catalog.Database; import com.starrocks.catalog.IcebergTable; import com.starrocks.external.iceberg.hive.CachedClientPool; import com.starrocks.external.iceberg.hive.HiveTableOperations; +import com.starrocks.external.iceberg.io.IcebergCachingFileIO; import mockit.Expectations; import mockit.Mock; import mockit.MockUp; @@ -32,9 +34,15 @@ import org.junit.Assert; import org.junit.Test; +import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; + +import static com.starrocks.external.iceberg.IcebergUtil.convertToSRDatabase; +import static com.starrocks.external.iceberg.IcebergUtil.getIcebergCustomCatalog; public class IcebergCustomCatalogTest { @@ -82,6 +90,33 @@ public Catalog loadCatalog(String catalogImpl, String catalogName, Assert.assertEquals("test", table.name()); } + @Test + public void testListAllDatabases(@Mocked IcebergCustomTestingCatalog customCatalog) { + new Expectations() { + { + customCatalog.listAllDatabases(); + result = Arrays.asList("db1", "db2"); + minTimes = 0; + } + }; + + new MockUp() { + @Mock + public Catalog loadCatalog(String catalogImpl, String catalogName, + Map properties, + Configuration hadoopConf) { + return customCatalog; + } + }; + + String catalogImpl = IcebergCustomTestingCatalog.class.getName(); + Map icebergProperties = new HashMap<>(); + IcebergCustomTestingCatalog icebergCustomCatalog = (IcebergCustomTestingCatalog) getIcebergCustomCatalog( + catalogImpl, icebergProperties); + List dbs = icebergCustomCatalog.listAllDatabases(); + Assert.assertEquals(Arrays.asList("db1", "db2"), dbs); + } + public static class IcebergCustomTestingCatalog extends BaseMetastoreCatalog implements IcebergCatalog { private String name; @@ -141,6 +176,11 @@ public void initialize(String inputName, Map properties) { this.fileIO = fileIOImpl == null ? new HadoopFileIO(conf) : CatalogUtil.loadFileIO(fileIOImpl, properties, conf); + // warp cache fileIO + IcebergCachingFileIO cachingFileIO = new IcebergCachingFileIO(fileIO); + cachingFileIO.initialize(properties); + this.fileIO = cachingFileIO; + this.clients = new CachedClientPool(conf, properties); } @@ -166,9 +206,33 @@ protected String defaultWarehouseLocation(TableIdentifier tableIdentifier) { throw new UnsupportedOperationException("Not implemented"); } + @Override + public List listAllDatabases() { + try { + return new ArrayList<>(clients.run(IMetaStoreClient::getAllDatabases)); + } catch (TException | InterruptedException e) { + throw new RuntimeException(e); + } + } + + @Override + public Database getDB(String dbName) throws InterruptedException, TException { + org.apache.hadoop.hive.metastore.api.Database db = clients.run(client -> client.getDatabase(dbName)); + if (db == null || db.getName() == null) { + throw new TException("Hive db " + dbName + " doesn't exist"); + } + return convertToSRDatabase(dbName); + } + @Override public List listTables(Namespace namespace) { - throw new UnsupportedOperationException("Not implemented"); + String database = namespace.level(0); + try { + List tableNames = clients.run(client -> client.getAllTables(database)); + return tableNames.stream().map(tblName -> TableIdentifier.of(namespace, tblName)).collect(Collectors.toList()); + } catch (TException | InterruptedException e) { + throw new RuntimeException(e); + } } @Override diff --git a/fe/fe-core/src/test/java/com/starrocks/external/iceberg/IcebergHiveCatalogTest.java b/fe/fe-core/src/test/java/com/starrocks/external/iceberg/IcebergHiveCatalogTest.java index bea787eb37a36..26335c8e6f0c0 100644 --- a/fe/fe-core/src/test/java/com/starrocks/external/iceberg/IcebergHiveCatalogTest.java +++ b/fe/fe-core/src/test/java/com/starrocks/external/iceberg/IcebergHiveCatalogTest.java @@ -79,6 +79,6 @@ public void testListAllDatabases(@Mocked IcebergHiveCatalog hiveCatalog) { Map icebergProperties = new HashMap<>(); IcebergHiveCatalog icebergHiveCatalog = IcebergHiveCatalog.getInstance("thrift://test:9030", icebergProperties); List dbs = icebergHiveCatalog.listAllDatabases(); - Assert.assertEquals(dbs, Arrays.asList("db1", "db2")); + Assert.assertEquals(Arrays.asList("db1", "db2"), dbs); } } diff --git a/fe/fe-core/src/test/java/com/starrocks/server/CatalogLevelTest.java b/fe/fe-core/src/test/java/com/starrocks/server/CatalogLevelTest.java index f76fb1224dff8..12c729aaa4059 100644 --- a/fe/fe-core/src/test/java/com/starrocks/server/CatalogLevelTest.java +++ b/fe/fe-core/src/test/java/com/starrocks/server/CatalogLevelTest.java @@ -73,7 +73,7 @@ public void testQueryExternalCatalogInDefaultCatalog(@Mocked MetadataMgr metadat public void testQueryIcebergCatalog(@Mocked MetadataMgr metadataMgr, @Mocked HiveTableOperations hiveTableOperations) throws Exception { String createCatalog = "CREATE EXTERNAL CATALOG iceberg_catalog PROPERTIES(\"type\"=\"iceberg\"," + - " \"iceberg.catalog.hive.metastore.uris\"=\"thrift://127.0.0.1:9083\")"; + " \"iceberg.catalog.hive.metastore.uris\"=\"thrift://127.0.0.1:9083\", \"starrocks.catalog-type\" = \"hive\")"; StarRocksAssert starRocksAssert = new StarRocksAssert(); starRocksAssert.withCatalog(createCatalog); Configuration conf = new Configuration(); @@ -87,7 +87,7 @@ public void testQueryIcebergCatalog(@Mocked MetadataMgr metadataMgr, }; org.apache.iceberg.Table tbl = new org.apache.iceberg.BaseTable(hiveTableOperations, "iceberg_table"); - com.starrocks.catalog.Table icebergTable = IcebergUtil.convertToSRTable(tbl, "thrift://127.0.0.1:9083", + com.starrocks.catalog.Table icebergTable = IcebergUtil.convertHiveCatalogToSRTable(tbl, "thrift://127.0.0.1:9083", "iceberg_db", "iceberg_table"); GlobalStateMgr.getCurrentState().setMetadataMgr(metadataMgr); new Expectations(metadataMgr) {