Skip to content

Commit

Permalink
[BugFix] fix resource name from stmt propery instead of catalog recast (
Browse files Browse the repository at this point in the history
#34844)

Signed-off-by: yanz <dirtysalt1987@gmail.com>
  • Loading branch information
dirtysalt authored Nov 14, 2023
1 parent 7c6884e commit d7be916
Show file tree
Hide file tree
Showing 6 changed files with 143 additions and 33 deletions.
28 changes: 17 additions & 11 deletions fe/fe-core/src/main/java/com/starrocks/server/HiveTableFactory.java
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,21 @@ private HiveTableFactory() {

}

public static void copyFromCatalogTable(HiveTable.Builder tableBuilder, HiveTable catalogTable,
Map<String, String> properties) {
tableBuilder
.setCatalogName(catalogTable.getCatalogName())
.setResourceName(properties.get(RESOURCE))
.setHiveDbName(catalogTable.getDbName())
.setHiveTableName(catalogTable.getTableName())
.setPartitionColumnNames(catalogTable.getPartitionColumnNames())
.setDataColumnNames(catalogTable.getDataColumnNames())
.setTableLocation(catalogTable.getTableLocation())
.setStorageFormat(catalogTable.getStorageFormat())
.setCreateTime(catalogTable.getCreateTime())
.setProperties(catalogTable.getProperties());
}

@Override
@NotNull
public Table createTable(LocalMetastore metastore, Database database, CreateTableStmt stmt) throws DdlException {
Expand All @@ -60,17 +75,8 @@ public Table createTable(LocalMetastore metastore, Database database, CreateTabl
HiveTable.Builder tableBuilder = HiveTable.builder()
.setId(tableId)
.setTableName(tableName)
.setCatalogName(oHiveTable.getCatalogName())
.setResourceName(oHiveTable.getResourceName())
.setHiveDbName(oHiveTable.getDbName())
.setHiveTableName(oHiveTable.getTableName())
.setPartitionColumnNames(oHiveTable.getPartitionColumnNames())
.setDataColumnNames(oHiveTable.getDataColumnNames())
.setFullSchema(columns)
.setTableLocation(oHiveTable.getTableLocation())
.setStorageFormat(oHiveTable.getStorageFormat())
.setCreateTime(oHiveTable.getCreateTime())
.setProperties(oHiveTable.getProperties());
.setFullSchema(columns);
copyFromCatalogTable(tableBuilder, oHiveTable, properties);

HiveTable hiveTable = tableBuilder.build();

Expand Down
23 changes: 13 additions & 10 deletions fe/fe-core/src/main/java/com/starrocks/server/HudiTableFactory.java
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,17 @@ private HudiTableFactory() {

}

public static void copyFromCatalogTable(HudiTable.Builder builder, HudiTable catalogTable, Map<String, String> properties) {
builder.setCatalogName(catalogTable.getCatalogName())
.setResourceName(properties.get(RESOURCE))
.setHiveDbName(catalogTable.getDbName())
.setHiveTableName(catalogTable.getTableName())
.setPartitionColNames(catalogTable.getPartitionColumnNames())
.setDataColNames(catalogTable.getDataColumnNames())
.setHudiProperties(catalogTable.getProperties())
.setCreateTime(catalogTable.getCreateTime());
}

@Override
@NotNull
public Table createTable(LocalMetastore metastore, Database database, CreateTableStmt stmt) throws DdlException {
Expand Down Expand Up @@ -79,16 +90,8 @@ public Table createTable(LocalMetastore metastore, Database database, CreateTabl
HudiTable.Builder tableBuilder = HudiTable.builder()
.setId(tableId)
.setTableName(tableName)
.setCatalogName(oHudiTable.getCatalogName())
.setResourceName(oHudiTable.getResourceName())
.setHiveDbName(oHudiTable.getDbName())
.setHiveTableName(oHudiTable.getTableName())
.setFullSchema(columns)
.setPartitionColNames(oHudiTable.getPartitionColumnNames())
.setDataColNames(oHudiTable.getDataColumnNames())
.setHudiProperties(oHudiTable.getProperties())
.setCreateTime(oHudiTable.getCreateTime());

.setFullSchema(columns);
copyFromCatalogTable(tableBuilder, oHudiTable, properties);
HudiTable hudiTable = tableBuilder.build();

// partition key, commented for show partition key
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,16 @@ private IcebergTableFactory() {

}

public static void copyFromCatalogTable(IcebergTable.Builder tableBuilder, IcebergTable catalogTable,
Map<String, String> properties) {
tableBuilder.setCatalogName(catalogTable.getCatalogName())
.setResourceName(properties.get(RESOURCE))
.setRemoteDbName(catalogTable.getRemoteDbName())
.setRemoteTableName(catalogTable.getRemoteTableName())
.setIcebergProperties(properties)
.setNativeTable(catalogTable.getNativeTable());
}

@Override
@NotNull
public Table createTable(LocalMetastore metastore, Database database, CreateTableStmt stmt) throws DdlException {
Expand All @@ -63,13 +73,9 @@ public Table createTable(LocalMetastore metastore, Database database, CreateTabl
IcebergTable.Builder tableBuilder = IcebergTable.builder()
.setId(tableId)
.setSrTableName(tableName)
.setCatalogName(oIcebergTable.getCatalogName())
.setResourceName(oIcebergTable.getResourceName())
.setRemoteDbName(oIcebergTable.getRemoteDbName())
.setRemoteTableName(oIcebergTable.getRemoteTableName())
.setFullSchema(columns)
.setIcebergProperties(properties)
.setNativeTable(oIcebergTable.getNativeTable());
.setFullSchema(columns);

copyFromCatalogTable(tableBuilder, oIcebergTable, properties);

IcebergTable icebergTable = tableBuilder.build();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
import com.starrocks.connector.hive.HiveStorageFormat;
import com.starrocks.qe.ConnectContext;
import com.starrocks.server.GlobalStateMgr;
import com.starrocks.server.HiveTableFactory;
import com.starrocks.server.MetadataMgr;
import com.starrocks.server.TableFactoryProvider;
import com.starrocks.sql.ast.CreateTableStmt;
Expand All @@ -62,10 +63,13 @@
import org.junit.Test;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static com.starrocks.connector.hive.HiveClassNames.MAPRED_PARQUET_INPUT_FORMAT_CLASS;
import static com.starrocks.server.CatalogMgr.ResourceMappingCatalog.getResourceMappingCatalogName;
import static com.starrocks.server.ExternalTableFactory.RESOURCE;

public class HiveTableTest {
private static ConnectContext connectContext;
Expand Down Expand Up @@ -211,7 +215,7 @@ public void testHasBoolPartitionColumn() {
HiveTable oTable = HiveMetastoreApiConverter.toHiveTable(msTable, getResourceMappingCatalogName("hive0", "hive"));
Assert.assertFalse(oTable.hasBooleanTypePartitionColumn());
}

// create a hive table with specific storage format
private HiveTable createExternalTableByFormat(String format) throws Exception {
String inputFormatClass = HiveStorageFormat.get(format).getInputFormat();
Expand Down Expand Up @@ -292,4 +296,31 @@ public void testCreateExternalTableWithStorageFormat(@Mocked MetadataMgr metadat
}
}

@Test
public void testCreateTableResourceName() throws DdlException {

String resourceName = "Hive_resource_29bb53dc_7e04_11ee_9b35_00163e0e489a";
Map<String, String> properties = new HashMap() {
{
put(RESOURCE, resourceName);
}
};
HiveTable.Builder tableBuilder = HiveTable.builder()
.setId(1000)
.setTableName("supplier")
.setCatalogName("hice_catalog")
.setHiveDbName("hive_oss_tpch_1g_parquet_gzip")
.setHiveTableName("supplier")
.setResourceName(resourceName)
.setTableLocation("")
.setFullSchema(new ArrayList<>())
.setDataColumnNames(new ArrayList<>())
.setPartitionColumnNames(Lists.newArrayList())
.setStorageFormat(null);
HiveTable oTable = tableBuilder.build();
HiveTable.Builder newBuilder = HiveTable.builder();
HiveTableFactory.copyFromCatalogTable(newBuilder, oTable, properties);
HiveTable table = newBuilder.build();
Assert.assertEquals(table.getResourceName(), resourceName);
}
}
38 changes: 34 additions & 4 deletions fe/fe-core/src/test/java/com/starrocks/catalog/HudiTableTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.


package com.starrocks.catalog;

import com.google.common.collect.ImmutableList;
Expand All @@ -28,6 +27,7 @@
import com.starrocks.credential.CloudConfiguration;
import com.starrocks.qe.ConnectContext;
import com.starrocks.server.GlobalStateMgr;
import com.starrocks.server.HudiTableFactory;
import com.starrocks.server.MetadataMgr;
import com.starrocks.server.TableFactoryProvider;
import com.starrocks.sql.ast.CreateTableStmt;
Expand All @@ -44,9 +44,13 @@
import org.junit.Before;
import org.junit.Test;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static com.starrocks.server.ExternalTableFactory.RESOURCE;

public class HudiTableTest {
private static ConnectContext connectContext;
private static StarRocksAssert starRocksAssert;
Expand Down Expand Up @@ -123,8 +127,6 @@ public void testCreateExternalTable(@Mocked MetadataMgr metadataMgr) throws Exce
Assert.fail("No exception throws.");
}



@Test(expected = DdlException.class)
public void testNoDb() throws Exception {
String createTableSql = "create external table db.hudi_tbl (col1 int, col2 int) engine=hudi properties " +
Expand Down Expand Up @@ -188,7 +190,7 @@ public void testColumnTypeConvert() {
Assert.assertEquals(ColumnTypeConverter.fromHudiType(Schema.create(Schema.Type.STRING)),
ScalarType.createDefaultCatalogString());
Assert.assertEquals(ColumnTypeConverter.fromHudiType(
Schema.createArray(Schema.create(Schema.Type.INT))),
Schema.createArray(Schema.create(Schema.Type.INT))),
new ArrayType(ScalarType.createType(PrimitiveType.INT)));
Assert.assertEquals(ColumnTypeConverter.fromHudiType(
Schema.createFixed("FIXED", "FIXED", "F", 1)),
Expand Down Expand Up @@ -249,4 +251,32 @@ public void testToThrift(
Assert.assertEquals("db0", tTableDescriptor.getDbName());
Assert.assertEquals("table0", tTableDescriptor.getTableName());
}

@Test
public void testCreateTableResourceName() throws DdlException {
String resourceName = "Hudi_resource_29bb53dc_7e04_11ee_9b35_00163e0e489a";
Map<String, String> properties = new HashMap() {
{
put(RESOURCE, resourceName);
}
};
HudiTable.Builder tableBuilder = HudiTable.builder()
.setId(1000)
.setTableName("supplier")
.setCatalogName("hudi_catalog")
.setHiveDbName("hudi_oss_tpch_1g_parquet_gzip")
.setHiveTableName("supplier")
.setResourceName(resourceName)
.setFullSchema(new ArrayList<>())
.setDataColNames(new ArrayList<>())
.setPartitionColNames(Lists.newArrayList())
.setCreateTime(10)
.setHudiProperties(new HashMap<>());
HudiTable oTable = tableBuilder.build();

HudiTable.Builder newBuilder = HudiTable.builder();
HudiTableFactory.copyFromCatalogTable(newBuilder, oTable, properties);
HudiTable table = newBuilder.build();
Assert.assertEquals(table.getResourceName(), resourceName);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,18 @@
import com.starrocks.common.DdlException;
import com.starrocks.connector.iceberg.TableTestBase;
import com.starrocks.server.IcebergTableFactory;
import mockit.Mocked;
import org.apache.iceberg.Table;
import org.junit.Assert;
import org.junit.Test;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static com.starrocks.catalog.Type.INT;
import static com.starrocks.server.ExternalTableFactory.RESOURCE;

public class IcebergTableTest extends TableTestBase {

Expand All @@ -35,4 +42,31 @@ public void testValidateIcebergColumnType() throws DdlException {
List<Column> inputColumns = Lists.newArrayList(new Column("k1", INT, true));
IcebergTableFactory.validateIcebergColumnType(inputColumns, oTable);
}

@Test
public void testCreateTableResourceName(@Mocked Table icebergNativeTable) throws DdlException {

String resourceName = "Iceberg_resource_29bb53dc_7e04_11ee_9b35_00163e0e489a";
Map<String, String> properties = new HashMap() {
{
put(RESOURCE, resourceName);
}
};

IcebergTable.Builder tableBuilder = IcebergTable.builder()
.setId(1000)
.setSrTableName("supplier")
.setCatalogName("iceberg_catalog")
.setRemoteDbName("iceberg_oss_tpch_1g_parquet_gzip")
.setRemoteTableName("supplier")
.setResourceName(resourceName)
.setFullSchema(new ArrayList<>())
.setNativeTable(icebergNativeTable)
.setIcebergProperties(new HashMap<>());
IcebergTable oTable = tableBuilder.build();
IcebergTable.Builder newBuilder = IcebergTable.builder();
IcebergTableFactory.copyFromCatalogTable(newBuilder, oTable, properties);
IcebergTable table = newBuilder.build();
Assert.assertEquals(table.getResourceName(), resourceName);
}
}

0 comments on commit d7be916

Please sign in to comment.