From 5217f63283c7cacbb39107fe363cb3039d78be02 Mon Sep 17 00:00:00 2001 From: Martin Traverso Date: Sun, 13 Oct 2019 15:35:11 -0700 Subject: [PATCH] Remove usage of TypeSignature in Hive connector --- presto-hive/pom.xml | 5 ++ .../plugin/hive/GenericHiveRecordCursor.java | 6 +- .../hive/GenericHiveRecordCursorProvider.java | 3 +- .../plugin/hive/HiveColumnHandle.java | 31 ++++----- .../prestosql/plugin/hive/HiveMetadata.java | 31 +++++---- .../io/prestosql/plugin/hive/HiveModule.java | 29 ++++++++ .../prestosql/plugin/hive/HivePageSink.java | 10 +-- .../plugin/hive/HivePageSinkProvider.java | 1 - .../prestosql/plugin/hive/HivePageSource.java | 6 +- .../plugin/hive/HivePageSourceProvider.java | 11 ++- .../plugin/hive/HivePartitionManager.java | 4 +- .../plugin/hive/HiveRecordCursor.java | 5 +- .../plugin/hive/HiveWriterFactory.java | 2 +- .../plugin/hive/orc/OrcPageSourceFactory.java | 14 ++-- .../parquet/ParquetPageSourceFactory.java | 8 +-- .../plugin/hive/rcfile/RcFilePageSource.java | 13 ++-- .../hive/rcfile/RcFilePageSourceFactory.java | 2 +- .../hive/s3select/S3SelectRecordCursor.java | 2 +- .../plugin/hive/util/HiveBucketing.java | 5 +- .../prestosql/plugin/hive/util/HiveUtil.java | 15 +++-- .../plugin/hive/AbstractTestHive.java | 10 +-- .../hive/AbstractTestHiveFileFormats.java | 2 +- .../prestosql/plugin/hive/HiveTestUtils.java | 4 +- .../hive/TestBackgroundHiveSplitLoader.java | 5 +- .../plugin/hive/TestHiveColumnHandle.java | 15 +++-- .../plugin/hive/TestHiveFileFormats.java | 12 ++-- .../hive/TestHiveIntegrationSmokeTest.java | 21 ++++-- .../plugin/hive/TestHiveMetadata.java | 2 +- .../plugin/hive/TestHivePageSink.java | 2 +- .../prestosql/plugin/hive/TestHiveSplit.java | 12 +++- .../plugin/hive/TestIonSqlQueryBuilder.java | 22 +++--- .../hive/TestOrcPageSourceMemoryTracking.java | 4 +- .../plugin/hive/benchmark/FileFormat.java | 6 +- .../predicate/TestParquetPredicateUtils.java | 22 +++--- .../s3select/TestS3SelectRecordCursor.java | 8 +-- .../TestMetastoreHiveStatisticsProvider.java | 8 +-- .../plugin/iceberg/ExpressionConverter.java | 67 ++++++++++--------- .../plugin/iceberg/IcebergMetadata.java | 2 +- .../plugin/iceberg/IcebergPageSink.java | 6 +- .../iceberg/IcebergPageSourceProvider.java | 2 +- .../plugin/iceberg/IcebergSplitManager.java | 5 +- .../prestosql/plugin/iceberg/IcebergUtil.java | 4 +- 42 files changed, 243 insertions(+), 201 deletions(-) diff --git a/presto-hive/pom.xml b/presto-hive/pom.xml index 276e72afc5ea..1f8af91ddd34 100644 --- a/presto-hive/pom.xml +++ b/presto-hive/pom.xml @@ -185,6 +185,11 @@ javax.inject + + com.fasterxml.jackson.core + jackson-databind + + io.airlift diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/GenericHiveRecordCursor.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/GenericHiveRecordCursor.java index c954c94a51c8..bd8b0fb9930d 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/GenericHiveRecordCursor.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/GenericHiveRecordCursor.java @@ -22,7 +22,6 @@ import io.prestosql.spi.type.DecimalType; import io.prestosql.spi.type.Decimals; import io.prestosql.spi.type.Type; -import io.prestosql.spi.type.TypeManager; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.type.HiveDecimal; @@ -121,8 +120,7 @@ public GenericHiveRecordCursor( long totalBytes, Properties splitSchema, List columns, - DateTimeZone hiveStorageTimeZone, - TypeManager typeManager) + DateTimeZone hiveStorageTimeZone) { requireNonNull(path, "path is null"); requireNonNull(recordReader, "recordReader is null"); @@ -162,7 +160,7 @@ public GenericHiveRecordCursor( HiveColumnHandle column = columns.get(i); checkState(column.getColumnType() == REGULAR, "column type must be regular"); - types[i] = typeManager.getType(column.getTypeSignature()); + types[i] = column.getType(); hiveTypes[i] = column.getHiveType(); StructField field = rowInspector.getStructFieldRef(column.getName()); diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/GenericHiveRecordCursorProvider.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/GenericHiveRecordCursorProvider.java index 13333bcdf823..52baea41cfb7 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/GenericHiveRecordCursorProvider.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/GenericHiveRecordCursorProvider.java @@ -93,8 +93,7 @@ public Optional createRecordCursor( length, schema, columns, - hiveStorageTimeZone, - typeManager)); + hiveStorageTimeZone)); }); } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveColumnHandle.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveColumnHandle.java index c01c5f078371..ecf9a27dc2ea 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveColumnHandle.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveColumnHandle.java @@ -17,8 +17,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import io.prestosql.spi.connector.ColumnHandle; import io.prestosql.spi.connector.ColumnMetadata; -import io.prestosql.spi.type.TypeManager; -import io.prestosql.spi.type.TypeSignature; +import io.prestosql.spi.type.Type; import java.util.Objects; import java.util.Optional; @@ -30,6 +29,8 @@ import static io.prestosql.plugin.hive.HiveType.HIVE_LONG; import static io.prestosql.plugin.hive.HiveType.HIVE_STRING; import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.VarcharType.VARCHAR; import static java.util.Objects.requireNonNull; public class HiveColumnHandle @@ -38,22 +39,22 @@ public class HiveColumnHandle public static final int PATH_COLUMN_INDEX = -11; public static final String PATH_COLUMN_NAME = "$path"; public static final HiveType PATH_HIVE_TYPE = HIVE_STRING; - public static final TypeSignature PATH_TYPE_SIGNATURE = PATH_HIVE_TYPE.getTypeSignature(); + public static final Type PATH_TYPE = VARCHAR; public static final int BUCKET_COLUMN_INDEX = -12; public static final String BUCKET_COLUMN_NAME = "$bucket"; public static final HiveType BUCKET_HIVE_TYPE = HIVE_INT; - public static final TypeSignature BUCKET_TYPE_SIGNATURE = BUCKET_HIVE_TYPE.getTypeSignature(); + public static final Type BUCKET_TYPE_SIGNATURE = INTEGER; public static final int FILE_SIZE_COLUMN_INDEX = -13; public static final String FILE_SIZE_COLUMN_NAME = "$file_size"; public static final HiveType FILE_SIZE_TYPE = HIVE_LONG; - public static final TypeSignature FILE_SIZE_TYPE_SIGNATURE = FILE_SIZE_TYPE.getTypeSignature(); + public static final Type FILE_SIZE_TYPE_SIGNATURE = BIGINT; public static final int FILE_MODIFIED_TIME_COLUMN_INDEX = -14; public static final String FILE_MODIFIED_TIME_COLUMN_NAME = "$file_modified_time"; public static final HiveType FILE_MODIFIED_TIME_TYPE = HIVE_LONG; - public static final TypeSignature FILE_MODIFIED_TIME_TYPE_SIGNATURE = FILE_SIZE_TYPE.getTypeSignature(); + public static final Type FILE_MODIFIED_TIME_TYPE_SIGNATURE = BIGINT; private static final String UPDATE_ROW_ID_COLUMN_NAME = "$shard_row_id"; @@ -66,7 +67,7 @@ public enum ColumnType private final String name; private final HiveType hiveType; - private final TypeSignature typeName; + private final Type type; private final int hiveColumnIndex; private final ColumnType columnType; private final Optional comment; @@ -75,7 +76,7 @@ public enum ColumnType public HiveColumnHandle( @JsonProperty("name") String name, @JsonProperty("hiveType") HiveType hiveType, - @JsonProperty("typeSignature") TypeSignature typeSignature, + @JsonProperty("type") Type type, @JsonProperty("hiveColumnIndex") int hiveColumnIndex, @JsonProperty("columnType") ColumnType columnType, @JsonProperty("comment") Optional comment) @@ -84,7 +85,7 @@ public HiveColumnHandle( checkArgument(hiveColumnIndex >= 0 || columnType == PARTITION_KEY || columnType == SYNTHESIZED, "hiveColumnIndex is negative"); this.hiveColumnIndex = hiveColumnIndex; this.hiveType = requireNonNull(hiveType, "hiveType is null"); - this.typeName = requireNonNull(typeSignature, "type is null"); + this.type = requireNonNull(type, "type is null"); this.columnType = requireNonNull(columnType, "columnType is null"); this.comment = requireNonNull(comment, "comment is null"); } @@ -117,9 +118,9 @@ public boolean isHidden() return columnType == SYNTHESIZED; } - public ColumnMetadata getColumnMetadata(TypeManager typeManager) + public ColumnMetadata getColumnMetadata() { - return new ColumnMetadata(name, typeManager.getType(typeName), null, isHidden()); + return new ColumnMetadata(name, type, null, isHidden()); } @JsonProperty @@ -129,9 +130,9 @@ public Optional getComment() } @JsonProperty - public TypeSignature getTypeSignature() + public Type getType() { - return typeName; + return type; } @JsonProperty @@ -177,12 +178,12 @@ public static HiveColumnHandle updateRowIdHandle() // plan-time support for row-by-row delete so that planning doesn't fail. This is why we need // rowid handle. Note that in Hive connector, rowid handle is not implemented beyond plan-time. - return new HiveColumnHandle(UPDATE_ROW_ID_COLUMN_NAME, HIVE_LONG, BIGINT.getTypeSignature(), -1, SYNTHESIZED, Optional.empty()); + return new HiveColumnHandle(UPDATE_ROW_ID_COLUMN_NAME, HIVE_LONG, BIGINT, -1, SYNTHESIZED, Optional.empty()); } public static HiveColumnHandle pathColumnHandle() { - return new HiveColumnHandle(PATH_COLUMN_NAME, PATH_HIVE_TYPE, PATH_TYPE_SIGNATURE, PATH_COLUMN_INDEX, SYNTHESIZED, Optional.empty()); + return new HiveColumnHandle(PATH_COLUMN_NAME, PATH_HIVE_TYPE, PATH_TYPE, PATH_COLUMN_INDEX, SYNTHESIZED, Optional.empty()); } /** diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java index e0e7856f61e0..f3f192227548 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java @@ -339,8 +339,8 @@ public HiveTableHandle getTableHandle(ConnectorSession session, SchemaTableName tableName.getSchemaName(), tableName.getTableName(), table.get().getParameters(), - getPartitionKeyColumnHandles(table.get()), - getHiveBucketHandle(table.get())); + getPartitionKeyColumnHandles(table.get(), typeManager), + getHiveBucketHandle(table.get(), typeManager)); } @Override @@ -418,14 +418,13 @@ private Optional getPartitionsSystemTable(ConnectorSession session, } List partitionColumnTypes = partitionColumns.stream() - .map(HiveColumnHandle::getTypeSignature) - .map(typeManager::getType) + .map(HiveColumnHandle::getType) .collect(toImmutableList()); List partitionSystemTableColumns = partitionColumns.stream() .map(column -> new ColumnMetadata( column.getName(), - typeManager.getType(column.getTypeSignature()), + column.getType(), column.getComment().orElse(null), column.isHidden())) .collect(toImmutableList()); @@ -482,9 +481,9 @@ private ConnectorTableMetadata doGetTableMetadata(ConnectorSession session, Sche throw new TableNotFoundException(tableName); } - Function metadataGetter = columnMetadataGetter(table.get(), typeManager); + Function metadataGetter = columnMetadataGetter(table.get()); ImmutableList.Builder columns = ImmutableList.builder(); - for (HiveColumnHandle columnHandle : hiveColumnHandles(table.get())) { + for (HiveColumnHandle columnHandle : hiveColumnHandles(table.get(), typeManager)) { columns.add(metadataGetter.apply(columnHandle)); } @@ -620,7 +619,7 @@ public Map getColumnHandles(ConnectorSession session, Conn SchemaTableName tableName = ((HiveTableHandle) tableHandle).getSchemaTableName(); Table table = metastore.getTable(new HiveIdentity(session), tableName.getSchemaName(), tableName.getTableName()) .orElseThrow(() -> new TableNotFoundException(tableName)); - return hiveColumnHandles(table).stream() + return hiveColumnHandles(table, typeManager).stream() .collect(toImmutableMap(HiveColumnHandle::getName, identity())); } @@ -684,7 +683,7 @@ private List listTables(ConnectorSession session, SchemaTablePr @Override public ColumnMetadata getColumnMetadata(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnHandle columnHandle) { - return ((HiveColumnHandle) columnHandle).getColumnMetadata(typeManager); + return ((HiveColumnHandle) columnHandle).getColumnMetadata(); } @Override @@ -1087,7 +1086,7 @@ public void finishStatisticsCollection(ConnectorSession session, ConnectorTableH List partitionColumnNames = partitionColumns.stream() .map(Column::getName) .collect(toImmutableList()); - List hiveColumnHandles = hiveColumnHandles(table); + List hiveColumnHandles = hiveColumnHandles(table, typeManager); Map columnTypes = hiveColumnHandles.stream() .filter(columnHandle -> !columnHandle.isHidden()) .collect(toImmutableMap(HiveColumnHandle::getName, column -> column.getHiveType().getType(typeManager))); @@ -1115,7 +1114,7 @@ public void finishStatisticsCollection(ConnectorSession session, ConnectorTableH Map> columnStatisticTypes = hiveColumnHandles.stream() .filter(columnHandle -> !partitionColumnNames.contains(columnHandle.getName())) .filter(column -> !column.isHidden()) - .collect(toImmutableMap(HiveColumnHandle::getName, column -> ImmutableSet.copyOf(metastore.getSupportedColumnStatistics(typeManager.getType(column.getTypeSignature()))))); + .collect(toImmutableMap(HiveColumnHandle::getName, column -> ImmutableSet.copyOf(metastore.getSupportedColumnStatistics(column.getType())))); Supplier emptyPartitionStatistics = Suppliers.memoize(() -> createEmptyPartitionStatistics(columnTypes, columnStatisticTypes)); int usedComputedStatistics = 0; @@ -1389,7 +1388,7 @@ public HiveInsertTableHandle beginInsert(ConnectorSession session, ConnectorTabl } } - List handles = hiveColumnHandles(table).stream() + List handles = hiveColumnHandles(table, typeManager).stream() .filter(columnHandle -> !columnHandle.isHidden()) .collect(toList()); @@ -1958,7 +1957,7 @@ public Optional getInsertLayout(ConnectorSession sessio } } - Optional hiveBucketHandle = getHiveBucketHandle(table); + Optional hiveBucketHandle = getHiveBucketHandle(table, typeManager); if (!hiveBucketHandle.isPresent()) { return Optional.empty(); } @@ -2206,7 +2205,7 @@ else if (column.isHidden()) { columnHandles.add(new HiveColumnHandle( column.getName(), toHiveType(typeTranslator, column.getType()), - column.getType().getTypeSignature(), + column.getType(), ordinal, columnType, Optional.ofNullable(column.getComment()))); @@ -2236,7 +2235,7 @@ private static void validateCsvColumns(ConnectorTableMetadata tableMetadata) } } - private static Function columnMetadataGetter(Table table, TypeManager typeManager) + private static Function columnMetadataGetter(Table table) { ImmutableList.Builder columnNames = ImmutableList.builder(); table.getPartitionColumns().stream().map(Column::getName).forEach(columnNames::add); @@ -2269,7 +2268,7 @@ private static Function columnMetadataGetter(T return handle -> new ColumnMetadata( handle.getName(), - typeManager.getType(handle.getTypeSignature()), + handle.getType(), columnComment.get(handle.getName()).orElse(null), columnExtraInfo(handle.isPartitionKey()), handle.isHidden()); diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveModule.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveModule.java index 6c53afaff854..5c7b97f1bafb 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveModule.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveModule.java @@ -13,6 +13,8 @@ */ package io.prestosql.plugin.hive; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.deser.std.FromStringDeserializer; import com.google.inject.Binder; import com.google.inject.Module; import com.google.inject.Provides; @@ -35,7 +37,11 @@ import io.prestosql.spi.connector.ConnectorPageSinkProvider; import io.prestosql.spi.connector.ConnectorPageSourceProvider; import io.prestosql.spi.connector.ConnectorSplitManager; +import io.prestosql.spi.type.Type; +import io.prestosql.spi.type.TypeId; +import io.prestosql.spi.type.TypeManager; +import javax.inject.Inject; import javax.inject.Singleton; import java.util.concurrent.ExecutorService; @@ -45,7 +51,9 @@ import static com.google.inject.multibindings.Multibinder.newSetBinder; import static io.airlift.concurrent.Threads.daemonThreadsNamed; import static io.airlift.configuration.ConfigBinder.configBinder; +import static io.airlift.json.JsonBinder.jsonBinder; import static io.airlift.json.JsonCodecBinder.jsonCodecBinder; +import static java.util.Objects.requireNonNull; import static java.util.concurrent.Executors.newCachedThreadPool; import static org.weakref.jmx.guice.ExportBinder.newExporter; @@ -117,6 +125,8 @@ public void configure(Binder binder) configBinder(binder).bindConfig(ParquetReaderConfig.class); configBinder(binder).bindConfig(ParquetWriterConfig.class); + + jsonBinder(binder).addDeserializerBinding(Type.class).to(TypeDeserializer.class); } @ForHive @@ -133,4 +143,23 @@ public Function createMet { return transactionHandle -> ((HiveMetadata) transactionManager.get(transactionHandle)).getMetastore(); } + + public static final class TypeDeserializer + extends FromStringDeserializer + { + private final TypeManager typeManager; + + @Inject + public TypeDeserializer(TypeManager typeManager) + { + super(Type.class); + this.typeManager = requireNonNull(typeManager, "metadata is null"); + } + + @Override + protected Type _deserialize(String value, DeserializationContext context) + { + return typeManager.getType(TypeId.of(value)); + } + } } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSink.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSink.java index 7cb533973a4a..cee60cd8c0c1 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSink.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSink.java @@ -32,7 +32,6 @@ import io.prestosql.spi.connector.ConnectorPageSink; import io.prestosql.spi.connector.ConnectorSession; import io.prestosql.spi.type.Type; -import io.prestosql.spi.type.TypeManager; import it.unimi.dsi.fastutil.objects.Object2IntMap; import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap; @@ -93,7 +92,6 @@ public HivePageSink( List inputColumns, Optional bucketProperty, PageIndexerFactory pageIndexerFactory, - TypeManager typeManager, HdfsEnvironment hdfsEnvironment, int maxOpenWriters, ListeningExecutorService writeVerificationExecutor, @@ -115,8 +113,7 @@ public HivePageSink( this.pagePartitioner = new HiveWriterPagePartitioner( inputColumns, bucketProperty.isPresent(), - pageIndexerFactory, - typeManager); + pageIndexerFactory); // determine the input index of the partition columns and data columns // and determine the input index and type of bucketing columns @@ -401,15 +398,14 @@ private static class HiveWriterPagePartitioner public HiveWriterPagePartitioner( List inputColumns, boolean bucketed, - PageIndexerFactory pageIndexerFactory, - TypeManager typeManager) + PageIndexerFactory pageIndexerFactory) { requireNonNull(inputColumns, "inputColumns is null"); requireNonNull(pageIndexerFactory, "pageIndexerFactory is null"); List partitionColumnTypes = inputColumns.stream() .filter(HiveColumnHandle::isPartitionKey) - .map(column -> typeManager.getType(column.getTypeSignature())) + .map(HiveColumnHandle::getType) .collect(toList()); if (bucketed) { diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSinkProvider.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSinkProvider.java index 80edc868040d..1ef7d842ae08 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSinkProvider.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSinkProvider.java @@ -167,7 +167,6 @@ private ConnectorPageSink createPageSink(HiveWritableTableHandle handle, boolean handle.getInputColumns(), handle.getBucketProperty(), pageIndexerFactory, - typeManager, hdfsEnvironment, maxOpenPartitions, writeVerificationExecutor, diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSource.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSource.java index ea86a8339695..1a481de01284 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSource.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSource.java @@ -149,7 +149,7 @@ public HivePageSource( HiveColumnHandle column = columnMapping.getHiveColumnHandle(); String name = column.getName(); - Type type = typeManager.getType(column.getTypeSignature()); + Type type = column.getType(); types[columnIndex] = type; if (columnMapping.getCoercionFrom().isPresent()) { @@ -340,8 +340,8 @@ public ConnectorPageSource getPageSource() private static Function createCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType) { - Type fromType = typeManager.getType(fromHiveType.getTypeSignature()); - Type toType = typeManager.getType(toHiveType.getTypeSignature()); + Type fromType = fromHiveType.getType(typeManager); + Type toType = toHiveType.getType(typeManager); if (toType instanceof VarcharType && (fromHiveType.equals(HIVE_BYTE) || fromHiveType.equals(HIVE_SHORT) || fromHiveType.equals(HIVE_INT) || fromHiveType.equals(HIVE_LONG))) { return new IntegerNumberToVarcharCoercer<>(fromType, (VarcharType) toType); } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSourceProvider.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSourceProvider.java index e2b62f61646d..c6058f382823 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSourceProvider.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSourceProvider.java @@ -191,7 +191,7 @@ public static Optional createHivePageSource( length, fileSize, schema, - toColumnHandles(regularAndInterimColumnMappings, true), + toColumnHandles(regularAndInterimColumnMappings, true, typeManager), effectivePredicate, hiveStorageTimeZone); if (pageSource.isPresent()) { @@ -217,7 +217,7 @@ public static Optional createHivePageSource( length, fileSize, schema, - toColumnHandles(regularAndInterimColumnMappings, doCoercion), + toColumnHandles(regularAndInterimColumnMappings, doCoercion, typeManager), effectivePredicate, hiveStorageTimeZone, typeManager, @@ -246,10 +246,9 @@ public static Optional createHivePageSource( HiveRecordCursor hiveRecordCursor = new HiveRecordCursor( columnMappings, hiveStorageTimeZone, - typeManager, delegate); List columnTypes = hiveColumns.stream() - .map(input -> typeManager.getType(input.getTypeSignature())) + .map(HiveColumnHandle::getType) .collect(toList()); return Optional.of(new RecordPageSource(columnTypes, hiveRecordCursor)); @@ -379,7 +378,7 @@ public static List extractRegularAndInterimColumnMappings(List toColumnHandles(List regularColumnMappings, boolean doCoercion) + public static List toColumnHandles(List regularColumnMappings, boolean doCoercion, TypeManager typeManager) { return regularColumnMappings.stream() .map(columnMapping -> { @@ -390,7 +389,7 @@ public static List toColumnHandles(List regular return new HiveColumnHandle( columnHandle.getName(), columnMapping.getCoercionFrom().get(), - columnMapping.getCoercionFrom().get().getTypeSignature(), + columnMapping.getCoercionFrom().get().getType(typeManager), columnHandle.getHiveColumnIndex(), columnHandle.getColumnType(), Optional.empty()); diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePartitionManager.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePartitionManager.java index c1de5baf2c93..91349785e9cc 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePartitionManager.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePartitionManager.java @@ -148,7 +148,7 @@ public HivePartitionResult getPartitions(SemiTransactionalHiveMetastore metastor } List partitionTypes = partitionColumns.stream() - .map(column -> typeManager.getType(column.getTypeSignature())) + .map(HiveColumnHandle::getType) .collect(toList()); Iterable partitionsIterable; @@ -186,7 +186,7 @@ public HivePartitionResult getPartitions(ConnectorTableHandle tableHandle, List< .collect(toImmutableList()); List partitionColumnTypes = partitionColumns.stream() - .map(column -> typeManager.getType(column.getTypeSignature())) + .map(HiveColumnHandle::getType) .collect(toImmutableList()); List partitionList = partitionValuesList.stream() diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveRecordCursor.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveRecordCursor.java index 7e9525b47f89..58aa7bbd3b1a 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveRecordCursor.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveRecordCursor.java @@ -21,7 +21,6 @@ import io.prestosql.spi.connector.RecordCursor; import io.prestosql.spi.type.DecimalType; import io.prestosql.spi.type.Type; -import io.prestosql.spi.type.TypeManager; import org.joda.time.DateTimeZone; import java.util.List; @@ -78,11 +77,9 @@ public class HiveRecordCursor public HiveRecordCursor( List columnMappings, DateTimeZone hiveStorageTimeZone, - TypeManager typeManager, RecordCursor delegate) { requireNonNull(columnMappings, "columns is null"); - requireNonNull(typeManager, "typeManager is null"); requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null"); this.delegate = requireNonNull(delegate, "delegate is null"); @@ -107,7 +104,7 @@ public HiveRecordCursor( byte[] bytes = columnValue.getBytes(UTF_8); String name = columnMapping.getHiveColumnHandle().getName(); - Type type = typeManager.getType(columnMapping.getHiveColumnHandle().getTypeSignature()); + Type type = columnMapping.getHiveColumnHandle().getType(); types[columnIndex] = type; if (HiveUtil.isHiveNull(bytes)) { diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveWriterFactory.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveWriterFactory.java index dcb2845e7838..6e252b77138a 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveWriterFactory.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveWriterFactory.java @@ -206,7 +206,7 @@ public HiveWriterFactory( HiveType hiveType = column.getHiveType(); if (column.isPartitionKey()) { partitionColumnNames.add(column.getName()); - partitionColumnTypes.add(typeManager.getType(column.getTypeSignature())); + partitionColumnTypes.add(column.getType()); } else { dataColumns.add(new DataColumn(column.getName(), hiveType)); diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcPageSourceFactory.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcPageSourceFactory.java index dd0440317f1d..2c95ff6d55ee 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcPageSourceFactory.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcPageSourceFactory.java @@ -34,7 +34,6 @@ import io.prestosql.spi.connector.FixedPageSource; import io.prestosql.spi.predicate.TupleDomain; import io.prestosql.spi.type.Type; -import io.prestosql.spi.type.TypeManager; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -76,26 +75,23 @@ public class OrcPageSourceFactory implements HivePageSourceFactory { private static final Pattern DEFAULT_HIVE_COLUMN_NAME_PATTERN = Pattern.compile("_col\\d+"); - private final TypeManager typeManager; private final boolean useOrcColumnNames; private final OrcReaderOptions orcReaderOptions; private final HdfsEnvironment hdfsEnvironment; private final FileFormatDataSourceStats stats; @Inject - public OrcPageSourceFactory(TypeManager typeManager, OrcReaderConfig config, HdfsEnvironment hdfsEnvironment, FileFormatDataSourceStats stats) + public OrcPageSourceFactory(OrcReaderConfig config, HdfsEnvironment hdfsEnvironment, FileFormatDataSourceStats stats) { - this(typeManager, requireNonNull(config, "config is null").isUseColumnNames(), config.toOrcReaderOptions(), hdfsEnvironment, stats); + this(requireNonNull(config, "config is null").isUseColumnNames(), config.toOrcReaderOptions(), hdfsEnvironment, stats); } public OrcPageSourceFactory( - TypeManager typeManager, boolean useOrcColumnNames, OrcReaderOptions orcReaderOptions, HdfsEnvironment hdfsEnvironment, FileFormatDataSourceStats stats) { - this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.useOrcColumnNames = useOrcColumnNames; this.orcReaderOptions = requireNonNull(orcReaderOptions, "orcReaderOptions is null"); this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); @@ -136,7 +132,6 @@ public Optional createPageSource( useOrcColumnNames, effectivePredicate, hiveStorageTimeZone, - typeManager, orcReaderOptions .withMaxMergeDistance(getOrcMaxMergeDistance(session)) .withMaxBufferSize(getOrcMaxBufferSize(session)) @@ -160,7 +155,6 @@ private static OrcPageSource createOrcPageSource( boolean useOrcColumnNames, TupleDomain effectivePredicate, DateTimeZone hiveStorageTimeZone, - TypeManager typeManager, OrcReaderOptions options, FileFormatDataSourceStats stats) { @@ -192,7 +186,7 @@ private static OrcPageSource createOrcPageSource( ImmutableList.Builder> columnReferences = ImmutableList.builder(); for (HiveColumnHandle column : physicalColumns) { if (column.getColumnType() == REGULAR) { - Type type = typeManager.getType(column.getTypeSignature()); + Type type = column.getType(); includedColumnsBuilder.put(column.getHiveColumnIndex(), type); columnReferences.add(new ColumnReference<>(column, column.getHiveColumnIndex(), type)); } @@ -260,7 +254,7 @@ private static List getPhysicalHiveColumnHandles(List createPageSource( isUseParquetColumnNames(session), isFailOnCorruptedParquetStatistics(session), getParquetMaxReadBlockSize(session), - typeManager, effectivePredicate, stats)); } @@ -151,7 +150,6 @@ public static ParquetPageSource createParquetPageSource( boolean useParquetColumnNames, boolean failOnCorruptedParquetStatistics, DataSize maxReadBlockSize, - TypeManager typeManager, TupleDomain effectivePredicate, FileFormatDataSourceStats stats) { @@ -213,13 +211,11 @@ public static ParquetPageSource createParquetPageSource( HiveColumnHandle column = columns.get(columnIndex); Optional parquetField = parquetFields.get(columnIndex); - Type prestoType = typeManager.getType(column.getTypeSignature()); - - prestoTypes.add(prestoType); + prestoTypes.add(column.getType()); internalFields.add(parquetField.map(field -> { String columnName = useParquetColumnNames ? column.getName() : fileSchema.getFields().get(column.getHiveColumnIndex()).getName(); - return constructField(prestoType, lookupColumnByName(messageColumnIO, columnName)).orElse(null); + return constructField(column.getType(), lookupColumnByName(messageColumnIO, columnName)).orElse(null); })); } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/rcfile/RcFilePageSource.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/rcfile/RcFilePageSource.java index a5ca392880c2..ed213e1d4ea9 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/rcfile/RcFilePageSource.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/rcfile/RcFilePageSource.java @@ -28,7 +28,6 @@ import io.prestosql.spi.block.RunLengthEncodedBlock; import io.prestosql.spi.connector.ConnectorPageSource; import io.prestosql.spi.type.Type; -import io.prestosql.spi.type.TypeManager; import java.io.IOException; import java.util.List; @@ -58,11 +57,10 @@ public class RcFilePageSource private boolean closed; - public RcFilePageSource(RcFileReader rcFileReader, List columns, TypeManager typeManager) + public RcFilePageSource(RcFileReader rcFileReader, List columns) { requireNonNull(rcFileReader, "rcReader is null"); requireNonNull(columns, "columns is null"); - requireNonNull(typeManager, "typeManager is null"); this.rcFileReader = rcFileReader; @@ -77,11 +75,8 @@ public RcFilePageSource(RcFileReader rcFileReader, List column for (int columnIndex = 0; columnIndex < columns.size(); columnIndex++) { HiveColumnHandle column = columns.get(columnIndex); - String name = column.getName(); - Type type = typeManager.getType(column.getTypeSignature()); - - namesBuilder.add(name); - typesBuilder.add(type); + namesBuilder.add(column.getName()); + typesBuilder.add(column.getType()); hiveTypesBuilder.add(column.getHiveType()); hiveColumnIndexes[columnIndex] = column.getHiveColumnIndex(); @@ -89,7 +84,7 @@ public RcFilePageSource(RcFileReader rcFileReader, List column if (hiveColumnIndexes[columnIndex] >= rcFileReader.getColumnCount()) { // this file may contain fewer fields than what's declared in the schema // this happens when additional columns are added to the hive table after files have been created - BlockBuilder blockBuilder = type.createBlockBuilder(null, 1, NULL_ENTRY_SIZE); + BlockBuilder blockBuilder = column.getType().createBlockBuilder(null, 1, NULL_ENTRY_SIZE); blockBuilder.appendNull(); constantBlocks[columnIndex] = blockBuilder.build(); } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/rcfile/RcFilePageSourceFactory.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/rcfile/RcFilePageSourceFactory.java index 46a33c5543a6..f37d84536f43 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/rcfile/RcFilePageSourceFactory.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/rcfile/RcFilePageSourceFactory.java @@ -147,7 +147,7 @@ else if (deserializerClassName.equals(ColumnarSerDe.class.getName())) { length, new DataSize(8, Unit.MEGABYTE)); - return Optional.of(new RcFilePageSource(rcFileReader, columns, typeManager)); + return Optional.of(new RcFilePageSource(rcFileReader, columns)); } catch (Throwable e) { try { diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3select/S3SelectRecordCursor.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3select/S3SelectRecordCursor.java index f405f6fac56c..69ba5757214b 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3select/S3SelectRecordCursor.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3select/S3SelectRecordCursor.java @@ -56,7 +56,7 @@ public S3SelectRecordCursor( DateTimeZone hiveStorageTimeZone, TypeManager typeManager) { - super(configuration, path, recordReader, totalBytes, updateSplitSchema(splitSchema, columns), columns, hiveStorageTimeZone, typeManager); + super(configuration, path, recordReader, totalBytes, updateSplitSchema(splitSchema, columns), columns, hiveStorageTimeZone); } // since s3select only returns the required column, not the whole columns diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/util/HiveBucketing.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/util/HiveBucketing.java index e0e0c1e77db3..dfd6b074ab3d 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/util/HiveBucketing.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/util/HiveBucketing.java @@ -32,6 +32,7 @@ import io.prestosql.spi.predicate.NullableValue; import io.prestosql.spi.predicate.TupleDomain; import io.prestosql.spi.predicate.ValueSet; +import io.prestosql.spi.type.TypeManager; import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; @@ -129,14 +130,14 @@ static int getBucketHashCode(BucketingVersion bucketingVersion, List t } } - public static Optional getHiveBucketHandle(Table table) + public static Optional getHiveBucketHandle(Table table, TypeManager typeManager) { Optional hiveBucketProperty = table.getStorage().getBucketProperty(); if (!hiveBucketProperty.isPresent()) { return Optional.empty(); } - Map map = getRegularColumnHandles(table).stream() + Map map = getRegularColumnHandles(table, typeManager).stream() .collect(Collectors.toMap(HiveColumnHandle::getName, identity())); ImmutableList.Builder bucketColumns = ImmutableList.builder(); diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/util/HiveUtil.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/util/HiveUtil.java index ca2035b3190a..518c151a9683 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/util/HiveUtil.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/util/HiveUtil.java @@ -45,6 +45,7 @@ import io.prestosql.spi.type.MapType; import io.prestosql.spi.type.RowType; import io.prestosql.spi.type.Type; +import io.prestosql.spi.type.TypeManager; import io.prestosql.spi.type.VarbinaryType; import io.prestosql.spi.type.VarcharType; import org.apache.hadoop.conf.Configuration; @@ -848,15 +849,15 @@ public static Slice charPartitionKey(String value, String name, Type columnType) return partitionKey; } - public static List hiveColumnHandles(Table table) + public static List hiveColumnHandles(Table table, TypeManager typeManager) { ImmutableList.Builder columns = ImmutableList.builder(); // add the data fields first - columns.addAll(getRegularColumnHandles(table)); + columns.addAll(getRegularColumnHandles(table, typeManager)); // add the partition keys last (like Hive does) - columns.addAll(getPartitionKeyColumnHandles(table)); + columns.addAll(getPartitionKeyColumnHandles(table, typeManager)); // add hidden columns columns.add(pathColumnHandle()); @@ -872,7 +873,7 @@ public static List hiveColumnHandles(Table table) return columns.build(); } - public static List getRegularColumnHandles(Table table) + public static List getRegularColumnHandles(Table table, TypeManager typeManager) { ImmutableList.Builder columns = ImmutableList.builder(); @@ -881,7 +882,7 @@ public static List getRegularColumnHandles(Table table) // ignore unsupported types rather than failing HiveType hiveType = field.getType(); if (hiveType.isSupportedType()) { - columns.add(new HiveColumnHandle(field.getName(), hiveType, hiveType.getTypeSignature(), hiveColumnIndex, REGULAR, field.getComment())); + columns.add(new HiveColumnHandle(field.getName(), hiveType, hiveType.getType(typeManager), hiveColumnIndex, REGULAR, field.getComment())); } hiveColumnIndex++; } @@ -889,7 +890,7 @@ public static List getRegularColumnHandles(Table table) return columns.build(); } - public static List getPartitionKeyColumnHandles(Table table) + public static List getPartitionKeyColumnHandles(Table table, TypeManager typeManager) { ImmutableList.Builder columns = ImmutableList.builder(); @@ -899,7 +900,7 @@ public static List getPartitionKeyColumnHandles(Table table) if (!hiveType.isSupportedType()) { throw new PrestoException(NOT_SUPPORTED, format("Unsupported Hive type %s found in partition keys of table %s.%s", hiveType, table.getDatabaseName(), table.getTableName())); } - columns.add(new HiveColumnHandle(field.getName(), hiveType, hiveType.getTypeSignature(), -1, PARTITION_KEY, field.getComment())); + columns.add(new HiveColumnHandle(field.getName(), hiveType, hiveType.getType(typeManager), -1, PARTITION_KEY, field.getComment())); } return columns.build(); diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHive.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHive.java index b0c62e3e167f..766ea6c1f426 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHive.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHive.java @@ -625,11 +625,11 @@ protected void setupHive(String databaseName, String timeZoneId) invalidTableHandle = new HiveTableHandle(database, INVALID_TABLE, ImmutableMap.of(), ImmutableList.of(), Optional.empty()); - dsColumn = new HiveColumnHandle("ds", HIVE_STRING, VARCHAR.getTypeSignature(), -1, PARTITION_KEY, Optional.empty()); - fileFormatColumn = new HiveColumnHandle("file_format", HIVE_STRING, VARCHAR.getTypeSignature(), -1, PARTITION_KEY, Optional.empty()); - dummyColumn = new HiveColumnHandle("dummy", HIVE_INT, INTEGER.getTypeSignature(), -1, PARTITION_KEY, Optional.empty()); - intColumn = new HiveColumnHandle("t_int", HIVE_INT, INTEGER.getTypeSignature(), -1, PARTITION_KEY, Optional.empty()); - invalidColumnHandle = new HiveColumnHandle(INVALID_COLUMN, HIVE_STRING, VARCHAR.getTypeSignature(), 0, REGULAR, Optional.empty()); + dsColumn = new HiveColumnHandle("ds", HIVE_STRING, VARCHAR, -1, PARTITION_KEY, Optional.empty()); + fileFormatColumn = new HiveColumnHandle("file_format", HIVE_STRING, VARCHAR, -1, PARTITION_KEY, Optional.empty()); + dummyColumn = new HiveColumnHandle("dummy", HIVE_INT, INTEGER, -1, PARTITION_KEY, Optional.empty()); + intColumn = new HiveColumnHandle("t_int", HIVE_INT, INTEGER, -1, PARTITION_KEY, Optional.empty()); + invalidColumnHandle = new HiveColumnHandle(INVALID_COLUMN, HIVE_STRING, VARCHAR, 0, REGULAR, Optional.empty()); List partitionColumns = ImmutableList.of(dsColumn, fileFormatColumn, dummyColumn); tablePartitionFormatPartitions = ImmutableList.builder() diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHiveFileFormats.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHiveFileFormats.java index eff364405cee..6790b87d6704 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHiveFileFormats.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHiveFileFormats.java @@ -479,7 +479,7 @@ protected List getColumnHandles(List testColumns) int columnIndex = testColumn.isPartitionKey() ? -1 : nextHiveColumnIndex++; HiveType hiveType = HiveType.valueOf(testColumn.getObjectInspector().getTypeName()); - columns.add(new HiveColumnHandle(testColumn.getName(), hiveType, hiveType.getTypeSignature(), columnIndex, testColumn.isPartitionKey() ? PARTITION_KEY : REGULAR, Optional.empty())); + columns.add(new HiveColumnHandle(testColumn.getName(), hiveType, hiveType.getType(TYPE_MANAGER), columnIndex, testColumn.isPartitionKey() ? PARTITION_KEY : REGULAR, Optional.empty())); } return columns; } diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/HiveTestUtils.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/HiveTestUtils.java index eb5688c343a9..a3beb554d2e0 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/HiveTestUtils.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/HiveTestUtils.java @@ -93,7 +93,7 @@ public static Set getDefaultHivePageSourceFactories(HiveC FileFormatDataSourceStats stats = new FileFormatDataSourceStats(); return ImmutableSet.builder() .add(new RcFilePageSourceFactory(TYPE_MANAGER, hdfsEnvironment, stats)) - .add(new OrcPageSourceFactory(TYPE_MANAGER, new OrcReaderConfig(), hdfsEnvironment, stats)) + .add(new OrcPageSourceFactory(new OrcReaderConfig(), hdfsEnvironment, stats)) .add(new ParquetPageSourceFactory(TYPE_MANAGER, hdfsEnvironment, stats)) .build(); } @@ -129,7 +129,7 @@ public static List getTypes(List columnHandles) { ImmutableList.Builder types = ImmutableList.builder(); for (ColumnHandle columnHandle : columnHandles) { - types.add(METADATA.getType(((HiveColumnHandle) columnHandle).getTypeSignature())); + types.add(((HiveColumnHandle) columnHandle).getType()); } return types.build(); } diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestBackgroundHiveSplitLoader.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestBackgroundHiveSplitLoader.java index b082108c43ec..05316f1db9ba 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestBackgroundHiveSplitLoader.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestBackgroundHiveSplitLoader.java @@ -68,6 +68,7 @@ import static io.prestosql.plugin.hive.HiveColumnHandle.pathColumnHandle; import static io.prestosql.plugin.hive.HiveStorageFormat.CSV; import static io.prestosql.plugin.hive.HiveTestUtils.SESSION; +import static io.prestosql.plugin.hive.HiveTestUtils.TYPE_MANAGER; import static io.prestosql.plugin.hive.HiveTestUtils.getHiveSession; import static io.prestosql.plugin.hive.HiveType.HIVE_INT; import static io.prestosql.plugin.hive.HiveType.HIVE_STRING; @@ -105,7 +106,7 @@ public class TestBackgroundHiveSplitLoader private static final List PARTITION_COLUMNS = ImmutableList.of( new Column("partitionColumn", HIVE_INT, Optional.empty())); private static final List BUCKET_COLUMN_HANDLES = ImmutableList.of( - new HiveColumnHandle("col1", HIVE_INT, INTEGER.getTypeSignature(), 0, ColumnType.REGULAR, Optional.empty())); + new HiveColumnHandle("col1", HIVE_INT, INTEGER, 0, ColumnType.REGULAR, Optional.empty())); private static final Optional BUCKET_PROPERTY = Optional.of( new HiveBucketProperty(ImmutableList.of("col1"), BUCKETING_V1, BUCKET_COUNT, ImmutableList.of())); @@ -203,7 +204,7 @@ public void testPathFilterBucketedPartitionedTable() PARTITIONED_TABLE, Optional.of( new HiveBucketHandle( - getRegularColumnHandles(PARTITIONED_TABLE), + getRegularColumnHandles(PARTITIONED_TABLE, TYPE_MANAGER), BUCKETING_V1, BUCKET_COUNT, BUCKET_COUNT))); diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveColumnHandle.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveColumnHandle.java index 091b90db71f8..5ca36851644f 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveColumnHandle.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveColumnHandle.java @@ -13,7 +13,12 @@ */ package io.prestosql.plugin.hive; +import com.google.common.collect.ImmutableMap; import io.airlift.json.JsonCodec; +import io.airlift.json.JsonCodecFactory; +import io.airlift.json.ObjectMapperProvider; +import io.prestosql.spi.type.TestingTypeManager; +import io.prestosql.spi.type.Type; import org.testng.annotations.Test; import java.util.Optional; @@ -25,8 +30,6 @@ public class TestHiveColumnHandle { - private final JsonCodec codec = JsonCodec.jsonCodec(HiveColumnHandle.class); - @Test public void testHiddenColumn() { @@ -37,19 +40,23 @@ public void testHiddenColumn() @Test public void testRegularColumn() { - HiveColumnHandle expectedPartitionColumn = new HiveColumnHandle("name", HiveType.HIVE_FLOAT, DOUBLE.getTypeSignature(), 88, PARTITION_KEY, Optional.empty()); + HiveColumnHandle expectedPartitionColumn = new HiveColumnHandle("name", HiveType.HIVE_FLOAT, DOUBLE, 88, PARTITION_KEY, Optional.empty()); testRoundTrip(expectedPartitionColumn); } @Test public void testPartitionKeyColumn() { - HiveColumnHandle expectedRegularColumn = new HiveColumnHandle("name", HiveType.HIVE_FLOAT, DOUBLE.getTypeSignature(), 88, REGULAR, Optional.empty()); + HiveColumnHandle expectedRegularColumn = new HiveColumnHandle("name", HiveType.HIVE_FLOAT, DOUBLE, 88, REGULAR, Optional.empty()); testRoundTrip(expectedRegularColumn); } private void testRoundTrip(HiveColumnHandle expected) { + ObjectMapperProvider objectMapperProvider = new ObjectMapperProvider(); + objectMapperProvider.setJsonDeserializers(ImmutableMap.of(Type.class, new HiveModule.TypeDeserializer(new TestingTypeManager()))); + JsonCodec codec = new JsonCodecFactory(objectMapperProvider).jsonCodec(HiveColumnHandle.class); + String json = codec.toJson(expected); HiveColumnHandle actual = codec.fromJson(json); diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveFileFormats.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveFileFormats.java index 1b223bb9c420..c86584a3f96b 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveFileFormats.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveFileFormats.java @@ -304,7 +304,7 @@ public void testOrc(int rowCount) assertThatFileFormat(ORC) .withColumns(TEST_COLUMNS) .withRowsCount(rowCount) - .isReadableByPageSource(new OrcPageSourceFactory(TYPE_MANAGER, false, new OrcReaderOptions(), HDFS_ENVIRONMENT, STATS)); + .isReadableByPageSource(new OrcPageSourceFactory(false, new OrcReaderOptions(), HDFS_ENVIRONMENT, STATS)); } @Test(dataProvider = "rowCount") @@ -330,7 +330,7 @@ public void testOrcOptimizedWriter(int rowCount) .withSession(session) .withFileWriterFactory(new OrcFileWriterFactory(HDFS_ENVIRONMENT, TYPE_MANAGER, new NodeVersion("test"), HIVE_STORAGE_TIME_ZONE, false, STATS, new OrcWriterOptions())) .isReadableByRecordCursor(createGenericHiveRecordCursorProvider(HDFS_ENVIRONMENT)) - .isReadableByPageSource(new OrcPageSourceFactory(TYPE_MANAGER, false, new OrcReaderOptions(), HDFS_ENVIRONMENT, STATS)); + .isReadableByPageSource(new OrcPageSourceFactory(false, new OrcReaderOptions(), HDFS_ENVIRONMENT, STATS)); } @Test(dataProvider = "rowCount") @@ -342,7 +342,7 @@ public void testOrcUseColumnNames(int rowCount) .withRowsCount(rowCount) .withReadColumns(Lists.reverse(TEST_COLUMNS)) .withSession(SESSION) - .isReadableByPageSource(new OrcPageSourceFactory(TYPE_MANAGER, true, new OrcReaderOptions(), HDFS_ENVIRONMENT, STATS)); + .isReadableByPageSource(new OrcPageSourceFactory(true, new OrcReaderOptions(), HDFS_ENVIRONMENT, STATS)); } @Test(dataProvider = "rowCount") @@ -358,7 +358,7 @@ public void testOrcUseColumnNameLowerCaseConversion(int rowCount) .withRowsCount(rowCount) .withReadColumns(TEST_COLUMNS) .withSession(SESSION) - .isReadableByPageSource(new OrcPageSourceFactory(TYPE_MANAGER, true, new OrcReaderOptions(), HDFS_ENVIRONMENT, STATS)); + .isReadableByPageSource(new OrcPageSourceFactory(true, new OrcReaderOptions(), HDFS_ENVIRONMENT, STATS)); } @Test(dataProvider = "rowCount") @@ -461,7 +461,7 @@ public void testTruncateVarcharColumn() assertThatFileFormat(ORC) .withWriteColumns(ImmutableList.of(writeColumn)) .withReadColumns(ImmutableList.of(readColumn)) - .isReadableByPageSource(new OrcPageSourceFactory(TYPE_MANAGER, false, new OrcReaderOptions(), HDFS_ENVIRONMENT, STATS)); + .isReadableByPageSource(new OrcPageSourceFactory(false, new OrcReaderOptions(), HDFS_ENVIRONMENT, STATS)); assertThatFileFormat(PARQUET) .withWriteColumns(ImmutableList.of(writeColumn)) @@ -509,7 +509,7 @@ public void testFailForLongVarcharPartitionColumn() assertThatFileFormat(ORC) .withColumns(columns) - .isFailingForPageSource(new OrcPageSourceFactory(TYPE_MANAGER, false, new OrcReaderOptions(), HDFS_ENVIRONMENT, STATS), expectedErrorCode, expectedMessage); + .isFailingForPageSource(new OrcPageSourceFactory(false, new OrcReaderOptions(), HDFS_ENVIRONMENT, STATS), expectedErrorCode, expectedMessage); assertThatFileFormat(PARQUET) .withColumns(columns) diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java index 968e9a98b4f8..0f2b678b1a4e 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java @@ -14,8 +14,12 @@ package io.prestosql.plugin.hive; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.io.Files; +import io.airlift.json.JsonCodec; +import io.airlift.json.JsonCodecFactory; +import io.airlift.json.ObjectMapperProvider; import io.prestosql.Session; import io.prestosql.connector.CatalogName; import io.prestosql.cost.StatsAndCosts; @@ -46,6 +50,7 @@ import io.prestosql.testing.MaterializedRow; import io.prestosql.tests.AbstractTestIntegrationSmokeTest; import io.prestosql.tests.DistributedQueryRunner; +import io.prestosql.type.TypeDeserializer; import org.apache.hadoop.fs.Path; import org.intellij.lang.annotations.Language; import org.testng.annotations.Test; @@ -73,7 +78,6 @@ import static com.google.common.io.Files.createTempDir; import static com.google.common.io.MoreFiles.deleteRecursively; import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE; -import static io.airlift.json.JsonCodec.jsonCodec; import static io.airlift.tpch.TpchTable.CUSTOMER; import static io.airlift.tpch.TpchTable.ORDERS; import static io.prestosql.SystemSessionProperties.COLOCATED_JOIN; @@ -182,7 +186,7 @@ public void testIOExplain() EstimatedStatsAndCost estimate = new EstimatedStatsAndCost(2.0, 40.0, 40.0, 0.0, 0.0); MaterializedResult result = computeActual("EXPLAIN (TYPE IO, FORMAT JSON) INSERT INTO test_orders SELECT custkey, orderkey, processing FROM test_orders WHERE custkey <= 10"); assertEquals( - jsonCodec(IoPlan.class).fromJson((String) getOnlyElement(result.getOnlyColumnAsSet())), + getIoPlanCodec().fromJson((String) getOnlyElement(result.getOnlyColumnAsSet())), new IoPlan( ImmutableSet.of( new TableColumnInfo( @@ -221,7 +225,7 @@ public void testIOExplain() estimate = new EstimatedStatsAndCost(55.0, 990.0, 990.0, 0.0, 0.0); result = computeActual("EXPLAIN (TYPE IO, FORMAT JSON) INSERT INTO test_orders SELECT custkey, orderkey + 10 FROM test_orders WHERE custkey <= 10"); assertEquals( - jsonCodec(IoPlan.class).fromJson((String) getOnlyElement(result.getOnlyColumnAsSet())), + getIoPlanCodec().fromJson((String) getOnlyElement(result.getOnlyColumnAsSet())), new IoPlan( ImmutableSet.of( new TableColumnInfo( @@ -273,7 +277,7 @@ public void testIoExplainWithPrimitiveTypes() assertUpdate(query, 1); assertEquals( - jsonCodec(IoPlan.class).fromJson((String) getOnlyElement(computeActual("EXPLAIN (TYPE IO, FORMAT JSON) SELECT * FROM test_types_table").getOnlyColumnAsSet())), + getIoPlanCodec().fromJson((String) getOnlyElement(computeActual("EXPLAIN (TYPE IO, FORMAT JSON) SELECT * FROM test_types_table").getOnlyColumnAsSet())), new IoPlan( ImmutableSet.of(new TableColumnInfo( new CatalogSchemaTableName(catalog, "tpch", "test_types_table"), @@ -4567,7 +4571,7 @@ private void assertColumnType(TableMetadata tableMetadata, String columnName, Ty private void assertConstraints(@Language("SQL") String query, Set expected) { MaterializedResult result = computeActual("EXPLAIN (TYPE IO, FORMAT JSON) " + query); - Set constraints = jsonCodec(IoPlan.class).fromJson((String) getOnlyElement(result.getOnlyColumnAsSet())) + Set constraints = getIoPlanCodec().fromJson((String) getOnlyElement(result.getOnlyColumnAsSet())) .getInputTableColumnInfos().stream() .findFirst().get() .getColumnConstraints(); @@ -4639,6 +4643,13 @@ private List getAllTestingHiveStorageFormat() return formats.build(); } + private JsonCodec getIoPlanCodec() + { + ObjectMapperProvider objectMapperProvider = new ObjectMapperProvider(); + objectMapperProvider.setJsonDeserializers(ImmutableMap.of(Type.class, new TypeDeserializer(getQueryRunner().getMetadata()))); + return new JsonCodecFactory(objectMapperProvider).jsonCodec(IoPlan.class); + } + private static class TestingHiveStorageFormat { private final Session session; diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveMetadata.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveMetadata.java index 4a1e3e8028c2..028850e4ab7c 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveMetadata.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveMetadata.java @@ -30,7 +30,7 @@ public class TestHiveMetadata private static final HiveColumnHandle TEST_COLUMN_HANDLE = new HiveColumnHandle( "test", HiveType.HIVE_STRING, - VARCHAR.getTypeSignature(), + VARCHAR, 0, HiveColumnHandle.ColumnType.PARTITION_KEY, Optional.empty()); diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHivePageSink.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHivePageSink.java index 1fba269ea6ce..896a1a2542d1 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHivePageSink.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHivePageSink.java @@ -288,7 +288,7 @@ private static List getColumnHandles() for (int i = 0; i < columns.size(); i++) { LineItemColumn column = columns.get(i); HiveType hiveType = getHiveType(column.getType()); - handles.add(new HiveColumnHandle(column.getColumnName(), hiveType, hiveType.getTypeSignature(), i, REGULAR, Optional.empty())); + handles.add(new HiveColumnHandle(column.getColumnName(), hiveType, hiveType.getType(TYPE_MANAGER), i, REGULAR, Optional.empty())); } return handles.build(); } diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveSplit.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveSplit.java index e8137166eccd..d2240aadeece 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveSplit.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveSplit.java @@ -16,8 +16,12 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.airlift.json.JsonCodec; +import io.airlift.json.JsonCodecFactory; +import io.airlift.json.ObjectMapperProvider; import io.prestosql.plugin.hive.HiveColumnHandle.ColumnType; import io.prestosql.spi.HostAddress; +import io.prestosql.spi.type.TestingTypeManager; +import io.prestosql.spi.type.Type; import org.testng.annotations.Test; import java.time.Instant; @@ -33,11 +37,13 @@ public class TestHiveSplit { - private final JsonCodec codec = JsonCodec.jsonCodec(HiveSplit.class); - @Test public void testJsonRoundTrip() { + ObjectMapperProvider objectMapperProvider = new ObjectMapperProvider(); + objectMapperProvider.setJsonDeserializers(ImmutableMap.of(Type.class, new HiveModule.TypeDeserializer(new TestingTypeManager()))); + JsonCodec codec = new JsonCodecFactory(objectMapperProvider).jsonCodec(HiveSplit.class); + Properties schema = new Properties(); schema.setProperty("foo", "bar"); schema.setProperty("bar", "baz"); @@ -63,7 +69,7 @@ public void testJsonRoundTrip() BUCKETING_V1, 32, 16, - ImmutableList.of(new HiveColumnHandle("col", HIVE_LONG, BIGINT.getTypeSignature(), 5, ColumnType.REGULAR, Optional.of("comment"))))), + ImmutableList.of(new HiveColumnHandle("col", HIVE_LONG, BIGINT, 5, ColumnType.REGULAR, Optional.of("comment"))))), false); String json = codec.toJson(expected); diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestIonSqlQueryBuilder.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestIonSqlQueryBuilder.java index 1ae5ad18715a..0f713bfaf8e4 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestIonSqlQueryBuilder.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestIonSqlQueryBuilder.java @@ -56,9 +56,9 @@ public void testBuildSQL() { IonSqlQueryBuilder queryBuilder = new IonSqlQueryBuilder(typeManager); List columns = ImmutableList.of( - new HiveColumnHandle("n_nationkey", HIVE_INT, INTEGER.getTypeSignature(), 0, REGULAR, Optional.empty()), - new HiveColumnHandle("n_name", HIVE_STRING, VARCHAR.getTypeSignature(), 1, REGULAR, Optional.empty()), - new HiveColumnHandle("n_regionkey", HIVE_INT, INTEGER.getTypeSignature(), 2, REGULAR, Optional.empty())); + new HiveColumnHandle("n_nationkey", HIVE_INT, INTEGER, 0, REGULAR, Optional.empty()), + new HiveColumnHandle("n_name", HIVE_STRING, VARCHAR, 1, REGULAR, Optional.empty()), + new HiveColumnHandle("n_regionkey", HIVE_INT, INTEGER, 2, REGULAR, Optional.empty())); assertEquals("SELECT s._1, s._2, s._3 FROM S3Object s", queryBuilder.buildSql(columns, TupleDomain.all())); @@ -81,9 +81,9 @@ public void testDecimalColumns() TypeManager typeManager = this.typeManager; IonSqlQueryBuilder queryBuilder = new IonSqlQueryBuilder(typeManager); List columns = ImmutableList.of( - new HiveColumnHandle("quantity", HiveType.valueOf("decimal(20,0)"), DecimalType.createDecimalType().getTypeSignature(), 0, REGULAR, Optional.empty()), - new HiveColumnHandle("extendedprice", HiveType.valueOf("decimal(20,2)"), DecimalType.createDecimalType().getTypeSignature(), 1, REGULAR, Optional.empty()), - new HiveColumnHandle("discount", HiveType.valueOf("decimal(10,2)"), DecimalType.createDecimalType().getTypeSignature(), 2, REGULAR, Optional.empty())); + new HiveColumnHandle("quantity", HiveType.valueOf("decimal(20,0)"), DecimalType.createDecimalType(), 0, REGULAR, Optional.empty()), + new HiveColumnHandle("extendedprice", HiveType.valueOf("decimal(20,2)"), DecimalType.createDecimalType(), 1, REGULAR, Optional.empty()), + new HiveColumnHandle("discount", HiveType.valueOf("decimal(10,2)"), DecimalType.createDecimalType(), 2, REGULAR, Optional.empty())); DecimalType decimalType = DecimalType.createDecimalType(10, 2); TupleDomain tupleDomain = withColumnDomains( ImmutableMap.of( @@ -101,8 +101,8 @@ public void testDateColumn() { IonSqlQueryBuilder queryBuilder = new IonSqlQueryBuilder(typeManager); List columns = ImmutableList.of( - new HiveColumnHandle("t1", HIVE_TIMESTAMP, TIMESTAMP.getTypeSignature(), 0, REGULAR, Optional.empty()), - new HiveColumnHandle("t2", HIVE_DATE, DATE.getTypeSignature(), 1, REGULAR, Optional.empty())); + new HiveColumnHandle("t1", HIVE_TIMESTAMP, TIMESTAMP, 0, REGULAR, Optional.empty()), + new HiveColumnHandle("t2", HIVE_DATE, DATE, 1, REGULAR, Optional.empty())); TupleDomain tupleDomain = withColumnDomains(ImmutableMap.of( columns.get(1), Domain.create(SortedRangeSet.copyOf(DATE, ImmutableList.of(Range.equal(DATE, (long) DateTimeUtils.parseDate("2001-08-22")))), false))); @@ -114,9 +114,9 @@ public void testNotPushDoublePredicates() { IonSqlQueryBuilder queryBuilder = new IonSqlQueryBuilder(typeManager); List columns = ImmutableList.of( - new HiveColumnHandle("quantity", HIVE_INT, INTEGER.getTypeSignature(), 0, REGULAR, Optional.empty()), - new HiveColumnHandle("extendedprice", HIVE_DOUBLE, DOUBLE.getTypeSignature(), 1, REGULAR, Optional.empty()), - new HiveColumnHandle("discount", HIVE_DOUBLE, DOUBLE.getTypeSignature(), 2, REGULAR, Optional.empty())); + new HiveColumnHandle("quantity", HIVE_INT, INTEGER, 0, REGULAR, Optional.empty()), + new HiveColumnHandle("extendedprice", HIVE_DOUBLE, DOUBLE, 1, REGULAR, Optional.empty()), + new HiveColumnHandle("discount", HIVE_DOUBLE, DOUBLE, 2, REGULAR, Optional.empty())); TupleDomain tupleDomain = withColumnDomains( ImmutableMap.of( columns.get(0), Domain.create(ofRanges(Range.lessThan(BIGINT, 50L)), false), diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestOrcPageSourceMemoryTracking.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestOrcPageSourceMemoryTracking.java index 5a458ab1412a..5de8c8b9e08b 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestOrcPageSourceMemoryTracking.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestOrcPageSourceMemoryTracking.java @@ -453,7 +453,7 @@ public TestPreparer(String tempFilePath, List testColumns, int numRo HiveType hiveType = HiveType.valueOf(inspector.getTypeName()); Type type = hiveType.getType(TYPE_MANAGER); - columnsBuilder.add(new HiveColumnHandle(testColumn.getName(), hiveType, type.getTypeSignature(), columnIndex, testColumn.isPartitionKey() ? PARTITION_KEY : REGULAR, Optional.empty())); + columnsBuilder.add(new HiveColumnHandle(testColumn.getName(), hiveType, type, columnIndex, testColumn.isPartitionKey() ? PARTITION_KEY : REGULAR, Optional.empty())); typesBuilder.add(type); } columns = columnsBuilder.build(); @@ -474,7 +474,7 @@ public ConnectorPageSource newPageSource(FileFormatDataSourceStats stats) public ConnectorPageSource newPageSource(FileFormatDataSourceStats stats, ConnectorSession session) { - OrcPageSourceFactory orcPageSourceFactory = new OrcPageSourceFactory(TYPE_MANAGER, false, new OrcReaderOptions(), HDFS_ENVIRONMENT, stats); + OrcPageSourceFactory orcPageSourceFactory = new OrcPageSourceFactory(false, new OrcReaderOptions(), HDFS_ENVIRONMENT, stats); return HivePageSourceProvider.createHivePageSource( ImmutableSet.of(), ImmutableSet.of(orcPageSourceFactory), diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/benchmark/FileFormat.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/benchmark/FileFormat.java index 57ca0547c263..468223f60835 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/benchmark/FileFormat.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/benchmark/FileFormat.java @@ -131,7 +131,7 @@ public FormatWriter createFileFormatWriter( @Override public ConnectorPageSource createFileFormatReader(ConnectorSession session, HdfsEnvironment hdfsEnvironment, File targetFile, List columnNames, List columnTypes) { - HivePageSourceFactory pageSourceFactory = new OrcPageSourceFactory(TYPE_MANAGER, false, new OrcReaderOptions(), hdfsEnvironment, new FileFormatDataSourceStats()); + HivePageSourceFactory pageSourceFactory = new OrcPageSourceFactory(false, new OrcReaderOptions(), hdfsEnvironment, new FileFormatDataSourceStats()); return createPageSource(pageSourceFactory, session, targetFile, columnNames, columnTypes, HiveStorageFormat.ORC); } @@ -294,7 +294,7 @@ private static ConnectorPageSource createPageSource( for (int i = 0; i < columnNames.size(); i++) { String columnName = columnNames.get(i); Type columnType = columnTypes.get(i); - columnHandles.add(new HiveColumnHandle(columnName, toHiveType(typeTranslator, columnType), columnType.getTypeSignature(), i, REGULAR, Optional.empty())); + columnHandles.add(new HiveColumnHandle(columnName, toHiveType(typeTranslator, columnType), columnType, i, REGULAR, Optional.empty())); } RecordCursor recordCursor = cursorProvider @@ -328,7 +328,7 @@ private static ConnectorPageSource createPageSource( for (int i = 0; i < columnNames.size(); i++) { String columnName = columnNames.get(i); Type columnType = columnTypes.get(i); - columnHandles.add(new HiveColumnHandle(columnName, toHiveType(typeTranslator, columnType), columnType.getTypeSignature(), i, REGULAR, Optional.empty())); + columnHandles.add(new HiveColumnHandle(columnName, toHiveType(typeTranslator, columnType), columnType, i, REGULAR, Optional.empty())); } return pageSourceFactory diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/parquet/predicate/TestParquetPredicateUtils.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/parquet/predicate/TestParquetPredicateUtils.java index 460048964947..ebc759048b00 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/parquet/predicate/TestParquetPredicateUtils.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/parquet/predicate/TestParquetPredicateUtils.java @@ -24,8 +24,6 @@ import io.prestosql.spi.type.ArrayType; import io.prestosql.spi.type.MapType; import io.prestosql.spi.type.RowType; -import io.prestosql.spi.type.StandardTypes; -import io.prestosql.spi.type.TypeSignature; import org.apache.parquet.column.ColumnDescriptor; import org.apache.parquet.schema.GroupType; import org.apache.parquet.schema.MessageType; @@ -43,6 +41,7 @@ import static io.prestosql.spi.predicate.TupleDomain.withColumnDomains; import static io.prestosql.spi.type.BigintType.BIGINT; import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.RowType.rowType; import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT32; import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT64; import static org.apache.parquet.schema.Type.Repetition.OPTIONAL; @@ -56,7 +55,7 @@ public class TestParquetPredicateUtils @Test public void testParquetTupleDomainPrimitiveArray() { - HiveColumnHandle columnHandle = new HiveColumnHandle("my_array", HiveType.valueOf("array"), new TypeSignature(StandardTypes.ARRAY), 0, REGULAR, Optional.empty()); + HiveColumnHandle columnHandle = new HiveColumnHandle("my_array", HiveType.valueOf("array"), new ArrayType(INTEGER), 0, REGULAR, Optional.empty()); TupleDomain domain = withColumnDomains(ImmutableMap.of(columnHandle, Domain.notNull(new ArrayType(INTEGER)))); MessageType fileSchema = new MessageType("hive_schema", @@ -71,9 +70,10 @@ public void testParquetTupleDomainPrimitiveArray() @Test public void testParquetTupleDomainStructArray() { - HiveColumnHandle columnHandle = new HiveColumnHandle("my_array_struct", HiveType.valueOf("array>"), new TypeSignature(StandardTypes.ARRAY), 0, REGULAR, Optional.empty()); RowType.Field rowField = new RowType.Field(Optional.of("a"), INTEGER); RowType rowType = RowType.from(ImmutableList.of(rowField)); + + HiveColumnHandle columnHandle = new HiveColumnHandle("my_array_struct", HiveType.valueOf("array>"), rowType, 0, REGULAR, Optional.empty()); TupleDomain domain = withColumnDomains(ImmutableMap.of(columnHandle, Domain.notNull(new ArrayType(rowType)))); MessageType fileSchema = new MessageType("hive_schema", @@ -89,7 +89,7 @@ public void testParquetTupleDomainStructArray() @Test public void testParquetTupleDomainPrimitive() { - HiveColumnHandle columnHandle = new HiveColumnHandle("my_primitive", HiveType.valueOf("bigint"), BIGINT.getTypeSignature(), 0, REGULAR, Optional.empty()); + HiveColumnHandle columnHandle = new HiveColumnHandle("my_primitive", HiveType.valueOf("bigint"), BIGINT, 0, REGULAR, Optional.empty()); Domain singleValueDomain = Domain.singleValue(BIGINT, 123L); TupleDomain domain = withColumnDomains(ImmutableMap.of(columnHandle, singleValueDomain)); @@ -110,9 +110,11 @@ public void testParquetTupleDomainPrimitive() @Test public void testParquetTupleDomainStruct() { - HiveColumnHandle columnHandle = new HiveColumnHandle("my_struct", HiveType.valueOf("struct"), new TypeSignature(StandardTypes.ROW), 0, REGULAR, Optional.empty()); - RowType.Field rowField = new RowType.Field(Optional.of("my_struct"), INTEGER); - RowType rowType = RowType.from(ImmutableList.of(rowField)); + RowType rowType = rowType( + RowType.field("a", INTEGER), + RowType.field("b", INTEGER)); + + HiveColumnHandle columnHandle = new HiveColumnHandle("my_struct", HiveType.valueOf("struct"), rowType, 0, REGULAR, Optional.empty()); TupleDomain domain = withColumnDomains(ImmutableMap.of(columnHandle, Domain.notNull(rowType))); MessageType fileSchema = new MessageType("hive_schema", @@ -127,8 +129,6 @@ public void testParquetTupleDomainStruct() @Test public void testParquetTupleDomainMap() { - HiveColumnHandle columnHandle = new HiveColumnHandle("my_map", HiveType.valueOf("map"), new TypeSignature(StandardTypes.MAP), 0, REGULAR, Optional.empty()); - MapType mapType = new MapType( INTEGER, INTEGER, @@ -137,6 +137,8 @@ public void testParquetTupleDomainMap() methodHandle(TestParquetPredicateUtils.class, "throwUnsupportedOperationException"), methodHandle(TestParquetPredicateUtils.class, "throwUnsupportedOperationException")); + HiveColumnHandle columnHandle = new HiveColumnHandle("my_map", HiveType.valueOf("map"), mapType, 0, REGULAR, Optional.empty()); + TupleDomain domain = withColumnDomains(ImmutableMap.of(columnHandle, Domain.notNull(mapType))); MessageType fileSchema = new MessageType("hive_schema", diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/s3select/TestS3SelectRecordCursor.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3select/TestS3SelectRecordCursor.java index 3f3886918c9d..8cdc931681c3 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/s3select/TestS3SelectRecordCursor.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3select/TestS3SelectRecordCursor.java @@ -44,10 +44,10 @@ public class TestS3SelectRecordCursor { private static final String LAZY_SERDE_CLASS_NAME = LazySimpleSerDe.class.getName(); - private static final HiveColumnHandle ARTICLE_COLUMN = new HiveColumnHandle("article", HIVE_STRING, VARCHAR.getTypeSignature(), 1, REGULAR, Optional.empty()); - private static final HiveColumnHandle AUTHOR_COLUMN = new HiveColumnHandle("author", HIVE_STRING, VARCHAR.getTypeSignature(), 1, REGULAR, Optional.empty()); - private static final HiveColumnHandle DATE_ARTICLE_COLUMN = new HiveColumnHandle("date_pub", HIVE_INT, DATE.getTypeSignature(), 1, REGULAR, Optional.empty()); - private static final HiveColumnHandle QUANTITY_COLUMN = new HiveColumnHandle("quantity", HIVE_INT, INTEGER.getTypeSignature(), 1, REGULAR, Optional.empty()); + private static final HiveColumnHandle ARTICLE_COLUMN = new HiveColumnHandle("article", HIVE_STRING, VARCHAR, 1, REGULAR, Optional.empty()); + private static final HiveColumnHandle AUTHOR_COLUMN = new HiveColumnHandle("author", HIVE_STRING, VARCHAR, 1, REGULAR, Optional.empty()); + private static final HiveColumnHandle DATE_ARTICLE_COLUMN = new HiveColumnHandle("date_pub", HIVE_INT, DATE, 1, REGULAR, Optional.empty()); + private static final HiveColumnHandle QUANTITY_COLUMN = new HiveColumnHandle("quantity", HIVE_INT, INTEGER, 1, REGULAR, Optional.empty()); private static final HiveColumnHandle[] DEFAULT_TEST_COLUMNS = {ARTICLE_COLUMN, AUTHOR_COLUMN, DATE_ARTICLE_COLUMN, QUANTITY_COLUMN}; @Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "Invalid Thrift DDL struct article \\{ \\}") diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/statistics/TestMetastoreHiveStatisticsProvider.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/statistics/TestMetastoreHiveStatisticsProvider.java index 6f84b1433718..ab6604422e22 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/statistics/TestMetastoreHiveStatisticsProvider.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/statistics/TestMetastoreHiveStatisticsProvider.java @@ -93,8 +93,8 @@ public class TestMetastoreHiveStatisticsProvider private static final String COLUMN = "column"; private static final DecimalType DECIMAL = createDecimalType(5, 3); - private static final HiveColumnHandle PARTITION_COLUMN_1 = new HiveColumnHandle("p1", HIVE_STRING, VARCHAR.getTypeSignature(), 0, PARTITION_KEY, Optional.empty()); - private static final HiveColumnHandle PARTITION_COLUMN_2 = new HiveColumnHandle("p2", HIVE_LONG, BIGINT.getTypeSignature(), 1, PARTITION_KEY, Optional.empty()); + private static final HiveColumnHandle PARTITION_COLUMN_1 = new HiveColumnHandle("p1", HIVE_STRING, VARCHAR, 0, PARTITION_KEY, Optional.empty()); + private static final HiveColumnHandle PARTITION_COLUMN_2 = new HiveColumnHandle("p2", HIVE_LONG, BIGINT, 1, PARTITION_KEY, Optional.empty()); @Test public void testGetPartitionsSample() @@ -609,7 +609,7 @@ public void testGetTableStatistics() .setColumnStatistics(ImmutableMap.of(COLUMN, createIntegerColumnStatistics(OptionalLong.of(-100), OptionalLong.of(100), OptionalLong.of(500), OptionalLong.of(300)))) .build(); MetastoreHiveStatisticsProvider statisticsProvider = new MetastoreHiveStatisticsProvider((session, table, hivePartitions) -> ImmutableMap.of(partitionName, statistics)); - HiveColumnHandle columnHandle = new HiveColumnHandle(COLUMN, HIVE_LONG, BIGINT.getTypeSignature(), 2, REGULAR, Optional.empty()); + HiveColumnHandle columnHandle = new HiveColumnHandle(COLUMN, HIVE_LONG, BIGINT, 2, REGULAR, Optional.empty()); TableStatistics expected = TableStatistics.builder() .setRowCount(Estimate.of(1000)) .setColumnStatistics( @@ -658,7 +658,7 @@ public void testGetTableStatisticsUnpartitioned() .setColumnStatistics(ImmutableMap.of(COLUMN, createIntegerColumnStatistics(OptionalLong.of(-100), OptionalLong.of(100), OptionalLong.of(500), OptionalLong.of(300)))) .build(); MetastoreHiveStatisticsProvider statisticsProvider = new MetastoreHiveStatisticsProvider((session, table, hivePartitions) -> ImmutableMap.of(UNPARTITIONED_ID, statistics)); - HiveColumnHandle columnHandle = new HiveColumnHandle(COLUMN, HIVE_LONG, BIGINT.getTypeSignature(), 2, REGULAR, Optional.empty()); + HiveColumnHandle columnHandle = new HiveColumnHandle(COLUMN, HIVE_LONG, BIGINT, 2, REGULAR, Optional.empty()); TableStatistics expected = TableStatistics.builder() .setRowCount(Estimate.of(1000)) .setColumnStatistics( diff --git a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/ExpressionConverter.java b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/ExpressionConverter.java index 16951e3d85b2..f40931294dea 100644 --- a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/ExpressionConverter.java +++ b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/ExpressionConverter.java @@ -24,6 +24,13 @@ import io.prestosql.spi.predicate.SortedRangeSet; import io.prestosql.spi.predicate.TupleDomain; import io.prestosql.spi.predicate.ValueSet; +import io.prestosql.spi.type.TimeType; +import io.prestosql.spi.type.TimeWithTimeZoneType; +import io.prestosql.spi.type.TimestampType; +import io.prestosql.spi.type.TimestampWithTimeZoneType; +import io.prestosql.spi.type.Type; +import io.prestosql.spi.type.VarbinaryType; +import io.prestosql.spi.type.VarcharType; import org.apache.iceberg.expressions.Expression; import java.util.List; @@ -34,12 +41,6 @@ import static io.prestosql.spi.predicate.Marker.Bound.BELOW; import static io.prestosql.spi.predicate.Marker.Bound.EXACTLY; import static io.prestosql.spi.type.DateTimeEncoding.unpackMillisUtc; -import static io.prestosql.spi.type.StandardTypes.TIME; -import static io.prestosql.spi.type.StandardTypes.TIMESTAMP; -import static io.prestosql.spi.type.StandardTypes.TIMESTAMP_WITH_TIME_ZONE; -import static io.prestosql.spi.type.StandardTypes.TIME_WITH_TIME_ZONE; -import static io.prestosql.spi.type.StandardTypes.VARBINARY; -import static io.prestosql.spi.type.StandardTypes.VARCHAR; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static org.apache.iceberg.expressions.Expressions.alwaysFalse; import static org.apache.iceberg.expressions.Expressions.alwaysTrue; @@ -72,16 +73,14 @@ public static Expression toIcebergExpression(TupleDomain tuple HiveColumnHandle columnHandle = entry.getKey(); Domain domain = entry.getValue(); if (!columnHandle.isHidden()) { - expression = and(expression, toIcebergExpression(columnHandle, domain, session)); + expression = and(expression, toIcebergExpression(columnHandle.getName(), columnHandle.getType(), domain, session)); } } return expression; } - private static Expression toIcebergExpression(HiveColumnHandle column, Domain domain, ConnectorSession session) + private static Expression toIcebergExpression(String columnName, Type type, Domain domain, ConnectorSession session) { - String columnName = column.getName(); - if (domain.isAll()) { return alwaysTrue(); } @@ -122,42 +121,42 @@ private static Expression toIcebergExpression(HiveColumnHandle column, Domain do // case col <> 'val' is represented as (col < 'val' or col > 'val') if (lowBound == EXACTLY && highBound == EXACTLY) { // case == - if (getValue(column, low, session).equals(getValue(column, high, session))) { - expression = or(expression, equal(columnName, getValue(column, low, session))); + if (getValue(type, low, session).equals(getValue(type, high, session))) { + expression = or(expression, equal(columnName, getValue(type, low, session))); } else { // case between Expression between = and( - greaterThanOrEqual(columnName, getValue(column, low, session)), - lessThanOrEqual(columnName, getValue(column, high, session))); + greaterThanOrEqual(columnName, getValue(type, low, session)), + lessThanOrEqual(columnName, getValue(type, high, session))); expression = or(expression, between); } } else { if (lowBound == EXACTLY && low.getValueBlock().isPresent()) { // case >= - expression = or(expression, greaterThanOrEqual(columnName, getValue(column, low, session))); + expression = or(expression, greaterThanOrEqual(columnName, getValue(type, low, session))); } else if (lowBound == ABOVE && low.getValueBlock().isPresent()) { // case > - expression = or(expression, greaterThan(columnName, getValue(column, low, session))); + expression = or(expression, greaterThan(columnName, getValue(type, low, session))); } if (highBound == EXACTLY && high.getValueBlock().isPresent()) { // case <= if (low.getValueBlock().isPresent()) { - expression = and(expression, lessThanOrEqual(columnName, getValue(column, high, session))); + expression = and(expression, lessThanOrEqual(columnName, getValue(type, high, session))); } else { - expression = or(expression, lessThanOrEqual(columnName, getValue(column, high, session))); + expression = or(expression, lessThanOrEqual(columnName, getValue(type, high, session))); } } else if (highBound == BELOW && high.getValueBlock().isPresent()) { // case < if (low.getValueBlock().isPresent()) { - expression = and(expression, lessThan(columnName, getValue(column, high, session))); + expression = and(expression, lessThan(columnName, getValue(type, high, session))); } else { - expression = or(expression, lessThan(columnName, getValue(column, high, session))); + expression = or(expression, lessThan(columnName, getValue(type, high, session))); } } } @@ -168,20 +167,24 @@ else if (highBound == BELOW && high.getValueBlock().isPresent()) { throw new VerifyException("Did not expect a domain value set other than SortedRangeSet and EquatableValueSet but got " + domainValues.getClass().getSimpleName()); } - private static Object getValue(HiveColumnHandle columnHandle, Marker marker, ConnectorSession session) + private static Object getValue(Type type, Marker marker, ConnectorSession session) { - switch (columnHandle.getTypeSignature().getBase()) { - case TIMESTAMP_WITH_TIME_ZONE: - case TIME_WITH_TIME_ZONE: - return MILLISECONDS.toMicros(unpackMillisUtc((Long) marker.getValue())); - case TIME: - case TIMESTAMP: - return MILLISECONDS.toMicros((Long) marker.getValue()); - case VARCHAR: - return ((Slice) marker.getValue()).toStringUtf8(); - case VARBINARY: - return ((Slice) marker.getValue()).getBytes(); + if (type instanceof TimestampWithTimeZoneType || type instanceof TimeWithTimeZoneType) { + return MILLISECONDS.toMicros(unpackMillisUtc((Long) marker.getValue())); + } + + if (type instanceof TimestampType || type instanceof TimeType) { + return MILLISECONDS.toMicros((Long) marker.getValue()); + } + + if (type instanceof VarcharType) { + return ((Slice) marker.getValue()).toStringUtf8(); + } + + if (type instanceof VarbinaryType) { + return ((Slice) marker.getValue()).getBytes(); } + return marker.getValue(); } } diff --git a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergMetadata.java b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergMetadata.java index e45ff43ed40d..333940a70761 100644 --- a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergMetadata.java +++ b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergMetadata.java @@ -211,7 +211,7 @@ public Map getColumnHandles(ConnectorSession session, Conn public ColumnMetadata getColumnMetadata(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnHandle columnHandle) { HiveColumnHandle column = (HiveColumnHandle) columnHandle; - return new ColumnMetadata(column.getName(), typeManager.getType(column.getTypeSignature())); + return new ColumnMetadata(column.getName(), column.getType()); } @Override diff --git a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergPageSink.java b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergPageSink.java index 568673b4bb4c..1c4b40602059 100644 --- a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergPageSink.java +++ b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergPageSink.java @@ -140,7 +140,7 @@ public IcebergPageSink( this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.fileFormat = requireNonNull(fileFormat, "fileFormat is null"); this.inputColumns = ImmutableList.copyOf(inputColumns); - this.pagePartitioner = new PagePartitioner(pageIndexerFactory, toPartitionColumns(typeManager, inputColumns, partitionSpec)); + this.pagePartitioner = new PagePartitioner(pageIndexerFactory, toPartitionColumns(inputColumns, partitionSpec)); } @Override @@ -432,7 +432,7 @@ public static Object getIcebergValue(Block block, int position, Type type) throw new UnsupportedOperationException("Type not supported as partition column: " + type.getDisplayName()); } - private static List toPartitionColumns(TypeManager typeManager, List handles, PartitionSpec partitionSpec) + private static List toPartitionColumns(List handles, PartitionSpec partitionSpec) { Map nameChannels = new HashMap<>(); for (int i = 0; i < handles.size(); i++) { @@ -444,7 +444,7 @@ private static List toPartitionColumns(TypeManager typeManager, String name = partitionSpec.schema().findColumnName(field.sourceId()); Integer channel = nameChannels.get(name); checkArgument(channel != null, "partition field not found: %s", field); - Type inputType = typeManager.getType(handles.get(channel).getTypeSignature()); + Type inputType = handles.get(channel).getType(); ColumnTransform transform = getColumnTransform(field, inputType); return new PartitionColumn(field, channel, inputType, transform.getType(), transform.getTransform()); }) diff --git a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergPageSourceProvider.java b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergPageSourceProvider.java index d97f4c2deb3b..588064d740aa 100644 --- a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergPageSourceProvider.java +++ b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergPageSourceProvider.java @@ -233,7 +233,7 @@ private static ConnectorPageSource createParquetPageSource( HiveColumnHandle column = regularColumns.get(columnIndex); org.apache.parquet.schema.Type parquetField = parquetFields.get(columnIndex); - Type prestoType = typeManager.getType(column.getTypeSignature()); + Type prestoType = column.getType(); prestoTypes.add(prestoType); diff --git a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergSplitManager.java b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergSplitManager.java index 537058908e84..9f6cdf1fac56 100644 --- a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergSplitManager.java +++ b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergSplitManager.java @@ -21,6 +21,7 @@ import io.prestosql.spi.connector.ConnectorTableHandle; import io.prestosql.spi.connector.ConnectorTransactionHandle; import io.prestosql.spi.connector.classloader.ClassLoaderSafeConnectorSplitSource; +import io.prestosql.spi.type.TypeManager; import org.apache.iceberg.Table; import org.apache.iceberg.TableScan; @@ -35,12 +36,14 @@ public class IcebergSplitManager { private final IcebergTransactionManager transactionManager; private final HdfsEnvironment hdfsEnvironment; + private final TypeManager typeManager; @Inject - public IcebergSplitManager(IcebergTransactionManager transactionManager, HdfsEnvironment hdfsEnvironment) + public IcebergSplitManager(IcebergTransactionManager transactionManager, HdfsEnvironment hdfsEnvironment, TypeManager typeManager) { this.transactionManager = requireNonNull(transactionManager, "transactionManager is null"); this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); + this.typeManager = requireNonNull(typeManager, "typeManager is null"); } @Override diff --git a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergUtil.java b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergUtil.java index 28e7f35fd4e2..94af22bc9434 100644 --- a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergUtil.java +++ b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergUtil.java @@ -99,7 +99,7 @@ public static List getColumns(Schema schema, PartitionSpec spe } io.prestosql.spi.type.Type prestoType = toPrestoType(type, typeManager); HiveType hiveType = toHiveType(TYPE_TRANSLATOR, coerceForHive(prestoType)); - HiveColumnHandle columnHandle = new HiveColumnHandle(column.name(), hiveType, prestoType.getTypeSignature(), columnIndex, columnType, Optional.empty()); + HiveColumnHandle columnHandle = new HiveColumnHandle(column.name(), hiveType, prestoType, columnIndex, columnType, Optional.empty()); columnIndex++; builder.add(columnHandle); } @@ -119,7 +119,7 @@ public static List getPartitionColumns(Schema schema, Partitio Type type = partitionField.transform().getResultType(sourceType); io.prestosql.spi.type.Type prestoType = toPrestoType(type, typeManager); HiveType hiveType = toHiveType(TYPE_TRANSLATOR, coerceForHive(prestoType)); - HiveColumnHandle columnHandle = new HiveColumnHandle(partitionField.name(), hiveType, prestoType.getTypeSignature(), columnIndex, PARTITION_KEY, Optional.empty()); + HiveColumnHandle columnHandle = new HiveColumnHandle(partitionField.name(), hiveType, prestoType, columnIndex, PARTITION_KEY, Optional.empty()); columnIndex++; builder.add(columnHandle); }