Skip to content

Commit

Permalink
[apache#2628] improvement(client): remove tableDTO in client (apache#…
Browse files Browse the repository at this point in the history
…2630)

### What changes were proposed in this pull request?

- remove TableDTO in the client
- convert TableDTO to Table in the client

### Why are the changes needed?

Fix: apache#2628 

### Does this PR introduce _any_ user-facing change?

no

### How was this patch tested?

existing tests
  • Loading branch information
mchades authored and xiaojiebao committed Mar 28, 2024
1 parent eb073f0 commit db01f8c
Show file tree
Hide file tree
Showing 15 changed files with 249 additions and 141 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

import com.datastrato.gravitino.annotation.Evolving;
import java.util.Arrays;
import java.util.Objects;

/**
* The interface of a function expression. A function expression is an expression that takes a
Expand Down Expand Up @@ -74,5 +75,25 @@ public String toString() {
}
return functionName + "(" + String.join(", ", Arrays.toString(arguments)) + ")";
}

@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
FuncExpressionImpl that = (FuncExpressionImpl) o;
return Objects.equals(functionName, that.functionName)
&& Arrays.equals(arguments, that.arguments);
}

@Override
public int hashCode() {
int result = Objects.hash(functionName);
result = 31 * result + Arrays.hashCode(arguments);
return result;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import com.datastrato.gravitino.rel.expressions.Expression;
import com.datastrato.gravitino.rel.expressions.NamedReference;
import java.util.Arrays;
import java.util.Objects;

/** Helper methods to create distributions to pass into Gravitino. */
public class Distributions {
Expand Down Expand Up @@ -133,6 +134,39 @@ public Expression[] expressions() {
return expressions;
}

@Override
public String toString() {
return "DistributionImpl{"
+ "strategy="
+ strategy
+ ", number="
+ number
+ ", expressions="
+ Arrays.toString(expressions)
+ '}';
}

@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DistributionImpl that = (DistributionImpl) o;
return number == that.number
&& strategy == that.strategy
&& Arrays.equals(expressions, that.expressions);
}

@Override
public int hashCode() {
int result = Objects.hash(strategy, number);
result = 31 * result + Arrays.hashCode(expressions);
return result;
}

/** Builder to create a distribution. */
public static class Builder {
private Strategy strategy;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ public class Literals {
*
* @param value the literal value
* @param dataType the data type of the literal
* @return a new {@link com.datastrato.gravitino.rel.expressions.Literal} instance
* @param <T> the JVM type of value held by the literal
* @return a new {@link com.datastrato.gravitino.rel.expressions.Literal} instance
*/
public static <T> LiteralImpl<T> of(T value, Type dataType) {
return new LiteralImpl<>(value, dataType);
Expand Down Expand Up @@ -204,13 +204,23 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) {
return false;
}
LiteralImpl<?> literal = (LiteralImpl) o;
return Objects.equals(value, literal.value) && Objects.equals(dataType, literal.dataType);

LiteralImpl<?> literal = (LiteralImpl<?>) o;
if (!Objects.equals(dataType, literal.dataType)) {
return false;
}
// Check both values for null before comparing to avoid NullPointerException
if (value == null || literal.value == null) {
return Objects.equals(value, literal.value);
}
// Now, it's safe to compare using toString() since neither value is null
return Objects.equals(value, literal.value)
|| value.toString().equals(literal.value.toString());
}

@Override
public int hashCode() {
return Objects.hash(value, dataType);
return Objects.hash(dataType, value != null ? value.toString() : null);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
package com.datastrato.gravitino.rel.expressions.sorts;

import com.datastrato.gravitino.rel.expressions.Expression;
import java.util.Objects;

/** Helper methods to create SortOrders to pass into Gravitino. */
public class SortOrders {
Expand Down Expand Up @@ -84,6 +85,37 @@ public SortDirection direction() {
public NullOrdering nullOrdering() {
return nullOrdering;
}

@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SortImpl sort = (SortImpl) o;
return Objects.equals(expression, sort.expression)
&& direction == sort.direction
&& nullOrdering == sort.nullOrdering;
}

@Override
public int hashCode() {
return Objects.hash(expression, direction, nullOrdering);
}

@Override
public String toString() {
return "SortImpl{"
+ "expression="
+ expression
+ ", direction="
+ direction
+ ", nullOrdering="
+ nullOrdering
+ '}';
}
}

private SortOrders() {}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,6 @@
import com.datastrato.gravitino.catalog.hive.HiveTablePropertiesMetadata.TableType;
import com.datastrato.gravitino.client.GravitinoMetalake;
import com.datastrato.gravitino.connector.BaseCatalog;
import com.datastrato.gravitino.dto.rel.expressions.FieldReferenceDTO;
import com.datastrato.gravitino.dto.rel.partitioning.IdentityPartitioningDTO;
import com.datastrato.gravitino.dto.rel.partitioning.Partitioning;
import com.datastrato.gravitino.exceptions.NoSuchCatalogException;
import com.datastrato.gravitino.exceptions.NoSuchMetalakeException;
import com.datastrato.gravitino.exceptions.NoSuchSchemaException;
Expand Down Expand Up @@ -624,8 +621,7 @@ public void testCreatePartitionedHiveTable() throws TException, InterruptedExcep
TABLE_COMMENT,
properties,
new Transform[] {
IdentityPartitioningDTO.of(columns[1].name()),
IdentityPartitioningDTO.of(columns[2].name())
Transforms.identity(columns[1].name()), Transforms.identity(columns[2].name())
});

// Directly get table from hive metastore to check if the table is created successfully.
Expand All @@ -642,8 +638,7 @@ public void testCreatePartitionedHiveTable() throws TException, InterruptedExcep
TableCatalog tableCatalog = catalog.asTableCatalog();
Transform[] transforms =
new Transform[] {
IdentityPartitioningDTO.of(columns[0].name()),
IdentityPartitioningDTO.of(columns[1].name())
Transforms.identity(columns[0].name()), Transforms.identity(columns[1].name())
};
RuntimeException exception =
assertThrows(
Expand Down Expand Up @@ -870,7 +865,7 @@ private void assertTableEquals(
distribution == null
? Collections.emptyList()
: Arrays.stream(distribution.expressions())
.map(t -> ((FieldReferenceDTO) t).fieldName()[0])
.map(t -> ((NamedReference.FieldReference) t).fieldName()[0])
.collect(Collectors.toList());
Assertions.assertEquals(resultDistributionCols, hiveTab.getSd().getBucketCols());

Expand All @@ -879,14 +874,14 @@ private void assertTableEquals(
sortOrders[i].direction() == SortDirection.ASCENDING ? 0 : 1,
hiveTab.getSd().getSortCols().get(i).getOrder());
Assertions.assertEquals(
((FieldReferenceDTO) sortOrders[i].expression()).fieldName()[0],
((NamedReference.FieldReference) sortOrders[i].expression()).fieldName()[0],
hiveTab.getSd().getSortCols().get(i).getCol());
}
Assertions.assertNotNull(createdTable.partitioning());
Assertions.assertEquals(createdTable.partitioning().length, hiveTab.getPartitionKeys().size());
List<String> partitionKeys =
Arrays.stream(createdTable.partitioning())
.map(p -> ((Partitioning.SingleFieldPartitioning) p).fieldName()[0])
.map(p -> ((Transform.SingleFieldTransform) p).fieldName()[0])
.collect(Collectors.toList());
List<String> hivePartitionKeys =
hiveTab.getPartitionKeys().stream().map(FieldSchema::getName).collect(Collectors.toList());
Expand Down Expand Up @@ -916,7 +911,7 @@ public void testAlterHiveTable() throws TException, InterruptedException {
columns,
TABLE_COMMENT,
createProperties(),
new Transform[] {IdentityPartitioningDTO.of(columns[2].name())});
new Transform[] {Transforms.identity(columns[2].name())});
Assertions.assertNull(createdTable.auditInfo().lastModifier());
Assertions.assertEquals(AuthConstants.ANONYMOUS_USER, createdTable.auditInfo().creator());
Table alteredTable =
Expand Down Expand Up @@ -1266,7 +1261,7 @@ public void testDropHiveManagedTable() throws TException, InterruptedException,
columns,
TABLE_COMMENT,
createProperties(),
new Transform[] {IdentityPartitioningDTO.of(columns[2].name())});
new Transform[] {Transforms.identity(columns[2].name())});
// Directly get table from hive metastore to check if the table is created successfully.
org.apache.hadoop.hive.metastore.api.Table hiveTab =
hiveClientPool.run(client -> client.getTable(schemaName, tableName));
Expand All @@ -1291,7 +1286,7 @@ public void testDropHiveExternalTable() throws TException, InterruptedException,
columns,
TABLE_COMMENT,
ImmutableMap.of(TABLE_TYPE, EXTERNAL_TABLE.name().toLowerCase(Locale.ROOT)),
new Transform[] {IdentityPartitioningDTO.of(columns[2].name())});
new Transform[] {Transforms.identity(columns[2].name())});
// Directly get table from hive metastore to check if the table is created successfully.
org.apache.hadoop.hive.metastore.api.Table hiveTab =
hiveClientPool.run(client -> client.getTable(schemaName, tableName));
Expand All @@ -1318,7 +1313,7 @@ public void testPurgeHiveManagedTable() throws TException, InterruptedException,
columns,
TABLE_COMMENT,
createProperties(),
new Transform[] {IdentityPartitioningDTO.of(columns[2].name())});
new Transform[] {Transforms.identity(columns[2].name())});
// Directly get table from hive metastore to check if the table is created successfully.
org.apache.hadoop.hive.metastore.api.Table hiveTab =
hiveClientPool.run(client -> client.getTable(schemaName, tableName));
Expand All @@ -1345,7 +1340,7 @@ public void testPurgeHiveExternalTable() throws TException, InterruptedException
columns,
TABLE_COMMENT,
ImmutableMap.of(TABLE_TYPE, EXTERNAL_TABLE.name().toLowerCase(Locale.ROOT)),
new Transform[] {IdentityPartitioningDTO.of(columns[2].name())});
new Transform[] {Transforms.identity(columns[2].name())});
// Directly get table from hive metastore to check if the table is created successfully.
org.apache.hadoop.hive.metastore.api.Table hiveTab =
hiveClientPool.run(client -> client.getTable(schemaName, tableName));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
package com.datastrato.gravitino.catalog.mysql.integration.test;

import static com.datastrato.gravitino.catalog.mysql.MysqlTablePropertiesMetadata.GRAVITINO_ENGINE_KEY;
import static com.datastrato.gravitino.dto.util.DTOConverters.toFunctionArg;
import static com.datastrato.gravitino.rel.Column.DEFAULT_VALUE_OF_CURRENT_TIMESTAMP;
import static org.junit.jupiter.api.Assertions.assertThrows;

Expand All @@ -15,7 +14,6 @@
import com.datastrato.gravitino.catalog.jdbc.config.JdbcConfig;
import com.datastrato.gravitino.catalog.mysql.integration.test.service.MysqlService;
import com.datastrato.gravitino.client.GravitinoMetalake;
import com.datastrato.gravitino.dto.rel.expressions.LiteralDTO;
import com.datastrato.gravitino.exceptions.NoSuchSchemaException;
import com.datastrato.gravitino.exceptions.NotFoundException;
import com.datastrato.gravitino.exceptions.SchemaAlreadyExistsException;
Expand Down Expand Up @@ -425,17 +423,13 @@ void testColumnDefaultValue() {
ImmutableMap.of());

Assertions.assertEquals(
toFunctionArg(UnparsedExpression.of("rand()")), createdTable.columns()[0].defaultValue());
UnparsedExpression.of("rand()"), createdTable.columns()[0].defaultValue());
Assertions.assertEquals(
toFunctionArg(DEFAULT_VALUE_OF_CURRENT_TIMESTAMP),
createdTable.columns()[1].defaultValue());
Assertions.assertEquals(toFunctionArg(Literals.NULL), createdTable.columns()[2].defaultValue());
DEFAULT_VALUE_OF_CURRENT_TIMESTAMP, createdTable.columns()[1].defaultValue());
Assertions.assertEquals(Literals.NULL, createdTable.columns()[2].defaultValue());
Assertions.assertEquals(Column.DEFAULT_VALUE_NOT_SET, createdTable.columns()[3].defaultValue());
Assertions.assertEquals(
LiteralDTO.builder()
.withValue("current_timestamp")
.withDataType(Types.VarCharType.of(255))
.build(),
Literals.varcharLiteral(255, "current_timestamp"),
createdTable.columns()[4].defaultValue());
}

Expand Down Expand Up @@ -484,71 +478,57 @@ void testColumnDefaultValueConverter() {
for (Column column : loadedTable.columns()) {
switch (column.name()) {
case "int_col_1":
Assertions.assertEquals(
toFunctionArg(Literals.integerLiteral(431)), column.defaultValue());
Assertions.assertEquals(Literals.integerLiteral(431), column.defaultValue());
break;
case "int_col_2":
Assertions.assertEquals(
toFunctionArg(UnparsedExpression.of("rand()")), column.defaultValue());
Assertions.assertEquals(UnparsedExpression.of("rand()"), column.defaultValue());
break;
case "int_col_3":
Assertions.assertEquals(toFunctionArg(Literals.integerLiteral(3)), column.defaultValue());
Assertions.assertEquals(Literals.integerLiteral(3), column.defaultValue());
break;
case "double_col_1":
Assertions.assertEquals(
toFunctionArg(Literals.doubleLiteral(123.45)), column.defaultValue());
Assertions.assertEquals(Literals.doubleLiteral(123.45), column.defaultValue());
break;
case "varchar20_col_1":
Assertions.assertEquals(
toFunctionArg(UnparsedExpression.of("10")), column.defaultValue());
Assertions.assertEquals(UnparsedExpression.of("10"), column.defaultValue());
break;
case "varchar100_col_1":
Assertions.assertEquals(
toFunctionArg(Literals.varcharLiteral(100, "CURRENT_TIMESTAMP")),
column.defaultValue());
Literals.varcharLiteral(100, "CURRENT_TIMESTAMP"), column.defaultValue());
break;
case "varchar200_col_1":
Assertions.assertEquals(
toFunctionArg(Literals.varcharLiteral(200, "curdate()")), column.defaultValue());
Assertions.assertEquals(Literals.varcharLiteral(200, "curdate()"), column.defaultValue());
break;
case "varchar200_col_2":
Assertions.assertEquals(
toFunctionArg(UnparsedExpression.of("curdate()")), column.defaultValue());
Assertions.assertEquals(UnparsedExpression.of("curdate()"), column.defaultValue());
break;
case "varchar200_col_3":
Assertions.assertEquals(
toFunctionArg(UnparsedExpression.of("now()")), column.defaultValue());
Assertions.assertEquals(UnparsedExpression.of("now()"), column.defaultValue());
break;
case "date_col_1":
Assertions.assertEquals(
toFunctionArg(UnparsedExpression.of("curdate()")), column.defaultValue());
Assertions.assertEquals(UnparsedExpression.of("curdate()"), column.defaultValue());
break;
case "date_col_2":
Assertions.assertEquals(toFunctionArg(Literals.NULL), column.defaultValue());
Assertions.assertEquals(Literals.NULL, column.defaultValue());
break;
case "date_col_3":
Assertions.assertEquals(
toFunctionArg(UnparsedExpression.of("(curdate() + interval 1 year)")),
column.defaultValue());
UnparsedExpression.of("(curdate() + interval 1 year)"), column.defaultValue());
break;
case "date_col_4":
Assertions.assertEquals(
toFunctionArg(UnparsedExpression.of("curdate()")), column.defaultValue());
Assertions.assertEquals(UnparsedExpression.of("curdate()"), column.defaultValue());
break;
case "timestamp_col_1":
Assertions.assertEquals(
toFunctionArg(Literals.timestampLiteral("2012-12-31T11:30:45")),
column.defaultValue());
Literals.timestampLiteral("2012-12-31T11:30:45"), column.defaultValue());
break;
case "timestamp_col_2":
Assertions.assertEquals(
toFunctionArg(Literals.timestampLiteral("1983-09-05T00:00:00")),
column.defaultValue());
Literals.timestampLiteral("1983-09-05T00:00:00"), column.defaultValue());
break;
case "decimal_6_2_col_1":
Assertions.assertEquals(
toFunctionArg(Literals.decimalLiteral(Decimal.of("1.2", 6, 2))),
column.defaultValue());
Literals.decimalLiteral(Decimal.of("1.2", 6, 2)), column.defaultValue());
break;
default:
Assertions.fail("Unexpected column name: " + column.name());
Expand Down
Loading

0 comments on commit db01f8c

Please sign in to comment.