Skip to content

Commit

Permalink
This is an automated cherry-pick of #2639
Browse files Browse the repository at this point in the history
Signed-off-by: ti-chi-bot <ti-community-prow-bot@tidb.io>
  • Loading branch information
shiyuhang0 authored and ti-chi-bot committed Mar 19, 2023
1 parent c2c70d7 commit 2172a04
Show file tree
Hide file tree
Showing 4 changed files with 101 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,12 @@ trait LeafColumnarExecRDD extends LeafExecNode {
}
b.append("]")
b.toString
} else {
} else if (tiRDDs.lengthCompare(1) == 0) {
s"${dagRequest.getStoreType.name()} $nodeName{$dagRequest}" +
s"${TiUtil.getReqEstCountStr(dagRequest)}"
} else {
// return empty TiRDD when there is no tiRDDs
"Empty TiRDD"
}

def dagRequest: TiDAGRequest = tiRDDs.head.dagRequest
Expand Down
12 changes: 12 additions & 0 deletions core/src/test/scala/org/apache/spark/sql/PartitionTableSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,18 @@ class PartitionTableSuite extends BasePlanTest {
super.afterAll()
}

test("reading from range column partition") {
tidbStmt.execute("drop table if exists range_column_test")
tidbStmt.execute(
"create table range_column_test (id varchar(10)) partition by RANGE COLUMNS(`id`) (PARTITION `p1` VALUES LESS THAN ('''CN001'''),PARTITION `p2` VALUES LESS THAN ('CN002'))")
tidbStmt.execute("insert into `range_column_test` values('CN001')")
tidbStmt.execute("insert into `range_column_test` values('''CN001''')")

judge("select * from range_column_test where id = 'CN001'")
judge("select * from range_column_test where id = '\\'CN001\\''")
judge("select * from range_column_test where id = 'CN002'")
}

test("reading from hash partition") {
enablePartitionForTiDB()
tidbStmt.execute("drop table if exists t")
Expand Down
39 changes: 39 additions & 0 deletions docs/features/partition_table.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# TiSpark partition table

## Read from partition table

TiSpark supports reads the range, hash and list partition table from TiDB.

TiSpark doesn't support a MySQL/TiDB partition table syntax `select col_name from table_name partition(partition_name)`, but you can still use `where` condition to filter the partitions.

## Partition pruning in Reading

TiSpark decides whether to apply partition pruning according to the partition type and the partition expression associated with the table. If partition pruning is not applied, TiSpark's reading is equivalent to doing a table scan over all partitions.

TiSpark only supports partition pruning with the following partition expression in **range** partition:

+ column expression
+ `YEAR(col)` and its type is datetime/string/date literal that can be parsed as datetime.
+ `TO_DAYS(col)` and its type is datetime/string/date literal that can be parsed as datetime.

### Limitations

- TiSpark does not support partition pruning in hash and list partition.
- TiSpark can not apply partition pruning for some special characters in partition definition. For example, Partition definition with `""` can not be pruned. such as `partition p0 values less than ('"string"')`.

## Write into partition table

Currently, TiSpark only supports writing into the range and hash partition table under the following conditions:
+ the partition expression is column expression
+ the partition expression is `YEAR($argument)` where the argument is a column and its type is datetime or string literal
that can be parsed as datetime.

There are two ways to write into partition table:
1. Use datasource API to write into partition table which supports replace and append semantics.
2. Use delete statement with Spark SQL.

> [!NOTE]
> Because different character sets and collations have different sort orders, the character sets and
> collations in use may affect which partition of a table partitioned by RANGE COLUMNS a given row
> is stored in when using string columns as partitioning columns.
> For supported character sets and collations, see [Limitations](../README.md#limitations)
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,13 @@ public static List<TiPartitionDef> prune(TiTableInfo tableInfo, List<Expression>
return tableInfo.getPartitionInfo().getDefs();
}

// prune can not handle \" now.
for (int i = 0; i < tableInfo.getPartitionInfo().getDefs().size(); i++) {
TiPartitionDef pDef = tableInfo.getPartitionInfo().getDefs().get(i);
if (pDef.getLessThan().get(0).contains("\"")) {
return tableInfo.getPartitionInfo().getDefs();
}
}
RangeColumnPartitionPruner pruner = new RangeColumnPartitionPruner(tableInfo);
return pruner.prune(filters);
}
Expand Down Expand Up @@ -104,7 +111,7 @@ public static void generateRangeExprs(
// partExprColRefs.addAll(PredicateUtils.extractColumnRefFromExpression(partExpr));
for (int i = 0; i < partInfo.getDefs().size(); i++) {
TiPartitionDef pDef = partInfo.getDefs().get(i);
String current = pDef.getLessThan().get(lessThanIdx);
String current = wrapValue(pDef.getLessThan().get(lessThanIdx));
String leftHand;
if (current.equals("MAXVALUE")) {
leftHand = "true";
Expand All @@ -114,7 +121,7 @@ public static void generateRangeExprs(
if (i == 0) {
partExprs.add(parser.parseExpression(leftHand));
} else {
String previous = partInfo.getDefs().get(i - 1).getLessThan().get(lessThanIdx);
String previous = wrapValue(partInfo.getDefs().get(i - 1).getLessThan().get(lessThanIdx));
String and =
String.format("%s >= %s and %s", wrapColumnName(partExprStr), previous, leftHand);
partExprs.add(parser.parseExpression(and));
Expand All @@ -132,4 +139,41 @@ private static String wrapColumnName(String columnName) {
return String.format("`%s`", columnName);
}
}

/**
* Spark SQL will parse string literal without escape, So we need to parse partition definition
* without escape too.
*
* <p>wrapValue will replace the first '' to "", so that antlr will not regard the first '' as a
* part of string literal.
*
* <p>wrapValue will also delete the escape character in string literal.
*
* <p>e.g. 'string' -> "string" '''string''' -> "'string'" 'string''' -> "string'"
*
* <p>Can't handle '""'. e.g. '"string"' -> ""string"". parseExpression will parse ""string"" to
* empty string, parse '"string"' to 'string'
*
* @param value
* @return
*/
private static String wrapValue(String value) {
if (value.startsWith("'") && value.endsWith("'")) {
String newValue = String.format("\"%s\"", value.substring(1, value.length() - 1));
StringBuilder valueWithoutEscape = new StringBuilder();
for (int i = 0; i < newValue.length(); i++) {
if (newValue.charAt(i) != '\'') {
valueWithoutEscape.append(newValue.charAt(i));
} else {
if (i + 1 < newValue.length()) {
valueWithoutEscape.append(newValue.charAt(i + 1));
}
i++;
}
}
return valueWithoutEscape.toString();
} else {
return value;
}
}
}

0 comments on commit 2172a04

Please sign in to comment.