Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature: decode json to string #417

Merged
merged 6 commits into from
Aug 20, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
<properties>
<scalatest.version>3.0.4</scalatest.version>
<scalaj.version>2.3.0</scalaj.version>
<mysql.connector.version>5.1.18</mysql.connector.version>
<mysql.connector.version>5.1.44</mysql.connector.version>
<play.version>2.6.8</play.version>
</properties>
<dependencies>
Expand Down
1 change: 1 addition & 0 deletions core/src/main/scala/com/pingcap/tispark/TiUtils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,7 @@ object TiUtils {
case _: EnumType => sql.types.LongType
case _: SetType => sql.types.LongType
case _: YearType => sql.types.LongType
case _: JsonType => sql.types.StringType
}

def fromSparkType(tp: DataType): TiDataType =
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/org/apache/spark/sql/TiStrategy.scala
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ class TiStrategy(context: SQLContext) extends Strategy with Logging {

def typeBlackList: TypeBlacklist = {
val blacklistString =
sqlConf.getConfString(TiConfigConst.UNSUPPORTED_TYPES, "time,enum,set,year,json")
sqlConf.getConfString(TiConfigConst.UNSUPPORTED_TYPES, "time,enum,set,year")
new TypeBlacklist(blacklistString)
}

Expand Down
2 changes: 2 additions & 0 deletions core/src/test/resources/tidb_config.properties.template
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ tidb.addr=127.0.0.1
tidb.port=4000
# TiDB login user
tidb.user=root
# TiDB login password
tidb.password=
# TPCH database name, if you already have a tpch database in TiDB, specify the db name so that TPCH tests will run on this database
tpch.db=tpch_test
# Placement Driver address:port
Expand Down
61 changes: 61 additions & 0 deletions core/src/test/scala/org/apache/spark/sql/IssueTestSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,67 @@ class IssueTestSuite extends BaseTiSparkSuite {
judge("select count(c1 + c2) from t")
}

test("json support") {
tidbStmt.execute("drop table if exists t")
tidbStmt.execute("create table t(json_doc json)")
tidbStmt.execute(
"""insert into t values ('null'),
('true'),
('false'),
('0'),
('1'),
('-1'),
('2147483647'),
('-2147483648'),
('9223372036854775807'),
('-9223372036854775808'),
('0.5'),
('-0.5'),
('""'),
('"a"'),
('"\\t"'),
('"\\n"'),
('"\\""'),
('"\\u0001"'),
('[]'),
('"中文"'),
(JSON_ARRAY(null, false, true, 0, 0.5, "hello", JSON_ARRAY("nested_array"), JSON_OBJECT("nested", "object"))),
(JSON_OBJECT("a", null, "b", true, "c", false, "d", 0, "e", 0.5, "f", "hello", "nested_array", JSON_ARRAY(1, 2, 3), "nested_object", JSON_OBJECT("hello", 1)))"""
)
refreshConnections()

runTest(
"select json_doc from t",
skipJDBC = true,
rTiDB = List(
List("null"),
List(true),
List(false),
List(0),
List(1),
List(-1),
List(2147483647),
List(-2147483648),
List(9223372036854775807L),
List(-9223372036854775808L),
List(0.5),
List(-0.5),
List("\"\""),
List("\"a\""),
List("\"\\t\""),
List("\"\\n\""),
List("\"\\\"\""),
List("\"\\u0001\""),
List("[]"),
List("\"中文\""),
List("[null,false,true,0,0.5,\"hello\",[\"nested_array\"],{\"nested\":\"object\"}]"),
List(
"{\"a\":null,\"b\":true,\"c\":false,\"d\":0,\"e\":0.5,\"f\":\"hello\",\"nested_array\":[1,2,3],\"nested_object\":{\"hello\":1}}"
)
)
)
}

override def afterAll(): Unit =
try {
tidbStmt.execute("drop table if exists t")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -158,16 +158,18 @@ object SharedSQLContext extends Logging {
if (_tidbConnection == null) {
val jdbcUsername = getOrElse(_tidbConf, TiDB_USER, "root")

val jdbcPassword = getOrElse(_tidbConf, TiDB_PASSWORD, "")

val jdbcHostname = getOrElse(_tidbConf, TiDB_ADDRESS, "127.0.0.1")

val jdbcPort = Integer.parseInt(getOrElse(_tidbConf, TiDB_PORT, "4000"))

val loadData = getOrElse(_tidbConf, SHOULD_LOAD_DATA, "true").toBoolean

jdbcUrl =
s"jdbc:mysql://$jdbcHostname:$jdbcPort/?user=$jdbcUsername&useUnicode=true&characterEncoding=UTF-8&zeroDateTimeBehavior=convertToNull"
s"jdbc:mysql://$jdbcHostname:$jdbcPort/?user=$jdbcUsername&password=$jdbcPassword&useUnicode=true&characterEncoding=UTF-8&zeroDateTimeBehavior=convertToNull&useSSL=false"

_tidbConnection = DriverManager.getConnection(jdbcUrl, jdbcUsername, "")
_tidbConnection = DriverManager.getConnection(jdbcUrl, jdbcUsername, jdbcPassword)
_statement = _tidbConnection.createStatement()

if (loadData && !forceNotLoad) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ object TestConstants {
val TiDB_ADDRESS = "tidb.addr"
val TiDB_PORT = "tidb.port"
val TiDB_USER = "tidb.user"
val TiDB_PASSWORD = "tidb.password"
val TPCH_DB_NAME = "tpch.db"
val SHOULD_LOAD_DATA = "test.data.load"
val SHOULD_SKIP_TEST = "test.skip"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ public class DataTypeFactory {
extractTypeMap(EnumType.subTypes, EnumType.class, builder, instBuilder);
extractTypeMap(SetType.subTypes, SetType.class, builder, instBuilder);
extractTypeMap(YearType.subTypes, YearType.class, builder, instBuilder);
extractTypeMap(new MySQLType[]{MySQLType.TypeJSON}, StringType.class, builder, instBuilder);
extractTypeMap(JsonType.subTypes, JsonType.class, builder, instBuilder);
dataTypeCreatorMap = builder.build();
dataTypeInstanceMap = instBuilder.build();
}
Expand Down
Loading