Skip to content

Commit eb1c95e

Browse files
committed
Merge remote-tracking branch 'upstream/master' into SPARK-26346
# Conflicts: # pom.xml
2 parents a89c61d + 134a7d7 commit eb1c95e

File tree

91 files changed

+1510
-465
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

91 files changed

+1510
-465
lines changed

common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java

Lines changed: 21 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -683,6 +683,16 @@ public UTF8String trimRight() {
683683
return copyUTF8String(0, e);
684684
}
685685

686+
/**
687+
* Trims at most `numSpaces` space characters (ASCII 32) from the end of this string.
688+
*/
689+
public UTF8String trimTrailingSpaces(int numSpaces) {
690+
int endIdx = numBytes - 1;
691+
int trimTo = numBytes - numSpaces;
692+
while (endIdx >= trimTo && getByte(endIdx) == 0x20) endIdx--;
693+
return copyUTF8String(0, endIdx);
694+
}
695+
686696
/**
687697
* Trims instances of the given trim string from the end of this string.
688698
*
@@ -1065,16 +1075,20 @@ public UTF8String replace(UTF8String search, UTF8String replace) {
10651075
return buf.build();
10661076
}
10671077

1068-
// TODO: Need to use `Code Point` here instead of Char in case the character longer than 2 bytes
1069-
public UTF8String translate(Map<Character, Character> dict) {
1078+
public UTF8String translate(Map<String, String> dict) {
10701079
String srcStr = this.toString();
10711080

10721081
StringBuilder sb = new StringBuilder();
1073-
for(int k = 0; k< srcStr.length(); k++) {
1074-
if (null == dict.get(srcStr.charAt(k))) {
1075-
sb.append(srcStr.charAt(k));
1076-
} else if ('\0' != dict.get(srcStr.charAt(k))){
1077-
sb.append(dict.get(srcStr.charAt(k)));
1082+
int charCount = 0;
1083+
for (int k = 0; k < srcStr.length(); k += charCount) {
1084+
int codePoint = srcStr.codePointAt(k);
1085+
charCount = Character.charCount(codePoint);
1086+
String subStr = srcStr.substring(k, k + charCount);
1087+
String translated = dict.get(subStr);
1088+
if (null == translated) {
1089+
sb.append(subStr);
1090+
} else if (!"\0".equals(translated)) {
1091+
sb.append(translated);
10781092
}
10791093
}
10801094
return fromString(sb.toString());

common/unsafe/src/test/java/org/apache/spark/unsafe/types/UTF8StringSuite.java

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -465,27 +465,27 @@ public void translate() {
465465
assertEquals(
466466
fromString("1a2s3ae"),
467467
fromString("translate").translate(ImmutableMap.of(
468-
'r', '1',
469-
'n', '2',
470-
'l', '3',
471-
't', '\0'
468+
"r", "1",
469+
"n", "2",
470+
"l", "3",
471+
"t", "\0"
472472
)));
473473
assertEquals(
474474
fromString("translate"),
475475
fromString("translate").translate(new HashMap<>()));
476476
assertEquals(
477477
fromString("asae"),
478478
fromString("translate").translate(ImmutableMap.of(
479-
'r', '\0',
480-
'n', '\0',
481-
'l', '\0',
482-
't', '\0'
479+
"r", "\0",
480+
"n", "\0",
481+
"l", "\0",
482+
"t", "\0"
483483
)));
484484
assertEquals(
485485
fromString("aa世b"),
486486
fromString("花花世界").translate(ImmutableMap.of(
487-
'花', 'a',
488-
'界', 'b'
487+
"花", "a",
488+
"界", "b"
489489
)));
490490
}
491491

dev/deps/spark-deps-hadoop-2.7-hive-2.3

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -196,9 +196,9 @@ objenesis/2.6//objenesis-2.6.jar
196196
okhttp/3.12.12//okhttp-3.12.12.jar
197197
okio/1.14.0//okio-1.14.0.jar
198198
opencsv/2.3//opencsv-2.3.jar
199-
orc-core/1.6.6//orc-core-1.6.6.jar
200-
orc-mapreduce/1.6.6//orc-mapreduce-1.6.6.jar
201-
orc-shims/1.6.6//orc-shims-1.6.6.jar
199+
orc-core/1.6.7//orc-core-1.6.7.jar
200+
orc-mapreduce/1.6.7//orc-mapreduce-1.6.7.jar
201+
orc-shims/1.6.7//orc-shims-1.6.7.jar
202202
oro/2.0.8//oro-2.0.8.jar
203203
osgi-resource-locator/1.0.3//osgi-resource-locator-1.0.3.jar
204204
paranamer/2.8//paranamer-2.8.jar

dev/deps/spark-deps-hadoop-3.2-hive-2.3

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -166,9 +166,9 @@ objenesis/2.6//objenesis-2.6.jar
166166
okhttp/3.12.12//okhttp-3.12.12.jar
167167
okio/1.14.0//okio-1.14.0.jar
168168
opencsv/2.3//opencsv-2.3.jar
169-
orc-core/1.6.6//orc-core-1.6.6.jar
170-
orc-mapreduce/1.6.6//orc-mapreduce-1.6.6.jar
171-
orc-shims/1.6.6//orc-shims-1.6.6.jar
169+
orc-core/1.6.7//orc-core-1.6.7.jar
170+
orc-mapreduce/1.6.7//orc-mapreduce-1.6.7.jar
171+
orc-shims/1.6.7//orc-shims-1.6.7.jar
172172
oro/2.0.8//oro-2.0.8.jar
173173
osgi-resource-locator/1.0.3//osgi-resource-locator-1.0.3.jar
174174
paranamer/2.8//paranamer-2.8.jar

docs/README.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,12 @@ installed. Also install the following libraries:
3939
$ sudo gem install jekyll jekyll-redirect-from rouge
4040
```
4141

42+
If your ruby version is 3.0 or higher, you should also install `webrick`.
43+
44+
```sh
45+
$ sudo gem install jekyll jekyll-redirect-from webrick
46+
```
47+
4248
Note: If you are on a system with both Ruby 1.9 and Ruby 2.0 you may need to replace gem with gem2.0.
4349

4450
### R Documentation

docs/sql-migration-guide.md

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,14 @@ license: |
4141

4242
- In Spark 3.2, the auto-generated `Cast` (such as those added by type coercion rules) will be stripped when generating column alias names. E.g., `sql("SELECT floor(1)").columns` will be `FLOOR(1)` instead of `FLOOR(CAST(1 AS DOUBLE))`.
4343

44+
- In Spark 3.2, table refreshing clears cached data of the table as well as of all its dependents such as views while keeping the dependents cached. The following commands perform table refreshing:
45+
* `ALTER TABLE .. ADD PARTITION`
46+
* `ALTER TABLE .. RENAME PARTITION`
47+
* `ALTER TABLE .. DROP PARTITION`
48+
* `REFRESH TABLE`
49+
* and the method `spark.catalog.refreshTable`
50+
In Spark 3.1 and earlier, table refreshing leaves dependents uncached.
51+
4452
## Upgrading from Spark SQL 3.0 to 3.1
4553

4654
- In Spark 3.1, statistical aggregation function includes `std`, `stddev`, `stddev_samp`, `variance`, `var_samp`, `skewness`, `kurtosis`, `covar_samp`, `corr` will return `NULL` instead of `Double.NaN` when `DivideByZero` occurs during expression evaluation, for example, when `stddev_samp` applied on a single element set. In Spark version 3.0 and earlier, it will return `Double.NaN` in such case. To restore the behavior before Spark 3.1, you can set `spark.sql.legacy.statisticalAggregate` to `true`.

external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala

Lines changed: 121 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -107,6 +107,37 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite {
107107
|'the', 'lazy',
108108
|'dog')
109109
""".stripMargin).executeUpdate()
110+
conn.prepareStatement(
111+
"""
112+
|CREATE TABLE spatials (
113+
|point geometry,
114+
|line geometry,
115+
|circle geometry,
116+
|curve geography,
117+
|polygon geometry,
118+
|curve_polygon geography,
119+
|multi_point geometry,
120+
|multi_line geometry,
121+
|multi_polygon geometry,
122+
|geometry_collection geometry)
123+
""".stripMargin).executeUpdate()
124+
conn.prepareStatement(
125+
"""
126+
|INSERT INTO spatials VALUES (
127+
|'POINT(3 4 7 2.5)',
128+
|'LINESTRING(1 0, 0 1, -1 0)',
129+
|'CIRCULARSTRING(
130+
| -122.358 47.653, -122.348 47.649, -122.348 47.658, -122.358 47.658, -122.358 47.653)',
131+
|'COMPOUNDCURVE(
132+
| CIRCULARSTRING(-122.358 47.653, -122.348 47.649,
133+
| -122.348 47.658, -122.358 47.658, -122.358 47.653))',
134+
|'POLYGON((-20 -20, -20 20, 20 20, 20 -20, -20 -20), (10 0, 0 10, 0 -10, 10 0))',
135+
|'CURVEPOLYGON((-122.3 47, 122.3 47, 125.7 49, 121 38, -122.3 47))',
136+
|'MULTIPOINT((2 3), (7 8 9.5))',
137+
|'MULTILINESTRING((0 2, 1 1), (1 0, 1 1))',
138+
|'MULTIPOLYGON(((2 2, 2 -2, -2 -2, -2 2, 2 2)),((1 1, 3 1, 3 3, 1 3, 1 1)))',
139+
|'GEOMETRYCOLLECTION(LINESTRING(1 1, 3 5),POLYGON((-1 -1, -1 -5, -5 -5, -5 -1, -1 -1)))')
140+
""".stripMargin).executeUpdate()
110141
}
111142

112143
test("Basic test") {
@@ -232,4 +263,94 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite {
232263
df2.write.jdbc(jdbcUrl, "datescopy", new Properties)
233264
df3.write.jdbc(jdbcUrl, "stringscopy", new Properties)
234265
}
266+
267+
test("SPARK-33813: MsSqlServerDialect should support spatial types") {
268+
val df = spark.read.jdbc(jdbcUrl, "spatials", new Properties)
269+
val rows = df.collect()
270+
assert(rows.length == 1)
271+
val row = rows(0)
272+
val types = row.toSeq.map(x => x.getClass.toString)
273+
assert(types.length == 10)
274+
assert(types(0) == "class [B")
275+
assert(row.getAs[Array[Byte]](0) ===
276+
Array(0, 0, 0, 0, 1, 15, 0, 0, 0, 0, 0, 0, 8, 64, 0, 0, 0, 0, 0, 0,
277+
16, 64, 0, 0, 0, 0, 0, 0, 28, 64, 0, 0, 0, 0, 0, 0, 4, 64))
278+
assert(types(1) == "class [B")
279+
assert(row.getAs[Array[Byte]](1) ===
280+
Array[Byte](0, 0, 0, 0, 1, 4, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, -16, 63, 0, 0,
281+
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
282+
-16, 63, 0, 0, 0, 0, 0, 0, -16, -65, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0,
283+
0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, -1, -1, -1, -1, 0, 0, 0, 0, 2))
284+
assert(types(2) == "class [B")
285+
assert(row.getAs[Array[Byte]](2) ===
286+
Array[Byte](0, 0, 0, 0, 2, 4, 5, 0, 0, 0, -12, -3, -44, 120, -23, -106,
287+
94, -64, -35, 36, 6, -127, -107, -45, 71, 64, -125, -64, -54, -95, 69,
288+
-106, 94, -64, 80, -115, -105, 110, 18, -45, 71, 64, -125, -64, -54,
289+
-95, 69, -106, 94, -64, 78, 98, 16, 88, 57, -44, 71, 64, -12, -3, -44,
290+
120, -23, -106, 94, -64, 78, 98, 16, 88, 57, -44, 71, 64, -12, -3, -44,
291+
120, -23, -106, 94, -64, -35, 36, 6, -127, -107, -45, 71, 64, 1, 0, 0,
292+
0, 2, 0, 0, 0, 0, 1, 0, 0, 0, -1, -1, -1, -1, 0, 0, 0, 0, 8))
293+
assert(types(3) == "class [B")
294+
assert(row.getAs[Array[Byte]](3) ===
295+
Array[Byte](-26, 16, 0, 0, 2, 4, 5, 0, 0, 0, -35, 36, 6, -127, -107, -45,
296+
71, 64, -12, -3, -44, 120, -23, -106, 94, -64, 80, -115, -105, 110, 18,
297+
-45, 71, 64, -125, -64, -54, -95, 69, -106, 94, -64, 78, 98, 16, 88, 57,
298+
-44, 71, 64, -125, -64, -54, -95, 69, -106, 94, -64, 78, 98, 16, 88, 57,
299+
-44, 71, 64, -12, -3, -44, 120, -23, -106, 94, -64, -35, 36, 6, -127, -107,
300+
-45, 71, 64, -12, -3, -44, 120, -23, -106, 94, -64, 1, 0, 0, 0, 3, 0, 0,
301+
0, 0, 1, 0, 0, 0, -1, -1, -1, -1, 0, 0, 0, 0, 9, 2, 0, 0, 0, 3, 1))
302+
assert(types(5) == "class [B")
303+
assert(row.getAs[Array[Byte]](4) ===
304+
Array[Byte](0, 0, 0, 0, 1, 4, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 52, -64, 0, 0,
305+
0, 0, 0, 0, 52, -64, 0, 0, 0, 0, 0, 0, 52, -64, 0, 0, 0, 0, 0, 0, 52, 64,
306+
0, 0, 0, 0, 0, 0, 52, 64, 0, 0, 0, 0, 0, 0, 52, 64, 0, 0, 0, 0, 0, 0, 52,
307+
64, 0, 0, 0, 0, 0, 0, 52, -64, 0, 0, 0, 0, 0, 0, 52, -64, 0, 0, 0, 0, 0,
308+
0, 52, -64, 0, 0, 0, 0, 0, 0, 36, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
309+
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 36, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
310+
0, 0, 36, -64, 0, 0, 0, 0, 0, 0, 36, 64, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0,
311+
0, 2, 0, 0, 0, 0, 0, 5, 0, 0, 0, 1, 0, 0, 0, -1, -1, -1, -1, 0, 0, 0, 0, 3))
312+
assert(types(6) === "class [B")
313+
assert(row.getAs[Array[Byte]](5) ===
314+
Array[Byte](-26, 16, 0, 0, 2, 4, 5, 0, 0, 0, 0, 0, 0, 0, 0, -128, 71, 64, 51,
315+
51, 51, 51, 51, -109, 94, -64, 0, 0, 0, 0, 0, -128, 71, 64, 51, 51, 51, 51,
316+
51, -109, 94, 64, 0, 0, 0, 0, 0, -128, 72, 64, -51, -52, -52, -52, -52, 108,
317+
95, 64, 0, 0, 0, 0, 0, 0, 67, 64, 0, 0, 0, 0, 0, 64, 94, 64, 0, 0, 0, 0, 0,
318+
-128, 71, 64, 51, 51, 51, 51, 51, -109, 94, -64, 1, 0, 0, 0, 1, 0, 0, 0, 0,
319+
1, 0, 0, 0, -1, -1, -1, -1, 0, 0, 0, 0, 10))
320+
assert(types(6) === "class [B")
321+
assert(row.getAs[Array[Byte]](6) ===
322+
Array[Byte](0, 0, 0, 0, 1, 5, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0, 0, 0,
323+
0, 0, 8, 64, 0, 0, 0, 0, 0, 0, 28, 64, 0, 0, 0, 0, 0, 0, 32, 64, 0, 0, 0, 0,
324+
0, 0, -8, -1, 0, 0, 0, 0, 0, 0, 35, 64, 2, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0,
325+
0, 0, 3, 0, 0, 0, -1, -1, -1, -1, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 1,
326+
0, 0, 0, 0, 1, 0, 0, 0, 1))
327+
assert(types(6) === "class [B")
328+
assert(row.getAs[Array[Byte]](7) ===
329+
Array[Byte](0, 0, 0, 0, 1, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
330+
0, 0, 0, 64, 0, 0, 0, 0, 0, 0, -16, 63, 0, 0, 0, 0, 0, 0, -16, 63, 0, 0, 0,
331+
0, 0, 0, -16, 63, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -16, 63, 0, 0,
332+
0, 0, 0, 0, -16, 63, 2, 0, 0, 0, 1, 0, 0, 0, 0, 1, 2, 0, 0, 0, 3, 0, 0, 0,
333+
-1, -1, -1, -1, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 1, 0, 0, 0, 2))
334+
assert(types(6) === "class [B")
335+
assert(row.getAs[Array[Byte]](8) ===
336+
Array[Byte](0, 0, 0, 0, 1, 0, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0, 0,
337+
0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, -64, 0, 0, 0,
338+
0, 0, 0, 0, -64, 0, 0, 0, 0, 0, 0, 0, -64, 0, 0, 0, 0, 0, 0, 0, -64, 0, 0,
339+
0, 0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0,
340+
0, 0, 0, 0, -16, 63, 0, 0, 0, 0, 0, 0, -16, 63, 0, 0, 0, 0, 0, 0, 8, 64, 0,
341+
0, 0, 0, 0, 0, -16, 63, 0, 0, 0, 0, 0, 0, 8, 64, 0, 0, 0, 0, 0, 0, 8, 64, 0,
342+
0, 0, 0, 0, 0, -16, 63, 0, 0, 0, 0, 0, 0, 8, 64, 0, 0, 0, 0, 0, 0, -16, 63,
343+
0, 0, 0, 0, 0, 0, -16, 63, 2, 0, 0, 0, 2, 0, 0, 0, 0, 2, 5, 0, 0, 0, 3, 0,
344+
0, 0, -1, -1, -1, -1, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 1, 0, 0, 0, 3))
345+
assert(types(6) === "class [B")
346+
assert(row.getAs[Array[Byte]](9) ===
347+
Array[Byte](0, 0, 0, 0, 1, 4, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, -16, 63, 0, 0, 0,
348+
0, 0, 0, -16, 63, 0, 0, 0, 0, 0, 0, 8, 64, 0, 0, 0, 0, 0, 0, 20, 64, 0, 0,
349+
0, 0, 0, 0, -16, -65, 0, 0, 0, 0, 0, 0, -16, -65, 0, 0, 0, 0, 0, 0, -16, -65,
350+
0, 0, 0, 0, 0, 0, 20, -64, 0, 0, 0, 0, 0, 0, 20, -64, 0, 0, 0, 0, 0, 0, 20,
351+
-64, 0, 0, 0, 0, 0, 0, 20, -64, 0, 0, 0, 0, 0, 0, -16, -65, 0, 0, 0, 0, 0, 0,
352+
-16, -65, 0, 0, 0, 0, 0, 0, -16, -65, 2, 0, 0, 0, 1, 0, 0, 0, 0, 2, 2, 0, 0,
353+
0, 3, 0, 0, 0, -1, -1, -1, -1, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0,
354+
0, 0, 0, 1, 0, 0, 0, 3))
355+
}
235356
}

external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2IntegrationSuite.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -65,11 +65,11 @@ class DB2IntegrationSuite extends DockerJDBCIntegrationSuite with V2JDBCTest {
6565
override def testUpdateColumnType(tbl: String): Unit = {
6666
sql(s"CREATE TABLE $tbl (ID INTEGER)")
6767
var t = spark.table(tbl)
68-
var expectedSchema = new StructType().add("ID", IntegerType)
68+
var expectedSchema = new StructType().add("ID", IntegerType, true, defaultMetadata)
6969
assert(t.schema === expectedSchema)
7070
sql(s"ALTER TABLE $tbl ALTER COLUMN id TYPE DOUBLE")
7171
t = spark.table(tbl)
72-
expectedSchema = new StructType().add("ID", DoubleType)
72+
expectedSchema = new StructType().add("ID", DoubleType, true, defaultMetadata)
7373
assert(t.schema === expectedSchema)
7474
// Update column type from DOUBLE to STRING
7575
val msg1 = intercept[AnalysisException] {
@@ -81,8 +81,8 @@ class DB2IntegrationSuite extends DockerJDBCIntegrationSuite with V2JDBCTest {
8181
override def testCreateTableWithProperty(tbl: String): Unit = {
8282
sql(s"CREATE TABLE $tbl (ID INT)" +
8383
s" TBLPROPERTIES('CCSID'='UNICODE')")
84-
var t = spark.table(tbl)
85-
var expectedSchema = new StructType().add("ID", IntegerType)
84+
val t = spark.table(tbl)
85+
val expectedSchema = new StructType().add("ID", IntegerType, true, defaultMetadata)
8686
assert(t.schema === expectedSchema)
8787
}
8888
}

external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MsSqlServerIntegrationSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,11 +67,11 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite with V2JDBC
6767
override def testUpdateColumnType(tbl: String): Unit = {
6868
sql(s"CREATE TABLE $tbl (ID INTEGER)")
6969
var t = spark.table(tbl)
70-
var expectedSchema = new StructType().add("ID", IntegerType)
70+
var expectedSchema = new StructType().add("ID", IntegerType, true, defaultMetadata)
7171
assert(t.schema === expectedSchema)
7272
sql(s"ALTER TABLE $tbl ALTER COLUMN id TYPE STRING")
7373
t = spark.table(tbl)
74-
expectedSchema = new StructType().add("ID", StringType)
74+
expectedSchema = new StructType().add("ID", StringType, true, defaultMetadata)
7575
assert(t.schema === expectedSchema)
7676
// Update column type from STRING to INTEGER
7777
val msg1 = intercept[AnalysisException] {

external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -69,11 +69,11 @@ class MySQLIntegrationSuite extends DockerJDBCIntegrationSuite with V2JDBCTest {
6969
override def testUpdateColumnType(tbl: String): Unit = {
7070
sql(s"CREATE TABLE $tbl (ID INTEGER)")
7171
var t = spark.table(tbl)
72-
var expectedSchema = new StructType().add("ID", IntegerType)
72+
var expectedSchema = new StructType().add("ID", IntegerType, true, defaultMetadata)
7373
assert(t.schema === expectedSchema)
7474
sql(s"ALTER TABLE $tbl ALTER COLUMN id TYPE STRING")
7575
t = spark.table(tbl)
76-
expectedSchema = new StructType().add("ID", StringType)
76+
expectedSchema = new StructType().add("ID", StringType, true, defaultMetadata)
7777
assert(t.schema === expectedSchema)
7878
// Update column type from STRING to INTEGER
7979
val msg1 = intercept[AnalysisException] {
@@ -110,8 +110,8 @@ class MySQLIntegrationSuite extends DockerJDBCIntegrationSuite with V2JDBCTest {
110110
override def testCreateTableWithProperty(tbl: String): Unit = {
111111
sql(s"CREATE TABLE $tbl (ID INT)" +
112112
s" TBLPROPERTIES('ENGINE'='InnoDB', 'DEFAULT CHARACTER SET'='utf8')")
113-
var t = spark.table(tbl)
114-
var expectedSchema = new StructType().add("ID", IntegerType)
113+
val t = spark.table(tbl)
114+
val expectedSchema = new StructType().add("ID", IntegerType, true, defaultMetadata)
115115
assert(t.schema === expectedSchema)
116116
}
117117
}

external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -75,11 +75,11 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with V2JDBCTest
7575
override def testUpdateColumnType(tbl: String): Unit = {
7676
sql(s"CREATE TABLE $tbl (ID INTEGER)")
7777
var t = spark.table(tbl)
78-
var expectedSchema = new StructType().add("ID", DecimalType(10, 0))
78+
var expectedSchema = new StructType().add("ID", DecimalType(10, 0), true, defaultMetadata)
7979
assert(t.schema === expectedSchema)
8080
sql(s"ALTER TABLE $tbl ALTER COLUMN id TYPE STRING")
8181
t = spark.table(tbl)
82-
expectedSchema = new StructType().add("ID", StringType)
82+
expectedSchema = new StructType().add("ID", StringType, true, defaultMetadata)
8383
assert(t.schema === expectedSchema)
8484
// Update column type from STRING to INTEGER
8585
val msg1 = intercept[AnalysisException] {

external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/PostgresIntegrationSuite.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -54,11 +54,11 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite with V2JDBCTes
5454
override def testUpdateColumnType(tbl: String): Unit = {
5555
sql(s"CREATE TABLE $tbl (ID INTEGER)")
5656
var t = spark.table(tbl)
57-
var expectedSchema = new StructType().add("ID", IntegerType)
57+
var expectedSchema = new StructType().add("ID", IntegerType, true, defaultMetadata)
5858
assert(t.schema === expectedSchema)
5959
sql(s"ALTER TABLE $tbl ALTER COLUMN id TYPE STRING")
6060
t = spark.table(tbl)
61-
expectedSchema = new StructType().add("ID", StringType)
61+
expectedSchema = new StructType().add("ID", StringType, true, defaultMetadata)
6262
assert(t.schema === expectedSchema)
6363
// Update column type from STRING to INTEGER
6464
val msg = intercept[AnalysisException] {
@@ -70,8 +70,8 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite with V2JDBCTes
7070
override def testCreateTableWithProperty(tbl: String): Unit = {
7171
sql(s"CREATE TABLE $tbl (ID INT)" +
7272
s" TBLPROPERTIES('TABLESPACE'='pg_default')")
73-
var t = spark.table(tbl)
74-
var expectedSchema = new StructType().add("ID", IntegerType)
73+
val t = spark.table(tbl)
74+
val expectedSchema = new StructType().add("ID", IntegerType, true, defaultMetadata)
7575
assert(t.schema === expectedSchema)
7676
}
7777
}

0 commit comments

Comments
 (0)