Skip to content

Commit caa662d

Browse files
Update dependency scala to v2.13.15 (#966)
* Update dependency scala to v2.13.15 * Update build.sbt * bump & sbt +{,macroSub/,structType/}dependencyLockWrite * silence --------- Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: Dmitry Ivankov <dmitry.ivankov@cognite.com>
1 parent ebd4b36 commit caa662d

10 files changed

+149
-122
lines changed

build.sbt

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ import scala.xml.{Node => XmlNode, NodeSeq => XmlNodeSeq, _}
44
import scala.xml.transform.{RewriteRule, RuleTransformer}
55

66
val scala212 = "2.12.19"
7-
val scala213 = "2.13.14"
7+
val scala213 = "2.13.15"
88
val supportedScalaVersions = List(scala212, scala213)
99
val sparkVersion = "3.3.4"
1010
val circeVersion = "0.14.9"
@@ -57,7 +57,8 @@ lazy val commonSettings = Seq(
5757
// and to avoid a dependency on scala-collection-compat
5858
case Some((2, 13)) => Seq(
5959
"-Wconf:src=src/test/scala/cognite/spark/v1/SparkTest.scala&cat=deprecation:i",
60-
"-Wconf:src=src/test/scala/.*&cat=other-pure-statement:i"
60+
"-Wconf:src=src/test/scala/.*&cat=other-pure-statement:i",
61+
"-Wconf:src=src/test/scala/.*&msg=unused value of type org.scalatest.Assertion:s"
6162
)
6263
case Some((2, 12)) => Seq(
6364
"-Wconf:src=src/test/scala/.*&cat=unused:i"

build.scala-2.13.sbt.lock

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"lockVersion" : 1,
3-
"timestamp" : "2024-11-13T11:30:56.507065520Z",
3+
"timestamp" : "2024-11-20T12:59:46.331288696Z",
44
"configurations" : [
55
"compile",
66
"optional",
@@ -2569,11 +2569,11 @@
25692569
{
25702570
"org" : "org.scala-lang",
25712571
"name" : "scala-library",
2572-
"version" : "2.13.14",
2572+
"version" : "2.13.15",
25732573
"artifacts" : [
25742574
{
25752575
"name" : "scala-library.jar",
2576-
"hash" : "sha1:f8b4afe89abe48ca670f620c7da89b71f93e6546"
2576+
"hash" : "sha1:ed6f1d58968b16c5f9067d5cac032d952552de58"
25772577
}
25782578
],
25792579
"configurations" : [
@@ -2586,11 +2586,11 @@
25862586
{
25872587
"org" : "org.scala-lang",
25882588
"name" : "scala-reflect",
2589-
"version" : "2.13.14",
2589+
"version" : "2.13.15",
25902590
"artifacts" : [
25912591
{
25922592
"name" : "scala-reflect.jar",
2593-
"hash" : "sha1:8e275fefb2a01e178db2cdfebb2181062a790b82"
2593+
"hash" : "sha1:355927b10366563a8f1b56c1f34ff376f2f7c8c5"
25942594
}
25952595
],
25962596
"configurations" : [

macro/build.scala-2.13.sbt.lock

Lines changed: 19 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"lockVersion" : 1,
3-
"timestamp" : "2024-11-13T11:31:01.766754723Z",
3+
"timestamp" : "2024-11-20T12:59:52.180435270Z",
44
"configurations" : [
55
"compile",
66
"optional",
@@ -2083,28 +2083,41 @@
20832083
{
20842084
"org" : "org.scala-lang",
20852085
"name" : "scala-library",
2086-
"version" : "2.13.14",
2086+
"version" : "2.13.15",
20872087
"artifacts" : [
20882088
{
20892089
"name" : "scala-library.jar",
2090-
"hash" : "sha1:f8b4afe89abe48ca670f620c7da89b71f93e6546"
2090+
"hash" : "sha1:ed6f1d58968b16c5f9067d5cac032d952552de58"
20912091
}
20922092
],
20932093
"configurations" : [
20942094
"compile",
2095-
"provided",
20962095
"runtime",
20972096
"test"
20982097
]
20992098
},
2099+
{
2100+
"org" : "org.scala-lang",
2101+
"name" : "scala-library",
2102+
"version" : "2.13.8",
2103+
"artifacts" : [
2104+
{
2105+
"name" : "scala-library.jar",
2106+
"hash" : "sha1:5a865f03a794b27e6491740c4c419a19e4511a3d"
2107+
}
2108+
],
2109+
"configurations" : [
2110+
"provided"
2111+
]
2112+
},
21002113
{
21012114
"org" : "org.scala-lang",
21022115
"name" : "scala-reflect",
2103-
"version" : "2.13.14",
2116+
"version" : "2.13.8",
21042117
"artifacts" : [
21052118
{
21062119
"name" : "scala-reflect.jar",
2107-
"hash" : "sha1:8e275fefb2a01e178db2cdfebb2181062a790b82"
2120+
"hash" : "sha1:994b004d041b18724ec298a135c37e7817d369ec"
21082121
}
21092122
],
21102123
"configurations" : [

project/build.properties

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
sbt.version=1.9.9
1+
sbt.version=1.10.3

project/plugins.sbt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,6 @@ addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.2-1")
66
addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.12.0")
77
addSbtPlugin("au.com.onegeek" %% "sbt-dotenv" % "2.1.233")
88
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "2.2.0")
9-
addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.12.1")
9+
addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.13.0")
1010
addSbtPlugin("io.github.davidgregory084" % "sbt-tpolecat" % "0.4.4")
1111
addSbtPlugin("software.purpledragon" % "sbt-dependency-lock" % "1.5.1")

src/test/scala/cognite/spark/v1/FileContentRelationTest.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -122,10 +122,10 @@ class FileContentRelationTest extends FlatSpec with Matchers with SparkTest wit
122122
val result = spark.sqlContext.sql(s"select * from filecontent").collect()
123123
result.map(_.toSeq.toList) should contain theSameElementsAs
124124
Array(
125-
List(30, "Alice", null),
126-
List(25, "Bob", null),
127-
List(35, "Charlie", null),
128-
List(35, "Charlie2", "test")
125+
List[Any](30, "Alice", null),
126+
List[Any](25, "Bob", null),
127+
List[Any](35, "Charlie", null),
128+
List[Any](35, "Charlie2", "test")
129129
)
130130
}
131131

src/test/scala/cognite/spark/v1/RawTableRelationTest.scala

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -313,7 +313,7 @@ class RawTableRelationTest
313313

314314
collectToSet[java.sql.Timestamp](dfWithManylastUpdatedTime.select($"lastUpdatedTime"))
315315
collectToSet[JavaLong](dfWithManylastUpdatedTime.select($"_lastUpdatedTime")) should equal(
316-
Set(null, 2))
316+
Set[Any](null, 2))
317317
collectToSet[JavaLong](dfWithManylastUpdatedTime.select($"___lastUpdatedTime")) should equal(
318318
Set(11, 22))
319319
collectToSet[JavaLong](dfWithManylastUpdatedTime.select($"____lastUpdatedTime")) should equal(
@@ -343,7 +343,7 @@ class RawTableRelationTest
343343
val (columnNames2, unRenamed2) = prepareForInsert(dfWithManylastUpdatedTime)
344344
columnNames2.toSet should equal(
345345
Set("lastUpdatedTime", "__lastUpdatedTime", "___lastUpdatedTime", "value"))
346-
collectToSet[JavaLong](unRenamed2.select($"lastUpdatedTime")) should equal(Set(null, 2))
346+
collectToSet[JavaLong](unRenamed2.select($"lastUpdatedTime")) should equal(Set[Any](null, 2))
347347
collectToSet[JavaLong](unRenamed2.select($"__lastUpdatedTime")) should equal(Set(11, 22))
348348
collectToSet[JavaLong](unRenamed2.select($"___lastUpdatedTime")) should equal(Set(111, 222))
349349
}
@@ -836,7 +836,7 @@ class RawTableRelationTest
836836
dfWithEmptyStringInByteField
837837
.collect()
838838
.map(_.getAs[Any]("byte"))
839-
.toSet shouldBe Set(null, 1.toByte)
839+
.toSet shouldBe Set[Any](null, 1.toByte)
840840
}
841841

842842
it should "handle empty string as null for Short type" in {
@@ -846,7 +846,7 @@ class RawTableRelationTest
846846
dfWithEmptyStringInShortField
847847
.collect()
848848
.map(_.getAs[Any]("short"))
849-
.toSet shouldBe Set(null, 12.toShort)
849+
.toSet shouldBe Set[Any](null, 12.toShort)
850850
}
851851

852852
it should "handle empty string as null for Integer type" in {
@@ -856,7 +856,7 @@ class RawTableRelationTest
856856
dfWithEmptyStringInIntegerField
857857
.collect()
858858
.map(_.getAs[Any]("integer"))
859-
.toSet shouldBe Set(null, 123)
859+
.toSet shouldBe Set[Any](null, 123)
860860
}
861861

862862
it should "handle empty string as null for Long type" in {
@@ -866,7 +866,7 @@ class RawTableRelationTest
866866
dfWithEmptyStringInLongField
867867
.collect()
868868
.map(_.getAs[Any]("long"))
869-
.toSet shouldBe Set(null, 12345L)
869+
.toSet shouldBe Set[Any](null, 12345L)
870870
}
871871

872872
it should "handle empty string as null for Double type" in {
@@ -876,7 +876,7 @@ class RawTableRelationTest
876876
dfWithEmptyStringInDoubleField
877877
.collect()
878878
.map(_.getAs[Any]("num"))
879-
.toSet shouldBe Set(null, 12.3)
879+
.toSet shouldBe Set[Any](null, 12.3)
880880
}
881881

882882
it should "handle empty string as null for Boolean type" in {
@@ -889,7 +889,7 @@ class RawTableRelationTest
889889
dfWithEmptyStringInBooleanField
890890
.collect()
891891
.map(_.getAs[Any]("bool"))
892-
.toSet shouldBe Set(null, true, false)
892+
.toSet shouldBe Set[Any](null, true, false)
893893
}
894894

895895
it should "fail reasonably on invalid types" in {

src/test/scala/cognite/spark/v1/SparkSchemaHelperTest.scala

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -60,19 +60,19 @@ class SparkSchemaHelperTest extends FlatSpec with ParallelTestExecution with Mat
6060
}
6161

6262
it should "construct optional type from Row of null" in {
63-
val r = new GenericRowWithSchema(Array(null, null, null, null, null, null, null, null), structType[TestTypeOption]())
63+
val r = new GenericRowWithSchema(Array[Any](null, null, null, null, null, null, null, null), structType[TestTypeOption]())
6464
fromRow[TestTypeOption](r) should be(
6565
TestTypeOption(None, None, None, None, None, None, None, None))
6666
}
6767

6868
it should "construct optional type from Row of map values that can be null" in {
69-
val r = new GenericRowWithSchema(Array(null, null, null, null, Map("foo" -> "row", "bar" -> "a"), null, null, null), structType[TestTypeOption]())
69+
val r = new GenericRowWithSchema(Array[Any](null, null, null, null, Map("foo" -> "row", "bar" -> "a"), null, null, null), structType[TestTypeOption]())
7070
fromRow[TestTypeOption](r) should be(
7171
TestTypeOption(None, None, None, None, Some(Map("foo" -> "row", "bar" -> "a")), None, None, None))
7272
}
7373

7474
it should "construct optional type from Row of seq values that can be null" in {
75-
val r = new GenericRowWithSchema(Array(null, null, null, null, null, null, Seq(20L, null), null), structType[TestTypeOption]())
75+
val r = new GenericRowWithSchema(Array[Any](null, null, null, null, null, null, Seq[Any](20L, null), null), structType[TestTypeOption]())
7676
fromRow[TestTypeOption](r) should be(
7777
TestTypeOption(None, None, None, None, None, None, Some(Seq(Some(20), None)), None))
7878
}
@@ -96,13 +96,13 @@ class SparkSchemaHelperTest extends FlatSpec with ParallelTestExecution with Mat
9696
}
9797

9898
it should "ignore null in map" in {
99-
val x = new GenericRowWithSchema(Array(null, null, null, null, Map("foo" -> "row", "bar" -> null), null, null, null), structType[TestTypeOption]())
99+
val x = new GenericRowWithSchema(Array[Any](null, null, null, null, Map("foo" -> "row", "bar" -> null), null, null, null), structType[TestTypeOption]())
100100
val row = fromRow[TestTypeOption](x)
101101
row.x.get shouldBe Map("foo" -> "row")
102102
}
103103

104104
it should "ignore missing fields" in {
105-
val x = new GenericRowWithSchema(Array(1, 2, null), structType[TestTypeOptionalField]())
105+
val x = new GenericRowWithSchema(Array[Any](1, 2, null), structType[TestTypeOptionalField]())
106106
val row = fromRow[TestTypeOption](x)
107107
row.a shouldBe Some(1)
108108
row.b shouldBe Some(2)
@@ -112,7 +112,7 @@ class SparkSchemaHelperTest extends FlatSpec with ParallelTestExecution with Mat
112112

113113
it should "correctly return OptionalField" in {
114114
val x = new GenericRowWithSchema(
115-
Array(1, null),
115+
Array[Any](1, null),
116116
StructType(Seq(
117117
StructField("a", DataTypes.IntegerType),
118118
StructField("b", DataTypes.IntegerType, nullable = true)
@@ -124,34 +124,34 @@ class SparkSchemaHelperTest extends FlatSpec with ParallelTestExecution with Mat
124124
}
125125

126126
it should "fail nicely on different type in map" in {
127-
val x = new GenericRowWithSchema(Array(null, null, null, null, Map("foo" -> "row", "bar" -> 1), null, null, null), structType[TestTypeOption]())
127+
val x = new GenericRowWithSchema(Array[Any](null, null, null, null, Map[Any, Any]("foo" -> "row", "bar" -> 1), null, null, null), structType[TestTypeOption]())
128128
val ex = intercept[CdfSparkIllegalArgumentException] { fromRow[TestTypeOption](x) }
129129
ex.getMessage shouldBe "Map with string values was expected, but '1' of type Int was found (under key 'bar' on row [null,null,null,null,Map(foo -> row, bar -> 1),null,null,null])"
130130
}
131131

132132
it should "fail nicely on type mismatch" in {
133-
val x = new GenericRowWithSchema(Array("shouldBeInt", 2.toDouble, 3.toByte,
133+
val x = new GenericRowWithSchema(Array[Any]("shouldBeInt", 2.toDouble, 3.toByte,
134134
4.toFloat, Map("foo" -> "bar"), 5.toLong, Seq[Long](10), "foo"), structType[TestTypeBasic]())
135135
val ex = intercept[CdfSparkIllegalArgumentException] { fromRow[TestTypeBasic](x) }
136136
ex.getMessage shouldBe "Column 'a' was expected to have type Int, but 'shouldBeInt' of type String was found (on row [shouldBeInt,2.0,3,4.0,Map(foo -> bar),5,List(10),foo])."
137137
}
138138

139139
it should "fail nicely on unexpected NULL in int" in {
140-
val x = new GenericRowWithSchema(Array(null, 2.toDouble, 3.toByte,
140+
val x = new GenericRowWithSchema(Array[Any](null, 2.toDouble, 3.toByte,
141141
4.toFloat, Map("foo" -> "bar"), 5.toLong, Seq[Long](10), "foo"), structType[TestTypeBasic]())
142142
val ex = intercept[CdfSparkIllegalArgumentException] { fromRow[TestTypeBasic](x) }
143143
ex.getMessage shouldBe "Column 'a' was expected to have type Int, but NULL was found (on row [null,2.0,3,4.0,Map(foo -> bar),5,List(10),foo])."
144144
}
145145

146146
it should "fail nicely on unexpected NULL in string" in {
147-
val x = new GenericRowWithSchema(Array(1, 2.toDouble, 3.toByte,
147+
val x = new GenericRowWithSchema(Array[Any](1, 2.toDouble, 3.toByte,
148148
4.toFloat, Map("foo" -> "bar"), 5.toLong, Seq[Long](10), null), structType[TestTypeBasic]())
149149
val ex = intercept[CdfSparkIllegalArgumentException] { fromRow[TestTypeBasic](x) }
150150
ex.getMessage shouldBe "Column 's' was expected to have type String, but NULL was found (on row [1,2.0,3,4.0,Map(foo -> bar),5,List(10),null])."
151151
}
152152

153153
it should "fail nicely on unexpected NULL in map" in {
154-
val x = new GenericRowWithSchema(Array(1, 2.toDouble, 3.toByte,
154+
val x = new GenericRowWithSchema(Array[Any](1, 2.toDouble, 3.toByte,
155155
4.toFloat, null, 5.toLong, Seq[Long](10), "foo"), structType[TestTypeBasic]())
156156
val ex = intercept[CdfSparkIllegalArgumentException] { fromRow[TestTypeBasic](x) }
157157
ex.getMessage shouldBe "Column 'x' was expected to have type Map[String,String], but NULL was found (on row [1,2.0,3,4.0,null,5,List(10),foo])."

0 commit comments

Comments
 (0)