Skip to content

Commit

Permalink
scalafmt
Browse files Browse the repository at this point in the history
  • Loading branch information
osopardo1 authored and osopardo1 committed Oct 26, 2023
1 parent 47de1fe commit 1db4b13
Showing 1 changed file with 30 additions and 31 deletions.
61 changes: 30 additions & 31 deletions src/test/scala/io/qbeast/spark/index/SparkRevisionFactoryTest.scala
Original file line number Diff line number Diff line change
Expand Up @@ -155,38 +155,37 @@ class SparkRevisionFactoryTest extends QbeastIntegrationTestSpec {

it should "createNewRevision with columnStats " +
"even on APPEND mode" in withSparkAndTmpDir((spark, tmpDir) => {
import spark.implicits._
val df = 0.to(10).map(i => T3(i, i * 2.0, s"$i", i * 1.2f)).toDF()
val columnStats = """{ "a_min": 0, "a_max": 20 }"""

// On append mode, it already expects a RevisionChange,
// but in this case the change is defined by the user
// instead of triggered by the data

// TODO: very special case
// TODO: a cleaner solution would be to change the API for IndexManager
// and allow to send a set of options
// with user-specific configurations to Index

df.write
.mode("append")
.format("qbeast")
.option("columnsToIndex", "a")
.option("columnStats", columnStats)
.save(tmpDir)

val qbeastSnapshot =
DeltaQbeastSnapshot(DeltaLog.forTable(spark, tmpDir).update())
val latestRevision = qbeastSnapshot.loadLatestRevision
val transformation = latestRevision.transformations.head

transformation should not be null
transformation shouldBe a[LinearTransformation]
transformation.asInstanceOf[LinearTransformation].minNumber shouldBe 0
transformation.asInstanceOf[LinearTransformation].maxNumber shouldBe 20

})
import spark.implicits._
val df = 0.to(10).map(i => T3(i, i * 2.0, s"$i", i * 1.2f)).toDF()
val columnStats = """{ "a_min": 0, "a_max": 20 }"""

// On append mode, it already expects a RevisionChange,
// but in this case the change is defined by the user
// instead of triggered by the data

// TODO: very special case
// TODO: a cleaner solution would be to change the API for IndexManager
// and allow to send a set of options
// with user-specific configurations to Index

df.write
.mode("append")
.format("qbeast")
.option("columnsToIndex", "a")
.option("columnStats", columnStats)
.save(tmpDir)

val qbeastSnapshot =
DeltaQbeastSnapshot(DeltaLog.forTable(spark, tmpDir).update())
val latestRevision = qbeastSnapshot.loadLatestRevision
val transformation = latestRevision.transformations.head

transformation should not be null
transformation shouldBe a[LinearTransformation]
transformation.asInstanceOf[LinearTransformation].minNumber shouldBe 0
transformation.asInstanceOf[LinearTransformation].maxNumber shouldBe 20

})

it should "createNewRevision with min max timestamp" in withSpark(spark => {
import spark.implicits._
Expand Down

0 comments on commit 1db4b13

Please sign in to comment.