Skip to content

Commit

Permalink
test: get value by key which looks like nested JSON path
Browse files Browse the repository at this point in the history
Signed-off-by: Kacper Trochimiak <kacper.trochimiak@eliatra.com>
  • Loading branch information
kt-eliatra committed Aug 26, 2024
1 parent 7fd9223 commit 96e10ca
Show file tree
Hide file tree
Showing 3 changed files with 52 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -486,6 +486,32 @@ trait FlintSparkSuite extends QueryTest with FlintSuite with OpenSearchSuite wit
|""".stripMargin)
}

protected def createStructNestedTable2(testTable: String): Unit = {
sql(s"""
| CREATE TABLE $testTable
| (
| unmapped STRUCT<userIdentity: STRUCT<sessioncontext: STRUCT<sessionIssuer: STRUCT<type: STRING>>>>
| )
| USING JSON
|""".stripMargin)

sql(s"""
| INSERT INTO $testTable
| VALUES
| ( STRUCT(STRUCT(STRUCT(STRUCT(STRUCT("example_type1"))))) )
|""".stripMargin)
}

protected def createStructNestedTable3(testTable: String): Unit = {
sql(s"""
| CREATE TABLE $testTable
| USING JSON
| OPTIONS (
| path "../integ-test/src/integration/scala/org/opensearch/flint/spark/unmapped.json"
| )
|""".stripMargin)
}

protected def createTableIssue112(testTable: String): Unit = {
sql(s"""
| CREATE TABLE $testTable (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ class FlintSparkPPLNestedFieldsITSuite
override def beforeAll(): Unit = {
super.beforeAll()

createStructNestedTable(testTable)
createStructNestedTable3(testTable)
}

protected override def afterEach(): Unit = {
Expand All @@ -37,6 +37,30 @@ class FlintSparkPPLNestedFieldsITSuite
}
}

test("aaa") {
val pplFrame = sql(s"""
| source = $testTable | fields unmapped.userIdentity.sessioncontext.sessionIssuer.type
| """.stripMargin)

// Retrieve the results
val pplResults: Array[Row] = pplFrame.collect()
assert(pplResults.length == 1)
val expectedResults: Array[Row] = Array(Row("Role"))
// Compare the results
implicit val rowOrdering: Ordering[Row] = Ordering.by[Row, String](_.getAs[String](0))
assert(pplResults.sorted.sameElements(expectedResults.sorted))

val sqlFrame = sql(s"""
| select unmapped['userIdentity.sessioncontext.sessionIssuer.type'] from $testTable
| """.stripMargin)

// Retrieve the results
val sqlResults: Array[Row] = sqlFrame.collect()
assert(sqlResults.length == 1)
assert(sqlResults.sorted.sameElements(expectedResults.sorted))
}


test("create ppl simple query test") {
val testTableQuoted = "`spark_catalog`.`default`.`flint_ppl_test`"
Seq(testTable, testTableQuoted).foreach { table =>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"unmapped":{"userIdentity.sessionContext.sessionIssuer.type":"Role","tlsDetails.clientProvidedHostHeader":"dynamodb.us-east-1.amazonaws.com","userIdentity.sessionContext.sessionIssuer.userName":"lambda_rii_finding_etl_REDACTED","userIdentity.sessionContext.sessionIssuer.principalId":"REDACTED","recipientAccountId":"REDACTED","readOnly":"true","tlsDetails.tlsVersion":"TLSv1.3","managementEvent":"true","tlsDetails.cipherSuite":"TLS_AES_256_GCM_SHA384","userIdentity.sessionContext.sessionIssuer.accountId":"REDACTED"}}

0 comments on commit 96e10ca

Please sign in to comment.