Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
4 changes: 2 additions & 2 deletions .github/workflows/integ-tests-with-security.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ jobs:
strategy:
fail-fast: false
matrix:
java: [21]
java: [21, 23]
runs-on: ubuntu-latest
container:
# using the same image which is used by opensearch-build team to build the OpenSearch Distribution
Expand Down Expand Up @@ -62,7 +62,7 @@ jobs:
fail-fast: false
matrix:
os: [ windows-latest, macos-13 ]
java: [21]
java: [21, 23]

runs-on: ${{ matrix.os }}

Expand Down
1 change: 1 addition & 0 deletions .github/workflows/sql-pitest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ jobs:
matrix:
java:
- 21
- 23
runs-on: ubuntu-latest
container:
# using the same image which is used by opensearch-build team to build the OpenSearch Distribution
Expand Down
6 changes: 4 additions & 2 deletions .github/workflows/sql-test-and-build-workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ jobs:
# Run all jobs
fail-fast: false
matrix:
java: [21]
java: [21, 23]
runs-on: ubuntu-latest
container:
# using the same image which is used by opensearch-build team to build the OpenSearch Distribution
Expand Down Expand Up @@ -101,7 +101,9 @@ jobs:
matrix:
entry:
- { os: windows-latest, java: 21, os_build_args: -x doctest -PbuildPlatform=windows }
- { os: windows-latest, java: 23, os_build_args: -x doctest -PbuildPlatform=windows }
- { os: macos-13, java: 21 }
- { os: macos-13, java: 23 }
runs-on: ${{ matrix.entry.os }}

steps:
Expand Down Expand Up @@ -161,7 +163,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
java: [21]
java: [21, 23]
container:
image: ${{ needs.Get-CI-Image-Tag.outputs.ci-image-version-linux }}
options: ${{ needs.Get-CI-Image-Tag.outputs.ci-image-start-options }}
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/sql-test-workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ jobs:
matrix:
java:
- 21
- 23
runs-on: ubuntu-latest
container:
# using the same image which is used by opensearch-build team to build the OpenSearch Distribution
Expand Down
3 changes: 0 additions & 3 deletions DEVELOPER_GUIDE.rst
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,6 @@ The plugin codebase is in standard layout of Gradle project::
├── plugin
├── protocol
├── ppl
├── spark
├── sql
├── sql-cli
├── sql-jdbc
Expand All @@ -161,7 +160,6 @@ Here are sub-folders (Gradle modules) for plugin source code:
- ``core``: core query engine.
- ``opensearch``: OpenSearch storage engine.
- ``prometheus``: Prometheus storage engine.
- ``spark`` : Spark storage engine
- ``protocol``: request/response protocol formatter.
- ``common``: common util code.
- ``integ-test``: integration and comparison test.
Expand Down Expand Up @@ -296,7 +294,6 @@ For test cases, you can use the cases in the following checklist in case you mis

- *Other Statements*

- DELETE
- SHOW
- DESCRIBE

Expand Down
7 changes: 3 additions & 4 deletions async-query-core/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ plugins {
id 'jacoco'
id 'antlr'
id 'com.diffplug.spotless' version '6.22.0'
id 'com.github.johnrengelman.shadow'
id 'com.gradleup.shadow'
}

repositories {
Expand Down Expand Up @@ -46,10 +46,9 @@ dependencies {
antlr "org.antlr:antlr4:4.7.1"

implementation project(':core')
implementation project(':spark') // TODO: dependency to spark should be eliminated
implementation project(':datasources') // TODO: dependency to datasources should be eliminated
implementation 'org.json:json:20231013'
implementation 'com.google.code.gson:gson:2.8.9'
api group: 'com.amazonaws', name: 'aws-java-sdk-emrserverless', version: "${aws_java_sdk_version}"

testImplementation(platform("org.junit:junit-bom:5.9.3"))

Expand Down Expand Up @@ -148,4 +147,4 @@ shadowJar {

from sourceSets.main.output
configurations = [project.configurations.runtimeClasspath]
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ public enum FunctionType {
CSV("CSV"),
MISC("Misc"),
GENERATOR("Generator"),
OTHER("Other"),
UDF("User Defined Function");

private final String name;
Expand Down Expand Up @@ -422,6 +423,51 @@ public enum FunctionType {
"posexplode",
"posexplode_outer",
"stack"))
.put(
OTHER,
Set.of(
"aggregate",
"array_size",
"array_sort",
"cardinality",
"crc32",
"exists",
"filter",
"forall",
"hash",
"ilike",
"in",
"like",
"map_filter",
"map_zip_with",
"md5",
"mod",
"named_struct",
"parse_url",
"raise_error",
"reduce",
"reverse",
"sha",
"sha1",
"sha2",
"size",
"struct",
"transform",
"transform_keys",
"transform_values",
"url_decode",
"url_encode",
"xpath",
"xpath_boolean",
"xpath_double",
"xpath_float",
"xpath_int",
"xpath_long",
"xpath_number",
"xpath_short",
"xpath_string",
"xxhash64",
"zip_with"))
.build();

private static final Map<String, FunctionType> FUNCTION_NAME_TO_FUNCTION_TYPE_MAP =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ public enum SQLGrammarElement implements GrammarElement {
CSV_FUNCTIONS("CSV functions"),
GENERATOR_FUNCTIONS("Generator functions"),
MISC_FUNCTIONS("Misc functions"),
OTHER_FUNCTIONS("Other functions"),

// UDF
UDF("User Defined functions");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -560,26 +560,30 @@ public Void visitFunctionName(FunctionNameContext ctx) {
return super.visitFunctionName(ctx);
}

private void validateFunctionAllowed(String function) {
FunctionType type = FunctionType.fromFunctionName(function.toLowerCase());
private void validateFunctionAllowed(String functionName) {
String lowerCaseFunctionName = functionName.toLowerCase();
FunctionType type = FunctionType.fromFunctionName(lowerCaseFunctionName);
switch (type) {
case MAP:
validateAllowed(SQLGrammarElement.MAP_FUNCTIONS);
validateAllowed(SQLGrammarElement.MAP_FUNCTIONS, lowerCaseFunctionName);
break;
case BITWISE:
validateAllowed(SQLGrammarElement.BITWISE_FUNCTIONS);
validateAllowed(SQLGrammarElement.BITWISE_FUNCTIONS, lowerCaseFunctionName);
break;
case CSV:
validateAllowed(SQLGrammarElement.CSV_FUNCTIONS);
validateAllowed(SQLGrammarElement.CSV_FUNCTIONS, lowerCaseFunctionName);
break;
case MISC:
validateAllowed(SQLGrammarElement.MISC_FUNCTIONS);
validateAllowed(SQLGrammarElement.MISC_FUNCTIONS, lowerCaseFunctionName);
break;
case GENERATOR:
validateAllowed(SQLGrammarElement.GENERATOR_FUNCTIONS);
validateAllowed(SQLGrammarElement.GENERATOR_FUNCTIONS, lowerCaseFunctionName);
break;
case OTHER:
validateAllowed(SQLGrammarElement.OTHER_FUNCTIONS, lowerCaseFunctionName);
break;
case UDF:
validateAllowed(SQLGrammarElement.UDF);
validateAllowed(SQLGrammarElement.UDF, lowerCaseFunctionName);
break;
}
}
Expand All @@ -590,6 +594,12 @@ private void validateAllowed(SQLGrammarElement element) {
}
}

private void validateAllowed(SQLGrammarElement element, String detail) {
if (!grammarElementValidator.isValid(element)) {
throw new IllegalArgumentException(String.format("%s (%s) is not allowed.", element, detail));
}
}

@Override
public Void visitErrorCapturingIdentifier(ErrorCapturingIdentifierContext ctx) {
ErrorCapturingIdentifierExtraContext extra = ctx.errorCapturingIdentifierExtra();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.opensearch.sql.datasources.glue.GlueDataSourceFactory.GLUE_INDEX_STORE_OPENSEARCH_AUTH;
import static org.opensearch.sql.datasources.glue.GlueDataSourceFactory.GLUE_INDEX_STORE_OPENSEARCH_URI;
import static org.opensearch.sql.spark.dispatcher.IndexDMLHandler.DML_QUERY_JOB_ID;
import static org.opensearch.sql.spark.dispatcher.IndexDMLHandler.DROP_INDEX_JOB_ID;

Expand All @@ -41,7 +39,6 @@
import org.opensearch.sql.datasource.DataSourceService;
import org.opensearch.sql.datasource.model.DataSourceMetadata;
import org.opensearch.sql.datasource.model.DataSourceType;
import org.opensearch.sql.datasources.auth.AuthenticationType;
import org.opensearch.sql.spark.asyncquery.model.AsyncQueryExecutionResponse;
import org.opensearch.sql.spark.asyncquery.model.AsyncQueryJobMetadata;
import org.opensearch.sql.spark.asyncquery.model.AsyncQueryJobMetadata.AsyncQueryJobMetadataBuilder;
Expand Down Expand Up @@ -675,10 +672,7 @@ private void givenValidDataSourceMetadataExist() {
.setName(DATASOURCE_NAME)
.setConnector(DataSourceType.S3GLUE)
.setProperties(
ImmutableMap.<String, String>builder()
.put(GLUE_INDEX_STORE_OPENSEARCH_URI, "https://open.search.cluster:9200/")
.put(GLUE_INDEX_STORE_OPENSEARCH_AUTH, AuthenticationType.NOAUTH.getName())
.build())
ImmutableMap.<String, String>builder().put("PARAM_NAME", "PARAM_VALUE").build())
.build());
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/

package org.opensearch.sql.spark.data.type;

import static org.junit.jupiter.api.Assertions.*;

import org.junit.jupiter.api.Test;

class SparkDataTypeTest {

@Test
void testTypeName() {
String expectedTypeName = "spark_string";
SparkDataType sparkDataType = new SparkDataType(expectedTypeName);

assertEquals(
expectedTypeName, sparkDataType.typeName(), "Type name should match the expected value");
}

@Test
void testEqualsAndHashCode() {
SparkDataType type1 = new SparkDataType("spark_integer");
SparkDataType type2 = new SparkDataType("spark_integer");
SparkDataType type3 = new SparkDataType("spark_double");

assertEquals(type1, type2);
assertNotEquals(type1, type3);
assertEquals(type1.hashCode(), type2.hashCode());
assertNotEquals(type1.hashCode(), type3.hashCode());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/

package org.opensearch.sql.spark.data.value;

import static org.junit.jupiter.api.Assertions.*;

import org.junit.jupiter.api.Test;
import org.opensearch.sql.spark.data.type.SparkDataType;

class SparkExprValueTest {
private final SparkDataType sparkDataType = new SparkDataType("char");

@Test
public void getters() {
SparkExprValue sparkExprValue = new SparkExprValue(sparkDataType, "str");

assertEquals(sparkDataType, sparkExprValue.type());
assertEquals("str", sparkExprValue.value());
}

@Test
public void unsupportedCompare() {
SparkExprValue sparkExprValue = new SparkExprValue(sparkDataType, "str");

assertThrows(UnsupportedOperationException.class, () -> sparkExprValue.compare(sparkExprValue));
}

@Test
public void testEquals() {
SparkExprValue sparkExprValue1 = new SparkExprValue(sparkDataType, "str");
SparkExprValue sparkExprValue2 = new SparkExprValue(sparkDataType, "str");
SparkExprValue sparkExprValue3 = new SparkExprValue(sparkDataType, "other");

assertTrue(sparkExprValue1.equal(sparkExprValue2));
assertFalse(sparkExprValue1.equal(sparkExprValue3));
}
}
Loading
Loading