Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions DEVELOPER_GUIDE.rst
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,6 @@ The plugin codebase is in standard layout of Gradle project::
├── plugin
├── protocol
├── ppl
├── spark
├── sql
├── sql-cli
├── sql-jdbc
Expand All @@ -161,7 +160,6 @@ Here are sub-folders (Gradle modules) for plugin source code:
- ``core``: core query engine.
- ``opensearch``: OpenSearch storage engine.
- ``prometheus``: Prometheus storage engine.
- ``spark`` : Spark storage engine
- ``protocol``: request/response protocol formatter.
- ``common``: common util code.
- ``integ-test``: integration and comparison test.
Expand Down
2 changes: 1 addition & 1 deletion async-query-core/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,10 @@ dependencies {
antlr "org.antlr:antlr4:4.7.1"

implementation project(':core')
implementation project(':spark') // TODO: dependency to spark should be eliminated
implementation project(':datasources') // TODO: dependency to datasources should be eliminated
implementation 'org.json:json:20231013'
implementation 'com.google.code.gson:gson:2.8.9'
api group: 'com.amazonaws', name: 'aws-java-sdk-emrserverless', version: "${aws_java_sdk_version}"

testImplementation(platform("org.junit:junit-bom:5.9.3"))

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/

package org.opensearch.sql.spark.data.type;

import static org.junit.jupiter.api.Assertions.*;

import org.junit.jupiter.api.Test;

class SparkDataTypeTest {

@Test
void testTypeName() {
String expectedTypeName = "spark_string";
SparkDataType sparkDataType = new SparkDataType(expectedTypeName);

assertEquals(
expectedTypeName, sparkDataType.typeName(), "Type name should match the expected value");
}

@Test
void testEqualsAndHashCode() {
SparkDataType type1 = new SparkDataType("spark_integer");
SparkDataType type2 = new SparkDataType("spark_integer");
SparkDataType type3 = new SparkDataType("spark_double");

assertEquals(type1, type2);
assertNotEquals(type1, type3);
assertEquals(type1.hashCode(), type2.hashCode());
assertNotEquals(type1.hashCode(), type3.hashCode());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/

package org.opensearch.sql.spark.data.value;

import static org.junit.jupiter.api.Assertions.*;

import org.junit.jupiter.api.Test;
import org.opensearch.sql.spark.data.type.SparkDataType;

class SparkExprValueTest {
private final SparkDataType sparkDataType = new SparkDataType("char");

@Test
public void getters() {
SparkExprValue sparkExprValue = new SparkExprValue(sparkDataType, "str");

assertEquals(sparkDataType, sparkExprValue.type());
assertEquals("str", sparkExprValue.value());
}

@Test
public void unsupportedCompare() {
SparkExprValue sparkExprValue = new SparkExprValue(sparkDataType, "str");

assertThrows(UnsupportedOperationException.class, () -> sparkExprValue.compare(sparkExprValue));
}

@Test
public void testEquals() {
SparkExprValue sparkExprValue1 = new SparkExprValue(sparkDataType, "str");
SparkExprValue sparkExprValue2 = new SparkExprValue(sparkDataType, "str");
SparkExprValue sparkExprValue3 = new SparkExprValue(sparkDataType, "other");

assertTrue(sparkExprValue1.equal(sparkExprValue2));
assertFalse(sparkExprValue1.equal(sparkExprValue3));
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/

package org.opensearch.sql.spark.functions.response;

import static org.junit.jupiter.api.Assertions.*;

import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.List;
import java.util.Map;
import org.json.JSONObject;
import org.junit.jupiter.api.Test;
import org.opensearch.sql.data.model.ExprBooleanValue;
import org.opensearch.sql.data.model.ExprByteValue;
import org.opensearch.sql.data.model.ExprDateValue;
import org.opensearch.sql.data.model.ExprDoubleValue;
import org.opensearch.sql.data.model.ExprFloatValue;
import org.opensearch.sql.data.model.ExprIntegerValue;
import org.opensearch.sql.data.model.ExprLongValue;
import org.opensearch.sql.data.model.ExprShortValue;
import org.opensearch.sql.data.model.ExprStringValue;
import org.opensearch.sql.data.model.ExprTimestampValue;
import org.opensearch.sql.data.model.ExprValue;
import org.opensearch.sql.executor.ExecutionEngine;
import org.opensearch.sql.executor.ExecutionEngine.Schema.Column;

class DefaultSparkSqlFunctionResponseHandleTest {

@Test
public void testConstruct() throws Exception {
DefaultSparkSqlFunctionResponseHandle handle =
new DefaultSparkSqlFunctionResponseHandle(readJson());

assertTrue(handle.hasNext());
ExprValue value = handle.next();
Map<String, ExprValue> row = value.tupleValue();
assertEquals(ExprBooleanValue.of(true), row.get("col1"));
assertEquals(new ExprLongValue(2), row.get("col2"));
assertEquals(new ExprIntegerValue(3), row.get("col3"));
assertEquals(new ExprShortValue(4), row.get("col4"));
assertEquals(new ExprByteValue(5), row.get("col5"));
assertEquals(new ExprDoubleValue(6.1), row.get("col6"));
assertEquals(new ExprFloatValue(7.1), row.get("col7"));
assertEquals(new ExprTimestampValue("2024-01-02 03:04:05.1234"), row.get("col8"));
assertEquals(new ExprDateValue("2024-01-03 04:05:06.1234"), row.get("col9"));
assertEquals(new ExprStringValue("some string"), row.get("col10"));

ExecutionEngine.Schema schema = handle.schema();
List<Column> columns = schema.getColumns();
assertEquals("col1", columns.get(0).getName());
}

private JSONObject readJson() throws Exception {
final URL url =
DefaultSparkSqlFunctionResponseHandle.class.getResource(
"/spark_execution_result_test.json");
return new JSONObject(Files.readString(Paths.get(url.toURI())));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ private String[] addPrefix(String... queries) {
void testAllowAllByDefault() {
when(mockedProvider.getValidatorForDatasource(any()))
.thenReturn(new DefaultGrammarElementValidator());
VerifyValidator v = new VerifyValidator(pplQueryValidator, DataSourceType.SPARK);
VerifyValidator v = new VerifyValidator(pplQueryValidator, DataSourceType.S3GLUE);
Arrays.stream(PPLQueryValidatorTest.TestElement.values()).forEach(v::ok);
}

Expand Down Expand Up @@ -127,7 +127,7 @@ public TestPPLGrammarElementValidator() {
void testCwlValidator() {
when(mockedProvider.getValidatorForDatasource(any()))
.thenReturn(new TestPPLGrammarElementValidator());
VerifyValidator v = new VerifyValidator(pplQueryValidator, DataSourceType.SPARK);
VerifyValidator v = new VerifyValidator(pplQueryValidator, DataSourceType.S3GLUE);

v.ok(TestElement.FIELDS);
v.ok(TestElement.WHERE);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -208,14 +208,14 @@ private enum TestElement {
void testAllowAllByDefault() {
when(mockedProvider.getValidatorForDatasource(any()))
.thenReturn(new DefaultGrammarElementValidator());
VerifyValidator v = new VerifyValidator(sqlQueryValidator, DataSourceType.SPARK);
VerifyValidator v = new VerifyValidator(sqlQueryValidator, DataSourceType.S3GLUE);
Arrays.stream(TestElement.values()).forEach(v::ok);
}

@Test
void testDenyAllValidator() {
when(mockedProvider.getValidatorForDatasource(any())).thenReturn(element -> false);
VerifyValidator v = new VerifyValidator(sqlQueryValidator, DataSourceType.SPARK);
VerifyValidator v = new VerifyValidator(sqlQueryValidator, DataSourceType.S3GLUE);
// The elements which doesn't have validation will be accepted.
// That's why there are some ok case

Expand Down Expand Up @@ -574,7 +574,7 @@ void testValidateFlintExtensionQuery() {
@Test
void testInvalidIdentifier() {
when(mockedProvider.getValidatorForDatasource(any())).thenReturn(element -> true);
VerifyValidator v = new VerifyValidator(sqlQueryValidator, DataSourceType.SPARK);
VerifyValidator v = new VerifyValidator(sqlQueryValidator, DataSourceType.S3GLUE);
v.ng("SELECT a.b.c as a-b-c FROM abc");
v.ok("SELECT a.b.c as `a-b-c` FROM abc");
v.ok("SELECT a.b.c as a_b_c FROM abc");
Expand All @@ -588,7 +588,7 @@ void testInvalidIdentifier() {
@Test
void testUnsupportedType() {
when(mockedProvider.getValidatorForDatasource(any())).thenReturn(element -> true);
VerifyValidator v = new VerifyValidator(sqlQueryValidator, DataSourceType.SPARK);
VerifyValidator v = new VerifyValidator(sqlQueryValidator, DataSourceType.S3GLUE);

v.ng("SELECT cast ( a as DateTime ) FROM tbl");
v.ok("SELECT cast ( a as DATE ) FROM tbl");
Expand All @@ -599,7 +599,7 @@ void testUnsupportedType() {
@Test
void testUnsupportedTypedLiteral() {
when(mockedProvider.getValidatorForDatasource(any())).thenReturn(element -> true);
VerifyValidator v = new VerifyValidator(sqlQueryValidator, DataSourceType.SPARK);
VerifyValidator v = new VerifyValidator(sqlQueryValidator, DataSourceType.S3GLUE);

v.ng("SELECT DATETIME '2024-10-11'");
v.ok("SELECT DATE '2024-10-11'");
Expand All @@ -609,7 +609,7 @@ void testUnsupportedTypedLiteral() {
@Test
void testUnsupportedHiveNativeCommand() {
when(mockedProvider.getValidatorForDatasource(any())).thenReturn(element -> true);
VerifyValidator v = new VerifyValidator(sqlQueryValidator, DataSourceType.SPARK);
VerifyValidator v = new VerifyValidator(sqlQueryValidator, DataSourceType.S3GLUE);

v.ng("CREATE ROLE aaa");
v.ng("SHOW GRANT");
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
{
"data" : {
"schema": [
{
"column_name": "col1",
"data_type": "boolean"
},
{
"column_name": "col2",
"data_type": "long"
},
{
"column_name": "col3",
"data_type": "integer"
},
{
"column_name": "col4",
"data_type": "short"
},
{
"column_name": "col5",
"data_type": "byte"
},
{
"column_name": "col6",
"data_type": "double"
},
{
"column_name": "col7",
"data_type": "float"
},
{
"column_name": "col8",
"data_type": "timestamp"
},
{
"column_name": "col9",
"data_type": "date"
},
{
"column_name": "col10",
"data_type": "string"
},
{
"column_name": "col11",
"data_type": "other"
},
{
"column_name": "col12",
"data_type": "other object"
},
{
"column_name": "col13",
"data_type": "other array"
},
{
"column_name": "col14",
"data_type": "other"
}
],
"result": [
{
"col1": true,
"col2": 2,
"col3": 3,
"col4": 4,
"col5": 5,
"col6": 6.1,
"col7": 7.1,
"col8": "2024-01-02 03:04:05.1234",
"col9": "2024-01-03 04:05:06.1234",
"col10": "some string",
"col11": "other value",
"col12": { "hello": "world" },
"col13": [1, 2, 3]
}
]
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,14 @@
public class DataSourceType {
public static final DataSourceType PROMETHEUS = new DataSourceType("PROMETHEUS");
public static final DataSourceType OPENSEARCH = new DataSourceType("OPENSEARCH");
public static final DataSourceType SPARK = new DataSourceType("SPARK");
public static final DataSourceType S3GLUE = new DataSourceType("S3GLUE");
public static final DataSourceType SECURITY_LAKE = new DataSourceType("SECURITY_LAKE");

// Map from uppercase DataSourceType name to DataSourceType object
private static final Map<String, DataSourceType> knownValues = new HashMap<>();

static {
register(PROMETHEUS, OPENSEARCH, SPARK, S3GLUE, SECURITY_LAKE);
register(PROMETHEUS, OPENSEARCH, S3GLUE, SECURITY_LAKE);
}

private final String name;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ class DataSourceTypeTest {
public void fromString_succeed() {
testFromString("PROMETHEUS", DataSourceType.PROMETHEUS);
testFromString("OPENSEARCH", DataSourceType.OPENSEARCH);
testFromString("SPARK", DataSourceType.SPARK);
testFromString("S3GLUE", DataSourceType.S3GLUE);

testFromString("prometheus", DataSourceType.PROMETHEUS);
Expand Down
Loading
Loading