Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,9 @@ public static ExprValue fromObjectValue(Object o) {
if (null == o) {
return LITERAL_NULL;
}
if (o instanceof Map) {
if (o instanceof ExprValue) {
return (ExprValue) o;
} else if (o instanceof Map) {
return tupleValue((Map) o);
} else if (o instanceof List) {
return collectionValue(((List) o));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -218,10 +218,8 @@ public void unSupportedObject() {
Exception exception =
assertThrows(
ExpressionEvaluationException.class,
() -> ExprValueUtils.fromObjectValue(integerValue(1)));
assertEquals(
"unsupported object " + "class org.opensearch.sql.data.model.ExprIntegerValue",
exception.getMessage());
() -> ExprValueUtils.fromObjectValue(new Object()));
assertEquals("unsupported object class java.lang.Object", exception.getMessage());
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -600,8 +600,8 @@ public void testNumericLiteral() throws IOException {
schema("floatLiteral", "float"));
verifyDataRows(
result,
rows("hello", 20, 0.05, 0.049999999999999996, 0.05),
rows("world", 30, 0.05, 0.049999999999999996, 0.05));
rows("hello", 20, 0.05, 0.049999999999999996, 0.049999999999999996),
rows("world", 30, 0.05, 0.049999999999999996, 0.049999999999999996));
}

@Test
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
setup:
- do:
indices.create:
index: array-test
body:
settings:
number_of_shards: 1
mappings:
properties:
array_keyword:
type: keyword
array_text:
type: text
array_num:
type: long
array_boolean:
type: boolean
array_date:
type: date
num:
type: integer
parent:
type: object
properties:
child_array_num:
type: float
child_array_keyword:
type: keyword
child_num:
type: integer
- do:
bulk:
index: array-test
refresh: true
body:
- '{"index": {}}'
- '{"array_text": ["Jane Smith","hello world"],"array_keyword": [ "c++", "java" ],"array_num": [1, 2],"array_boolean": [true, false],"array_date": ["2023-01-02T22:03:34.000Z","2023-01-02T22:03:35.000Z"],"num" : 10,"parent": {"child_array_num": [1.1, 2.2], "child_array_keyword":["a a a", "b b b"], "child_num": 100}}'
- '{"index": {}}'
- '{"array_text": "OpenSearch PPL","array_keyword": "python","array_num": 3,"array_boolean": true,"array_date": "2023-01-02T22:03:36.000Z","num" : 11,"parent": {"child_array_num": 3.3, "child_array_keyword":"c c c", "child_num": 101}}'
- do:
query.settings:
body:
transient:
plugins.calcite.enabled : true
---
teardown:
- do:
query.settings:
body:
transient:
plugins.calcite.enabled : false

---
"Handle multiple values (array) documents":
- skip:
features:
- headers
- allowed_warnings
- do:
allowed_warnings:
- 'Loading the fielddata on the _id field is deprecated and will be removed in future versions. If you require sorting or aggregating on this field you should also include the id in the body of your documents, and map this field as a keyword field that has [doc_values] enabled'
headers:
Content-Type: 'application/json'
ppl:
body:
query: 'source=array-test '
- match: {"total": 2}
- match: {"schema": [{"name": "parent", "type": "struct"},{"name":"array_boolean","type":"boolean"},{"name":"array_num","type":"bigint"},{"name":"array_date","type":"timestamp"},{"name":"num","type":"int"},{"name":"array_text","type":"string"},{"name":"array_keyword","type":"string"}]}
- match: {"datarows": [[{"child_array_keyword":["a a a","b b b"],"child_num":100,"child_array_num":[1.1,2.2]},[true, false],[1,2],["2023-01-02 22:03:34","2023-01-02 22:03:35"],10,["Jane Smith","hello world"],["c++","java"]],[{"child_num": 101,"child_array_keyword":"c c c","child_array_num":3.3},true,3,"2023-01-02 22:03:36",11,"OpenSearch PPL","python"]]}
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
Expand All @@ -34,6 +35,7 @@
import org.apache.calcite.sql.validate.SqlUserDefinedFunction;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.locationtech.jts.geom.Point;
import org.opensearch.sql.ast.statement.Explain.ExplainFormat;
import org.opensearch.sql.calcite.CalcitePlanContext;
import org.opensearch.sql.calcite.utils.CalciteToolsHelper.OpenSearchRelRunners;
Expand All @@ -42,6 +44,7 @@
import org.opensearch.sql.common.response.ResponseListener;
import org.opensearch.sql.data.model.ExprTupleValue;
import org.opensearch.sql.data.model.ExprValue;
import org.opensearch.sql.data.model.ExprValueUtils;
import org.opensearch.sql.data.type.ExprCoreType;
import org.opensearch.sql.data.type.ExprType;
import org.opensearch.sql.executor.ExecutionContext;
Expand All @@ -52,10 +55,10 @@
import org.opensearch.sql.expression.function.BuiltinFunctionName;
import org.opensearch.sql.expression.function.PPLFuncImpTable;
import org.opensearch.sql.opensearch.client.OpenSearchClient;
import org.opensearch.sql.opensearch.data.value.OpenSearchExprGeoPointValue;
import org.opensearch.sql.opensearch.executor.protector.ExecutionProtector;
import org.opensearch.sql.opensearch.functions.DistinctCountApproxAggFunction;
import org.opensearch.sql.opensearch.functions.GeoIpFunction;
import org.opensearch.sql.opensearch.util.JdbcOpenSearchDataTypeConvertor;
import org.opensearch.sql.planner.physical.PhysicalPlan;
import org.opensearch.sql.storage.TableScanOperator;
import org.opensearch.transport.client.node.NodeClient;
Expand Down Expand Up @@ -212,6 +215,38 @@ public void execute(
});
}

/**
* Process values recursively, handling geo points and nested maps. Geo points are converted to
* OpenSearchExprGeoPointValue. Maps are recursively processed to handle nested structures.
*/
private static Object processValue(Object value) {
if (value == null) {
return null;
}
if (value instanceof Point) {
Point point = (Point) value;
return new OpenSearchExprGeoPointValue(point.getY(), point.getX());
}
if (value instanceof Map) {
Map<String, Object> map = (Map<String, Object>) value;
Map<String, Object> convertedMap = new HashMap<>();
for (Map.Entry<String, Object> entry : map.entrySet()) {
convertedMap.put(entry.getKey(), processValue(entry.getValue()));
}
return convertedMap;
}
if (value instanceof List) {
List<Object> list = (List<Object>) value;
List<Object> convertedList = new ArrayList<>();
for (Object item : list) {
convertedList.add(processValue(item));
}
return convertedList;
}
// For other types, return as-is
return value;
}

private void buildResultSet(
ResultSet resultSet,
RelDataType rowTypes,
Expand All @@ -230,11 +265,9 @@ private void buildResultSet(
// Loop through each column
for (int i = 1; i <= columnCount; i++) {
String columnName = metaData.getColumnName(i);
int sqlType = metaData.getColumnType(i);
RelDataType fieldType = fieldTypes.get(i - 1);
ExprValue exprValue =
JdbcOpenSearchDataTypeConvertor.getExprValueFromSqlType(
resultSet, i, sqlType, fieldType, columnName);
Object value = resultSet.getObject(columnName);
Object converted = processValue(value);
ExprValue exprValue = ExprValueUtils.fromObjectValue(converted);
row.put(columnName, exprValue);
}
values.add(ExprTupleValue.fromExprValueMap(row));
Expand Down

This file was deleted.

Loading