Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructType;
import org.rumbledb.api.Item;
import org.rumbledb.context.DynamicContext;
Expand All @@ -36,6 +35,7 @@
import org.rumbledb.runtime.flwor.FlworDataFrame;
import org.rumbledb.runtime.flwor.FlworDataFrameColumn;
import org.rumbledb.runtime.flwor.FlworDataFrameUtils;
import org.rumbledb.runtime.primary.VariableReferenceIterator;
import org.rumbledb.runtime.flwor.NativeClauseContext;
import org.rumbledb.runtime.flwor.udfs.LongSerializeUDF;

Expand Down Expand Up @@ -162,25 +162,13 @@ public static Dataset<Row> addSerializedCountColumn(

Dataset<Row> dfWithIndex = FlworDataFrameUtils.zipWithIndex(df, 1L, variableName.toString());

df.sparkSession()
.udf()
.register(
"serializeCountIndex",
new LongSerializeUDF(),
DataTypes.BinaryType
);

String viewName = FlworDataFrameUtils.createTempView(dfWithIndex);
dfWithIndex.createOrReplaceTempView("input");
dfWithIndex = dfWithIndex.sparkSession()
.sql(
String.format(
"select %s serializeCountIndex(`%s`) as `%s` from %s",
"select %s `%s` from input",
selectSQL,
variableName,
variableName,
viewName
)
);
variableName
return dfWithIndex;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,11 @@
import org.rumbledb.runtime.flwor.closures.ItemsToBinaryColumn;
import org.rumbledb.runtime.flwor.udfs.DataFrameContext;
import org.rumbledb.runtime.flwor.udfs.ForClauseUDF;
import org.rumbledb.runtime.flwor.udfs.WhereClauseUDF;
import org.rumbledb.runtime.operational.AndOperationIterator;
import org.rumbledb.runtime.operational.ComparisonOperationIterator;
import org.rumbledb.runtime.postfix.PredicateIterator;
import org.rumbledb.runtime.primary.ArrayRuntimeIterator;
import org.rumbledb.runtime.flwor.udfs.GenericForClauseUDF;
import org.rumbledb.runtime.flwor.udfs.IntegerSerializeUDF;
import org.rumbledb.runtime.navigation.PredicateIterator;
Expand Down Expand Up @@ -455,7 +460,7 @@ private FlworDataFrame getDataFrameFromJoin(

// If the join criterion uses the context count, then we need to add it to the expression side (it is a
// constant).
if (predicateDependencies.containsKey(Name.CONTEXT_COUNT)) {
if (sequenceVariableName.equals(Name.CONTEXT_ITEM) && predicateDependencies.containsKey(Name.CONTEXT_COUNT)) {
expressionDF.sparkSession()
.udf()
.register(
Expand All @@ -468,7 +473,7 @@ private FlworDataFrame getDataFrameFromJoin(
expressionDF = expressionDF.sparkSession()
.sql(
String.format(
"SELECT *, serializeIntegerIndex(%s) AS `%s` FROM %s",
"SELECT *, %s AS `%s` FROM %s",
Long.toString(size),
Name.CONTEXT_COUNT.getLocalName(),
expressionDFTableName
Expand Down Expand Up @@ -679,19 +684,11 @@ private FlworDataFrame getDataFrameInParallel(
);
}
} else {
df.sparkSession()
.udf()
.register(
"serializePositionIndex",
new IntegerSerializeUDF(),
DataTypes.BinaryType
);

if (this.allowingEmpty) {
df = df.sparkSession()
.sql(
String.format(
"SELECT %s for_vars.`%s`, serializePositionIndex(IF(for_vars.`%s` IS NULL, 0, for_vars.`%s` + 1)) AS `%s` "
"SELECT %s for_vars.`%s`, IF(for_vars.`%s` IS NULL, 0, for_vars.`%s` + 1) AS `%s` "
+ "FROM %s "
+ "LATERAL VIEW OUTER posexplode(forClauseUDF(%s)) for_vars AS `%s`, `%s` ",
projectionVariables,
Expand All @@ -709,7 +706,7 @@ private FlworDataFrame getDataFrameInParallel(
df = df.sparkSession()
.sql(
String.format(
"SELECT %s for_vars.`%s`, serializePositionIndex(for_vars.`%s` + 1) AS `%s` "
"SELECT %s for_vars.`%s`, for_vars.`%s` + 1 AS `%s` "
+ "FROM %s "
+ "LATERAL VIEW posexplode(forClauseUDF(%s)) for_vars AS `%s`, `%s` ",
projectionVariables,
Expand Down Expand Up @@ -857,6 +854,7 @@ public static FlworDataFrame getDataFrameStartingClause(
}
return result;
}
dfWithIndex.createOrReplaceTempView("inputWithIndex");
String inputWithIndex = FlworDataFrameUtils.createTempView(dfWithIndex);
dfWithIndex.sparkSession()
.udf()
Expand All @@ -869,8 +867,7 @@ public static FlworDataFrame getDataFrameStartingClause(
dfWithIndex = dfWithIndex.sparkSession()
.sql(
String.format(
"SELECT %s.`%s`, IF(%s.`%s` IS NULL, serializeCountIndex(0), %s.`%s`) AS `%s` FROM VALUES(1) FULL OUTER JOIN %s",
inputWithIndex,
"SELECT %s.`%s`, IF(%s.`%s` IS NULL, 0, %s.`%s`) AS `%s` FROM VALUES(1) FULL OUTER JOIN %s",
variableName,
inputWithIndex,
positionalVariableName,
Expand Down

This file was deleted.

This file was deleted.

Loading