From 603b7b8455ddff24d84128a3c45f0df4a7e5fd7e Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Tue, 31 Mar 2020 23:42:08 +0200 Subject: [PATCH 001/519] Prune cross joins more --- .../src/main/java/io/prestosql/sql/planner/PlanOptimizers.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java index 24c18cbd0f08..f1a283928129 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java @@ -597,7 +597,8 @@ public PlanOptimizers( ImmutableSet.of( new PushPartialAggregationThroughJoin(), new PushPartialAggregationThroughExchange(metadata), - new PruneJoinColumns()))); + new PruneJoinColumns(), + new PruneJoinChildrenColumns()))); builder.add(new IterativeOptimizer( ruleStats, statsCalculator, From 1be444dc6d9f4dd3497b6d18b6dcd8a8e352b2fd Mon Sep 17 00:00:00 2001 From: David Phillips Date: Tue, 31 Mar 2020 13:35:17 -0700 Subject: [PATCH 002/519] Update GitHub Actions checkout to v2 --- .github/workflows/checks.yml | 6 +++--- .github/workflows/module-tests.yml | 6 +++--- .github/workflows/product-tests.yml | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index 0747476280c0..9106d14bf9ff 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -20,7 +20,7 @@ jobs: 13 ] steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - uses: actions/setup-java@v1 with: java-version: ${{ matrix.java-version }} @@ -38,7 +38,7 @@ jobs: error-prone-checks: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - uses: actions/setup-java@v1 with: java-version: 11 @@ -55,6 +55,6 @@ jobs: web-ui-checks: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - name: Web UI Checks run: presto-main/bin/check_webui.sh diff --git a/.github/workflows/module-tests.yml b/.github/workflows/module-tests.yml index fbd30ccffd66..36cd42689c2c 100644 --- a/.github/workflows/module-tests.yml +++ b/.github/workflows/module-tests.yml @@ -19,7 +19,7 @@ jobs: # TODO config-cdh5, ] steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - uses: actions/setup-java@v1 with: java-version: 8 @@ -59,7 +59,7 @@ jobs: test-other-modules: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - uses: actions/setup-java@v1 with: java-version: 8 @@ -103,7 +103,7 @@ jobs: "presto-kudu", ] steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - uses: actions/setup-java@v1 with: java-version: 8 diff --git a/.github/workflows/product-tests.yml b/.github/workflows/product-tests.yml index d5790efe3e4a..163bd74813dd 100644 --- a/.github/workflows/product-tests.yml +++ b/.github/workflows/product-tests.yml @@ -42,7 +42,7 @@ jobs: - config: config-cdh5 suite: suite-8-non-generic steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - uses: actions/setup-java@v1 with: java-version: 8 From 27af6fb12864b43598281ce4e32ccbe14e458b2b Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Tue, 31 Mar 2020 08:47:42 +0200 Subject: [PATCH 003/519] Add Project-off rule for EnforceSingleRowNode --- .../prestosql/sql/planner/PlanOptimizers.java | 2 + .../rule/PruneEnforceSingleRowColumns.java | 39 ++++++++++++ .../TestPruneEnforceSingleRowColumns.java | 62 +++++++++++++++++++ 3 files changed, 103 insertions(+) create mode 100644 presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneEnforceSingleRowColumns.java create mode 100644 presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneEnforceSingleRowColumns.java diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java index f1a283928129..ed82e4555772 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java @@ -65,6 +65,7 @@ import io.prestosql.sql.planner.iterative.rule.PruneCorrelatedJoinColumns; import io.prestosql.sql.planner.iterative.rule.PruneCountAggregationOverScalar; import io.prestosql.sql.planner.iterative.rule.PruneDeleteSourceColumns; +import io.prestosql.sql.planner.iterative.rule.PruneEnforceSingleRowColumns; import io.prestosql.sql.planner.iterative.rule.PruneFilterColumns; import io.prestosql.sql.planner.iterative.rule.PruneIndexSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneJoinChildrenColumns; @@ -238,6 +239,7 @@ public PlanOptimizers( new PruneApplySourceColumns(), new PruneCorrelatedJoinColumns(), new PruneDeleteSourceColumns(), + new PruneEnforceSingleRowColumns(), new PruneFilterColumns(), new PruneIndexSourceColumns(), new PruneJoinChildrenColumns(), diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneEnforceSingleRowColumns.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneEnforceSingleRowColumns.java new file mode 100644 index 000000000000..bdb093ee1dc9 --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneEnforceSingleRowColumns.java @@ -0,0 +1,39 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.plan.EnforceSingleRowNode; +import io.prestosql.sql.planner.plan.PlanNode; + +import java.util.Optional; +import java.util.Set; + +import static io.prestosql.sql.planner.iterative.rule.Util.restrictChildOutputs; +import static io.prestosql.sql.planner.plan.Patterns.enforceSingleRow; + +public class PruneEnforceSingleRowColumns + extends ProjectOffPushDownRule +{ + public PruneEnforceSingleRowColumns() + { + super(enforceSingleRow()); + } + + @Override + protected Optional pushDownProjectOff(Context context, EnforceSingleRowNode enforceSingleRowNode, Set referencedOutputs) + { + return restrictChildOutputs(context.getIdAllocator(), enforceSingleRowNode, referencedOutputs); + } +} diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneEnforceSingleRowColumns.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneEnforceSingleRowColumns.java new file mode 100644 index 000000000000..4adfda09f797 --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneEnforceSingleRowColumns.java @@ -0,0 +1,62 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableMap; +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; +import io.prestosql.sql.planner.plan.Assignments; +import org.testng.annotations.Test; + +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.enforceSingleRow; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.strictProject; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; + +public class TestPruneEnforceSingleRowColumns + extends BaseRuleTest +{ + @Test + public void testPruneInputColumn() + { + tester().assertThat(new PruneEnforceSingleRowColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + return p.project( + Assignments.identity(a), + p.enforceSingleRow(p.values(a, b))); + }) + .matches( + strictProject( + ImmutableMap.of("a", expression("a")), + enforceSingleRow( + strictProject( + ImmutableMap.of("a", expression("a")), + values("a", "b"))))); + } + + @Test + public void testAllOutputsReferenced() + { + tester().assertThat(new PruneEnforceSingleRowColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + return p.project( + Assignments.identity(a), + p.enforceSingleRow(p.values(a))); + }) + .doesNotFire(); + } +} From 617f5854ffa0af86783fecf01d7362691633a328 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Sat, 15 Feb 2020 16:14:59 +0100 Subject: [PATCH 004/519] Add Project-off rule for AssignUniqueId --- .../prestosql/sql/planner/PlanOptimizers.java | 2 + .../rule/PruneAssignUniqueIdColumns.java | 47 ++++++++++ .../rule/TestPruneAssignUniqueIdColumns.java | 89 +++++++++++++++++++ 3 files changed, 138 insertions(+) create mode 100644 presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneAssignUniqueIdColumns.java create mode 100644 presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneAssignUniqueIdColumns.java diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java index ed82e4555772..dd363ca56cd8 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java @@ -62,6 +62,7 @@ import io.prestosql.sql.planner.iterative.rule.PruneAggregationSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneApplyColumns; import io.prestosql.sql.planner.iterative.rule.PruneApplySourceColumns; +import io.prestosql.sql.planner.iterative.rule.PruneAssignUniqueIdColumns; import io.prestosql.sql.planner.iterative.rule.PruneCorrelatedJoinColumns; import io.prestosql.sql.planner.iterative.rule.PruneCountAggregationOverScalar; import io.prestosql.sql.planner.iterative.rule.PruneDeleteSourceColumns; @@ -237,6 +238,7 @@ public PlanOptimizers( new PruneAggregationSourceColumns(), new PruneApplyColumns(), new PruneApplySourceColumns(), + new PruneAssignUniqueIdColumns(), new PruneCorrelatedJoinColumns(), new PruneDeleteSourceColumns(), new PruneEnforceSingleRowColumns(), diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneAssignUniqueIdColumns.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneAssignUniqueIdColumns.java new file mode 100644 index 000000000000..38a20c4737ee --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneAssignUniqueIdColumns.java @@ -0,0 +1,47 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.plan.AssignUniqueId; +import io.prestosql.sql.planner.plan.PlanNode; + +import java.util.Optional; +import java.util.Set; + +import static io.prestosql.sql.planner.iterative.rule.Util.restrictChildOutputs; +import static io.prestosql.sql.planner.plan.Patterns.assignUniqueId; + +public class PruneAssignUniqueIdColumns + extends ProjectOffPushDownRule +{ + public PruneAssignUniqueIdColumns() + { + super(assignUniqueId()); + } + + @Override + protected Optional pushDownProjectOff( + Context context, + AssignUniqueId assignUniqueId, + Set referencedOutputs) + { + // remove unused AssignUniqueId node + if (!referencedOutputs.contains(assignUniqueId.getIdColumn())) { + return Optional.of(assignUniqueId.getSource()); + } + + return restrictChildOutputs(context.getIdAllocator(), assignUniqueId, referencedOutputs); + } +} diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneAssignUniqueIdColumns.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneAssignUniqueIdColumns.java new file mode 100644 index 000000000000..1c974571aca2 --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneAssignUniqueIdColumns.java @@ -0,0 +1,89 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableMap; +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.assertions.PlanMatchPattern; +import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; +import io.prestosql.sql.planner.plan.Assignments; +import org.testng.annotations.Test; + +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.assignUniqueId; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.strictProject; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; + +public class TestPruneAssignUniqueIdColumns + extends BaseRuleTest +{ + @Test + public void testRemoveUnusedAssignUniqueId() + { + tester().assertThat(new PruneAssignUniqueIdColumns()) + .on(p -> { + Symbol uniqueId = p.symbol("unique_id"); + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + return p.project( + Assignments.identity(a), + p.assignUniqueId( + uniqueId, + p.values(a, b))); + }) + .matches( + strictProject( + ImmutableMap.of("a", PlanMatchPattern.expression("a")), + values("a", "b"))); + } + + @Test + public void testNotAllInputsReferenced() + { + tester().assertThat(new PruneAssignUniqueIdColumns()) + .on(p -> { + Symbol uniqueId = p.symbol("unique_id"); + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + return p.project( + Assignments.identity(a, uniqueId), + p.assignUniqueId( + uniqueId, + p.values(a, b))); + }) + .matches( + strictProject( + ImmutableMap.of("a", PlanMatchPattern.expression("a"), "unique_id", PlanMatchPattern.expression("unique_id")), + assignUniqueId( + "unique_id", + strictProject( + ImmutableMap.of("a", PlanMatchPattern.expression("a")), + values("a", "b"))))); + } + + @Test + public void testAllInputsReferenced() + { + tester().assertThat(new PruneAssignUniqueIdColumns()) + .on(p -> { + Symbol uniqueId = p.symbol("unique_id"); + Symbol a = p.symbol("a"); + return p.project( + Assignments.identity(a, uniqueId), + p.assignUniqueId( + uniqueId, + p.values(a))); + }) + .doesNotFire(); + } +} From 0c8668b2cd1e7521b8dfda23c2c07374bf433704 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Tue, 31 Mar 2020 08:48:17 +0200 Subject: [PATCH 005/519] Fix symbol pruning in set operation nodes Before this change, ExceptNode and IntersectNode were incorrectly handled in PruneUnreferencedOutputs optimizer. The symbols on which the set operations should be performed, were incorrectly pruned in the case when they were not referenced in the parent node. This could result in changed semantics of the node. Example: - ProjectNode (x) - IntersectNode (x, y) - Source1 layout: (a, b) values: (1, 2) - Source2 layout: (c, d) values: (1, 3) The IntersectNode should produce empty result. However, if output symbol `y` was pruned, and source symbols `b` and `d` were pruned accordingly, we get: - ProjectNode (x) - IntersectNode (x) - Source1 layout: (a) values: (1) - Source2 layout: (c) values: (1) Now the result of intersection is not empty. Note: the incorrect code was never reached, because ExceptNode and IntersectNode were rewritten on earlier stage of optimization. --- .../PruneUnreferencedOutputs.java | 56 +++++++++---------- 1 file changed, 26 insertions(+), 30 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java index e242f2a34125..0ced3c4bbd77 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java @@ -713,51 +713,47 @@ public PlanNode visitDelete(DeleteNode node, RewriteContext> context @Override public PlanNode visitUnion(UnionNode node, RewriteContext> context) { - ListMultimap rewrittenSymbolMapping = rewriteSetOperationSymbolMapping(node, context); - ImmutableList rewrittenSubPlans = rewriteSetOperationSubPlans(node, context, rewrittenSymbolMapping); - return new UnionNode(node.getId(), rewrittenSubPlans, rewrittenSymbolMapping, ImmutableList.copyOf(rewrittenSymbolMapping.keySet())); + // Find out which output symbols we need to keep + ImmutableListMultimap.Builder prunedMappingBuilder = ImmutableListMultimap.builder(); + for (Symbol symbol : node.getOutputSymbols()) { + if (context.get().contains(symbol)) { + prunedMappingBuilder.putAll(symbol, node.getSymbolMapping().get(symbol)); + } + } + ListMultimap prunedSymbolMapping = prunedMappingBuilder.build(); + + // Find the corresponding input symbols to the remaining output symbols and prune the children + ImmutableList.Builder rewrittenSources = ImmutableList.builder(); + for (int i = 0; i < node.getSources().size(); i++) { + ImmutableSet.Builder expectedSourceSymbols = ImmutableSet.builder(); + for (Collection symbols : prunedSymbolMapping.asMap().values()) { + expectedSourceSymbols.add(Iterables.get(symbols, i)); + } + rewrittenSources.add(context.rewrite(node.getSources().get(i), expectedSourceSymbols.build())); + } + + return new UnionNode(node.getId(), rewrittenSources.build(), prunedSymbolMapping, ImmutableList.copyOf(prunedSymbolMapping.keySet())); } @Override public PlanNode visitIntersect(IntersectNode node, RewriteContext> context) { - ListMultimap rewrittenSymbolMapping = rewriteSetOperationSymbolMapping(node, context); - ImmutableList rewrittenSubPlans = rewriteSetOperationSubPlans(node, context, rewrittenSymbolMapping); - return new IntersectNode(node.getId(), rewrittenSubPlans, rewrittenSymbolMapping, ImmutableList.copyOf(rewrittenSymbolMapping.keySet())); + return rewriteSetOperationChildren(node, context); } @Override public PlanNode visitExcept(ExceptNode node, RewriteContext> context) { - ListMultimap rewrittenSymbolMapping = rewriteSetOperationSymbolMapping(node, context); - ImmutableList rewrittenSubPlans = rewriteSetOperationSubPlans(node, context, rewrittenSymbolMapping); - return new ExceptNode(node.getId(), rewrittenSubPlans, rewrittenSymbolMapping, ImmutableList.copyOf(rewrittenSymbolMapping.keySet())); + return rewriteSetOperationChildren(node, context); } - private ListMultimap rewriteSetOperationSymbolMapping(SetOperationNode node, RewriteContext> context) + private PlanNode rewriteSetOperationChildren(SetOperationNode node, RewriteContext> context) { - // Find out which output symbols we need to keep - ImmutableListMultimap.Builder rewrittenSymbolMappingBuilder = ImmutableListMultimap.builder(); - for (Symbol symbol : node.getOutputSymbols()) { - if (context.get().contains(symbol)) { - rewrittenSymbolMappingBuilder.putAll(symbol, node.getSymbolMapping().get(symbol)); - } - } - return rewrittenSymbolMappingBuilder.build(); - } - - private ImmutableList rewriteSetOperationSubPlans(SetOperationNode node, RewriteContext> context, ListMultimap rewrittenSymbolMapping) - { - // Find the corresponding input symbol to the remaining output symbols and prune the subplans - ImmutableList.Builder rewrittenSubPlans = ImmutableList.builder(); + ImmutableList.Builder rewrittenSources = ImmutableList.builder(); for (int i = 0; i < node.getSources().size(); i++) { - ImmutableSet.Builder expectedInputSymbols = ImmutableSet.builder(); - for (Collection symbols : rewrittenSymbolMapping.asMap().values()) { - expectedInputSymbols.add(Iterables.get(symbols, i)); - } - rewrittenSubPlans.add(context.rewrite(node.getSources().get(i), expectedInputSymbols.build())); + rewrittenSources.add(context.rewrite(node.getSources().get(i), ImmutableSet.copyOf(node.sourceOutputLayout(i)))); } - return rewrittenSubPlans.build(); + return node.replaceChildren(rewrittenSources.build()); } @Override From 8bfc7f5f7eeeb578dab5e7fa891a780f2dd40685 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 30 Mar 2020 21:58:07 -0700 Subject: [PATCH 006/519] Remove unused method --- .../prestosql/testing/AbstractTestIntegrationSmokeTest.java | 5 ----- 1 file changed, 5 deletions(-) diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java index b9be072cafc9..7636d4d5800f 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java @@ -24,11 +24,6 @@ public abstract class AbstractTestIntegrationSmokeTest extends AbstractTestQueryFramework { - protected boolean isParameterizedVarcharSupported() - { - return true; - } - /** * Ensure the tests are run with {@link DistributedQueryRunner}. E.g. {@link LocalQueryRunner} takes some * shortcuts, not exercising certain aspects. From 478d6f7357205f8eedee0a292b5f12e13eedb4f1 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 30 Mar 2020 21:57:30 -0700 Subject: [PATCH 007/519] Test concurrent scans in AbstractTestIntegrationSmokeTest --- .../testing/AbstractTestIntegrationSmokeTest.java | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java index 7636d4d5800f..c4b428af065f 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java @@ -19,6 +19,8 @@ import static io.prestosql.spi.type.VarcharType.VARCHAR; import static io.prestosql.testing.QueryAssertions.assertContains; import static io.prestosql.testing.assertions.Assert.assertEquals; +import static java.lang.String.join; +import static java.util.Collections.nCopies; import static org.assertj.core.api.Assertions.assertThat; public abstract class AbstractTestIntegrationSmokeTest @@ -99,6 +101,13 @@ public void testRangePredicate() "WHERE orderkey BETWEEN 10 AND 50"); } + @Test + public void testConcurrentScans() + { + String unionMultipleTimes = join(" UNION ALL ", nCopies(25, "SELECT * FROM orders")); + assertQuery("SELECT sum(if(rand() >= 0, orderkey)) FROM (" + unionMultipleTimes + ")", "VALUES 11246812500"); + } + @Test public void testSelectAll() { From 0c13d169dcf189d4a913187de6e33c212df92bfa Mon Sep 17 00:00:00 2001 From: David Phillips Date: Sun, 22 Mar 2020 13:39:07 -0700 Subject: [PATCH 008/519] Move engine-only tests --- .../TestAccumuloDistributedQueries.java | 12 - .../TestCassandraDistributedQueries.java | 12 - .../iceberg/TestIcebergDistributed.java | 12 - .../kudu/TestKuduDistributedQueries.java | 12 - .../mysql/TestMySqlDistributedQueries.java | 12 - .../testing/AbstractTestQueries.java | 5256 +++-------------- presto-tests/pom.xml | 5 + .../tests/AbstractTestEngineOnlyQueries.java | 3497 +++++++++++ .../TestDistributedEngineOnlyQueries.java | 43 + .../tests/TestDistributedSpilledQueries.java | 7 - .../tests/TestTpchDistributedQueries.java | 61 - .../TestThriftDistributedQueries.java | 6 - 12 files changed, 4437 insertions(+), 4498 deletions(-) diff --git a/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/TestAccumuloDistributedQueries.java b/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/TestAccumuloDistributedQueries.java index fc0941ee2cdc..04db9ffc5c27 100644 --- a/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/TestAccumuloDistributedQueries.java +++ b/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/TestAccumuloDistributedQueries.java @@ -342,18 +342,6 @@ public void testCreateTableEmptyColumns() } } - @Override - public void testDescribeOutput() - { - // this connector uses a non-canonical type for varchar columns in tpch - } - - @Override - public void testDescribeOutputNamedAndUnnamed() - { - // this connector uses a non-canonical type for varchar columns in tpch - } - @Override public void testCommentTable() { diff --git a/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraDistributedQueries.java b/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraDistributedQueries.java index 83ec47aa0990..6c2737846af6 100644 --- a/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraDistributedQueries.java +++ b/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraDistributedQueries.java @@ -141,18 +141,6 @@ public void testShowColumns() assertEquals(actual, expectedParametrizedVarchar); } - @Override - public void testDescribeOutput() - { - // this connector uses a non-canonical type for varchar columns in tpch - } - - @Override - public void testDescribeOutputNamedAndUnnamed() - { - // this connector uses a non-canonical type for varchar columns in tpch - } - @Override public void testWrittenStats() { diff --git a/presto-iceberg/src/test/java/io/prestosql/plugin/iceberg/TestIcebergDistributed.java b/presto-iceberg/src/test/java/io/prestosql/plugin/iceberg/TestIcebergDistributed.java index 70704d54b636..b752ee9fc0d0 100644 --- a/presto-iceberg/src/test/java/io/prestosql/plugin/iceberg/TestIcebergDistributed.java +++ b/presto-iceberg/src/test/java/io/prestosql/plugin/iceberg/TestIcebergDistributed.java @@ -51,18 +51,6 @@ public void testDelete() // Neither row delete nor partition delete is supported yet } - @Override - public void testDescribeOutput() - { - // Iceberg does not support parameterized varchar - } - - @Override - public void testDescribeOutputNamedAndUnnamed() - { - // Iceberg does not support parameterized varchar - } - @Override public void testCommentTable() { diff --git a/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java b/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java index e49b19721cca..d3fc45273626 100644 --- a/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java +++ b/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java @@ -127,18 +127,6 @@ public void testShowColumns() assertEquals(actual, expectedParametrizedVarchar); } - @Override - public void testDescribeOutput() - { - // this connector uses a non-canonical type for varchar columns in tpch - } - - @Override - public void testDescribeOutputNamedAndUnnamed() - { - // this connector uses a non-canonical type for varchar columns in tpch - } - @Override public void testCommentTable() { diff --git a/presto-mysql/src/test/java/io/prestosql/plugin/mysql/TestMySqlDistributedQueries.java b/presto-mysql/src/test/java/io/prestosql/plugin/mysql/TestMySqlDistributedQueries.java index 99e1e5c442cf..96a6e38ef21e 100644 --- a/presto-mysql/src/test/java/io/prestosql/plugin/mysql/TestMySqlDistributedQueries.java +++ b/presto-mysql/src/test/java/io/prestosql/plugin/mysql/TestMySqlDistributedQueries.java @@ -106,18 +106,6 @@ public void testInsertWithCoercion() // this connector uses a non-canonical type for varchar columns in tpch } - @Override - public void testDescribeOutput() - { - // this connector uses a non-canonical type for varchar columns in tpch - } - - @Override - public void testDescribeOutputNamedAndUnnamed() - { - // this connector uses a non-canonical type for varchar columns in tpch - } - @Override public void testCommentTable() { diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestQueries.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestQueries.java index 9e73c5121c39..507d1cd76530 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestQueries.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestQueries.java @@ -13,56 +13,28 @@ */ package io.prestosql.testing; -import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Maps; -import com.google.common.collect.Multimap; -import com.google.common.collect.Multimaps; -import com.google.common.collect.Ordering; -import io.prestosql.Session; -import io.prestosql.SystemSessionProperties; import io.prestosql.metadata.FunctionListBuilder; import io.prestosql.metadata.SqlFunction; import io.prestosql.spi.session.PropertyMetadata; -import io.prestosql.spi.type.SqlTimestampWithTimeZone; import io.prestosql.tests.QueryTemplate; import io.prestosql.tpch.TpchTable; -import io.prestosql.type.SqlIntervalDayTime; -import io.prestosql.type.SqlIntervalYearMonth; import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics; -import org.intellij.lang.annotations.Language; -import org.testng.annotations.DataProvider; import org.testng.annotations.Test; -import java.math.BigDecimal; -import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; -import java.util.HashSet; import java.util.List; -import java.util.Optional; import java.util.Set; -import java.util.stream.IntStream; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static com.google.common.collect.Iterables.getOnlyElement; -import static io.prestosql.SystemSessionProperties.IGNORE_DOWNSTREAM_PREFERENCES; import static io.prestosql.connector.informationschema.InformationSchemaTable.INFORMATION_SCHEMA; import static io.prestosql.operator.scalar.ApplyFunction.APPLY_FUNCTION; import static io.prestosql.operator.scalar.InvokeFunction.INVOKE_FUNCTION; import static io.prestosql.spi.type.BigintType.BIGINT; -import static io.prestosql.spi.type.BooleanType.BOOLEAN; -import static io.prestosql.spi.type.DecimalType.createDecimalType; -import static io.prestosql.spi.type.DoubleType.DOUBLE; import static io.prestosql.spi.type.VarcharType.VARCHAR; -import static io.prestosql.sql.tree.ExplainType.Type.DISTRIBUTED; -import static io.prestosql.sql.tree.ExplainType.Type.IO; -import static io.prestosql.sql.tree.ExplainType.Type.LOGICAL; import static io.prestosql.testing.MaterializedResult.resultBuilder; import static io.prestosql.testing.QueryAssertions.assertContains; -import static io.prestosql.testing.QueryAssertions.assertEqualsIgnoreOrder; import static io.prestosql.testing.StatefulSleepingSum.STATEFUL_SLEEPING_SUM; import static io.prestosql.testing.TestingAccessControlManager.TestingPrivilegeType.CREATE_TABLE; import static io.prestosql.testing.TestingAccessControlManager.TestingPrivilegeType.DELETE_TABLE; @@ -72,18 +44,13 @@ import static io.prestosql.testing.TestingAccessControlManager.TestingPrivilegeType.SELECT_COLUMN; import static io.prestosql.testing.TestingAccessControlManager.TestingPrivilegeType.SHOW_CREATE_TABLE; import static io.prestosql.testing.TestingAccessControlManager.privilege; -import static io.prestosql.testing.TestingSession.TESTING_CATALOG; -import static io.prestosql.testing.TestngUtils.toDataProvider; import static io.prestosql.testing.assertions.Assert.assertEquals; import static io.prestosql.tests.QueryTemplate.parameter; import static io.prestosql.tests.QueryTemplate.queryTemplate; -import static io.prestosql.type.UnknownType.UNKNOWN; import static java.lang.String.format; import static java.util.stream.Collectors.joining; -import static java.util.stream.Collectors.toList; import static java.util.stream.IntStream.range; import static org.assertj.core.api.Assertions.assertThat; -import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotEquals; import static org.testng.Assert.assertTrue; @@ -132,34 +99,8 @@ public abstract class AbstractTestQueries 99.0, false)); - private static final DateTimeFormatter ZONED_DATE_TIME_FORMAT = DateTimeFormatter.ofPattern(SqlTimestampWithTimeZone.JSON_FORMAT); - private static final String UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG = "line .*: Given correlated subquery is not supported"; - @Test - public void testParsingError() - { - assertQueryFails("SELECT foo FROM", "line 1:16: mismatched input ''. Expecting: .*"); - } - - @Test - public void selectLargeInterval() - { - MaterializedResult result = computeActual("SELECT INTERVAL '30' DAY"); - assertEquals(result.getRowCount(), 1); - assertEquals(result.getMaterializedRows().get(0).getField(0), new SqlIntervalDayTime(30, 0, 0, 0, 0)); - - result = computeActual("SELECT INTERVAL '" + Short.MAX_VALUE + "' YEAR"); - assertEquals(result.getRowCount(), 1); - assertEquals(result.getMaterializedRows().get(0).getField(0), new SqlIntervalYearMonth(Short.MAX_VALUE, 0)); - } - - @Test - public void selectNull() - { - assertQuery("SELECT NULL"); - } - @Test public void testAggregationOverUnknown() { @@ -193,571 +134,6 @@ public void testNonDeterministic() assertTrue(distinctCount >= 8, "rand() must produce different rows"); } - @Test - public void testLambdaCapture() - { - // Test for lambda expression without capture can be found in TestLambdaExpression - - assertQuery("SELECT apply(0, x -> x + c1) FROM (VALUES 1) t(c1)", "VALUES 1"); - assertQuery("SELECT apply(0, x -> x + t.c1) FROM (VALUES 1) t(c1)", "VALUES 1"); - assertQuery("SELECT apply(c1, x -> x + c2) FROM (VALUES (1, 2), (3, 4), (5, 6)) t(c1, c2)", "VALUES 3, 7, 11"); - assertQuery("SELECT apply(c1 + 10, x -> apply(x + 100, y -> c1)) FROM (VALUES 1) t(c1)", "VALUES 1"); - assertQuery("SELECT apply(c1 + 10, x -> apply(x + 100, y -> t.c1)) FROM (VALUES 1) t(c1)", "VALUES 1"); - assertQuery("SELECT apply(CAST(ROW(10) AS ROW(x INTEGER)), r -> r.x)", "VALUES 10"); - assertQuery("SELECT apply(CAST(ROW(10) AS ROW(x INTEGER)), r -> r.x) FROM (VALUES 1) u(x)", "VALUES 10"); - assertQuery("SELECT apply(CAST(ROW(10) AS ROW(x INTEGER)), r -> r.x) FROM (VALUES 1) r(x)", "VALUES 10"); - assertQuery("SELECT apply(CAST(ROW(10) AS ROW(x INTEGER)), r -> apply(3, y -> y + r.x)) FROM (VALUES 1) u(x)", "VALUES 13"); - assertQuery("SELECT apply(CAST(ROW(10) AS ROW(x INTEGER)), r -> apply(3, y -> y + r.x)) FROM (VALUES 1) r(x)", "VALUES 13"); - assertQuery("SELECT apply(CAST(ROW(10) AS ROW(x INTEGER)), r -> apply(3, y -> y + r.x)) FROM (VALUES 'a') r(x)", "VALUES 13"); - assertQuery("SELECT apply(CAST(ROW(10) AS ROW(x INTEGER)), z -> apply(3, y -> y + r.x)) FROM (VALUES 1) r(x)", "VALUES 4"); - - // reference lambda variable of the not-immediately-enclosing lambda - assertQuery("SELECT apply(1, x -> apply(10, y -> x)) FROM (VALUES 1000) t(x)", "VALUES 1"); - assertQuery("SELECT apply(1, x -> apply(10, y -> x)) FROM (VALUES 'abc') t(x)", "VALUES 1"); - assertQuery("SELECT apply(1, x -> apply(10, y -> apply(100, z -> x))) FROM (VALUES 1000) t(x)", "VALUES 1"); - assertQuery("SELECT apply(1, x -> apply(10, y -> apply(100, z -> x))) FROM (VALUES 'abc') t(x)", "VALUES 1"); - - // in join post-filter - assertQuery("SELECT * FROM (VALUES true) t(x) left JOIN (VALUES 1001) t2(y) ON (apply(false, z -> apply(false, y -> x)))", "SELECT true, 1001"); - } - - @Test - public void testLambdaInAggregationContext() - { - assertQuery("SELECT apply(sum(x), i -> i * i) FROM (VALUES 1, 2, 3, 4, 5) t(x)", "SELECT 225"); - assertQuery("SELECT apply(x, i -> i - 1), sum(y) FROM (VALUES (1, 10), (1, 20), (2, 50)) t(x,y) GROUP BY x", "VALUES (0, 30), (1, 50)"); - assertQuery("SELECT x, apply(sum(y), i -> i * 10) FROM (VALUES (1, 10), (1, 20), (2, 50)) t(x,y) GROUP BY x", "VALUES (1, 300), (2, 500)"); - assertQuery("SELECT apply(8, x -> x + 1) FROM (VALUES (1, 2)) t(x,y) GROUP BY y", "SELECT 9"); - - assertQuery("SELECT apply(CAST(ROW(1) AS ROW(someField BIGINT)), x -> x.someField) FROM (VALUES (1,2)) t(x,y) GROUP BY y", "SELECT 1"); - assertQuery("SELECT apply(sum(x), x -> x * x) FROM (VALUES 1, 2, 3, 4, 5) t(x)", "SELECT 225"); - // nested lambda expression uses the same variable name - assertQuery("SELECT apply(sum(x), x -> apply(x, x -> x * x)) FROM (VALUES 1, 2, 3, 4, 5) t(x)", "SELECT 225"); - } - - @Test - public void testLambdaInSubqueryContext() - { - assertQuery("SELECT apply(x, i -> i * i) FROM (SELECT 10 x)", "SELECT 100"); - assertQuery("SELECT apply((SELECT 10), i -> i * i)", "SELECT 100"); - - // with capture - assertQuery("SELECT apply(x, i -> i * x) FROM (SELECT 10 x)", "SELECT 100"); - assertQuery("SELECT apply(x, y -> y * x) FROM (SELECT 10 x, 3 y)", "SELECT 100"); - assertQuery("SELECT apply(x, z -> y * x) FROM (SELECT 10 x, 3 y)", "SELECT 30"); - } - - @Test - public void testLambdaInValuesAndUnnest() - { - assertQuery("SELECT * FROM UNNEST(transform(sequence(1, 5), x -> x * x))", "SELECT * FROM (VALUES 1, 4, 9, 16, 25)"); - assertQuery("SELECT x[5] FROM (VALUES transform(sequence(1, 5), x -> x * x)) t(x)", "SELECT 25"); - } - - @Test - public void testTryLambdaRepeated() - { - assertQuery("SELECT x + x FROM (SELECT apply(a, i -> i * i) x FROM (VALUES 3) t(a))", "SELECT 18"); - assertQuery("SELECT apply(a, i -> i * i) + apply(a, i -> i * i) FROM (VALUES 3) t(a)", "SELECT 18"); - assertQuery("SELECT apply(a, i -> i * i), apply(a, i -> i * i) FROM (VALUES 3) t(a)", "SELECT 9, 9"); - assertQuery("SELECT try(10 / a) + try(10 / a) FROM (VALUES 5) t(a)", "SELECT 4"); - assertQuery("SELECT try(10 / a), try(10 / a) FROM (VALUES 5) t(a)", "SELECT 2, 2"); - } - - @Test - public void testNonDeterministicFilter() - { - MaterializedResult materializedResult = computeActual("SELECT u FROM ( SELECT if(rand() > 0.5, 0, 1) AS u ) WHERE u <> u"); - assertEquals(materializedResult.getRowCount(), 0); - - materializedResult = computeActual("SELECT u, v FROM ( SELECT if(rand() > 0.5, 0, 1) AS u, 4*4 AS v ) WHERE u <> u and v > 10"); - assertEquals(materializedResult.getRowCount(), 0); - - materializedResult = computeActual("SELECT u, v, w FROM ( SELECT if(rand() > 0.5, 0, 1) AS u, 4*4 AS v, 'abc' AS w ) WHERE v > 10"); - assertEquals(materializedResult.getRowCount(), 1); - } - - @Test - public void testNonDeterministicProjection() - { - MaterializedResult materializedResult = computeActual("SELECT r, r + 1 FROM (SELECT rand(100) r FROM orders) LIMIT 10"); - assertEquals(materializedResult.getRowCount(), 10); - for (MaterializedRow materializedRow : materializedResult) { - assertEquals(materializedRow.getFieldCount(), 2); - assertEquals(((Number) materializedRow.getField(0)).intValue() + 1, materializedRow.getField(1)); - } - } - - @Test - public void testMapSubscript() - { - assertQuery("SELECT map(array[1], array['aa'])[1]", "SELECT 'aa'"); - assertQuery("SELECT map(array['a'], array['aa'])['a']", "SELECT 'aa'"); - assertQuery("SELECT map(array[array[1,1]], array['a'])[array[1,1]]", "SELECT 'a'"); - assertQuery("SELECT map(array[(1,2)], array['a'])[(1,2)]", "SELECT 'a'"); - } - - @Test - public void testRowSubscript() - { - // Subscript on Row with unnamed fields - assertQuery("SELECT ROW (1, 'a', true)[2]", "SELECT 'a'"); - assertQuery("SELECT r[2] FROM (VALUES (ROW (ROW (1, 'a', true)))) AS v(r)", "SELECT 'a'"); - assertQuery("SELECT r[1], r[2] FROM (SELECT ROW (name, regionkey) FROM nation ORDER BY name LIMIT 1) t(r)", "VALUES ('ALGERIA', 0)"); - - // Subscript on Row with named fields - assertQuery("SELECT (CAST (ROW (1, 'a', 2 ) AS ROW (field1 bigint, field2 varchar(1), field3 bigint)))[2]", "SELECT 'a'"); - - // Subscript on nested Row - assertQuery("SELECT ROW (1, 'a', ROW (false, 2, 'b'))[3][3]", "SELECT 'b'"); - - // Row subscript in filter condition - assertQuery("SELECT orderstatus FROM orders WHERE ROW (orderkey, custkey)[1] = 100", "SELECT 'O'"); - - // Row subscript in join condition - assertQuery("SELECT n.name, r.name FROM nation n JOIN region r ON ROW (n.name, n.regionkey)[2] = ROW (r.name, r.regionkey)[2] ORDER BY n.name LIMIT 1", "VALUES ('ALGERIA', 'AFRICA')"); - } - - @Test - public void testVarbinary() - { - assertQuery("SELECT LENGTH(x) FROM (SELECT from_base64('gw==') AS x)", "SELECT 1"); - assertQuery("SELECT LENGTH(from_base64('gw=='))", "SELECT 1"); - } - - @Test - public void testRowFieldAccessor() - { - //Dereference only - assertQuery("SELECT a.col0 FROM (VALUES ROW (CAST(ROW(1, 2) AS ROW(col0 integer, col1 integer)))) AS t (a)", "SELECT 1"); - assertQuery("SELECT a.col0 FROM (VALUES ROW (CAST(ROW(1.0E0, 2.0E0) AS ROW(col0 integer, col1 integer)))) AS t (a)", "SELECT 1.0"); - assertQuery("SELECT a.col0 FROM (VALUES ROW (CAST(ROW(TRUE, FALSE) AS ROW(col0 boolean, col1 boolean)))) AS t (a)", "SELECT TRUE"); - assertQuery("SELECT a.col1 FROM (VALUES ROW (CAST(ROW(1.0, 'kittens') AS ROW(col0 varchar, col1 varchar)))) AS t (a)", "SELECT 'kittens'"); - assertQuery("SELECT a.col2.col1 FROM (VALUES ROW(CAST(ROW(1.0, ARRAY[2], row(3, 4.0)) AS ROW(col0 double, col1 array(int), col2 row(col0 integer, col1 double))))) t(a)", "SELECT 4.0"); - - // mixture of row field reference and table field reference - assertQuery("SELECT CAST(row(1, t.x) AS row(col0 bigint, col1 bigint)).col1 FROM (VALUES 1, 2, 3) t(x)", "SELECT * FROM (VALUES 1, 2, 3)"); - assertQuery("SELECT Y.col1 FROM (SELECT CAST(row(1, t.x) AS row(col0 bigint, col1 bigint)) AS Y FROM (VALUES 1, 2, 3) t(x)) test_t", "SELECT * FROM (VALUES 1, 2, 3)"); - - // Subscript + Dereference - assertQuery("SELECT a.col1[2] FROM (VALUES ROW(CAST(ROW(1.0, ARRAY[22, 33, 44, 55], row(3, 4.0E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double))))) t(a)", "SELECT 33"); - assertQuery("SELECT a.col1[2].col0, a.col1[2].col1 FROM (VALUES ROW(cast(row(1.0, ARRAY[row(31, 4.1E0), row(32, 4.2E0)], row(3, 4.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double))))) t(a)", "SELECT 32, 4.2"); - - assertQuery("SELECT CAST(row(11, 12) AS row(col0 bigint, col1 bigint)).col0", "SELECT 11"); - } - - @Test - public void testRowFieldAccessorInAggregate() - { - assertQuery("SELECT a.col0, SUM(a.col1[2]), SUM(a.col2.col0), SUM(a.col2.col1) FROM " + - "(VALUES " + - "ROW(CAST(ROW(1.0, ARRAY[2, 13, 4], row(11, 4.1E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double)))), " + - "ROW(CAST(ROW(2.0, ARRAY[2, 23, 4], row(12, 14.0E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double)))), " + - "ROW(CAST(ROW(1.0, ARRAY[22, 33, 44], row(13, 5.0E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double))))) t(a) " + - "GROUP BY a.col0", - "SELECT * FROM VALUES (1.0, 46, 24, 9.1), (2.0, 23, 12, 14.0)"); - - assertQuery("SELECT a.col2.col0, SUM(a.col0), SUM(a.col1[2]), SUM(a.col2.col1) FROM " + - "(VALUES " + - "ROW(CAST(ROW(1.0, ARRAY[2, 13, 4], row(11, 4.1E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double)))), " + - "ROW(CAST(ROW(2.0, ARRAY[2, 23, 4], row(11, 14.0E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double)))), " + - "ROW(CAST(ROW(7.0, ARRAY[22, 33, 44], row(13, 5.0E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double))))) t(a) " + - "GROUP BY a.col2.col0", - "SELECT * FROM VALUES (11, 3.0, 36, 18.1), (13, 7.0, 33, 5.0)"); - - assertQuery("SELECT a.col1[1].col0, SUM(a.col0), SUM(a.col1[1].col1), SUM(a.col1[2].col0), SUM(a.col2.col1) FROM " + - "(VALUES " + - "ROW(CAST(ROW(1.0, ARRAY[row(31, 4.5E0), row(12, 4.2E0)], row(3, 4.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " + - "ROW(CAST(ROW(3.1, ARRAY[row(41, 3.1E0), row(32, 4.2E0)], row(6, 6.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " + - "ROW(CAST(ROW(2.2, ARRAY[row(31, 4.2E0), row(22, 4.2E0)], row(5, 4.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double))))) t(a) " + - "GROUP BY a.col1[1].col0", - "SELECT * FROM VALUES (31, 3.2, 8.7, 34, 8.0), (41, 3.1, 3.1, 32, 6.0)"); - - assertQuery("SELECT a.col1[1].col0, SUM(a.col0), SUM(a.col1[1].col1), SUM(a.col1[2].col0), SUM(a.col2.col1) FROM " + - "(VALUES " + - "ROW(CAST(ROW(2.2, ARRAY[row(31, 4.2E0), row(22, 4.2E0)], row(5, 4.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " + - "ROW(CAST(ROW(1.0, ARRAY[row(31, 4.5E0), row(12, 4.2E0)], row(3, 4.1E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " + - "ROW(CAST(ROW(3.1, ARRAY[row(41, 3.1E0), row(32, 4.2E0)], row(6, 6.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " + - "ROW(CAST(ROW(3.3, ARRAY[row(41, 3.1E0), row(32, 4.2E0)], row(6, 6.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))) " + - ") t(a) " + - "GROUP BY a.col1[1]", - "SELECT * FROM VALUES (31, 2.2, 4.2, 22, 4.0), (31, 1.0, 4.5, 12, 4.1), (41, 6.4, 6.2, 64, 12.0)"); - - assertQuery("SELECT a.col1[2], SUM(a.col0), SUM(a.col1[1]), SUM(a.col2.col1) FROM " + - "(VALUES " + - "ROW(CAST(ROW(1.0, ARRAY[2, 13, 4], row(11, 4.1E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double)))), " + - "ROW(CAST(ROW(2.0, ARRAY[2, 13, 4], row(12, 14.0E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double)))), " + - "ROW(CAST(ROW(7.0, ARRAY[22, 33, 44], row(13, 5.0E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double))))) t(a) " + - "GROUP BY a.col1[2]", - "SELECT * FROM VALUES (13, 3.0, 4, 18.1), (33, 7.0, 22, 5.0)"); - - assertQuery("SELECT a.col2.col0, SUM(a.col2.col1) FROM " + - "(VALUES " + - "ROW(CAST(ROW(2.2, ARRAY[row(31, 4.2E0), row(22, 4.2E0)], row(5, 4.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " + - "ROW(CAST(ROW(1.0, ARRAY[row(31, 4.5E0), row(12, 4.2E0)], row(3, 4.1E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " + - "ROW(CAST(ROW(3.1, ARRAY[row(41, 3.1E0), row(32, 4.2E0)], row(6, 6.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " + - "ROW(CAST(ROW(3.3, ARRAY[row(41, 3.1E0), row(32, 4.2E0)], row(6, 6.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))) " + - ") t(a) " + - "GROUP BY a.col2", - "SELECT * FROM VALUES (5, 4.0), (3, 4.1), (6, 12.0)"); - - assertQuery("SELECT a.col2.col0, a.col0, SUM(a.col2.col1) FROM " + - "(VALUES " + - "ROW(CAST(ROW(1.0, ARRAY[2, 13, 4], row(11, 4.1E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double)))), " + - "ROW(CAST(ROW(2.0, ARRAY[2, 23, 4], row(11, 14.0E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double)))), " + - "ROW(CAST(ROW(1.5, ARRAY[2, 13, 4], row(11, 4.1E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double)))), " + - "ROW(CAST(ROW(1.5, ARRAY[2, 13, 4], row(11, 4.1E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double)))), " + - "ROW(CAST(ROW(7.0, ARRAY[22, 33, 44], row(13, 5.0E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double))))) t(a) " + - "WHERE a.col1[2] < 30 " + - "GROUP BY 1, 2 ORDER BY 1", - "SELECT * FROM VALUES (11, 1.0, 4.1), (11, 1.5, 8.2), (11, 2.0, 14.0)"); - - assertQuery("SELECT a[1].col0, COUNT(1) FROM " + - "(VALUES " + - "(ROW(CAST(ARRAY[row(31, 4.2E0), row(22, 4.2E0)] AS ARRAY(ROW(col0 integer, col1 double))))), " + - "(ROW(CAST(ARRAY[row(31, 4.5E0), row(12, 4.2E0)] AS ARRAY(ROW(col0 integer, col1 double))))), " + - "(ROW(CAST(ARRAY[row(41, 3.1E0), row(32, 4.2E0)] AS ARRAY(ROW(col0 integer, col1 double))))), " + - "(ROW(CAST(ARRAY[row(31, 3.1E0), row(32, 4.2E0)] AS ARRAY(ROW(col0 integer, col1 double))))) " + - ") t(a) " + - "GROUP BY 1 " + - "ORDER BY 2 DESC", - "SELECT * FROM VALUES (31, 3), (41, 1)"); - } - - @Test - public void testRowFieldAccessorInJoin() - { - assertQuery("" + - "SELECT t.a.col1, custkey, orderkey FROM " + - "(VALUES " + - "ROW(CAST(ROW(1, 11) AS ROW(col0 integer, col1 integer))), " + - "ROW(CAST(ROW(2, 22) AS ROW(col0 integer, col1 integer))), " + - "ROW(CAST(ROW(3, 33) AS ROW(col0 integer, col1 integer)))) t(a) " + - "INNER JOIN orders " + - "ON t.a.col0 = orders.orderkey", - "SELECT * FROM VALUES (11, 370, 1), (22, 781, 2), (33, 1234, 3)"); - } - - @Test - public void testRowCast() - { - assertQuery("SELECT CAST(row(1, 2) AS row(aa bigint, bb boolean)).aa", "SELECT 1"); - assertQuery("SELECT CAST(row(1, 2) AS row(aa bigint, bb boolean)).bb", "SELECT true"); - assertQuery("SELECT CAST(row(1, 2) AS row(aa bigint, bb varchar)).bb", "SELECT '2'"); - assertQuery("SELECT CAST(row(true, array[0, 2]) AS row(aa boolean, bb array(boolean))).bb[1]", "SELECT false"); - assertQuery("SELECT CAST(row(0.1, array[0, 2], row(1, 0.5)) AS row(aa bigint, bb array(boolean), cc row(dd varchar, ee varchar))).cc.ee", "SELECT '0.5'"); - assertQuery("SELECT CAST(array[row(0.1, array[0, 2], row(1, 0.5))] AS array)[1].cc.ee", "SELECT '0.5'"); - } - - @Test - public void testDereferenceInSubquery() - { - assertQuery("" + - "SELECT x " + - "FROM (" + - " SELECT a.x" + - " FROM (VALUES 1, 2, 3) a(x)" + - ") " + - "GROUP BY x", - "SELECT * FROM VALUES 1, 2, 3"); - - assertQuery("" + - "SELECT t2.*, max(t1.b) AS max_b " + - "FROM (VALUES (1, 'a'), (2, 'b'), (1, 'c'), (3, 'd')) t1(a, b) " + - "INNER JOIN " + - "(VALUES 1, 2, 3, 4) t2(a) " + - "ON t1.a = t2.a " + - "GROUP BY t2.a", - "SELECT * FROM VALUES (1, 'c'), (2, 'b'), (3, 'd')"); - - assertQuery("" + - "SELECT t2.*, max(t1.b1) AS max_b1 " + - "FROM (VALUES (1, 'a'), (2, 'b'), (1, 'c'), (3, 'd')) t1(a1, b1) " + - "INNER JOIN " + - "(VALUES (1, 11, 111), (2, 22, 222), (3, 33, 333), (4, 44, 444)) t2(a2, b2, c2) " + - "ON t1.a1 = t2.a2 " + - "GROUP BY t2.a2, t2.b2, t2.c2", - "SELECT * FROM VALUES (1, 11, 111, 'c'), (2, 22, 222, 'b'), (3, 33, 333, 'd')"); - - assertQuery("" + - "SELECT custkey, orders2 " + - "FROM (" + - " SELECT x.custkey, SUM(x.orders) + 1 orders2 " + - " FROM ( " + - " SELECT x.custkey, COUNT(x.orderkey) orders " + - " FROM orders x " + - " WHERE x.custkey < 100 " + - " GROUP BY x.custkey " + - " ) x " + - " GROUP BY x.custkey" + - ") " + - "ORDER BY custkey"); - } - - @Test - public void testDereferenceInFunctionCall() - { - assertQuery("" + - "SELECT COUNT(DISTINCT custkey) " + - "FROM ( " + - " SELECT x.custkey " + - " FROM orders x " + - " WHERE custkey < 100 " + - ") t"); - } - - @Test - public void testDereferenceInComparison() - { - assertQuery("" + - "SELECT orders.custkey, orders.orderkey " + - "FROM orders " + - "WHERE orders.custkey > orders.orderkey AND orders.custkey < 200.3"); - } - - @Test - public void testMissingRowFieldInGroupBy() - { - assertQueryFails( - "SELECT a.col0, count(*) FROM (VALUES ROW(cast(ROW(1, 1) AS ROW(col0 integer, col1 integer)))) t(a)", - "line 1:8: 'a.col0' must be an aggregate expression or appear in GROUP BY clause"); - } - - @Test - public void testWhereWithRowField() - { - assertQuery("SELECT a.col0 FROM (VALUES ROW(CAST(ROW(1, 2) AS ROW(col0 integer, col1 integer)))) AS t (a) WHERE a.col0 > 0", "SELECT 1"); - assertQuery("SELECT SUM(a.col0) FROM (VALUES ROW(CAST(ROW(1, 2) AS ROW(col0 integer, col1 integer)))) AS t (a) WHERE a.col0 <= 0", "SELECT null"); - - assertQuery("SELECT a.col0 FROM (VALUES ROW(CAST(ROW(1, 2) AS ROW(col0 integer, col1 integer)))) AS t (a) WHERE a.col0 < a.col1", "SELECT 1"); - assertQuery("SELECT SUM(a.col0) FROM (VALUES ROW(CAST(ROW(1, 2) AS ROW(col0 integer, col1 integer)))) AS t (a) WHERE a.col0 < a.col1", "SELECT 1"); - assertQuery("SELECT SUM(a.col0) FROM (VALUES ROW(CAST(ROW(1, 2) AS ROW(col0 integer, col1 integer)))) AS t (a) WHERE a.col0 > a.col1", "SELECT null"); - } - - @Test - public void testUnnest() - { - assertQuery("SELECT 1 FROM (VALUES (ARRAY[1])) AS t (a) CROSS JOIN UNNEST(a)", "SELECT 1"); - assertQuery("SELECT x[1] FROM UNNEST(ARRAY[ARRAY[1, 2, 3]]) t(x)", "SELECT 1"); - assertQuery("SELECT x[1][2] FROM UNNEST(ARRAY[ARRAY[ARRAY[1, 2, 3]]]) t(x)", "SELECT 2"); - assertQuery("SELECT x[2] FROM UNNEST(ARRAY[MAP(ARRAY[1,2], ARRAY['hello', 'hi'])]) t(x)", "SELECT 'hi'"); - assertQuery("SELECT * FROM UNNEST(ARRAY[1, 2, 3])", "SELECT * FROM VALUES (1), (2), (3)"); - assertQuery("SELECT a FROM UNNEST(ARRAY[1, 2, 3]) t(a)", "SELECT * FROM VALUES (1), (2), (3)"); - assertQuery("SELECT a, b FROM UNNEST(ARRAY[1, 2], ARRAY[3, 4]) t(a, b)", "SELECT * FROM VALUES (1, 3), (2, 4)"); - assertQuery("SELECT a, b FROM UNNEST(ARRAY[1, 2, 3], ARRAY[4, 5]) t(a, b)", "SELECT * FROM VALUES (1, 4), (2, 5), (3, NULL)"); - assertQuery("SELECT a FROM UNNEST(ARRAY[1, 2, 3], ARRAY[4, 5]) t(a, b)", "SELECT * FROM VALUES 1, 2, 3"); - assertQuery("SELECT b FROM UNNEST(ARRAY[1, 2, 3], ARRAY[4, 5]) t(a, b)", "SELECT * FROM VALUES 4, 5, NULL"); - assertQuery("SELECT count(*) FROM UNNEST(ARRAY[1, 2, 3], ARRAY[4, 5])", "SELECT 3"); - assertQuery("SELECT a FROM UNNEST(ARRAY['kittens', 'puppies']) t(a)", "SELECT * FROM VALUES ('kittens'), ('puppies')"); - assertQuery("" + - "SELECT c " + - "FROM UNNEST(ARRAY[1, 2, 3], ARRAY[4, 5]) t(a, b) " + - "CROSS JOIN (values (8), (9)) t2(c)", - "SELECT * FROM VALUES 8, 8, 8, 9, 9, 9"); - assertQuery("" + - "SELECT a.custkey, t.e " + - "FROM (SELECT custkey, ARRAY[1, 2, 3] AS my_array FROM orders ORDER BY orderkey LIMIT 1) a " + - "CROSS JOIN UNNEST(my_array) t(e)", - "SELECT * FROM (SELECT custkey FROM orders ORDER BY orderkey LIMIT 1) CROSS JOIN (VALUES (1), (2), (3))"); - assertQuery("" + - "SELECT a.custkey, t.e " + - "FROM (SELECT custkey, ARRAY[1, 2, 3] AS my_array FROM orders ORDER BY orderkey LIMIT 1) a, " + - "UNNEST(my_array) t(e)", - "SELECT * FROM (SELECT custkey FROM orders ORDER BY orderkey LIMIT 1) CROSS JOIN (VALUES (1), (2), (3))"); - assertQuery("SELECT * FROM UNNEST(ARRAY[0, 1]) CROSS JOIN UNNEST(ARRAY[0, 1]) CROSS JOIN UNNEST(ARRAY[0, 1])", - "SELECT * FROM VALUES (0, 0, 0), (0, 0, 1), (0, 1, 0), (0, 1, 1), (1, 0, 0), (1, 0, 1), (1, 1, 0), (1, 1, 1)"); - assertQuery("SELECT * FROM UNNEST(ARRAY[0, 1]), UNNEST(ARRAY[0, 1]), UNNEST(ARRAY[0, 1])", - "SELECT * FROM VALUES (0, 0, 0), (0, 0, 1), (0, 1, 0), (0, 1, 1), (1, 0, 0), (1, 0, 1), (1, 1, 0), (1, 1, 1)"); - assertQuery("SELECT a, b FROM UNNEST(MAP(ARRAY[1,2], ARRAY['cat', 'dog'])) t(a, b)", "SELECT * FROM VALUES (1, 'cat'), (2, 'dog')"); - assertQuery("SELECT a, b FROM UNNEST(MAP(ARRAY[1,2], ARRAY['cat', NULL])) t(a, b)", "SELECT * FROM VALUES (1, 'cat'), (2, NULL)"); - - assertQuery("SELECT 1 FROM (VALUES (ARRAY[1])) AS t (a) CROSS JOIN UNNEST(a) WITH ORDINALITY", "SELECT 1"); - assertQuery("SELECT * FROM UNNEST(ARRAY[1, 2, 3]) WITH ORDINALITY", "SELECT * FROM VALUES (1, 1), (2, 2), (3, 3)"); - assertQuery("SELECT b FROM UNNEST(ARRAY[10, 20, 30]) WITH ORDINALITY t(a, b)", "SELECT * FROM VALUES (1), (2), (3)"); - assertQuery("SELECT a, b, c FROM UNNEST(ARRAY[10, 20, 30], ARRAY[4, 5]) WITH ORDINALITY t(a, b, c)", "SELECT * FROM VALUES (10, 4, 1), (20, 5, 2), (30, NULL, 3)"); - assertQuery("SELECT a, b FROM UNNEST(ARRAY['kittens', 'puppies']) WITH ORDINALITY t(a, b)", "SELECT * FROM VALUES ('kittens', 1), ('puppies', 2)"); - assertQuery("" + - "SELECT c " + - "FROM UNNEST(ARRAY[1, 2, 3], ARRAY[4, 5]) WITH ORDINALITY t(a, b, c) " + - "CROSS JOIN (values (8), (9)) t2(d)", - "SELECT * FROM VALUES 1, 1, 2, 2, 3, 3"); - assertQuery("" + - "SELECT a.custkey, t.e, t.f " + - "FROM (SELECT custkey, ARRAY[10, 20, 30] AS my_array FROM orders ORDER BY orderkey LIMIT 1) a " + - "CROSS JOIN UNNEST(my_array) WITH ORDINALITY t(e, f)", - "SELECT * FROM (SELECT custkey FROM orders ORDER BY orderkey LIMIT 1) CROSS JOIN (VALUES (10, 1), (20, 2), (30, 3))"); - assertQuery("" + - "SELECT a.custkey, t.e, t.f " + - "FROM (SELECT custkey, ARRAY[10, 20, 30] AS my_array FROM orders ORDER BY orderkey LIMIT 1) a, " + - "UNNEST(my_array) WITH ORDINALITY t(e, f)", - "SELECT * FROM (SELECT custkey FROM orders ORDER BY orderkey LIMIT 1) CROSS JOIN (VALUES (10, 1), (20, 2), (30, 3))"); - - assertQuery("SELECT * FROM orders, UNNEST(ARRAY[1])", "SELECT orders.*, 1 FROM orders"); - } - - @Test - public void testArrays() - { - assertQuery("SELECT a[1] FROM (SELECT ARRAY[orderkey] AS a FROM orders ORDER BY orderkey) t", "SELECT orderkey FROM orders"); - assertQuery("SELECT a[1 + CAST(round(rand()) AS BIGINT)] FROM (SELECT ARRAY[orderkey, orderkey] AS a FROM orders ORDER BY orderkey) t", "SELECT orderkey FROM orders"); - assertQuery("SELECT a[1] + 1 FROM (SELECT ARRAY[orderkey] AS a FROM orders ORDER BY orderkey) t", "SELECT orderkey + 1 FROM orders"); - assertQuery("SELECT a[1] FROM (SELECT ARRAY[orderkey + 1] AS a FROM orders ORDER BY orderkey) t", "SELECT orderkey + 1 FROM orders"); - assertQuery("SELECT a[1][1] FROM (SELECT ARRAY[ARRAY[orderkey + 1]] AS a FROM orders ORDER BY orderkey) t", "SELECT orderkey + 1 FROM orders"); - assertQuery("SELECT CARDINALITY(a) FROM (SELECT ARRAY[orderkey, orderkey + 1] AS a FROM orders ORDER BY orderkey) t", "SELECT 2 FROM orders"); - } - - @Test - public void testArrayAgg() - { - assertQuery("SELECT clerk, cardinality(array_agg(orderkey)) FROM orders GROUP BY clerk", "SELECT clerk, count(*) FROM orders GROUP BY clerk"); - } - - @Test - public void testReduceAgg() - { - assertQuery( - "SELECT x, reduce_agg(y, 1, (a, b) -> a * b, (a, b) -> a * b) " + - "FROM (VALUES (1, 5), (1, 6), (1, 7), (2, 8), (2, 9), (3, 10)) AS t(x, y) " + - "GROUP BY x", - "VALUES (1, 5 * 6 * 7), (2, 8 * 9), (3, 10)"); - assertQuery( - "SELECT x, reduce_agg(y, 0, (a, b) -> a + b, (a, b) -> a + b) " + - "FROM (VALUES (1, 5), (1, 6), (1, 7), (2, 8), (2, 9), (3, 10)) AS t(x, y) " + - "GROUP BY x", - "VALUES (1, 5 + 6 + 7), (2, 8 + 9), (3, 10)"); - - assertQuery( - "SELECT x, reduce_agg(y, 1, (a, b) -> a * b, (a, b) -> a * b) " + - "FROM (VALUES (1, CAST(5 AS DOUBLE)), (1, 6), (1, 7), (2, 8), (2, 9), (3, 10)) AS t(x, y) " + - "GROUP BY x", - "VALUES (1, CAST(5 AS DOUBLE) * 6 * 7), (2, 8 * 9), (3, 10)"); - assertQuery( - "SELECT x, reduce_agg(y, 0, (a, b) -> a + b, (a, b) -> a + b) " + - "FROM (VALUES (1, CAST(5 AS DOUBLE)), (1, 6), (1, 7), (2, 8), (2, 9), (3, 10)) AS t(x, y) " + - "GROUP BY x", - "VALUES (1, CAST(5 AS DOUBLE) + 6 + 7), (2, 8 + 9), (3, 10)"); - } - - @Test - public void testRows() - { - // Using JSON_FORMAT(CAST(_ AS JSON)) because H2 does not support ROW type - assertQuery("SELECT JSON_FORMAT(CAST(ROW(1 + 2, CONCAT('a', 'b')) AS JSON))", "SELECT '[3,\"ab\"]'"); - assertQuery("SELECT JSON_FORMAT(CAST(ROW(a + b) AS JSON)) FROM (VALUES (1, 2)) AS t(a, b)", "SELECT '[3]'"); - assertQuery("SELECT JSON_FORMAT(CAST(ROW(1, ROW(9, a, ARRAY[], NULL), ROW(1, 2)) AS JSON)) FROM (VALUES ('a')) t(a)", "SELECT '[1,[9,\"a\",[],null],[1,2]]'"); - assertQuery("SELECT JSON_FORMAT(CAST(ROW(ROW(ROW(ROW(ROW(a, b), c), d), e), f) AS JSON)) FROM (VALUES (ROW(0, 1), 2, '3', NULL, ARRAY[5], ARRAY[])) t(a, b, c, d, e, f)", - "SELECT '[[[[[[0,1],2],\"3\"],null],[5]],[]]'"); - assertQuery("SELECT JSON_FORMAT(CAST(ARRAY_AGG(ROW(a, b)) AS JSON)) FROM (VALUES (1, 2), (3, 4), (5, 6)) t(a, b)", "SELECT '[[1,2],[3,4],[5,6]]'"); - assertQuery("SELECT CONTAINS(ARRAY_AGG(ROW(a, b)), ROW(1, 2)) FROM (VALUES (1, 2), (3, 4), (5, 6)) t(a, b)", "SELECT TRUE"); - assertQuery("SELECT JSON_FORMAT(CAST(ARRAY_AGG(ROW(c, d)) AS JSON)) FROM (VALUES (ARRAY[1, 3, 5], ARRAY[2, 4, 6])) AS t(a, b) CROSS JOIN UNNEST(a, b) AS u(c, d)", - "SELECT '[[1,2],[3,4],[5,6]]'"); - assertQuery("SELECT JSON_FORMAT(CAST(ROW(x, y, z) AS JSON)) FROM (VALUES ROW(1, NULL, '3')) t(x,y,z)", "SELECT '[1,null,\"3\"]'"); - assertQuery("SELECT JSON_FORMAT(CAST(ROW(x, y, z) AS JSON)) FROM (VALUES ROW(1, CAST(NULL AS INTEGER), '3')) t(x,y,z)", "SELECT '[1,null,\"3\"]'"); - } - - @Test - public void testMaps() - { - assertQuery("SELECT m[max_key] FROM (SELECT map_agg(orderkey, orderkey) m, max(orderkey) max_key FROM orders)", "SELECT max(orderkey) FROM orders"); - // Make sure that even if the map constructor throws with the NULL key the block builders are left in a consistent state - // and the TRY() call eventually succeeds and return NULL values. - assertQuery("SELECT JSON_FORMAT(CAST(TRY(MAP(ARRAY[NULL], ARRAY[x])) AS JSON)) FROM (VALUES 1, 2) t(x)", "SELECT * FROM (VALUES NULL, NULL)"); - } - - @Test - public void testValues() - { - assertQuery("VALUES 1, 2, 3, 4"); - assertQuery("VALUES 1, 3, 2, 4 ORDER BY 1", "SELECT * FROM (VALUES 1, 3, 2, 4) ORDER BY 1"); - assertQuery("VALUES (1.1, 2, 'foo'), (sin(3.3), 2+2, 'bar')"); - assertQuery("VALUES (1.1, 2), (sin(3.3), 2+2) ORDER BY 1", "VALUES (sin(3.3), 2+2), (1.1, 2)"); - assertQuery("VALUES (1.1, 2), (sin(3.3), 2+2) LIMIT 1", "VALUES (1.1, 2)"); - assertQuery("SELECT * FROM (VALUES (1.1, 2), (sin(3.3), 2+2))"); - assertQuery("SELECT 1.1 in (VALUES (1.1), (2.2))", "VALUES (TRUE)"); - - assertQuery("" + - "WITH a AS (VALUES (1.1, 2), (sin(3.3), 2+2)) " + - "SELECT * FROM a", - "VALUES (1.1, 2), (sin(3.3), 2+2)"); - - // implicit coersions - assertQuery("VALUES 1, 2.2, 3, 4.4"); - assertQuery("VALUES (1, 2), (3.3, 4.4)"); - assertQuery("VALUES true, 1.0 in (1, 2, 3)"); - } - - @Test - public void testSpecialFloatingPointValues() - { - MaterializedResult actual = computeActual("SELECT nan(), infinity(), -infinity()"); - MaterializedRow row = getOnlyElement(actual.getMaterializedRows()); - assertEquals(row.getField(0), Double.NaN); - assertEquals(row.getField(1), Double.POSITIVE_INFINITY); - assertEquals(row.getField(2), Double.NEGATIVE_INFINITY); - } - - @Test - public void testMaxMinStringWithNulls() - { - assertQuery("SELECT custkey, MAX(NULLIF(orderstatus, 'O')), MIN(NULLIF(orderstatus, 'O')) FROM orders GROUP BY custkey"); - } - - @Test - public void testApproxPercentile() - { - MaterializedResult raw = computeActual("SELECT orderstatus, orderkey, totalprice FROM orders"); - - Multimap orderKeyByStatus = ArrayListMultimap.create(); - Multimap totalPriceByStatus = ArrayListMultimap.create(); - for (MaterializedRow row : raw.getMaterializedRows()) { - orderKeyByStatus.put((String) row.getField(0), ((Number) row.getField(1)).longValue()); - totalPriceByStatus.put((String) row.getField(0), (Double) row.getField(2)); - } - - MaterializedResult actual = computeActual("" + - "SELECT orderstatus, " + - " approx_percentile(orderkey, 0.5), " + - " approx_percentile(totalprice, 0.5)," + - " approx_percentile(orderkey, 2, 0.5)," + - " approx_percentile(totalprice, 2, 0.5)," + - " approx_percentile(orderkey, .2, 0.5)," + - " approx_percentile(totalprice, .2, 0.5)\n" + - "FROM orders\n" + - "GROUP BY orderstatus"); - - for (MaterializedRow row : actual.getMaterializedRows()) { - String status = (String) row.getField(0); - Long orderKey = ((Number) row.getField(1)).longValue(); - Double totalPrice = (Double) row.getField(2); - Long orderKeyWeighted = ((Number) row.getField(3)).longValue(); - Double totalPriceWeighted = (Double) row.getField(4); - Long orderKeyFractionalWeighted = ((Number) row.getField(5)).longValue(); - Double totalPriceFractionalWeighted = (Double) row.getField(6); - - List orderKeys = Ordering.natural().sortedCopy(orderKeyByStatus.get(status)); - List totalPrices = Ordering.natural().sortedCopy(totalPriceByStatus.get(status)); - - // verify real rank of returned value is within 1% of requested rank - assertTrue(orderKey >= orderKeys.get((int) (0.49 * orderKeys.size()))); - assertTrue(orderKey <= orderKeys.get((int) (0.51 * orderKeys.size()))); - - assertTrue(orderKeyWeighted >= orderKeys.get((int) (0.49 * orderKeys.size()))); - assertTrue(orderKeyWeighted <= orderKeys.get((int) (0.51 * orderKeys.size()))); - - assertTrue(orderKeyFractionalWeighted >= orderKeys.get((int) (0.49 * orderKeys.size()))); - assertTrue(orderKeyFractionalWeighted <= orderKeys.get((int) (0.51 * orderKeys.size()))); - - assertTrue(totalPrice >= totalPrices.get((int) (0.49 * totalPrices.size()))); - assertTrue(totalPrice <= totalPrices.get((int) (0.51 * totalPrices.size()))); - - assertTrue(totalPriceWeighted >= totalPrices.get((int) (0.49 * totalPrices.size()))); - assertTrue(totalPriceWeighted <= totalPrices.get((int) (0.51 * totalPrices.size()))); - - assertTrue(totalPriceFractionalWeighted >= totalPrices.get((int) (0.49 * totalPrices.size()))); - assertTrue(totalPriceFractionalWeighted <= totalPrices.get((int) (0.51 * totalPrices.size()))); - } - } - @Test public void testComplexQuery() { @@ -772,15 +148,6 @@ public void testComplexQuery() "VALUES (7, 5), (6, 4), (5, 3)"); } - @Test - public void testWhereNull() - { - // This query is has this strange shape to force the compiler to leave a true on the stack - // with the null flag set so if the filter method is not handling nulls correctly, this - // query will fail - assertQuery("SELECT custkey FROM orders WHERE custkey = custkey AND CAST(nullif(custkey, custkey) AS boolean) AND CAST(nullif(custkey, custkey) AS boolean)"); - } - @Test public void testDistinctMultipleFields() { @@ -831,100 +198,6 @@ public void testDistinctWithOrderBy() assertQueryOrdered("SELECT DISTINCT custkey FROM orders ORDER BY custkey LIMIT 10"); } - @Test - public void testDistinctWithOrderByNotInSelect() - { - assertQueryFails( - "SELECT DISTINCT custkey FROM orders ORDER BY orderkey LIMIT 10", - "line 1:1: For SELECT DISTINCT, ORDER BY expressions must appear in select list"); - } - - @Test - public void testGroupByOrderByLimit() - { - assertQueryOrdered("SELECT custkey, SUM(totalprice) FROM orders GROUP BY custkey ORDER BY SUM(totalprice) DESC LIMIT 10"); - } - - @Test - public void testLimitZero() - { - assertQuery("SELECT custkey, totalprice FROM orders LIMIT 0"); - } - - @Test - public void testLimitAll() - { - assertQuery("SELECT custkey, totalprice FROM orders LIMIT ALL", "SELECT custkey, totalprice FROM orders"); - } - - @Test - public void testOffset() - { - String values = "(VALUES ('A', 3), ('D', 2), ('C', 1), ('B', 4)) AS t(x, y)"; - - MaterializedResult actual = computeActual("SELECT x FROM " + values + " OFFSET 2 ROWS"); - MaterializedResult all = computeExpected("SELECT x FROM " + values, actual.getTypes()); - - assertEquals(actual.getMaterializedRows().size(), 2); - assertNotEquals(actual.getMaterializedRows().get(0), actual.getMaterializedRows().get(1)); - assertContains(all, actual); - } - - @Test - public void testOffsetWithFetch() - { - String values = "(VALUES ('A', 3), ('D', 2), ('C', 1), ('B', 4)) AS t(x, y)"; - - MaterializedResult actual = computeActual("SELECT x FROM " + values + " OFFSET 2 ROWS FETCH NEXT ROW ONLY"); - MaterializedResult all = computeExpected("SELECT x FROM " + values, actual.getTypes()); - - assertEquals(actual.getMaterializedRows().size(), 1); - assertContains(all, actual); - } - - @Test - public void testOffsetWithOrderBy() - { - String values = "(VALUES ('A', 3), ('D', 2), ('C', 1), ('B', 4)) AS t(x, y)"; - - assertQuery("SELECT x FROM " + values + " ORDER BY y OFFSET 2 ROWS", "VALUES 'A', 'B'"); - assertQuery("SELECT x FROM " + values + " ORDER BY y OFFSET 2 ROWS FETCH NEXT 1 ROW ONLY", "VALUES 'A'"); - } - - @Test - public void testOffsetEmptyResult() - { - assertQueryReturnsEmptyResult("SELECT name FROM nation OFFSET 100 ROWS"); - assertQueryReturnsEmptyResult("SELECT name FROM nation ORDER BY regionkey OFFSET 100 ROWS"); - assertQueryReturnsEmptyResult("SELECT name FROM nation OFFSET 100 ROWS LIMIT 20"); - assertQueryReturnsEmptyResult("SELECT name FROM nation ORDER BY regionkey OFFSET 100 ROWS LIMIT 20"); - } - - @Test - public void testFetchFirstWithTies() - { - String values = "(VALUES 1, 1, 1, 0, 0, 0, 2, 2, 2) AS t(x)"; - - assertQuery("SELECT x FROM " + values + " ORDER BY x FETCH FIRST 4 ROWS WITH TIES", "VALUES 0, 0, 0, 1, 1, 1"); - assertQuery("SELECT x FROM " + values + " ORDER BY x FETCH FIRST ROW WITH TIES", "VALUES 0, 0, 0"); - assertQuery("SELECT x FROM " + values + " ORDER BY x FETCH FIRST 20 ROWS WITH TIES", "VALUES 0, 0, 0, 1, 1, 1, 2, 2, 2"); - - assertQuery("SELECT x FROM " + values + " ORDER BY x OFFSET 2 ROWS FETCH NEXT 2 ROWS WITH TIES", "VALUES 0, 1, 1, 1"); - - assertQueryReturnsEmptyResult("SELECT x FROM " + values + " ORDER BY x OFFSET 20 ROWS FETCH NEXT 2 ROWS WITH TIES"); - - assertQueryFails("SELECT x FROM " + values + " FETCH FIRST 4 ROWS WITH TIES", "line 1:58: FETCH FIRST WITH TIES clause requires ORDER BY"); - assertQueryFails( - "SELECT x FROM (SELECT a FROM (VALUES 3, 2, 1, 1, 0) t(a) ORDER BY a) t1(x) FETCH FIRST 2 ROWS WITH TIES", - "line 1:76: FETCH FIRST WITH TIES clause requires ORDER BY"); - - String valuesMultiColumn = "(VALUES ('b', 0), ('b', 0), ('a', 1), ('a', 0), ('b', 1)) AS t(x, y)"; - - // if ORDER BY uses multiple symbols, then TIES are resolved basing on multiple symbols too - assertQuery("SELECT x, y FROM " + valuesMultiColumn + " ORDER BY x, y FETCH FIRST 3 ROWS WITH TIES", "VALUES ('a', 0), ('a', 1), ('b', 0), ('b', 0)"); - assertQuery("SELECT x, y FROM " + valuesMultiColumn + " ORDER BY x DESC, y FETCH FIRST ROW WITH TIES", "VALUES ('b', 0), ('b', 0)"); - } - @Test public void testRepeatedAggregations() { @@ -1117,70 +390,6 @@ public void testAverageAll() assertQuery("SELECT AVG(totalprice) FROM orders"); } - @Test - public void testVariance() - { - // int64 - assertQuery("SELECT VAR_SAMP(custkey) FROM orders"); - assertQuery("SELECT VAR_SAMP(custkey) FROM (SELECT custkey FROM orders ORDER BY custkey LIMIT 2) T"); - assertQuery("SELECT VAR_SAMP(custkey) FROM (SELECT custkey FROM orders ORDER BY custkey LIMIT 1) T"); - assertQuery("SELECT VAR_SAMP(custkey) FROM (SELECT custkey FROM orders LIMIT 0) T"); - - // double - assertQuery("SELECT VAR_SAMP(totalprice) FROM orders"); - assertQuery("SELECT VAR_SAMP(totalprice) FROM (SELECT totalprice FROM orders ORDER BY totalprice LIMIT 2) T"); - assertQuery("SELECT VAR_SAMP(totalprice) FROM (SELECT totalprice FROM orders ORDER BY totalprice LIMIT 1) T"); - assertQuery("SELECT VAR_SAMP(totalprice) FROM (SELECT totalprice FROM orders LIMIT 0) T"); - } - - @Test - public void testVariancePop() - { - // int64 - assertQuery("SELECT VAR_POP(custkey) FROM orders"); - assertQuery("SELECT VAR_POP(custkey) FROM (SELECT custkey FROM orders ORDER BY custkey LIMIT 2) T"); - assertQuery("SELECT VAR_POP(custkey) FROM (SELECT custkey FROM orders ORDER BY custkey LIMIT 1) T"); - assertQuery("SELECT VAR_POP(custkey) FROM (SELECT custkey FROM orders LIMIT 0) T"); - - // double - assertQuery("SELECT VAR_POP(totalprice) FROM orders"); - assertQuery("SELECT VAR_POP(totalprice) FROM (SELECT totalprice FROM orders ORDER BY totalprice LIMIT 2) T"); - assertQuery("SELECT VAR_POP(totalprice) FROM (SELECT totalprice FROM orders ORDER BY totalprice LIMIT 1) T"); - assertQuery("SELECT VAR_POP(totalprice) FROM (SELECT totalprice FROM orders LIMIT 0) T"); - } - - @Test - public void testStdDev() - { - // int64 - assertQuery("SELECT STDDEV_SAMP(custkey) FROM orders"); - assertQuery("SELECT STDDEV_SAMP(custkey) FROM (SELECT custkey FROM orders ORDER BY custkey LIMIT 2) T"); - assertQuery("SELECT STDDEV_SAMP(custkey) FROM (SELECT custkey FROM orders ORDER BY custkey LIMIT 1) T"); - assertQuery("SELECT STDDEV_SAMP(custkey) FROM (SELECT custkey FROM orders LIMIT 0) T"); - - // double - assertQuery("SELECT STDDEV_SAMP(totalprice) FROM orders"); - assertQuery("SELECT STDDEV_SAMP(totalprice) FROM (SELECT totalprice FROM orders ORDER BY totalprice LIMIT 2) T"); - assertQuery("SELECT STDDEV_SAMP(totalprice) FROM (SELECT totalprice FROM orders ORDER BY totalprice LIMIT 1) T"); - assertQuery("SELECT STDDEV_SAMP(totalprice) FROM (SELECT totalprice FROM orders LIMIT 0) T"); - } - - @Test - public void testStdDevPop() - { - // int64 - assertQuery("SELECT STDDEV_POP(custkey) FROM orders"); - assertQuery("SELECT STDDEV_POP(custkey) FROM (SELECT custkey FROM orders ORDER BY custkey LIMIT 2) T"); - assertQuery("SELECT STDDEV_POP(custkey) FROM (SELECT custkey FROM orders ORDER BY custkey LIMIT 1) T"); - assertQuery("SELECT STDDEV_POP(custkey) FROM (SELECT custkey FROM orders LIMIT 0) T"); - - // double - assertQuery("SELECT STDDEV_POP(totalprice) FROM orders"); - assertQuery("SELECT STDDEV_POP(totalprice) FROM (SELECT totalprice FROM orders ORDER BY totalprice LIMIT 2) T"); - assertQuery("SELECT STDDEV_POP(totalprice) FROM (SELECT totalprice FROM orders ORDER BY totalprice LIMIT 1) T"); - assertQuery("SELECT STDDEV_POP(totalprice) FROM (SELECT totalprice FROM orders LIMIT 0) T"); - } - @Test public void testRollupOverUnion() { @@ -1207,131 +416,15 @@ public void testRollupOverUnion() } @Test - public void testGrouping() + public void testIntersect() { assertQuery( - "SELECT a, b AS t, sum(c), grouping(a, b) + grouping(a) " + - "FROM (VALUES ('h', 'j', 11), ('k', 'l', 7)) AS t (a, b, c) " + - "GROUP BY GROUPING SETS ( (a), (b)) " + - "ORDER BY grouping(b) ASC", - "VALUES (NULL, 'j', 11, 3), (NULL, 'l', 7, 3), ('h', NULL, 11, 1), ('k', NULL, 7, 1)"); - + "SELECT regionkey FROM nation WHERE nationkey < 7 " + + "INTERSECT SELECT regionkey FROM nation WHERE nationkey > 21"); assertQuery( - "SELECT a, sum(b), grouping(a) FROM (VALUES ('h', 11, 0), ('k', 7, 0)) AS t (a, b, c) GROUP BY GROUPING SETS (a)", - "VALUES ('h', 11, 0), ('k', 7, 0)"); - - assertQuery( - "SELECT a, b, sum(c), grouping(a, b) FROM (VALUES ('h', 'j', 11), ('k', 'l', 7) ) AS t (a, b, c) GROUP BY GROUPING SETS ( (a), (b)) HAVING grouping(a, b) > 1 ", - "VALUES (NULL, 'j', 11, 2), (NULL, 'l', 7, 2)"); - - assertQuery("SELECT a, grouping(a) * 1.0 FROM (VALUES (1) ) AS t (a) GROUP BY a", - "VALUES (1, 0.0)"); - - assertQuery("SELECT a, grouping(a), grouping(a) FROM (VALUES (1) ) AS t (a) GROUP BY a", - "VALUES (1, 0, 0)"); - - assertQuery("SELECT grouping(a) FROM (VALUES ('h', 'j', 11), ('k', 'l', 7)) AS t (a, b, c) GROUP BY GROUPING SETS (a,c), c*2", - "VALUES (0), (1), (0), (1)"); - } - - @Test - public void testGroupingWithFortyArguments() - { - // This test ensures we correctly pick the bigint implementation version of the grouping - // function which supports up to 62 columns. Semantically it is exactly the same as - // TestGroupingOperationFunction#testMoreThanThirtyTwoArguments. That test is a little easier to - // understand and verify. - String fortyLetterSequence = "aa, ab, ac, ad, ae, af, ag, ah, ai, aj, ak, al, am, an, ao, ap, aq, ar, asa, at, au, av, aw, ax, ay, az, " + - "ba, bb, bc, bd, be, bf, bg, bh, bi, bj, bk, bl, bm, bn"; - String fortyIntegers = "1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, " + - "31, 32, 33, 34, 35, 36, 37, 38, 39, 40"; - // 20, 2, 13, 33, 40, 9 , 14 (corresponding indices from Left to right in the above fortyLetterSequence) - String groupingSet1 = "at, ab, am, bg, bn, ai, an"; - // 28, 4, 5, 29, 31, 10 (corresponding indices from left to right in the above fortyLetterSequence) - String groupingSet2 = "bb, ad, ae, bc, be, aj"; - String query = format( - "SELECT grouping(%s) FROM (VALUES (%s)) AS t(%s) GROUP BY GROUPING SETS ((%s), (%s), (%s))", - fortyLetterSequence, - fortyIntegers, - fortyLetterSequence, - fortyLetterSequence, - groupingSet1, - groupingSet2); - - assertQuery(query, "VALUES (0), (822283861886), (995358664191)"); - } - - @Test - public void testGroupingInTableSubquery() - { - // In addition to testing grouping() in subqueries, the following tests also - // ensure correct behavior in the case of alternating GROUPING SETS and GROUP BY - // clauses in the same plan. This is significant because grouping() with GROUP BY - // works only with a special re-write that should not happen in the presence of - // GROUPING SETS. - - // Inner query has a single GROUP BY and outer query has GROUPING SETS - assertQuery( - "SELECT orderkey, custkey, sum(agg_price) AS outer_sum, grouping(orderkey, custkey), g " + - "FROM " + - " (SELECT orderkey, custkey, sum(totalprice) AS agg_price, grouping(custkey, orderkey) AS g " + - " FROM orders " + - " GROUP BY orderkey, custkey " + - " ORDER BY agg_price ASC " + - " LIMIT 5) AS t " + - "GROUP BY GROUPING SETS ((orderkey, custkey), g) " + - "ORDER BY outer_sum", - "VALUES (35271, 334, 874.89, 0, NULL), " + - " (28647, 1351, 924.33, 0, NULL), " + - " (58145, 862, 929.03, 0, NULL), " + - " (8354, 634, 974.04, 0, NULL), " + - " (37415, 301, 986.63, 0, NULL), " + - " (NULL, NULL, 4688.92, 3, 0)"); - - // Inner query has GROUPING SETS and outer query has GROUP BY - assertQuery( - "SELECT orderkey, custkey, g, sum(agg_price) AS outer_sum, grouping(orderkey, custkey) " + - "FROM " + - " (SELECT orderkey, custkey, sum(totalprice) AS agg_price, grouping(custkey, orderkey) AS g " + - " FROM orders " + - " GROUP BY GROUPING SETS ((custkey), (orderkey)) " + - " ORDER BY agg_price ASC " + - " LIMIT 5) AS t " + - "GROUP BY orderkey, custkey, g", - "VALUES (28647, NULL, 2, 924.33, 0), " + - " (8354, NULL, 2, 974.04, 0), " + - " (37415, NULL, 2, 986.63, 0), " + - " (58145, NULL, 2, 929.03, 0), " + - " (35271, NULL, 2, 874.89, 0)"); - - // Inner query has GROUPING SETS but no grouping and outer query has a simple GROUP BY - assertQuery( - "SELECT orderkey, custkey, sum(agg_price) AS outer_sum, grouping(orderkey, custkey) " + - "FROM " + - " (SELECT orderkey, custkey, sum(totalprice) AS agg_price " + - " FROM orders " + - " GROUP BY GROUPING SETS ((custkey), (orderkey)) " + - " ORDER BY agg_price ASC NULLS FIRST) AS t " + - "GROUP BY orderkey, custkey " + - "ORDER BY outer_sum ASC NULLS FIRST " + - "LIMIT 5", - "VALUES (35271, NULL, 874.89, 0), " + - " (28647, NULL, 924.33, 0), " + - " (58145, NULL, 929.03, 0), " + - " (8354, NULL, 974.04, 0), " + - " (37415, NULL, 986.63, 0)"); - } - - @Test - public void testIntersect() - { - assertQuery( - "SELECT regionkey FROM nation WHERE nationkey < 7 " + - "INTERSECT SELECT regionkey FROM nation WHERE nationkey > 21"); - assertQuery( - "SELECT regionkey FROM nation WHERE nationkey < 7 " + - "INTERSECT DISTINCT SELECT regionkey FROM nation WHERE nationkey > 21", - "VALUES 1, 3"); + "SELECT regionkey FROM nation WHERE nationkey < 7 " + + "INTERSECT DISTINCT SELECT regionkey FROM nation WHERE nationkey > 21", + "VALUES 1, 3"); assertQuery( "WITH wnation AS (SELECT nationkey, regionkey FROM nation) " + "SELECT regionkey FROM wnation WHERE nationkey < 7 " + @@ -1379,12 +472,6 @@ public void testIntersectWithAggregation() assertQuery("SELECT COUNT(*) FROM (SELECT nationkey FROM nation INTERSECT SELECT 2) n1 INTERSECT SELECT regionkey FROM nation"); } - @Test - public void testIntersectAllFails() - { - assertQueryFails("SELECT * FROM (VALUES 1, 2, 3, 4) INTERSECT ALL SELECT * FROM (VALUES 3, 4)", "line 1:35: INTERSECT ALL not yet implemented"); - } - @Test public void testExcept() { @@ -1442,12 +529,6 @@ public void testExceptWithAggregation() assertQuery("SELECT COUNT(*) FROM (SELECT nationkey FROM nation EXCEPT SELECT 10) n1 EXCEPT SELECT regionkey FROM nation"); } - @Test - public void testExceptAllFails() - { - assertQueryFails("SELECT * FROM (VALUES 1, 2, 3, 4) EXCEPT ALL SELECT * FROM (VALUES 3, 4)", "line 1:35: EXCEPT ALL not yet implemented"); - } - @Test public void testSelectWithComparison() { @@ -1472,26 +553,6 @@ public void testInlineViewWithProjections() assertQuery("SELECT x + 1, y FROM (SELECT orderkey * 10 x, custkey y FROM orders) u"); } - @Test - public void testInUncorrelatedSubquery() - { - assertQuery( - "SELECT CASE WHEN false THEN 1 IN (VALUES 2) END", - "SELECT NULL"); - assertQuery( - "SELECT x FROM (VALUES 2) t(x) WHERE MAP(ARRAY[8589934592], ARRAY[x]) IN (VALUES MAP(ARRAY[8589934592],ARRAY[2]))", - "SELECT 2"); - assertQuery( - "SELECT a IN (VALUES 2), a FROM (VALUES (2)) t(a)", - "SELECT TRUE, 2"); - } - - @Test - public void testChecksum() - { - assertQuery("SELECT to_hex(checksum(0))", "SELECT '0000000000000000'"); - } - @Test public void testMaxBy() { @@ -1557,3120 +618,1133 @@ public void testColumnAliases() } @Test - public void testRowNumberNoOptimization() - { - MaterializedResult actual = computeActual("" + - "SELECT orderkey, orderstatus FROM (\n" + - " SELECT row_number() OVER () rn, orderkey, orderstatus\n" + - " FROM orders\n" + - ") WHERE NOT rn <= 10"); - MaterializedResult all = computeExpected("SELECT orderkey, orderstatus FROM orders", actual.getTypes()); - assertEquals(actual.getMaterializedRows().size(), all.getMaterializedRows().size() - 10); - assertContains(all, actual); - - actual = computeActual("" + - "SELECT orderkey, orderstatus FROM (\n" + - " SELECT row_number() OVER () rn, orderkey, orderstatus\n" + - " FROM orders\n" + - ") WHERE rn - 5 <= 10"); - all = computeExpected("SELECT orderkey, orderstatus FROM orders", actual.getTypes()); - assertEquals(actual.getMaterializedRows().size(), 15); - assertContains(all, actual); - } - - @Test - public void testRowNumberLimit() + public void testCast() { - MaterializedResult actual = computeActual("" + - "SELECT row_number() OVER (PARTITION BY orderstatus) rn, orderstatus\n" + - "FROM orders\n" + - "LIMIT 10"); - assertEquals(actual.getMaterializedRows().size(), 10); + assertQuery("SELECT CAST('1' AS BIGINT)"); + assertQuery("SELECT CAST(totalprice AS BIGINT) FROM orders"); + assertQuery("SELECT CAST(orderkey AS DOUBLE) FROM orders"); + assertQuery("SELECT CAST(orderkey AS VARCHAR) FROM orders"); + assertQuery("SELECT CAST(orderkey AS BOOLEAN) FROM orders"); - actual = computeActual("" + - "SELECT row_number() OVER (PARTITION BY orderstatus ORDER BY orderkey) rn\n" + - "FROM orders\n" + - "LIMIT 10"); - assertEquals(actual.getMaterializedRows().size(), 10); + assertQuery("SELECT try_cast('1' AS BIGINT)", "SELECT CAST('1' AS BIGINT)"); + assertQuery("SELECT try_cast(totalprice AS BIGINT) FROM orders", "SELECT CAST(totalprice AS BIGINT) FROM orders"); + assertQuery("SELECT try_cast(orderkey AS DOUBLE) FROM orders", "SELECT CAST(orderkey AS DOUBLE) FROM orders"); + assertQuery("SELECT try_cast(orderkey AS VARCHAR) FROM orders", "SELECT CAST(orderkey AS VARCHAR) FROM orders"); + assertQuery("SELECT try_cast(orderkey AS BOOLEAN) FROM orders", "SELECT CAST(orderkey AS BOOLEAN) FROM orders"); - actual = computeActual("" + - "SELECT row_number() OVER () rn, orderstatus\n" + - "FROM orders\n" + - "LIMIT 10"); - assertEquals(actual.getMaterializedRows().size(), 10); + assertQuery("SELECT try_cast('foo' AS BIGINT)", "SELECT CAST(null AS BIGINT)"); + assertQuery("SELECT try_cast(clerk AS BIGINT) FROM orders", "SELECT CAST(null AS BIGINT) FROM orders"); + assertQuery("SELECT try_cast(orderkey * orderkey AS VARCHAR) FROM orders", "SELECT CAST(orderkey * orderkey AS VARCHAR) FROM orders"); + assertQuery("SELECT try_cast(try_cast(orderkey AS VARCHAR) AS BIGINT) FROM orders", "SELECT orderkey FROM orders"); + assertQuery("SELECT try_cast(clerk AS VARCHAR) || try_cast(clerk AS VARCHAR) FROM orders", "SELECT clerk || clerk FROM orders"); - actual = computeActual("" + - "SELECT row_number() OVER (ORDER BY orderkey) rn\n" + - "FROM orders\n" + - "LIMIT 10"); - assertEquals(actual.getMaterializedRows().size(), 10); - } + assertQuery("SELECT coalesce(try_cast('foo' AS BIGINT), 456)", "SELECT 456"); + assertQuery("SELECT coalesce(try_cast(clerk AS BIGINT), 456) FROM orders", "SELECT 456 FROM orders"); - @Test - public void testRowNumberMultipleFilters() - { - MaterializedResult actual = computeActual("" + - "SELECT * FROM (" + - " SELECT a, row_number() OVER (PARTITION BY a ORDER BY a) rn\n" + - " FROM (VALUES (1), (1), (1), (2), (2), (3)) t (a)) t " + - "WHERE rn < 3 AND rn % 2 = 0 AND a = 2 LIMIT 2"); - MaterializedResult expected = resultBuilder(getSession(), BIGINT, BIGINT) - .row(2, 2L) - .build(); - assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); + assertQuery("SELECT CAST(x AS BIGINT) FROM (VALUES 1, 2, 3, NULL) t (x)", "VALUES 1, 2, 3, NULL"); + assertQuery("SELECT try_cast(x AS BIGINT) FROM (VALUES 1, 2, 3, NULL) t (x)", "VALUES 1, 2, 3, NULL"); } @Test - public void testRowNumberSpecialFilters() + public void testQuotedIdentifiers() { - // Test "row_number() = negative number" filter with ORDER BY. This should create a Window Node with a Filter Node on top and return 0 rows. - assertQueryReturnsEmptyResult("" + - "SELECT * FROM (" + - " SELECT a, row_number() OVER (PARTITION BY a ORDER BY a) rn\n" + - " FROM (VALUES (1), (1), (1), (2), (2), (3)) t (a)) t " + - "WHERE rn = -1"); - - // Test "row_number() <= negative number" filter with ORDER BY. This should create a Window Node with a Filter Node on top and return 0 rows. - assertQueryReturnsEmptyResult("" + - "SELECT * FROM (" + - " SELECT a, row_number() OVER (PARTITION BY a ORDER BY a) rn\n" + - " FROM (VALUES (1), (1), (1), (2), (2), (3)) t (a)) t " + - "WHERE rn <= -1"); - - // Test "row_number() = 0" filter with ORDER BY. This should create a Window Node with a Filter Node on top and return 0 rows. - assertQueryReturnsEmptyResult("" + - "SELECT * FROM (" + - " SELECT a, row_number() OVER (PARTITION BY a ORDER BY a) rn\n" + - " FROM (VALUES (1), (1), (1), (2), (2), (3)) t (a)) t " + - "WHERE rn = 0"); - - // Test "row_number() = negative number" filter without ORDER BY. This should create a RowNumber Node with a Filter Node on top and return 0 rows. - assertQueryReturnsEmptyResult("" + - "SELECT * FROM (" + - " SELECT a, row_number() OVER (PARTITION BY a) rn\n" + - " FROM (VALUES (1), (1), (1), (2), (2), (3)) t (a)) t " + - "WHERE rn = -1"); - - // Test "row_number() <= negative number" filter without ORDER BY. This should create a RowNumber Node with a Filter Node on top and return 0 rows. - assertQueryReturnsEmptyResult("" + - "SELECT * FROM (" + - " SELECT a, row_number() OVER (PARTITION BY a) rn\n" + - " FROM (VALUES (1), (1), (1), (2), (2), (3)) t (a)) t " + - "WHERE rn <= -1"); - - // Test "row_number() = 0" filter without ORDER BY. This should create a RowNumber Node with a Filter Node on top and return 0 rows. - assertQueryReturnsEmptyResult("" + - "SELECT * FROM (" + - " SELECT a, row_number() OVER (PARTITION BY a) rn\n" + - " FROM (VALUES (1), (1), (1), (2), (2), (3)) t (a)) t " + - "WHERE rn = 0"); + assertQuery("SELECT \"TOTALPRICE\" \"my price\" FROM \"ORDERS\""); } @Test - public void testRowNumberFilterAndLimit() + public void testIn() { - MaterializedResult actual = computeActual("" + - "SELECT * FROM (" + - "SELECT a, row_number() OVER (PARTITION BY a ORDER BY a) rn\n" + - "FROM (VALUES (1), (2), (1), (2)) t (a)) t WHERE rn < 2 LIMIT 2"); - - MaterializedResult expected = resultBuilder(getSession(), BIGINT, BIGINT) - .row(1, 1L) - .row(2, 1L) - .build(); - assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); - - actual = computeActual("" + - "SELECT * FROM (" + - "SELECT a, row_number() OVER (PARTITION BY a) rn\n" + - "FROM (VALUES (1), (2), (1), (2), (1)) t (a)) t WHERE rn < 3 LIMIT 2"); - - expected = resultBuilder(getSession(), BIGINT, BIGINT) - .row(1, 1L) - .row(1, 2L) - .row(2, 1L) - .row(2, 2L) - .build(); - assertEquals(actual.getMaterializedRows().size(), 2); - assertContains(expected, actual); + assertQuery("SELECT orderkey FROM orders WHERE orderkey IN (1, 2, 3)"); + assertQuery("SELECT orderkey FROM orders WHERE orderkey IN (1.5, 2.3)", "SELECT orderkey FROM orders LIMIT 0"); // H2 incorrectly matches rows + assertQuery("SELECT orderkey FROM orders WHERE orderkey IN (1, 2E0, 3)"); + assertQuery("SELECT orderkey FROM orders WHERE totalprice IN (1, 2, 3)"); } @Test - public void testRowNumberUnpartitionedFilter() + public void testLargeIn() { - MaterializedResult actual = computeActual("" + - "SELECT orderkey, orderstatus FROM (\n" + - " SELECT row_number() OVER () rn, orderkey, orderstatus\n" + - " FROM orders\n" + - ") WHERE rn <= 5 AND orderstatus != 'Z'"); - MaterializedResult all = computeExpected("SELECT orderkey, orderstatus FROM orders", actual.getTypes()); - assertEquals(actual.getMaterializedRows().size(), 5); - assertContains(all, actual); - - actual = computeActual("" + - "SELECT orderkey, orderstatus FROM (\n" + - " SELECT row_number() OVER () rn, orderkey, orderstatus\n" + - " FROM orders\n" + - ") WHERE rn < 5"); - all = computeExpected("SELECT orderkey, orderstatus FROM orders", actual.getTypes()); - - assertEquals(actual.getMaterializedRows().size(), 4); - assertContains(all, actual); - - actual = computeActual("" + - "SELECT orderkey, orderstatus FROM (\n" + - " SELECT row_number() OVER () rn, orderkey, orderstatus\n" + - " FROM orders\n" + - ") LIMIT 5"); - all = computeExpected("SELECT orderkey, orderstatus FROM orders", actual.getTypes()); + String longValues = range(0, 5000) + .mapToObj(Integer::toString) + .collect(joining(", ")); + assertQuery("SELECT orderkey FROM orders WHERE orderkey IN (" + longValues + ")"); + assertQuery("SELECT orderkey FROM orders WHERE orderkey NOT IN (" + longValues + ")"); - assertEquals(actual.getMaterializedRows().size(), 5); - assertContains(all, actual); + assertQuery("SELECT orderkey FROM orders WHERE orderkey IN (mod(1000, orderkey), " + longValues + ")"); + assertQuery("SELECT orderkey FROM orders WHERE orderkey NOT IN (mod(1000, orderkey), " + longValues + ")"); } @Test - public void testRowNumberPartitionedFilter() + public void testShowSchemas() { - MaterializedResult actual = computeActual("" + - "SELECT orderkey, orderstatus FROM (\n" + - " SELECT row_number() OVER (PARTITION BY orderstatus) rn, orderkey, orderstatus\n" + - " FROM orders\n" + - ") WHERE rn <= 5"); - MaterializedResult all = computeExpected("SELECT orderkey, orderstatus FROM orders", actual.getTypes()); - - // there are 3 DISTINCT orderstatus, so expect 15 rows. - assertEquals(actual.getMaterializedRows().size(), 15); - assertContains(all, actual); - - // Test for unreferenced outputs - actual = computeActual("" + - "SELECT orderkey FROM (\n" + - " SELECT row_number() OVER (PARTITION BY orderstatus) rn, orderkey\n" + - " FROM orders\n" + - ") WHERE rn <= 5"); - all = computeExpected("SELECT orderkey FROM orders", actual.getTypes()); - - // there are 3 distinct orderstatus, so expect 15 rows. - assertEquals(actual.getMaterializedRows().size(), 15); - assertContains(all, actual); + MaterializedResult result = computeActual("SHOW SCHEMAS"); + assertTrue(result.getOnlyColumnAsSet().containsAll(ImmutableSet.of(getSession().getSchema().get(), INFORMATION_SCHEMA))); } @Test - public void testRowNumberUnpartitionedFilterLimit() + public void testShowSchemasFrom() { - assertQuery("" + - "SELECT row_number() OVER ()\n" + - "FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey\n" + - "WHERE orders.orderkey = 10000\n" + - "LIMIT 20"); + MaterializedResult result = computeActual(format("SHOW SCHEMAS FROM %s", getSession().getCatalog().get())); + assertTrue(result.getOnlyColumnAsSet().containsAll(ImmutableSet.of(getSession().getSchema().get(), INFORMATION_SCHEMA))); } @Test - public void testRowNumberPropertyDerivation() + public void testShowSchemasLike() { - assertQuery( - "SELECT orderkey, orderstatus, SUM(rn) OVER (PARTITION BY orderstatus) c " + - "FROM ( " + - " SELECT orderkey, orderstatus, row_number() OVER (PARTITION BY orderstatus) rn " + - " FROM ( " + - " SELECT * FROM orders ORDER BY orderkey LIMIT 10 " + - " ) " + - ")", - "VALUES " + - "(1, 'O', 21), " + - "(2, 'O', 21), " + - "(3, 'F', 10), " + - "(4, 'O', 21), " + - "(5, 'F', 10), " + - "(6, 'F', 10), " + - "(7, 'O', 21), " + - "(32, 'O', 21), " + - "(33, 'F', 10), " + - "(34, 'O', 21)"); + MaterializedResult result = computeActual(format("SHOW SCHEMAS LIKE '%s'", getSession().getSchema().get())); + assertEquals(result.getOnlyColumnAsSet(), ImmutableSet.of(getSession().getSchema().get())); } @Test - public void testTopNUnpartitionedWindow() + public void testShowSchemasLikeWithEscape() { - MaterializedResult actual = computeActual("" + - "SELECT * FROM (\n" + - " SELECT row_number() OVER (ORDER BY orderkey) rn, orderkey, orderstatus\n" + - " FROM orders\n" + - ") WHERE rn <= 5"); - String sql = "SELECT row_number() OVER (), orderkey, orderstatus FROM orders ORDER BY orderkey LIMIT 5"; - MaterializedResult expected = computeExpected(sql, actual.getTypes()); - assertEquals(actual, expected); - } + assertQueryFails("SHOW SCHEMAS IN foo LIKE '%$_%' ESCAPE", "line 1:39: mismatched input ''. Expecting: "); + assertQueryFails("SHOW SCHEMAS LIKE 't$_%' ESCAPE ''", "Escape string must be a single character"); + assertQueryFails("SHOW SCHEMAS LIKE 't$_%' ESCAPE '$$'", "Escape string must be a single character"); - @Test - public void testTopNUnpartitionedLargeWindow() - { - MaterializedResult actual = computeActual("" + - "SELECT * FROM (\n" + - " SELECT row_number() OVER (ORDER BY orderkey) rn, orderkey, orderstatus\n" + - " FROM orders\n" + - ") WHERE rn <= 10000"); - String sql = "SELECT row_number() OVER (), orderkey, orderstatus FROM orders ORDER BY orderkey LIMIT 10000"; - MaterializedResult expected = computeExpected(sql, actual.getTypes()); - assertEqualsIgnoreOrder(actual, expected); + Set allSchemas = computeActual("SHOW SCHEMAS").getOnlyColumnAsSet(); + assertEquals(allSchemas, computeActual("SHOW SCHEMAS LIKE '%_%'").getOnlyColumnAsSet()); + Set result = computeActual("SHOW SCHEMAS LIKE '%$_%' ESCAPE '$'").getOnlyColumnAsSet(); + assertNotEquals(allSchemas, result); + assertThat(result).contains("information_schema").allMatch(schemaName -> ((String) schemaName).contains("_")); } @Test - public void testTopNPartitionedWindow() + public void testShowTables() { - assertQuery( - "SELECT * FROM ( " + - " SELECT row_number() OVER (PARTITION BY orderstatus ORDER BY orderkey) rn, orderkey, orderstatus " + - " FROM orders " + - ") WHERE rn <= 2", - "VALUES " + - "(1, 1, 'O'), " + - "(2, 2, 'O'), " + - "(1, 3, 'F'), " + - "(2, 5, 'F'), " + - "(1, 65, 'P'), " + - "(2, 197, 'P')"); - - // Test for unreferenced outputs - assertQuery( - "SELECT * FROM ( " + - " SELECT row_number() OVER (PARTITION BY orderstatus ORDER BY orderkey) rn, orderkey " + - " FROM orders " + - ") WHERE rn <= 2", - "VALUES " + - "(1, 1), " + - "(2, 2), " + - "(1, 3), " + - "(2, 5), " + - "(1, 65), " + - "(2, 197)"); + Set expectedTables = TpchTable.getTables().stream() + .map(TpchTable::getTableName) + .collect(toImmutableSet()); - assertQuery( - "SELECT * FROM ( " + - " SELECT row_number() OVER (PARTITION BY orderstatus ORDER BY orderkey) rn, orderstatus " + - " FROM orders " + - ") WHERE rn <= 2", - "VALUES " + - "(1, 'O'), " + - "(2, 'O'), " + - "(1, 'F'), " + - "(2, 'F'), " + - "(1, 'P'), " + - "(2, 'P')"); + MaterializedResult result = computeActual("SHOW TABLES"); + assertTrue(result.getOnlyColumnAsSet().containsAll(expectedTables)); } @Test - public void testTopNUnpartitionedWindowWithEqualityFilter() + public void testShowTablesFrom() { - assertQuery( - "SELECT * FROM ( " + - " SELECT row_number() OVER (ORDER BY orderkey) rn, orderkey, orderstatus " + - " FROM orders " + - ") WHERE rn = 2", - "VALUES (2, 2, 'O')"); - } + Set expectedTables = TpchTable.getTables().stream() + .map(TpchTable::getTableName) + .collect(toImmutableSet()); - @Test - public void testTopNUnpartitionedWindowWithCompositeFilter() - { - assertQuery( - "SELECT * FROM ( " + - " SELECT row_number() OVER (ORDER BY orderkey) rn, orderkey, orderstatus " + - " FROM orders " + - ") WHERE rn = 1 OR rn IN (3, 4) OR rn BETWEEN 6 AND 7", - "VALUES " + - "(1, 1, 'O'), " + - "(3, 3, 'F'), " + - "(4, 4, 'O'), " + - "(6, 6, 'F'), " + - "(7, 7, 'O')"); - } + String catalog = getSession().getCatalog().get(); + String schema = getSession().getSchema().get(); - @Test - public void testTopNPartitionedWindowWithEqualityFilter() - { - assertQuery( - "SELECT * FROM ( " + - " SELECT row_number() OVER (PARTITION BY orderstatus ORDER BY orderkey) rn, orderkey, orderstatus " + - " FROM orders " + - ") WHERE rn = 2", - "VALUES " + - "(2, 2, 'O'), " + - "(2, 5, 'F'), " + - "(2, 197, 'P')"); + MaterializedResult result = computeActual("SHOW TABLES FROM " + schema); + assertTrue(result.getOnlyColumnAsSet().containsAll(expectedTables)); - // Test for unreferenced outputs - assertQuery( - "SELECT * FROM ( " + - " SELECT row_number() OVER (PARTITION BY orderstatus ORDER BY orderkey) rn, orderkey " + - " FROM orders " + - ") WHERE rn = 2", - "VALUES (2, 2), (2, 5), (2, 197)"); + result = computeActual("SHOW TABLES FROM " + catalog + "." + schema); + assertTrue(result.getOnlyColumnAsSet().containsAll(expectedTables)); - assertQuery( - "SELECT * FROM ( " + - " SELECT row_number() OVER (PARTITION BY orderstatus ORDER BY orderkey) rn, orderstatus " + - " FROM orders " + - ") WHERE rn = 2", - "VALUES (2, 'O'), (2, 'F'), (2, 'P')"); + assertQueryFails("SHOW TABLES FROM UNKNOWN", "line 1:1: Schema 'unknown' does not exist"); + assertQueryFails("SHOW TABLES FROM UNKNOWNCATALOG.UNKNOWNSCHEMA", "line 1:1: Catalog 'unknowncatalog' does not exist"); } @Test - public void testScalarFunction() + public void testShowTablesLike() { - assertQuery("SELECT SUBSTR('Quadratically', 5, 6)"); + assertThat(computeActual("SHOW TABLES LIKE 'or%'").getOnlyColumnAsSet()) + .contains("orders") + .allMatch(tableName -> ((String) tableName).startsWith("or")); } @Test - public void testCast() + public void testShowColumns() { - assertQuery("SELECT CAST('1' AS BIGINT)"); - assertQuery("SELECT CAST(totalprice AS BIGINT) FROM orders"); - assertQuery("SELECT CAST(orderkey AS DOUBLE) FROM orders"); - assertQuery("SELECT CAST(orderkey AS VARCHAR) FROM orders"); - assertQuery("SELECT CAST(orderkey AS BOOLEAN) FROM orders"); - - assertQuery("SELECT try_cast('1' AS BIGINT)", "SELECT CAST('1' AS BIGINT)"); - assertQuery("SELECT try_cast(totalprice AS BIGINT) FROM orders", "SELECT CAST(totalprice AS BIGINT) FROM orders"); - assertQuery("SELECT try_cast(orderkey AS DOUBLE) FROM orders", "SELECT CAST(orderkey AS DOUBLE) FROM orders"); - assertQuery("SELECT try_cast(orderkey AS VARCHAR) FROM orders", "SELECT CAST(orderkey AS VARCHAR) FROM orders"); - assertQuery("SELECT try_cast(orderkey AS BOOLEAN) FROM orders", "SELECT CAST(orderkey AS BOOLEAN) FROM orders"); + MaterializedResult actual = computeActual("SHOW COLUMNS FROM orders"); - assertQuery("SELECT try_cast('foo' AS BIGINT)", "SELECT CAST(null AS BIGINT)"); - assertQuery("SELECT try_cast(clerk AS BIGINT) FROM orders", "SELECT CAST(null AS BIGINT) FROM orders"); - assertQuery("SELECT try_cast(orderkey * orderkey AS VARCHAR) FROM orders", "SELECT CAST(orderkey * orderkey AS VARCHAR) FROM orders"); - assertQuery("SELECT try_cast(try_cast(orderkey AS VARCHAR) AS BIGINT) FROM orders", "SELECT orderkey FROM orders"); - assertQuery("SELECT try_cast(clerk AS VARCHAR) || try_cast(clerk AS VARCHAR) FROM orders", "SELECT clerk || clerk FROM orders"); + MaterializedResult expectedUnparametrizedVarchar = resultBuilder(getSession(), VARCHAR, VARCHAR, VARCHAR, VARCHAR) + .row("orderkey", "bigint", "", "") + .row("custkey", "bigint", "", "") + .row("orderstatus", "varchar", "", "") + .row("totalprice", "double", "", "") + .row("orderdate", "date", "", "") + .row("orderpriority", "varchar", "", "") + .row("clerk", "varchar", "", "") + .row("shippriority", "integer", "", "") + .row("comment", "varchar", "", "") + .build(); - assertQuery("SELECT coalesce(try_cast('foo' AS BIGINT), 456)", "SELECT 456"); - assertQuery("SELECT coalesce(try_cast(clerk AS BIGINT), 456) FROM orders", "SELECT 456 FROM orders"); + MaterializedResult expectedParametrizedVarchar = resultBuilder(getSession(), VARCHAR, VARCHAR, VARCHAR, VARCHAR) + .row("orderkey", "bigint", "", "") + .row("custkey", "bigint", "", "") + .row("orderstatus", "varchar(1)", "", "") + .row("totalprice", "double", "", "") + .row("orderdate", "date", "", "") + .row("orderpriority", "varchar(15)", "", "") + .row("clerk", "varchar(15)", "", "") + .row("shippriority", "integer", "", "") + .row("comment", "varchar(79)", "", "") + .build(); - assertQuery("SELECT CAST(x AS BIGINT) FROM (VALUES 1, 2, 3, NULL) t (x)", "VALUES 1, 2, 3, NULL"); - assertQuery("SELECT try_cast(x AS BIGINT) FROM (VALUES 1, 2, 3, NULL) t (x)", "VALUES 1, 2, 3, NULL"); + // Until we migrate all connectors to parametrized varchar we check two options + assertTrue(actual.equals(expectedParametrizedVarchar) || actual.equals(expectedUnparametrizedVarchar), + format("%s does not matche neither of %s and %s", actual, expectedParametrizedVarchar, expectedUnparametrizedVarchar)); } @Test - public void testInvalidCast() + public void testInformationSchemaFiltering() { - assertQueryFails( - "SELECT CAST(1 AS DATE)", - "line 1:8: Cannot cast integer to date"); + assertQuery( + "SELECT table_name FROM information_schema.tables WHERE table_name = 'orders' LIMIT 1", + "SELECT 'orders' table_name"); + assertQuery( + "SELECT table_name FROM information_schema.columns WHERE data_type = 'bigint' AND table_name = 'customer' and column_name = 'custkey' LIMIT 1", + "SELECT 'customer' table_name"); } @Test - public void testInvalidCastInMultilineQuery() + public void testInformationSchemaUppercaseName() { - assertQueryFails( - "SELECT CAST(totalprice AS BIGINT),\n" + - "CAST(2015 AS DATE),\n" + - "CAST(orderkey AS DOUBLE) FROM orders", - "line 2:1: Cannot cast integer to date"); + assertQuery( + "SELECT table_name FROM information_schema.tables WHERE table_catalog = 'LOCAL'", + "SELECT '' WHERE false"); + assertQuery( + "SELECT table_name FROM information_schema.tables WHERE table_schema = 'TINY'", + "SELECT '' WHERE false"); + assertQuery( + "SELECT table_name FROM information_schema.tables WHERE table_name = 'ORDERS'", + "SELECT '' WHERE false"); } @Test - public void testTryInvalidCast() + public void testSelectColumnOfNulls() { - assertQuery("SELECT TRY(CAST('a' AS BIGINT))", - "SELECT NULL"); + // Currently nulls can confuse the local planner, so select some + assertQueryOrdered("SELECT CAST(NULL AS VARCHAR), CAST(NULL AS BIGINT) FROM orders ORDER BY 1"); } @Test - public void testConcatOperator() + public void testSelectCaseInsensitive() { - assertQuery("SELECT '12' || '34'"); + assertQuery("SELECT ORDERKEY FROM ORDERS"); + assertQuery("SELECT OrDeRkEy FROM OrDeRs"); } @Test - public void testQuotedIdentifiers() + public void testTopN() { - assertQuery("SELECT \"TOTALPRICE\" \"my price\" FROM \"ORDERS\""); + assertQuery("SELECT n.name, r.name FROM nation n LEFT JOIN region r ON n.regionkey = r.regionkey ORDER BY n.name LIMIT 1"); } @Test - public void testInvalidColumn() + public void testTopNByMultipleFields() { - assertQueryFails( - "SELECT * FROM lineitem l JOIN (SELECT orderkey_1, custkey FROM orders) o on l.orderkey = o.orderkey_1", - "line 1:39: Column 'orderkey_1' cannot be resolved"); - } + assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY orderkey ASC, custkey ASC LIMIT 10"); + assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY orderkey ASC, custkey DESC LIMIT 10"); + assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY orderkey DESC, custkey ASC LIMIT 10"); + assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY orderkey DESC, custkey DESC LIMIT 10"); - @Test - public void testUnaliasedSubqueries() - { - assertQuery("SELECT orderkey FROM (SELECT orderkey FROM orders)"); - } - - @Test - public void testUnaliasedSubqueries1() - { - assertQuery("SELECT a FROM (SELECT orderkey a FROM orders)"); - } - - @Test - public void testWith() - { - assertQuery("" + - "WITH a AS (SELECT * FROM orders) " + - "SELECT * FROM a", - "SELECT * FROM orders"); - } - - @Test - public void testWithQualifiedPrefix() - { - assertQuery("WITH a AS (SELECT 123) SELECT a.* FROM a", "SELECT 123"); - } - - @Test - public void testWithAliased() - { - assertQuery("WITH a AS (SELECT * FROM orders) SELECT * FROM a x", "SELECT * FROM orders"); - } - - @Test - public void testReferenceToWithQueryInFromClause() - { - assertQuery( - "WITH a AS (SELECT * FROM orders)" + - "SELECT * FROM (" + - " SELECT * FROM a" + - ")", - "SELECT * FROM orders"); - } - - @Test - public void testWithChaining() - { - assertQuery("" + - "WITH a AS (SELECT orderkey n FROM orders)\n" + - ", b AS (SELECT n + 1 n FROM a)\n" + - ", c AS (SELECT n + 1 n FROM b)\n" + - "SELECT n + 1 FROM c", - "SELECT orderkey + 3 FROM orders"); - } - - @Test - public void testWithNestedSubqueries() - { - assertQuery("" + - "WITH a AS (\n" + - " WITH aa AS (SELECT 123 x FROM orders LIMIT 1)\n" + - " SELECT x y FROM aa\n" + - "), b AS (\n" + - " WITH bb AS (\n" + - " WITH bbb AS (SELECT y FROM a)\n" + - " SELECT bbb.* FROM bbb\n" + - " )\n" + - " SELECT y z FROM bb\n" + - ")\n" + - "SELECT *\n" + - "FROM (\n" + - " WITH q AS (SELECT z w FROM b)\n" + - " SELECT j.*, k.*\n" + - " FROM a j\n" + - " JOIN q k ON (j.y = k.w)\n" + - ") t", "" + - "SELECT 123, 123 FROM orders LIMIT 1"); - } - - @Test - public void testWithColumnAliasing() - { - assertQuery("WITH a (id) AS (SELECT 123) SELECT id FROM a", "SELECT 123"); - - assertQuery( - "WITH t (a, b, c) AS (SELECT 1, custkey x, orderkey FROM orders) SELECT c, b, a FROM t", - "SELECT orderkey, custkey, 1 FROM orders"); - } - - @Test - public void testWithHiding() - { - assertQuery("" + - "WITH a AS (SELECT 1), " + - " b AS (" + - " WITH a AS (SELECT 2)" + - " SELECT * FROM a" + - " )" + - "SELECT * FROM b", - "SELECT 2"); - assertQueryFails( - "WITH a AS (VALUES 1), " + - " a AS (VALUES 2)" + - "SELECT * FROM a", - "line 1:28: WITH query name 'a' specified more than once"); - } - - @Test - public void testWithRecursive() - { - assertQueryFails( - "WITH RECURSIVE a AS (SELECT 123) SELECT * FROM a", - "line 1:1: Recursive WITH queries are not supported"); - } - - @Test - public void testCaseNoElse() - { - assertQuery("SELECT orderkey, CASE orderstatus WHEN 'O' THEN 'a' END FROM orders"); - } - - @Test - public void testCaseNoElseInconsistentResultType() - { - assertQueryFails( - "SELECT orderkey, CASE orderstatus WHEN 'O' THEN 'a' WHEN '1' THEN 2 END FROM orders", - "\\Qline 1:67: All CASE results must be the same type: varchar(1)\\E"); - } - - @Test - public void testCaseWithSupertypeCast() - { - assertQuery(" SELECT CASE x WHEN 1 THEN CAST(1 AS decimal(4,1)) WHEN 2 THEN CAST(1 AS decimal(4,2)) ELSE CAST(1 AS decimal(4,3)) END FROM (values 1) t(x)", "SELECT 1.000"); - } - - @Test - public void testIfExpression() - { - assertQuery( - "SELECT sum(IF(orderstatus = 'F', totalprice, 0.0)) FROM orders", - "SELECT sum(CASE WHEN orderstatus = 'F' THEN totalprice ELSE 0.0 END) FROM orders"); - assertQuery( - "SELECT sum(IF(orderstatus = 'Z', totalprice)) FROM orders", - "SELECT sum(CASE WHEN orderstatus = 'Z' THEN totalprice END) FROM orders"); - assertQuery( - "SELECT sum(IF(orderstatus = 'F', NULL, totalprice)) FROM orders", - "SELECT sum(CASE WHEN orderstatus = 'F' THEN NULL ELSE totalprice END) FROM orders"); - assertQuery( - "SELECT IF(orderstatus = 'Z', orderkey / 0, orderkey) FROM orders", - "SELECT CASE WHEN orderstatus = 'Z' THEN orderkey / 0 ELSE orderkey END FROM orders"); - assertQuery( - "SELECT sum(IF(NULLIF(orderstatus, 'F') <> 'F', totalprice, 5.1)) FROM orders", - "SELECT sum(CASE WHEN NULLIF(orderstatus, 'F') <> 'F' THEN totalprice ELSE 5.1 END) FROM orders"); - - // coercions to supertype - assertQuery("SELECT if(true, CAST(1 AS decimal(2,1)), 1)", "SELECT 1.0"); - } - - @Test - public void testIn() - { - assertQuery("SELECT orderkey FROM orders WHERE orderkey IN (1, 2, 3)"); - assertQuery("SELECT orderkey FROM orders WHERE orderkey IN (1.5, 2.3)", "SELECT orderkey FROM orders LIMIT 0"); // H2 incorrectly matches rows - assertQuery("SELECT orderkey FROM orders WHERE orderkey IN (1, 2E0, 3)"); - assertQuery("SELECT orderkey FROM orders WHERE totalprice IN (1, 2, 3)"); - assertQuery("SELECT x FROM (values 3, 100) t(x) WHERE x IN (2147483649)", "SELECT * WHERE false"); - assertQuery("SELECT x FROM (values 3, 100, 2147483648, 2147483649, 2147483650) t(x) WHERE x IN (2147483648, 2147483650)", "values 2147483648, 2147483650"); - assertQuery("SELECT x FROM (values 3, 100, 2147483648, 2147483649, 2147483650) t(x) WHERE x IN (3, 4, 2147483648, 2147483650)", "values 3, 2147483648, 2147483650"); - assertQuery("SELECT x FROM (values 1, 2, 3) t(x) WHERE x IN (1 + CAST(rand() < 0 AS bigint), 2 + CAST(rand() < 0 AS bigint))", "values 1, 2"); - assertQuery("SELECT x FROM (values 1, 2, 3, 4) t(x) WHERE x IN (1 + CAST(rand() < 0 AS bigint), 2 + CAST(rand() < 0 AS bigint), 4)", "values 1, 2, 4"); - assertQuery("SELECT x FROM (values 1, 2, 3, 4) t(x) WHERE x IN (4, 2, 1)", "values 1, 2, 4"); - assertQuery("SELECT x FROM (values 1, 2, 3, 2147483648) t(x) WHERE x IN (1 + CAST(rand() < 0 AS bigint), 2 + CAST(rand() < 0 AS bigint), 2147483648)", "values 1, 2, 2147483648"); - assertQuery("SELECT x IN (0) FROM (values 4294967296) t(x)", "values false"); - assertQuery("SELECT x IN (0, 4294967297 + CAST(rand() < 0 AS bigint)) FROM (values 4294967296, 4294967297) t(x)", "values false, true"); - assertQuery("SELECT NULL in (1, 2, 3)", "values null"); - assertQuery("SELECT 1 in (1, NULL, 3)", "values true"); - assertQuery("SELECT 2 in (1, NULL, 3)", "values null"); - assertQuery("SELECT x FROM (values DATE '1970-01-01', DATE '1970-01-03') t(x) WHERE x IN (DATE '1970-01-01')", "values DATE '1970-01-01'"); - assertEquals( - computeActual("SELECT x FROM (values TIMESTAMP '1970-01-01 00:01:00+00:00', TIMESTAMP '1970-01-01 08:01:00+08:00', TIMESTAMP '1970-01-01 00:01:00+08:00') t(x) WHERE x IN (TIMESTAMP '1970-01-01 00:01:00+00:00')") - .getOnlyColumn().collect(toList()), - ImmutableList.of(zonedDateTime("1970-01-01 00:01:00.000 UTC"), zonedDateTime("1970-01-01 08:01:00.000 +08:00"))); - assertQuery("SELECT COUNT(*) FROM (values 1) t(x) WHERE x IN (null, 0)", "SELECT 0"); - assertQuery("SELECT d IN (DECIMAL '2.0', DECIMAL '30.0') FROM (VALUES (2.0E0)) t(d)", "SELECT true"); // coercion with type only coercion inside IN list - } - - @Test - public void testLargeIn() - { - String longValues = range(0, 5000) - .mapToObj(Integer::toString) - .collect(joining(", ")); - assertQuery("SELECT orderkey FROM orders WHERE orderkey IN (" + longValues + ")"); - assertQuery("SELECT orderkey FROM orders WHERE orderkey NOT IN (" + longValues + ")"); - - assertQuery("SELECT orderkey FROM orders WHERE orderkey IN (mod(1000, orderkey), " + longValues + ")"); - assertQuery("SELECT orderkey FROM orders WHERE orderkey NOT IN (mod(1000, orderkey), " + longValues + ")"); - - String arrayValues = range(0, 5000) - .mapToObj(i -> format("ARRAY[%s, %s, %s]", i, i + 1, i + 2)) - .collect(joining(", ")); - assertQuery("SELECT ARRAY[0, 0, 0] in (ARRAY[0, 0, 0], " + arrayValues + ")", "values true"); - assertQuery("SELECT ARRAY[0, 0, 0] in (" + arrayValues + ")", "values false"); - } - - @Test - public void testNullOnLhsOfInPredicateAllowed() - { - assertQuery("SELECT NULL IN (1, 2, 3)", "SELECT NULL"); - assertQuery("SELECT NULL IN (SELECT 1)", "SELECT NULL"); - assertQuery("SELECT NULL IN (SELECT 1 WHERE FALSE)", "SELECT FALSE"); - assertQuery("SELECT x FROM (VALUES NULL) t(x) WHERE x IN (SELECT 1)", "SELECT 33 WHERE FALSE"); - assertQuery("SELECT NULL IN (SELECT CAST(NULL AS BIGINT))", "SELECT NULL"); - assertQuery("SELECT NULL IN (SELECT NULL WHERE FALSE)", "SELECT FALSE"); - assertQuery("SELECT NULL IN ((SELECT 1) UNION ALL (SELECT NULL))", "SELECT NULL"); - assertQuery("SELECT x IN (SELECT TRUE) FROM (SELECT * FROM (VALUES CAST(NULL AS BOOLEAN)) t(x) WHERE (x OR NULL) IS NULL)", "SELECT NULL"); - assertQuery("SELECT x IN (SELECT 1) FROM (SELECT * FROM (VALUES CAST(NULL AS INTEGER)) t(x) WHERE (x + 10 IS NULL) OR X = 2)", "SELECT NULL"); - assertQuery("SELECT x IN (SELECT 1 WHERE FALSE) FROM (SELECT * FROM (VALUES CAST(NULL AS INTEGER)) t(x) WHERE (x + 10 IS NULL) OR X = 2)", "SELECT FALSE"); - } - - @Test - public void testInSubqueryWithCrossJoin() - { - assertQuery("SELECT a FROM (VALUES (1),(2)) t(a) WHERE a IN " + - "(SELECT b FROM (VALUES (ARRAY[2])) AS t1 (a) CROSS JOIN UNNEST(a) AS t2(b))", "SELECT 2"); - } - - @Test - public void testDuplicateFields() - { - assertQuery( - "SELECT * FROM (SELECT orderkey, orderkey FROM orders)", - "SELECT orderkey, orderkey FROM orders"); - } - - @Test - public void testWildcardFromSubquery() - { - assertQuery("SELECT * FROM (SELECT orderkey X FROM orders)"); - } + // now try with order by fields swapped + assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY custkey ASC, orderkey ASC LIMIT 10"); + assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY custkey ASC, orderkey DESC LIMIT 10"); + assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY custkey DESC, orderkey ASC LIMIT 10"); + assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY custkey DESC, orderkey DESC LIMIT 10"); - @Test - public void testCaseInsensitiveAttribute() - { - assertQuery("SELECT x FROM (SELECT orderkey X FROM orders)"); - } + // nulls first + assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY nullif(orderkey, 3) ASC NULLS FIRST, custkey ASC LIMIT 10"); + assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY nullif(orderkey, 3) DESC NULLS FIRST, custkey ASC LIMIT 10"); - @Test - public void testCaseInsensitiveAliasedRelation() - { - assertQuery("SELECT A.* FROM orders a"); - } + // nulls last + assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY nullif(orderkey, 3) ASC NULLS LAST LIMIT 10"); + assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY nullif(orderkey, 3) DESC NULLS LAST, custkey ASC LIMIT 10"); - @Test - public void testCaseInsensitiveRowFieldReference() - { - assertQuery("SELECT a.Col0 FROM (VALUES row(cast(ROW(1,2) AS ROW(col0 integer, col1 integer)))) AS t (a)", "SELECT 1"); + // assure that default is nulls last + assertQueryOrdered( + "SELECT orderkey, custkey, orderstatus FROM orders ORDER BY nullif(orderkey, 3) ASC, custkey ASC LIMIT 10", + "SELECT orderkey, custkey, orderstatus FROM orders ORDER BY nullif(orderkey, 3) ASC NULLS LAST, custkey ASC LIMIT 10"); } @Test - public void testSubqueryBody() + public void testLimitPushDown() { - assertQuery("(SELECT orderkey, custkey FROM orders)"); - } + MaterializedResult actual = computeActual( + "(TABLE orders ORDER BY orderkey) UNION ALL " + + "SELECT * FROM orders WHERE orderstatus = 'F' UNION ALL " + + "(TABLE orders ORDER BY orderkey LIMIT 20) UNION ALL " + + "(TABLE orders LIMIT 5) UNION ALL " + + "TABLE orders LIMIT 10"); + MaterializedResult all = computeExpected("SELECT * FROM orders", actual.getTypes()); - @Test - public void testSubqueryBodyOrderLimit() - { - assertQueryOrdered("(SELECT orderkey AS a, custkey AS b FROM orders) ORDER BY a LIMIT 1"); + assertEquals(actual.getMaterializedRows().size(), 10); + assertContains(all, actual); } @Test - public void testSubqueryBodyProjectedOrderby() + public void testScalarSubquery() { - assertQueryOrdered("(SELECT orderkey, custkey FROM orders) ORDER BY orderkey * -1"); - } + // nested + assertQuery("SELECT (SELECT (SELECT (SELECT 1)))"); - @Test - public void testSubqueryBodyDoubleOrderby() - { - assertQueryOrdered("(SELECT orderkey, custkey FROM orders ORDER BY custkey) ORDER BY orderkey"); - } + // aggregation + assertQuery("SELECT * FROM lineitem WHERE orderkey = \n" + + "(SELECT max(orderkey) FROM orders)"); - @Test - public void testNodeRoster() - { - List result = computeActual("SELECT * FROM system.runtime.nodes").getMaterializedRows(); - assertEquals(result.size(), getNodeCount()); - } + // no output + assertQuery("SELECT * FROM lineitem WHERE orderkey = \n" + + "(SELECT orderkey FROM orders WHERE 0=1)"); - @Test - public void testCountOnInternalTables() - { - List rows = computeActual("SELECT count(*) FROM system.runtime.nodes").getMaterializedRows(); - assertEquals(((Long) rows.get(0).getField(0)).longValue(), getNodeCount()); - } + // no output matching with null test + assertQuery("SELECT * FROM lineitem WHERE \n" + + "(SELECT orderkey FROM orders WHERE 0=1) " + + "is null"); + assertQuery("SELECT * FROM lineitem WHERE \n" + + "(SELECT orderkey FROM orders WHERE 0=1) " + + "is not null"); - @Test - public void testTransactionsTable() - { - List result = computeActual("SELECT * FROM system.runtime.transactions").getMaterializedRows(); - assertTrue(result.size() >= 1); // At least one row for the current transaction. - } + // subquery results and in in-predicate + assertQuery("SELECT (SELECT 1) IN (1, 2, 3)"); + assertQuery("SELECT (SELECT 1) IN ( 2, 3)"); - @Test - public void testDefaultExplainTextFormat() - { - String query = "SELECT * FROM orders"; - MaterializedResult result = computeActual("EXPLAIN " + query); - assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan(query, LOGICAL)); - } + // multiple subqueries + assertQuery("SELECT (SELECT 1) = (SELECT 3)"); + assertQuery("SELECT (SELECT 1) < (SELECT 3)"); + assertQuery("SELECT COUNT(*) FROM lineitem WHERE " + + "(SELECT min(orderkey) FROM orders)" + + "<" + + "(SELECT max(orderkey) FROM orders)"); + assertQuery("SELECT (SELECT 1), (SELECT 2), (SELECT 3)"); - @Test - public void testDefaultExplainGraphvizFormat() - { - String query = "SELECT * FROM orders"; - MaterializedResult result = computeActual("EXPLAIN (FORMAT GRAPHVIZ) " + query); - assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getGraphvizExplainPlan(query, LOGICAL)); - } + // distinct + assertQuery("SELECT DISTINCT orderkey FROM lineitem " + + "WHERE orderkey BETWEEN" + + " (SELECT avg(orderkey) FROM orders) - 10 " + + " AND" + + " (SELECT avg(orderkey) FROM orders) + 10"); - @Test - public void testLogicalExplain() - { - String query = "SELECT * FROM orders"; - MaterializedResult result = computeActual("EXPLAIN (TYPE LOGICAL) " + query); - assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan(query, LOGICAL)); - } + // subqueries with joins + assertQuery("SELECT o1.orderkey, COUNT(*) " + + "FROM orders o1 " + + "INNER JOIN (SELECT * FROM orders ORDER BY orderkey LIMIT 10) o2 " + + "ON o1.orderkey " + + "BETWEEN (SELECT avg(orderkey) FROM orders) - 10 AND (SELECT avg(orderkey) FROM orders) + 10 " + + "GROUP BY o1.orderkey"); + assertQuery("SELECT o1.orderkey, COUNT(*) " + + "FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 5) o1 " + + "LEFT JOIN (SELECT * FROM orders ORDER BY orderkey LIMIT 10) o2 " + + "ON o1.orderkey " + + "BETWEEN (SELECT avg(orderkey) FROM orders) - 10 AND (SELECT avg(orderkey) FROM orders) + 10 " + + "GROUP BY o1.orderkey"); + assertQuery("SELECT o1.orderkey, COUNT(*) " + + "FROM orders o1 RIGHT JOIN (SELECT * FROM orders ORDER BY orderkey LIMIT 10) o2 " + + "ON o1.orderkey " + + "BETWEEN (SELECT avg(orderkey) FROM orders) - 10 AND (SELECT avg(orderkey) FROM orders) + 10 " + + "GROUP BY o1.orderkey"); + assertQuery("SELECT DISTINCT COUNT(*) " + + "FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 5) o1 " + + "FULL JOIN (SELECT * FROM orders ORDER BY orderkey LIMIT 10) o2 " + + "ON o1.orderkey " + + "BETWEEN (SELECT avg(orderkey) FROM orders) - 10 AND (SELECT avg(orderkey) FROM orders) + 10 " + + "GROUP BY o1.orderkey", + "VALUES 1, 10"); - @Test - public void testIoExplain() - { - String query = "SELECT * FROM orders"; - MaterializedResult result = computeActual("EXPLAIN (TYPE IO) " + query); - assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan(query, IO)); - } + // subqueries with ORDER BY + assertQuery("SELECT orderkey, totalprice FROM orders ORDER BY (SELECT 2)"); - @Test - public void testLogicalExplainTextFormat() - { - String query = "SELECT * FROM orders"; - MaterializedResult result = computeActual("EXPLAIN (TYPE LOGICAL, FORMAT TEXT) " + query); - assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan(query, LOGICAL)); - } + // subquery returns multiple rows + String multipleRowsErrorMsg = "Scalar sub-query has returned multiple rows"; + assertQueryFails("SELECT * FROM lineitem WHERE orderkey = (\n" + + "SELECT orderkey FROM orders ORDER BY totalprice)", + multipleRowsErrorMsg); + assertQueryFails("SELECT orderkey, totalprice FROM orders ORDER BY (VALUES 1, 2)", + multipleRowsErrorMsg); - @Test - public void testLogicalExplainGraphvizFormat() - { - String query = "SELECT * FROM orders"; - MaterializedResult result = computeActual("EXPLAIN (TYPE LOGICAL, FORMAT GRAPHVIZ) " + query); - assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getGraphvizExplainPlan(query, LOGICAL)); - } + // exposes a bug in optimize hash generation because EnforceSingleNode does not + // support more than one column from the underlying query + assertQuery("SELECT custkey, (SELECT DISTINCT custkey FROM orders ORDER BY custkey LIMIT 1) FROM orders"); - @Test - public void testDistributedExplain() - { - String query = "SELECT * FROM orders"; - MaterializedResult result = computeActual("EXPLAIN (TYPE DISTRIBUTED) " + query); - assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan(query, DISTRIBUTED)); - } - - @Test - public void testDistributedExplainTextFormat() - { - String query = "SELECT * FROM orders"; - MaterializedResult result = computeActual("EXPLAIN (TYPE DISTRIBUTED, FORMAT TEXT) " + query); - assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan(query, DISTRIBUTED)); - } - - @Test - public void testDistributedExplainGraphvizFormat() - { - String query = "SELECT * FROM orders"; - MaterializedResult result = computeActual("EXPLAIN (TYPE DISTRIBUTED, FORMAT GRAPHVIZ) " + query); - assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getGraphvizExplainPlan(query, DISTRIBUTED)); - } - - @Test - public void testExplainValidate() - { - MaterializedResult result = computeActual("EXPLAIN (TYPE VALIDATE) SELECT 1"); - assertEquals(result.getOnlyValue(), true); - } - - @Test(expectedExceptions = Exception.class, expectedExceptionsMessageRegExp = "line 1:32: Column 'x' cannot be resolved") - public void testExplainValidateThrows() - { - computeActual("EXPLAIN (TYPE VALIDATE) SELECT x"); - } - - @Test - public void testExplainOfExplain() - { - String query = "EXPLAIN SELECT * FROM orders"; - MaterializedResult result = computeActual("EXPLAIN " + query); - assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan(query, LOGICAL)); - } - - @Test - public void testExplainOfExplainAnalyze() - { - String query = "EXPLAIN ANALYZE SELECT * FROM orders"; - MaterializedResult result = computeActual("EXPLAIN " + query); - assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan(query, LOGICAL)); - } - - @Test - public void testExplainDdl() - { - assertExplainDdl("CREATE TABLE foo (pk bigint)", "CREATE TABLE foo"); - assertExplainDdl("CREATE VIEW foo AS SELECT * FROM orders", "CREATE VIEW foo"); - assertExplainDdl("DROP TABLE orders"); - assertExplainDdl("DROP VIEW view"); - assertExplainDdl("ALTER TABLE orders RENAME TO new_name"); - assertExplainDdl("ALTER TABLE orders RENAME COLUMN orderkey TO new_column_name"); - assertExplainDdl("SET SESSION foo = 'bar'"); - assertExplainDdl("PREPARE my_query FROM SELECT * FROM orders", "PREPARE my_query"); - assertExplainDdl("DEALLOCATE PREPARE my_query"); - assertExplainDdl("RESET SESSION foo"); - assertExplainDdl("START TRANSACTION"); - assertExplainDdl("COMMIT"); - assertExplainDdl("ROLLBACK"); - } - - private void assertExplainDdl(String query) - { - assertExplainDdl(query, query); - } - - private void assertExplainDdl(String query, String expected) - { - MaterializedResult result = computeActual("EXPLAIN " + query); - assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), expected); - } - - @Test - public void testExplainExecute() - { - Session session = Session.builder(getSession()) - .addPreparedStatement("my_query", "SELECT * FROM orders") - .build(); - MaterializedResult result = computeActual(session, "EXPLAIN (TYPE LOGICAL) EXECUTE my_query"); - assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan("SELECT * FROM orders", LOGICAL)); - } - - @Test - public void testExplainExecuteWithUsing() - { - Session session = Session.builder(getSession()) - .addPreparedStatement("my_query", "SELECT * FROM orders WHERE orderkey < ?") - .build(); - MaterializedResult result = computeActual(session, "EXPLAIN (TYPE LOGICAL) EXECUTE my_query USING 7"); - assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan("SELECT * FROM orders WHERE orderkey < 7", LOGICAL)); - } - - @Test - public void testExplainSetSessionWithUsing() - { - Session session = Session.builder(getSession()) - .addPreparedStatement("my_query", "SET SESSION foo = ?") - .build(); - MaterializedResult result = computeActual(session, "EXPLAIN (TYPE LOGICAL) EXECUTE my_query USING 7"); - assertEquals( - getOnlyElement(result.getOnlyColumnAsSet()), - "SET SESSION foo = ?\n" + - "Parameters: [7]"); - } - - @Test - public void testShowCatalogs() - { - MaterializedResult result = computeActual("SHOW CATALOGS"); - assertTrue(result.getOnlyColumnAsSet().contains(getSession().getCatalog().get())); - } - - @Test - public void testShowCatalogsLike() - { - MaterializedResult result = computeActual(format("SHOW CATALOGS LIKE '%s'", getSession().getCatalog().get())); - assertEquals(result.getOnlyColumnAsSet(), ImmutableSet.of(getSession().getCatalog().get())); - } - - @Test - public void testShowSchemas() - { - MaterializedResult result = computeActual("SHOW SCHEMAS"); - assertTrue(result.getOnlyColumnAsSet().containsAll(ImmutableSet.of(getSession().getSchema().get(), INFORMATION_SCHEMA))); - } - - @Test - public void testShowSchemasFrom() - { - MaterializedResult result = computeActual(format("SHOW SCHEMAS FROM %s", getSession().getCatalog().get())); - assertTrue(result.getOnlyColumnAsSet().containsAll(ImmutableSet.of(getSession().getSchema().get(), INFORMATION_SCHEMA))); - } - - @Test - public void testShowSchemasLike() - { - MaterializedResult result = computeActual(format("SHOW SCHEMAS LIKE '%s'", getSession().getSchema().get())); - assertEquals(result.getOnlyColumnAsSet(), ImmutableSet.of(getSession().getSchema().get())); - } - - @Test - public void testShowSchemasLikeWithEscape() - { - assertQueryFails("SHOW SCHEMAS IN foo LIKE '%$_%' ESCAPE", "line 1:39: mismatched input ''. Expecting: "); - assertQueryFails("SHOW SCHEMAS LIKE 't$_%' ESCAPE ''", "Escape string must be a single character"); - assertQueryFails("SHOW SCHEMAS LIKE 't$_%' ESCAPE '$$'", "Escape string must be a single character"); - - Set allSchemas = computeActual("SHOW SCHEMAS").getOnlyColumnAsSet(); - assertEquals(allSchemas, computeActual("SHOW SCHEMAS LIKE '%_%'").getOnlyColumnAsSet()); - Set result = computeActual("SHOW SCHEMAS LIKE '%$_%' ESCAPE '$'").getOnlyColumnAsSet(); - assertNotEquals(allSchemas, result); - assertThat(result).contains("information_schema").allMatch(schemaName -> ((String) schemaName).contains("_")); - } - - @Test - public void testShowTables() - { - Set expectedTables = TpchTable.getTables().stream() - .map(TpchTable::getTableName) - .collect(toImmutableSet()); - - MaterializedResult result = computeActual("SHOW TABLES"); - assertTrue(result.getOnlyColumnAsSet().containsAll(expectedTables)); - } - - @Test - public void testShowTablesFrom() - { - Set expectedTables = TpchTable.getTables().stream() - .map(TpchTable::getTableName) - .collect(toImmutableSet()); - - String catalog = getSession().getCatalog().get(); - String schema = getSession().getSchema().get(); - - MaterializedResult result = computeActual("SHOW TABLES FROM " + schema); - assertTrue(result.getOnlyColumnAsSet().containsAll(expectedTables)); - - result = computeActual("SHOW TABLES FROM " + catalog + "." + schema); - assertTrue(result.getOnlyColumnAsSet().containsAll(expectedTables)); - - assertQueryFails("SHOW TABLES FROM UNKNOWN", "line 1:1: Schema 'unknown' does not exist"); - assertQueryFails("SHOW TABLES FROM UNKNOWNCATALOG.UNKNOWNSCHEMA", "line 1:1: Catalog 'unknowncatalog' does not exist"); - } - - @Test - public void testShowTablesLike() - { - assertThat(computeActual("SHOW TABLES LIKE 'or%'").getOnlyColumnAsSet()) - .contains("orders") - .allMatch(tableName -> ((String) tableName).startsWith("or")); - } - - @Test - public void testShowTablesLikeWithEscape() - { - assertQueryFails("SHOW TABLES IN a LIKE '%$_%' ESCAPE", "line 1:36: mismatched input ''. Expecting: "); - assertQueryFails("SHOW TABLES LIKE 't$_%' ESCAPE ''", "Escape string must be a single character"); - assertQueryFails("SHOW TABLES LIKE 't$_%' ESCAPE '$$'", "Escape string must be a single character"); - - Set allTables = computeActual("SHOW TABLES FROM information_schema").getOnlyColumnAsSet(); - assertEquals(allTables, computeActual("SHOW TABLES FROM information_schema LIKE '%_%'").getOnlyColumnAsSet()); - Set result = computeActual("SHOW TABLES FROM information_schema LIKE '%$_%' ESCAPE '$'").getOnlyColumnAsSet(); - assertNotEquals(allTables, result); - assertThat(result).contains("table_privileges").allMatch(schemaName -> ((String) schemaName).contains("_")); - } - - @Test - public void testShowColumns() - { - MaterializedResult actual = computeActual("SHOW COLUMNS FROM orders"); - - MaterializedResult expectedUnparametrizedVarchar = resultBuilder(getSession(), VARCHAR, VARCHAR, VARCHAR, VARCHAR) - .row("orderkey", "bigint", "", "") - .row("custkey", "bigint", "", "") - .row("orderstatus", "varchar", "", "") - .row("totalprice", "double", "", "") - .row("orderdate", "date", "", "") - .row("orderpriority", "varchar", "", "") - .row("clerk", "varchar", "", "") - .row("shippriority", "integer", "", "") - .row("comment", "varchar", "", "") - .build(); - - MaterializedResult expectedParametrizedVarchar = resultBuilder(getSession(), VARCHAR, VARCHAR, VARCHAR, VARCHAR) - .row("orderkey", "bigint", "", "") - .row("custkey", "bigint", "", "") - .row("orderstatus", "varchar(1)", "", "") - .row("totalprice", "double", "", "") - .row("orderdate", "date", "", "") - .row("orderpriority", "varchar(15)", "", "") - .row("clerk", "varchar(15)", "", "") - .row("shippriority", "integer", "", "") - .row("comment", "varchar(79)", "", "") - .build(); - - // Until we migrate all connectors to parametrized varchar we check two options - assertTrue(actual.equals(expectedParametrizedVarchar) || actual.equals(expectedUnparametrizedVarchar), - format("%s does not matche neither of %s and %s", actual, expectedParametrizedVarchar, expectedUnparametrizedVarchar)); - } - - @Test - public void testAtTimeZone() - { - // TODO the expected values here are non-sensical due to https://github.com/prestosql/presto/issues/37 - assertEquals(computeScalar("SELECT TIMESTAMP '2012-10-31 01:00' AT TIME ZONE INTERVAL '07:09' hour to minute"), zonedDateTime("2012-10-30 18:09:00.000 +07:09")); - assertEquals(computeScalar("SELECT TIMESTAMP '2012-10-31 01:00' AT TIME ZONE 'Asia/Oral'"), zonedDateTime("2012-10-30 16:00:00.000 Asia/Oral")); - assertEquals(computeScalar("SELECT MIN(x) AT TIME ZONE 'America/Chicago' FROM (VALUES TIMESTAMP '1970-01-01 00:01:00+00:00') t(x)"), zonedDateTime("1969-12-31 18:01:00.000 America/Chicago")); - assertEquals(computeScalar("SELECT TIMESTAMP '2012-10-31 01:00' AT TIME ZONE '+07:09'"), zonedDateTime("2012-10-30 18:09:00.000 +07:09")); - assertEquals(computeScalar("SELECT TIMESTAMP '2012-10-31 01:00 UTC' AT TIME ZONE 'America/Los_Angeles'"), zonedDateTime("2012-10-30 18:00:00.000 America/Los_Angeles")); - assertEquals(computeScalar("SELECT TIMESTAMP '2012-10-31 01:00' AT TIME ZONE 'America/Los_Angeles'"), zonedDateTime("2012-10-30 04:00:00.000 America/Los_Angeles")); - assertEquals(computeActual("SELECT x AT TIME ZONE 'America/Los_Angeles' FROM (values TIMESTAMP '1970-01-01 00:01:00+00:00', TIMESTAMP '1970-01-01 08:01:00+08:00', TIMESTAMP '1969-12-31 16:01:00-08:00') t(x)").getOnlyColumnAsSet(), - ImmutableSet.of(zonedDateTime("1969-12-31 16:01:00.000 America/Los_Angeles"))); - assertEquals(computeActual("SELECT x AT TIME ZONE 'America/Los_Angeles' FROM (values TIMESTAMP '1970-01-01 00:01:00', TIMESTAMP '1970-01-01 08:01:00', TIMESTAMP '1969-12-31 16:01:00') t(x)").getOnlyColumn().collect(toList()), - ImmutableList.of(zonedDateTime("1970-01-01 03:01:00.000 America/Los_Angeles"), zonedDateTime("1970-01-01 11:01:00.000 America/Los_Angeles"), zonedDateTime("1969-12-31 19:01:00.000 America/Los_Angeles"))); - assertEquals(computeScalar("SELECT min(x) AT TIME ZONE 'America/Los_Angeles' FROM (values TIMESTAMP '1970-01-01 00:01:00+00:00', TIMESTAMP '1970-01-01 08:01:00+08:00', TIMESTAMP '1969-12-31 16:01:00-08:00') t(x)"), - zonedDateTime("1969-12-31 16:01:00.000 America/Los_Angeles")); - - // with chained AT TIME ZONE - assertEquals(computeScalar("SELECT TIMESTAMP '2012-10-31 01:00' AT TIME ZONE 'America/Los_Angeles' AT TIME ZONE 'UTC'"), zonedDateTime("2012-10-30 11:00:00.000 UTC")); - assertEquals(computeScalar("SELECT TIMESTAMP '2012-10-31 01:00' AT TIME ZONE 'Asia/Tokyo' AT TIME ZONE 'America/Los_Angeles'"), zonedDateTime("2012-10-30 04:00:00.000 America/Los_Angeles")); - assertEquals(computeScalar("SELECT TIMESTAMP '2012-10-31 01:00' AT TIME ZONE 'America/Los_Angeles' AT TIME ZONE 'Asia/Shanghai'"), zonedDateTime("2012-10-30 19:00:00.000 Asia/Shanghai")); - assertEquals(computeScalar("SELECT min(x) AT TIME ZONE 'America/Los_Angeles' AT TIME ZONE 'UTC' FROM (values TIMESTAMP '1970-01-01 00:01:00+00:00', TIMESTAMP '1970-01-01 08:01:00+08:00', TIMESTAMP '1969-12-31 16:01:00-08:00') t(x)"), - zonedDateTime("1970-01-01 00:01:00.000 UTC")); - - // with AT TIME ZONE in VALUES - assertEquals(computeScalar("SELECT * FROM (VALUES TIMESTAMP '2012-10-31 01:00' AT TIME ZONE 'Asia/Oral')"), zonedDateTime("2012-10-30 16:00:00.000 Asia/Oral")); - } - - private ZonedDateTime zonedDateTime(String value) - { - return ZONED_DATE_TIME_FORMAT.parse(value, ZonedDateTime::from); - } - - @Test - public void testShowFunctions() - { - MaterializedResult result = computeActual("SHOW FUNCTIONS"); - ImmutableMultimap functions = Multimaps.index(result.getMaterializedRows(), input -> { - assertEquals(input.getFieldCount(), 6); - return (String) input.getField(0); - }); - - assertTrue(functions.containsKey("avg"), "Expected function names " + functions + " to contain 'avg'"); - assertEquals(functions.get("avg").asList().size(), 6); - assertEquals(functions.get("avg").asList().get(0).getField(1), "decimal(p,s)"); - assertEquals(functions.get("avg").asList().get(0).getField(2), "decimal(p,s)"); - assertEquals(functions.get("avg").asList().get(0).getField(3), "aggregate"); - assertEquals(functions.get("avg").asList().get(1).getField(1), "double"); - assertEquals(functions.get("avg").asList().get(1).getField(2), "bigint"); - assertEquals(functions.get("avg").asList().get(1).getField(3), "aggregate"); - assertEquals(functions.get("avg").asList().get(2).getField(1), "double"); - assertEquals(functions.get("avg").asList().get(2).getField(2), "double"); - assertEquals(functions.get("avg").asList().get(2).getField(3), "aggregate"); - assertEquals(functions.get("avg").asList().get(3).getField(1), "interval day to second"); - assertEquals(functions.get("avg").asList().get(3).getField(2), "interval day to second"); - assertEquals(functions.get("avg").asList().get(3).getField(3), "aggregate"); - assertEquals(functions.get("avg").asList().get(4).getField(1), "interval year to month"); - assertEquals(functions.get("avg").asList().get(4).getField(2), "interval year to month"); - assertEquals(functions.get("avg").asList().get(4).getField(3), "aggregate"); - assertEquals(functions.get("avg").asList().get(5).getField(1), "real"); - assertEquals(functions.get("avg").asList().get(5).getField(2), "real"); - assertEquals(functions.get("avg").asList().get(5).getField(3), "aggregate"); - - assertTrue(functions.containsKey("abs"), "Expected function names " + functions + " to contain 'abs'"); - assertEquals(functions.get("abs").asList().get(0).getField(3), "scalar"); - assertEquals(functions.get("abs").asList().get(0).getField(4), true); - - assertTrue(functions.containsKey("rand"), "Expected function names " + functions + " to contain 'rand'"); - assertEquals(functions.get("rand").asList().get(0).getField(3), "scalar"); - assertEquals(functions.get("rand").asList().get(0).getField(4), false); - - assertTrue(functions.containsKey("rank"), "Expected function names " + functions + " to contain 'rank'"); - assertEquals(functions.get("rank").asList().get(0).getField(3), "window"); - - assertTrue(functions.containsKey("rank"), "Expected function names " + functions + " to contain 'split_part'"); - assertEquals(functions.get("split_part").asList().get(0).getField(1), "varchar(x)"); - assertEquals(functions.get("split_part").asList().get(0).getField(2), "varchar(x), varchar(y), bigint"); - assertEquals(functions.get("split_part").asList().get(0).getField(3), "scalar"); - - assertFalse(functions.containsKey("like"), "Expected function names " + functions + " not to contain 'like'"); - } - - @Test - public void testInformationSchemaFiltering() - { - assertQuery( - "SELECT table_name FROM information_schema.tables WHERE table_name = 'orders' LIMIT 1", - "SELECT 'orders' table_name"); - assertQuery( - "SELECT table_name FROM information_schema.columns WHERE data_type = 'bigint' AND table_name = 'customer' and column_name = 'custkey' LIMIT 1", - "SELECT 'customer' table_name"); - } - - @Test - public void testInformationSchemaUppercaseName() - { - assertQuery( - "SELECT table_name FROM information_schema.tables WHERE table_catalog = 'LOCAL'", - "SELECT '' WHERE false"); - assertQuery( - "SELECT table_name FROM information_schema.tables WHERE table_schema = 'TINY'", - "SELECT '' WHERE false"); - assertQuery( - "SELECT table_name FROM information_schema.tables WHERE table_name = 'ORDERS'", - "SELECT '' WHERE false"); - } - - @Test - public void testSelectColumnOfNulls() - { - // Currently nulls can confuse the local planner, so select some - assertQueryOrdered("SELECT CAST(NULL AS VARCHAR), CAST(NULL AS BIGINT) FROM orders ORDER BY 1"); - } - - @Test - public void testSelectCaseInsensitive() - { - assertQuery("SELECT ORDERKEY FROM ORDERS"); - assertQuery("SELECT OrDeRkEy FROM OrDeRs"); - } - - @Test - public void testShowSession() - { - Session session = new Session( - getSession().getQueryId(), - Optional.empty(), - getSession().isClientTransactionSupport(), - getSession().getIdentity(), - getSession().getSource(), - getSession().getCatalog(), - getSession().getSchema(), - getSession().getPath(), - getSession().getTraceToken(), - getSession().getTimeZoneKey(), - getSession().getLocale(), - getSession().getRemoteUserAddress(), - getSession().getUserAgent(), - getSession().getClientInfo(), - getSession().getClientTags(), - getSession().getClientCapabilities(), - getSession().getResourceEstimates(), - getSession().getStartTime(), - ImmutableMap.builder() - .put("test_string", "foo string") - .put("test_long", "424242") - .build(), - ImmutableMap.of(), - ImmutableMap.of(TESTING_CATALOG, ImmutableMap.builder() - .put("connector_string", "bar string") - .put("connector_long", "11") - .build()), - getQueryRunner().getMetadata().getSessionPropertyManager(), - getSession().getPreparedStatements()); - MaterializedResult result = computeActual(session, "SHOW SESSION"); - - ImmutableMap properties = Maps.uniqueIndex(result.getMaterializedRows(), input -> { - assertEquals(input.getFieldCount(), 5); - return (String) input.getField(0); - }); - - assertEquals(properties.get("test_string"), new MaterializedRow(1, "test_string", "foo string", "test default", "varchar", "test string property")); - assertEquals(properties.get("test_long"), new MaterializedRow(1, "test_long", "424242", "42", "bigint", "test long property")); - assertEquals(properties.get(TESTING_CATALOG + ".connector_string"), - new MaterializedRow(1, TESTING_CATALOG + ".connector_string", "bar string", "connector default", "varchar", "connector string property")); - assertEquals(properties.get(TESTING_CATALOG + ".connector_long"), - new MaterializedRow(1, TESTING_CATALOG + ".connector_long", "11", "33", "bigint", "connector long property")); - } - - @Test - public void testTry() - { - // divide by zero - assertQuery( - "SELECT linenumber, sum(TRY(100/(CAST (tax*10 AS BIGINT)))) FROM lineitem GROUP BY linenumber", - "SELECT linenumber, sum(100/(CAST (tax*10 AS BIGINT))) FROM lineitem WHERE CAST(tax*10 AS BIGINT) <> 0 GROUP BY linenumber"); - - // invalid cast - assertQuery( - "SELECT TRY(CAST(IF(round(totalprice) % 2 = 0, CAST(totalprice AS VARCHAR), '^&$' || CAST(totalprice AS VARCHAR)) AS DOUBLE)) FROM orders", - "SELECT CASE WHEN round(totalprice) % 2 = 0 THEN totalprice ELSE null END FROM orders"); - - // invalid function argument - assertQuery( - "SELECT COUNT(TRY(to_base(100, CAST(round(totalprice/100) AS BIGINT)))) FROM orders", - "SELECT SUM(CASE WHEN CAST(round(totalprice/100) AS BIGINT) BETWEEN 2 AND 36 THEN 1 ELSE 0 END) FROM orders"); - - // as part of a complex expression - assertQuery( - "SELECT COUNT(CAST(orderkey AS VARCHAR) || TRY(to_base(100, CAST(round(totalprice/100) AS BIGINT)))) FROM orders", - "SELECT SUM(CASE WHEN CAST(round(totalprice/100) AS BIGINT) BETWEEN 2 AND 36 THEN 1 ELSE 0 END) FROM orders"); - - // missing function argument - assertQueryFails("SELECT TRY()", "line 1:8: The 'try' function must have exactly one argument"); - - // check that TRY is not pushed down - assertQueryFails("SELECT TRY(x) IS NULL FROM (SELECT 1/y AS x FROM (VALUES 1, 2, 3, 0, 4) t(y))", "Division by zero"); - assertQuery("SELECT x IS NULL FROM (SELECT TRY(1/y) AS x FROM (VALUES 3, 0, 4) t(y))", "VALUES false, true, false"); - - // test try with lambda function - assertQuery("SELECT TRY(apply(5, x -> x + 1) / 0)", "SELECT NULL"); - assertQuery("SELECT TRY(apply(5 + RANDOM(1), x -> x + 1) / 0)", "SELECT NULL"); - assertQuery("SELECT apply(5 + RANDOM(1), x -> x + TRY(1 / 0))", "SELECT NULL"); - - // test try with invalid JSON - assertQuery("SELECT JSON_FORMAT(TRY(JSON 'INVALID'))", "SELECT NULL"); - assertQuery("SELECT JSON_FORMAT(TRY (JSON_PARSE('INVALID')))", "SELECT NULL"); - - // tests that might be constant folded - assertQuery("SELECT TRY(CAST(NULL AS BIGINT))", "SELECT NULL"); - assertQuery("SELECT TRY(CAST('123' AS BIGINT))", "SELECT 123L"); - assertQuery("SELECT TRY(CAST('foo' AS BIGINT))", "SELECT NULL"); - assertQuery("SELECT TRY(CAST('foo' AS BIGINT)) + TRY(CAST('123' AS BIGINT))", "SELECT NULL"); - assertQuery("SELECT TRY(CAST(CAST(123 AS VARCHAR) AS BIGINT))", "SELECT 123L"); - assertQuery("SELECT COALESCE(CAST(CONCAT('123', CAST(123 AS VARCHAR)) AS BIGINT), 0)", "SELECT 123123L"); - assertQuery("SELECT TRY(CAST(CONCAT('hello', CAST(123 AS VARCHAR)) AS BIGINT))", "SELECT NULL"); - assertQuery("SELECT COALESCE(TRY(CAST(CONCAT('a', CAST(123 AS VARCHAR)) AS INTEGER)), 0)", "SELECT 0"); - assertQuery("SELECT COALESCE(TRY(CAST(CONCAT('a', CAST(123 AS VARCHAR)) AS BIGINT)), 0)", "SELECT 0L"); - assertQuery("SELECT 123 + TRY(ABS(-9223372036854775807 - 1))", "SELECT NULL"); - assertQuery("SELECT JSON_FORMAT(TRY(JSON '[]')) || '123'", "SELECT '[]123'"); - assertQuery("SELECT JSON_FORMAT(TRY(JSON 'INVALID')) || '123'", "SELECT NULL"); - assertQuery("SELECT TRY(2/1)", "SELECT 2"); - assertQuery("SELECT TRY(2/0)", "SELECT null"); - assertQuery("SELECT COALESCE(TRY(2/0), 0)", "SELECT 0"); - assertQuery("SELECT TRY(ABS(-2))", "SELECT 2"); - } - - @Test - public void testTryNoMergeProjections() - { - // no regexp specified because the JVM optimizes away exception message constructor if run enough times - assertQueryFails("SELECT TRY(x) FROM (SELECT 1/y AS x FROM (VALUES 1, 2, 3, 0, 4) t(y))", ".*"); - } - - @Test - public void testNoFrom() - { - assertQuery("SELECT 1 + 2, 3 + 4"); - } - - @Test - public void testTopN() - { - assertQuery("SELECT n.name, r.name FROM nation n LEFT JOIN region r ON n.regionkey = r.regionkey ORDER BY n.name LIMIT 1"); - } - - @Test - public void testTopNByMultipleFields() - { - assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY orderkey ASC, custkey ASC LIMIT 10"); - assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY orderkey ASC, custkey DESC LIMIT 10"); - assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY orderkey DESC, custkey ASC LIMIT 10"); - assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY orderkey DESC, custkey DESC LIMIT 10"); - - // now try with order by fields swapped - assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY custkey ASC, orderkey ASC LIMIT 10"); - assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY custkey ASC, orderkey DESC LIMIT 10"); - assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY custkey DESC, orderkey ASC LIMIT 10"); - assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY custkey DESC, orderkey DESC LIMIT 10"); - - // nulls first - assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY nullif(orderkey, 3) ASC NULLS FIRST, custkey ASC LIMIT 10"); - assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY nullif(orderkey, 3) DESC NULLS FIRST, custkey ASC LIMIT 10"); - - // nulls last - assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY nullif(orderkey, 3) ASC NULLS LAST LIMIT 10"); - assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY nullif(orderkey, 3) DESC NULLS LAST, custkey ASC LIMIT 10"); - - // assure that default is nulls last - assertQueryOrdered( - "SELECT orderkey, custkey, orderstatus FROM orders ORDER BY nullif(orderkey, 3) ASC, custkey ASC LIMIT 10", - "SELECT orderkey, custkey, orderstatus FROM orders ORDER BY nullif(orderkey, 3) ASC NULLS LAST, custkey ASC LIMIT 10"); - } - - @Test - public void testExchangeWithProjectionPushDown() - { - assertQuery( - "SELECT * FROM \n" + - " (SELECT orderkey + 1 orderkey FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 100)) o \n" + - "JOIN \n" + - " (SELECT orderkey + 1 orderkey FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 100)) o1 \n" + - "ON (o.orderkey = o1.orderkey)"); - } - - @Test - public void testUnionWithProjectionPushDown() - { - assertQuery("SELECT key + 5, status FROM (SELECT orderkey key, orderstatus status FROM orders UNION ALL SELECT orderkey key, linestatus status FROM lineitem)"); - } - - @Test - public void testUnion() - { - assertQuery("SELECT orderkey FROM orders UNION SELECT custkey FROM orders"); - assertQuery("SELECT 123 UNION DISTINCT SELECT 123 UNION ALL SELECT 123"); - assertQuery("SELECT NULL UNION SELECT NULL"); - assertQuery("SELECT NULL, NULL UNION ALL SELECT NULL, NULL FROM nation"); - assertQuery("SELECT 'x', 'y' UNION ALL SELECT name, name FROM nation"); - - // mixed single-node vs fixed vs source-distributed - assertQuery("SELECT orderkey FROM orders UNION ALL SELECT 123 UNION ALL (SELECT custkey FROM orders GROUP BY custkey)"); - } - - @Test - public void testUnionDistinct() - { - assertQuery("SELECT orderkey FROM orders UNION DISTINCT SELECT custkey FROM orders"); - } - - @Test - public void testUnionAll() - { - assertQuery("SELECT orderkey FROM orders UNION ALL SELECT custkey FROM orders"); - } - - @Test - public void testUnionArray() - { - assertQuery("SELECT a[1] FROM (SELECT ARRAY[1] UNION ALL SELECT ARRAY[1]) t(a) LIMIT 1", "SELECT 1"); - } - - @Test - public void testChainedUnionsWithOrder() - { - assertQueryOrdered( - "SELECT orderkey FROM orders UNION (SELECT custkey FROM orders UNION SELECT linenumber FROM lineitem) UNION ALL SELECT orderkey FROM lineitem ORDER BY orderkey"); - } - - @Test - public void testUnionWithTopN() - { - assertQuery("SELECT * FROM (" + - " SELECT regionkey FROM nation " + - " UNION ALL " + - " SELECT nationkey FROM nation" + - ") t(a) " + - "ORDER BY a LIMIT 1", - "SELECT 0"); - } - - @Test - public void testUnionWithJoin() - { - assertQuery( - "SELECT * FROM (" + - " SELECT orderdate ds, orderkey FROM orders " + - " UNION ALL " + - " SELECT shipdate ds, orderkey FROM lineitem) a " + - "JOIN orders o ON (a.orderkey = o.orderkey)"); - } - - @Test - public void testUnionWithAggregation() - { - assertQuery( - "SELECT regionkey, count(*) FROM (" + - " SELECT regionkey FROM nation " + - " UNION ALL " + - " SELECT * FROM (VALUES 2, 100) t(regionkey)) " + - "GROUP BY regionkey", - "SELECT * FROM (VALUES (0, 5), (1, 5), (2, 6), (3, 5), (4, 5), (100, 1))"); - - assertQuery( - "SELECT ds, count(*) FROM (" + - " SELECT orderdate ds, orderkey FROM orders " + - " UNION ALL " + - " SELECT shipdate ds, orderkey FROM lineitem) a " + - "GROUP BY ds"); - assertQuery( - "SELECT ds, count(*) FROM (" + - " SELECT orderdate ds, orderkey FROM orders " + - " UNION " + - " SELECT shipdate ds, orderkey FROM lineitem) a " + - "GROUP BY ds"); - assertQuery( - "SELECT ds, count(DISTINCT orderkey) FROM (" + - " SELECT orderdate ds, orderkey FROM orders " + - " UNION " + - " SELECT shipdate ds, orderkey FROM lineitem) a " + - "GROUP BY ds"); - assertQuery( - "SELECT clerk, count(DISTINCT orderstatus) FROM (" + - "SELECT * FROM orders WHERE orderkey=0 " + - " UNION ALL " + - "SELECT * FROM orders WHERE orderkey<>0) " + - "GROUP BY clerk"); - assertQuery( - "SELECT count(clerk) FROM (" + - "SELECT clerk FROM orders WHERE orderkey=0 " + - " UNION ALL " + - "SELECT clerk FROM orders WHERE orderkey<>0) " + - "GROUP BY clerk"); - assertQuery( - "SELECT count(orderkey), sum(sc) FROM (" + - " SELECT sum(custkey) sc, orderkey FROM (" + - " SELECT custkey,orderkey, orderkey+1 FROM orders WHERE orderkey=0" + - " UNION ALL " + - " SELECT custkey,orderkey,orderkey+1 FROM orders WHERE orderkey<>0) " + - " GROUP BY orderkey)"); - - assertQuery( - "SELECT count(orderkey), sum(sc) FROM (\n" + - " SELECT sum(custkey) sc, orderkey FROM (\n" + - " SELECT custkey, orderkey, orderkey+1, orderstatus FROM orders WHERE orderkey=0\n" + - " UNION ALL \n" + - " SELECT custkey, orderkey, orderkey+1, orderstatus FROM orders WHERE orderkey<>0) \n" + - " GROUP BY GROUPING SETS ((orderkey, orderstatus), (orderkey)))", - "SELECT count(orderkey), sum(sc) FROM (\n" + - " SELECT sum(custkey) sc, orderkey FROM (\n" + - " SELECT custkey, orderkey, orderkey+1, orderstatus FROM orders WHERE orderkey=0\n" + - " UNION ALL \n" + - " SELECT custkey, orderkey, orderkey+1, orderstatus FROM orders WHERE orderkey<>0) \n" + - " GROUP BY orderkey, orderstatus \n" + - " \n" + - " UNION ALL \n" + - " \n" + - " SELECT sum(custkey) sc, orderkey FROM (\n" + - " SELECT custkey, orderkey, orderkey+1, orderstatus FROM orders WHERE orderkey=0\n" + - " UNION ALL \n" + - " SELECT custkey, orderkey, orderkey+1, orderstatus FROM orders WHERE orderkey<>0) \n" + - " GROUP BY orderkey)"); - } - - @Test - public void testUnionWithUnionAndAggregation() - { - assertQuery( - "SELECT count(*) FROM (" + - "SELECT 1 FROM nation GROUP BY regionkey " + - "UNION ALL " + - "SELECT 1 FROM (" + - " SELECT 1 FROM nation " + - " UNION ALL " + - " SELECT 1 FROM nation))"); - assertQuery( - "SELECT count(*) FROM (" + - "SELECT 1 FROM (" + - " SELECT 1 FROM nation " + - " UNION ALL " + - " SELECT 1 FROM nation)" + - "UNION ALL " + - "SELECT 1 FROM nation GROUP BY regionkey)"); - } - - @Test - public void testUnionWithAggregationAndTableScan() - { - assertQuery( - "SELECT orderkey, 1 FROM orders " + - "UNION ALL " + - "SELECT orderkey, count(*) FROM orders GROUP BY 1", - "SELECT orderkey, 1 FROM orders " + - "UNION ALL " + - "SELECT orderkey, count(*) FROM orders GROUP BY orderkey"); - - assertQuery( - "SELECT orderkey, count(*) FROM orders GROUP BY 1 " + - "UNION ALL " + - "SELECT orderkey, 1 FROM orders", - "SELECT orderkey, count(*) FROM orders GROUP BY orderkey " + - "UNION ALL " + - "SELECT orderkey, 1 FROM orders"); - } - - @Test - public void testUnionWithAggregationAndJoin() - { - assertQuery( - "SELECT * FROM ( " + - "SELECT orderkey, count(*) FROM (" + - " SELECT orderdate ds, orderkey FROM orders " + - " UNION ALL " + - " SELECT shipdate ds, orderkey FROM lineitem) a " + - "GROUP BY orderkey) t " + - "JOIN orders o " + - "ON (o.orderkey = t.orderkey)"); - } - - @Test - public void testUnionWithJoinOnNonTranslateableSymbols() - { - assertQuery("SELECT *\n" + - "FROM (SELECT orderdate ds, orderkey\n" + - " FROM orders\n" + - " UNION ALL\n" + - " SELECT shipdate ds, orderkey\n" + - " FROM lineitem) a\n" + - "JOIN orders o\n" + - "ON (substr(cast(a.ds AS VARCHAR), 6, 2) = substr(cast(o.orderdate AS VARCHAR), 6, 2) AND a.orderkey = o.orderkey)"); - } - - @Test - public void testSubqueryUnion() - { - assertQueryOrdered("SELECT * FROM (SELECT orderkey FROM orders UNION SELECT custkey FROM orders UNION SELECT orderkey FROM orders) ORDER BY orderkey LIMIT 1000"); - } - - @Test - public void testUnionWithFilterNotInSelect() - { - assertQuery("SELECT orderkey, orderdate FROM orders WHERE custkey < 1000 UNION ALL SELECT orderkey, shipdate FROM lineitem WHERE linenumber < 2000"); - assertQuery("SELECT orderkey, orderdate FROM orders UNION ALL SELECT orderkey, shipdate FROM lineitem WHERE linenumber < 2000"); - assertQuery("SELECT orderkey, orderdate FROM orders WHERE custkey < 1000 UNION ALL SELECT orderkey, shipdate FROM lineitem"); - } - - @Test - public void testSelectOnlyUnion() - { - assertQuery("SELECT 123, 'foo' UNION ALL SELECT 999, 'bar'"); - } - - @Test - public void testMultiColumnUnionAll() - { - assertQuery("SELECT * FROM orders UNION ALL SELECT * FROM orders"); - } - - @Test - public void testUnionRequiringCoercion() - { - assertQuery("VALUES 1 UNION ALL VALUES 1.0, 2", "SELECT * FROM (VALUES 1) UNION ALL SELECT * FROM (VALUES 1.0, 2)"); - assertQuery("(VALUES 1) UNION ALL (VALUES 1.0, 2)", "SELECT * FROM (VALUES 1) UNION ALL SELECT * FROM (VALUES 1.0, 2)"); - assertQuery("SELECT 0, 0 UNION ALL SELECT 1.0, 0"); // This test case generates a RelationPlan whose .outputSymbols is different .root.outputSymbols - assertQuery("SELECT 0, 0, 0, 0 UNION ALL SELECT 0.0, 0.0, 0, 0"); // This test case generates a RelationPlan where multiple positions share the same symbol - assertQuery("SELECT * FROM (VALUES 1) UNION ALL SELECT * FROM (VALUES 1.0, 2)"); - - assertQuery("SELECT * FROM (VALUES 1) UNION SELECT * FROM (VALUES 1.0, 2)", "VALUES 1.0, 2.0"); // H2 produces incorrect result for the original query: 1.0 1.0 2.0 - assertQuery("SELECT * FROM (VALUES (2, 2)) UNION SELECT * FROM (VALUES (1, 1.0))"); - assertQuery("SELECT * FROM (VALUES (NULL, NULL)) UNION SELECT * FROM (VALUES (1, 1.0))"); - assertQuery("SELECT * FROM (VALUES (NULL, NULL)) UNION ALL SELECT * FROM (VALUES (NULL, 1.0))"); - - // Test for https://github.com/prestodb/presto/issues/7496 - // Cast varchar(1) -> varchar(4) for orderstatus in first source of union was not added. It was not done for type-only coercions. - // Then as a result of predicate pushdown orderstatus (without cast) was compared with CAST('aaa' AS varchar(4)) which trigger checkArgument that - // both types of comparison should be equal in DomainTranslator. - assertQuery("SELECT a FROM " + - "(" + - " (SELECT orderstatus AS a FROM orders LIMIT 1) " + - "UNION ALL " + - " SELECT 'aaaa' AS a" + - ") " + - "WHERE a = 'aaa'"); - } - - @Test - public void testTableQuery() - { - assertQuery("TABLE orders", "SELECT * FROM orders"); - } - - @Test - public void testTableQueryOrderLimit() - { - assertQueryOrdered("TABLE orders ORDER BY orderkey LIMIT 10", "SELECT * FROM orders ORDER BY orderkey LIMIT 10"); - } - - @Test - public void testTableQueryInUnion() - { - assertQuery("(SELECT * FROM orders ORDER BY orderkey LIMIT 10) UNION ALL TABLE orders", "(SELECT * FROM orders ORDER BY orderkey LIMIT 10) UNION ALL SELECT * FROM orders"); - } - - @Test - public void testTableAsSubquery() - { - assertQueryOrdered("(TABLE orders) ORDER BY orderkey", "(SELECT * FROM orders) ORDER BY orderkey"); - } - - @Test - public void testLimitPushDown() - { - MaterializedResult actual = computeActual( - "(TABLE orders ORDER BY orderkey) UNION ALL " + - "SELECT * FROM orders WHERE orderstatus = 'F' UNION ALL " + - "(TABLE orders ORDER BY orderkey LIMIT 20) UNION ALL " + - "(TABLE orders LIMIT 5) UNION ALL " + - "TABLE orders LIMIT 10"); - MaterializedResult all = computeExpected("SELECT * FROM orders", actual.getTypes()); - - assertEquals(actual.getMaterializedRows().size(), 10); - assertContains(all, actual); - } - - @Test - public void testUnaliasSymbolReferencesWithUnion() - { - assertQuery("SELECT 1, 1, 'a', 'a' UNION ALL SELECT 1, 2, 'a', 'b'"); - } - - @Test - public void testSameInPredicateInProjectionAndFilter() - { - assertQuery("SELECT x IN (SELECT * FROM (VALUES 1))\n" + - "FROM (VALUES 1) t(x)\n" + - "WHERE x IN (SELECT * FROM (VALUES 1))", - "SELECT 1"); - - assertQuery("SELECT x IN (SELECT * FROM (VALUES 1))\n" + - "FROM (VALUES 2) t(x)\n" + - "WHERE x IN (SELECT * FROM (VALUES 1))", - "SELECT 1 WHERE false"); - } - - @Test - public void testScalarSubquery() - { - // nested - assertQuery("SELECT (SELECT (SELECT (SELECT 1)))"); - - // aggregation - assertQuery("SELECT * FROM lineitem WHERE orderkey = \n" + - "(SELECT max(orderkey) FROM orders)"); - - // no output - assertQuery("SELECT * FROM lineitem WHERE orderkey = \n" + - "(SELECT orderkey FROM orders WHERE 0=1)"); - - // no output matching with null test - assertQuery("SELECT * FROM lineitem WHERE \n" + - "(SELECT orderkey FROM orders WHERE 0=1) " + - "is null"); - assertQuery("SELECT * FROM lineitem WHERE \n" + - "(SELECT orderkey FROM orders WHERE 0=1) " + - "is not null"); - - // subquery results and in in-predicate - assertQuery("SELECT (SELECT 1) IN (1, 2, 3)"); - assertQuery("SELECT (SELECT 1) IN ( 2, 3)"); - - // multiple subqueries - assertQuery("SELECT (SELECT 1) = (SELECT 3)"); - assertQuery("SELECT (SELECT 1) < (SELECT 3)"); - assertQuery("SELECT COUNT(*) FROM lineitem WHERE " + - "(SELECT min(orderkey) FROM orders)" + - "<" + - "(SELECT max(orderkey) FROM orders)"); - assertQuery("SELECT (SELECT 1), (SELECT 2), (SELECT 3)"); - - // distinct - assertQuery("SELECT DISTINCT orderkey FROM lineitem " + - "WHERE orderkey BETWEEN" + - " (SELECT avg(orderkey) FROM orders) - 10 " + - " AND" + - " (SELECT avg(orderkey) FROM orders) + 10"); - - // subqueries with joins - assertQuery("SELECT o1.orderkey, COUNT(*) " + - "FROM orders o1 " + - "INNER JOIN (SELECT * FROM orders ORDER BY orderkey LIMIT 10) o2 " + - "ON o1.orderkey " + - "BETWEEN (SELECT avg(orderkey) FROM orders) - 10 AND (SELECT avg(orderkey) FROM orders) + 10 " + - "GROUP BY o1.orderkey"); - assertQuery("SELECT o1.orderkey, COUNT(*) " + - "FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 5) o1 " + - "LEFT JOIN (SELECT * FROM orders ORDER BY orderkey LIMIT 10) o2 " + - "ON o1.orderkey " + - "BETWEEN (SELECT avg(orderkey) FROM orders) - 10 AND (SELECT avg(orderkey) FROM orders) + 10 " + - "GROUP BY o1.orderkey"); - assertQuery("SELECT o1.orderkey, COUNT(*) " + - "FROM orders o1 RIGHT JOIN (SELECT * FROM orders ORDER BY orderkey LIMIT 10) o2 " + - "ON o1.orderkey " + - "BETWEEN (SELECT avg(orderkey) FROM orders) - 10 AND (SELECT avg(orderkey) FROM orders) + 10 " + - "GROUP BY o1.orderkey"); - assertQuery("SELECT DISTINCT COUNT(*) " + - "FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 5) o1 " + - "FULL JOIN (SELECT * FROM orders ORDER BY orderkey LIMIT 10) o2 " + - "ON o1.orderkey " + - "BETWEEN (SELECT avg(orderkey) FROM orders) - 10 AND (SELECT avg(orderkey) FROM orders) + 10 " + - "GROUP BY o1.orderkey", - "VALUES 1, 10"); - - // subqueries with ORDER BY - assertQuery("SELECT orderkey, totalprice FROM orders ORDER BY (SELECT 2)"); - - // subquery returns multiple rows - String multipleRowsErrorMsg = "Scalar sub-query has returned multiple rows"; - assertQueryFails("SELECT * FROM lineitem WHERE orderkey = (\n" + - "SELECT orderkey FROM orders ORDER BY totalprice)", - multipleRowsErrorMsg); - assertQueryFails("SELECT orderkey, totalprice FROM orders ORDER BY (VALUES 1, 2)", - multipleRowsErrorMsg); - - // exposes a bug in optimize hash generation because EnforceSingleNode does not - // support more than one column from the underlying query - assertQuery("SELECT custkey, (SELECT DISTINCT custkey FROM orders ORDER BY custkey LIMIT 1) FROM orders"); - - // cast scalar sub-query - assertQuery("SELECT 1.0/(SELECT 1), CAST(1.0 AS REAL)/(SELECT 1), 1/(SELECT 1)"); - assertQuery("SELECT 1.0 = (SELECT 1) AND 1 = (SELECT 1), 2.0 = (SELECT 1) WHERE 1.0 = (SELECT 1) AND 1 = (SELECT 1)"); - assertQuery("SELECT 1.0 = (SELECT 1), 2.0 = (SELECT 1), CAST(2.0 AS REAL) = (SELECT 1) WHERE 1.0 = (SELECT 1)"); - - // coerce correlated symbols - assertQuery("SELECT * FROM (VALUES 1) t(a) WHERE 1=(SELECT count(*) WHERE 1.0 = a)", "SELECT 1"); - assertQuery("SELECT * FROM (VALUES 1.0) t(a) WHERE 1=(SELECT count(*) WHERE 1 = a)", "SELECT 1.0"); - } - - @Test - public void testMultipleOccurrencesOfCorrelatedSymbol() - { - @Language("SQL") String expected = - "VALUES " + - "('AFRICA', 'MOZAMBIQUE'), " + - "('AMERICA', 'UNITED STATES'), " + - "('ASIA', 'VIETNAM'), " + - "('EUROPE', 'UNITED KINGDOM'), " + - "('MIDDLE EAST', 'SAUDI ARABIA')"; - - // correlated symbol used twice, no coercion - assertQuery( - "SELECT region.name, (SELECT max(name) FROM nation WHERE regionkey * 2 = region.regionkey * 2 AND regionkey = region.regionkey) FROM region", - expected); - - // correlated symbol used twice, first occurrence coerced to double - assertQuery( - "SELECT region.name, (SELECT max(name) FROM nation WHERE CAST(regionkey AS double) = region.regionkey AND regionkey = region.regionkey) FROM region", - expected); - - // correlated symbol used twice, second occurrence coerced to double - assertQuery( - "SELECT region.name, (SELECT max(name) FROM nation WHERE regionkey = region.regionkey AND CAST(regionkey AS double) = region.regionkey) FROM region", - expected); - - // different coercions - assertQuery( - "SELECT region.name, " + - "(SELECT max(name) FROM nation " + - "WHERE CAST(regionkey AS double) = region.regionkey " + // region.regionkey coerced to double - "AND regionkey = region.regionkey " + // no coercion - "AND regionkey * 1.0 = region.regionkey) " + // region.regionkey coerced to decimal - "FROM region", - expected); - } - - @Test - public void testExistsSubquery() - { - // nested - assertQuery("SELECT EXISTS(SELECT NOT EXISTS(SELECT EXISTS(SELECT 1)))"); - - // aggregation - assertQuery("SELECT COUNT(*) FROM lineitem WHERE " + - "EXISTS(SELECT max(orderkey) FROM orders)"); - assertQuery("SELECT COUNT(*) FROM lineitem WHERE " + - "NOT EXISTS(SELECT max(orderkey) FROM orders)"); - assertQuery("SELECT COUNT(*) FROM lineitem WHERE " + - "NOT EXISTS(SELECT orderkey FROM orders WHERE false)"); - - // no output - assertQuery("SELECT COUNT(*) FROM lineitem WHERE " + - "EXISTS(SELECT orderkey FROM orders WHERE false)"); - assertQuery("SELECT COUNT(*) FROM lineitem WHERE " + - "NOT EXISTS(SELECT orderkey FROM orders WHERE false)"); - - // exists with in-predicate - assertQuery("SELECT (EXISTS(SELECT 1)) IN (false)", "SELECT false"); - assertQuery("SELECT (NOT EXISTS(SELECT 1)) IN (false)", "SELECT true"); - - assertQuery("SELECT (EXISTS(SELECT 1)) IN (true, false)", "SELECT true"); - assertQuery("SELECT (NOT EXISTS(SELECT 1)) IN (true, false)", "SELECT true"); - - assertQuery("SELECT (EXISTS(SELECT 1 WHERE false)) IN (true, false)", "SELECT true"); - assertQuery("SELECT (NOT EXISTS(SELECT 1 WHERE false)) IN (true, false)", "SELECT true"); - - assertQuery("SELECT (EXISTS(SELECT 1 WHERE false)) IN (false)", "SELECT true"); - assertQuery("SELECT (NOT EXISTS(SELECT 1 WHERE false)) IN (false)", "SELECT false"); - - // multiple exists - assertQuery("SELECT (EXISTS(SELECT 1)) = (EXISTS(SELECT 1)) WHERE NOT EXISTS(SELECT 1)", "SELECT true WHERE false"); - assertQuery("SELECT (EXISTS(SELECT 1)) = (EXISTS(SELECT 3)) WHERE NOT EXISTS(SELECT 1 WHERE false)", "SELECT true"); - assertQuery("SELECT COUNT(*) FROM lineitem WHERE " + - "(EXISTS(SELECT min(orderkey) FROM orders))" + - "=" + - "(NOT EXISTS(SELECT orderkey FROM orders WHERE false))", - "SELECT count(*) FROM lineitem"); - assertQuery("SELECT EXISTS(SELECT 1), EXISTS(SELECT 1), EXISTS(SELECT 3), NOT EXISTS(SELECT 1), NOT EXISTS(SELECT 1 WHERE false)"); - - // distinct - assertQuery("SELECT DISTINCT orderkey FROM lineitem " + - "WHERE EXISTS(SELECT avg(orderkey) FROM orders)"); - - // subqueries used with joins - QueryTemplate.Parameter joinType = parameter("join_type"); - QueryTemplate.Parameter condition = parameter("condition"); - QueryTemplate queryTemplate = queryTemplate( - "SELECT o1.orderkey, COUNT(*) " + - "FROM orders o1 %join_type% JOIN (SELECT * FROM orders LIMIT 10) o2 ON %condition% " + - "GROUP BY o1.orderkey ORDER BY o1.orderkey LIMIT 5", - joinType, - condition); - List conditions = condition.of( - "EXISTS(SELECT avg(orderkey) FROM orders)", - "(SELECT avg(orderkey) FROM orders) > 3"); - for (QueryTemplate.Parameter actualCondition : conditions) { - for (QueryTemplate.Parameter actualJoinType : joinType.of("", "LEFT", "RIGHT")) { - assertQuery(queryTemplate.replace(actualJoinType, actualCondition)); - } - assertQuery( - queryTemplate.replace(joinType.of("FULL"), actualCondition), - "VALUES (1, 10), (2, 10), (3, 10), (4, 10), (5, 10)"); - } - - // subqueries with ORDER BY - assertQuery("SELECT orderkey, totalprice FROM orders ORDER BY EXISTS(SELECT 2)"); - assertQuery("SELECT orderkey, totalprice FROM orders ORDER BY NOT(EXISTS(SELECT 2))"); - } - - @Test - public void testScalarSubqueryWithGroupBy() - { - // using the same subquery in query - assertQuery("SELECT linenumber, min(orderkey), (SELECT max(orderkey) FROM orders WHERE orderkey < 7)" + - "FROM lineitem " + - "GROUP BY linenumber"); - - assertQuery("SELECT linenumber, min(orderkey), (SELECT max(orderkey) FROM orders WHERE orderkey < 7)" + - "FROM lineitem " + - "GROUP BY linenumber, (SELECT max(orderkey) FROM orders WHERE orderkey < 7)"); - - assertQuery("SELECT linenumber, min(orderkey) " + - "FROM lineitem " + - "GROUP BY linenumber, (SELECT max(orderkey) FROM orders WHERE orderkey < 7)"); - - assertQuery("SELECT linenumber, min(orderkey) " + - "FROM lineitem " + - "GROUP BY linenumber " + - "HAVING min(orderkey) < (SELECT avg(orderkey) FROM orders WHERE orderkey < 7)"); - - assertQuery("SELECT linenumber, min(orderkey), (SELECT max(orderkey) FROM orders WHERE orderkey < 7)" + - "FROM lineitem " + - "GROUP BY linenumber, (SELECT max(orderkey) FROM orders WHERE orderkey < 7)" + - "HAVING min(orderkey) < (SELECT max(orderkey) FROM orders WHERE orderkey < 7)"); - - // using different subqueries - assertQuery("SELECT linenumber, min(orderkey), (SELECT max(orderkey) FROM orders WHERE orderkey < 7)" + - "FROM lineitem " + - "GROUP BY linenumber, (SELECT sum(orderkey) FROM orders WHERE orderkey < 7)"); - - assertQuery("SELECT linenumber, max(orderkey), (SELECT min(orderkey) FROM orders WHERE orderkey < 5)" + - "FROM lineitem " + - "GROUP BY linenumber " + - "HAVING sum(orderkey) > (SELECT min(orderkey) FROM orders WHERE orderkey < 7)"); - - assertQuery("SELECT linenumber, min(orderkey), (SELECT max(orderkey) FROM orders WHERE orderkey < 7)" + - "FROM lineitem " + - "GROUP BY linenumber, (SELECT count(orderkey) FROM orders WHERE orderkey < 7)" + - "HAVING min(orderkey) < (SELECT sum(orderkey) FROM orders WHERE orderkey < 7)"); - } - - @Test - public void testOutputInEnforceSingleRow() - { - assertQuery("SELECT count(*) FROM (SELECT (SELECT 1))"); - assertQuery("SELECT * FROM (SELECT (SELECT 1))"); - assertQueryFails( - "SELECT * FROM (SELECT (SELECT 1, 2))", - "line 1:23: Multiple columns returned by subquery are not yet supported. Found 2"); - } - - @Test - public void testExistsSubqueryWithGroupBy() - { - // using the same subquery in query - assertQuery("SELECT linenumber, min(orderkey), EXISTS(SELECT orderkey FROM orders WHERE orderkey < 7)" + - "FROM lineitem " + - "GROUP BY linenumber"); - - assertQuery("SELECT linenumber, min(orderkey), EXISTS(SELECT orderkey FROM orders WHERE orderkey < 7)" + - "FROM lineitem " + - "GROUP BY linenumber, EXISTS(SELECT orderkey FROM orders WHERE orderkey < 7)"); - - assertQuery("SELECT linenumber, min(orderkey) " + - "FROM lineitem " + - "GROUP BY linenumber, EXISTS(SELECT orderkey FROM orders WHERE orderkey < 7)"); - - assertQuery("SELECT linenumber, min(orderkey) " + - "FROM lineitem " + - "GROUP BY linenumber " + - "HAVING EXISTS(SELECT orderkey FROM orders WHERE orderkey < 7)"); - - assertQuery("SELECT linenumber, min(orderkey), EXISTS(SELECT orderkey FROM orders WHERE orderkey < 7)" + - "FROM lineitem " + - "GROUP BY linenumber, EXISTS(SELECT orderkey FROM orders WHERE orderkey < 7)" + - "HAVING EXISTS(SELECT orderkey FROM orders WHERE orderkey < 7)"); - - // using different subqueries - assertQuery("SELECT linenumber, min(orderkey), EXISTS(SELECT orderkey FROM orders WHERE orderkey < 7)" + - "FROM lineitem " + - "GROUP BY linenumber, EXISTS(SELECT orderkey FROM orders WHERE orderkey < 17)"); - - assertQuery("SELECT linenumber, max(orderkey), EXISTS(SELECT orderkey FROM orders WHERE orderkey < 5)" + - "FROM lineitem " + - "GROUP BY linenumber " + - "HAVING EXISTS(SELECT orderkey FROM orders WHERE orderkey < 7)"); - - assertQuery("SELECT linenumber, min(orderkey), EXISTS(SELECT orderkey FROM orders WHERE orderkey < 17)" + - "FROM lineitem " + - "GROUP BY linenumber, EXISTS(SELECT orderkey FROM orders WHERE orderkey < 17)" + - "HAVING EXISTS(SELECT orderkey FROM orders WHERE orderkey < 27)"); - } - - @Test - public void testCorrelatedScalarSubqueries() - { - assertQuery("SELECT (SELECT n.nationkey) FROM nation n"); - assertQuery("SELECT (SELECT 2 * n.nationkey) FROM nation n"); - assertQuery("SELECT nationkey FROM nation n WHERE 2 = (SELECT 2 * n.nationkey)"); - assertQuery("SELECT nationkey FROM nation n ORDER BY (SELECT 2 * n.nationkey)"); - - // group by - assertQuery("SELECT max(n.regionkey), 2 * n.nationkey, (SELECT n.nationkey) FROM nation n GROUP BY n.nationkey"); - assertQuery( - "SELECT max(l.quantity), 2 * l.orderkey FROM lineitem l GROUP BY l.orderkey HAVING max(l.quantity) < (SELECT l.orderkey)"); - assertQuery("SELECT max(l.quantity), 2 * l.orderkey FROM lineitem l GROUP BY l.orderkey, (SELECT l.orderkey)"); - - // join - assertQuery("SELECT * FROM nation n1 JOIN nation n2 ON n1.nationkey = (SELECT n2.nationkey)"); - assertQueryFails( - "SELECT (SELECT l3.* FROM lineitem l2 CROSS JOIN (SELECT l1.orderkey) l3 LIMIT 1) FROM lineitem l1", - UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - - // subrelation - assertQuery( - "SELECT 1 FROM nation n WHERE 2 * nationkey - 1 = (SELECT * FROM (SELECT n.nationkey))", - "SELECT 1"); // h2 fails to parse this query - - // two level of nesting - assertQuery("SELECT * FROM nation n WHERE 2 = (SELECT (SELECT 2 * n.nationkey))"); - - // redundant LIMIT in subquery - assertQuery("SELECT (SELECT count(*) FROM (VALUES (7,1)) t(orderkey, value) WHERE orderkey = corr_key LIMIT 1) FROM (values 7) t(corr_key)"); - - // explicit LIMIT in subquery - assertQuery("SELECT (SELECT count(*) FROM (VALUES (7,1)) t(orderkey, value) WHERE orderkey = corr_key GROUP BY value LIMIT 1) FROM (values 7) t(corr_key)"); - // Limit(1) and non-constant output symbol of the subquery (count) - assertQueryFails("SELECT (SELECT count(*) FROM (VALUES (7,1), (7,2)) t(orderkey, value) WHERE orderkey = corr_key GROUP BY value LIMIT 1) FROM (values 7) t(corr_key)", - UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - } - - @Test - public void testCorrelatedNonAggregationScalarSubqueries() - { - String subqueryReturnedTooManyRows = "Scalar sub-query has returned multiple rows"; - - assertQuery("SELECT (SELECT 1 WHERE a = 2) FROM (VALUES 1) t(a)", "SELECT null"); - assertQuery("SELECT (SELECT 2 WHERE a = 1) FROM (VALUES 1) t(a)", "SELECT 2"); - assertQueryFails( - "SELECT (SELECT 2 FROM (VALUES 3, 4) WHERE a = 1) FROM (VALUES 1) t(a)", - subqueryReturnedTooManyRows); - - // multiple subquery output projections - assertQueryFails( - "SELECT name FROM nation n WHERE 'AFRICA' = (SELECT 'bleh' FROM region WHERE regionkey > n.regionkey)", - subqueryReturnedTooManyRows); - assertQueryFails( - "SELECT name FROM nation n WHERE 'AFRICA' = (SELECT name FROM region WHERE regionkey > n.regionkey)", - subqueryReturnedTooManyRows); - assertQueryFails( - "SELECT name FROM nation n WHERE 1 = (SELECT 1 FROM region WHERE regionkey > n.regionkey)", - subqueryReturnedTooManyRows); - - // correlation used in subquery output - assertQueryFails( - "SELECT name FROM nation n WHERE 'AFRICA' = (SELECT n.name FROM region WHERE regionkey > n.regionkey)", - UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - - assertQuery( - "SELECT (SELECT 2 WHERE o.orderkey = 1) FROM orders o ORDER BY orderkey LIMIT 5", - "VALUES 2, null, null, null, null"); - // outputs plain correlated orderkey symbol which causes ambiguity with outer query orderkey symbol - assertQueryFails( - "SELECT (SELECT o.orderkey WHERE o.orderkey = 1) FROM orders o ORDER BY orderkey LIMIT 5", - UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - assertQueryFails( - "SELECT (SELECT o.orderkey * 2 WHERE o.orderkey = 1) FROM orders o ORDER BY orderkey LIMIT 5", - UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - // correlation used outside the subquery - assertQueryFails( - "SELECT o.orderkey, (SELECT o.orderkey * 2 WHERE o.orderkey = 1) FROM orders o ORDER BY orderkey LIMIT 5", - UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - - // aggregation with having -// TODO: https://github.com/prestodb/presto/issues/8456 -// assertQuery("SELECT (SELECT avg(totalprice) FROM orders GROUP BY custkey, orderdate HAVING avg(totalprice) < a) FROM (VALUES 900) t(a)"); - - // correlation in predicate - assertQuery("SELECT name FROM nation n WHERE 'AFRICA' = (SELECT name FROM region WHERE regionkey = n.regionkey)"); - - // same correlation in predicate and projection - assertQueryFails( - "SELECT nationkey FROM nation n WHERE " + - "(SELECT n.regionkey * 2 FROM region r WHERE n.regionkey = r.regionkey) > 6", - UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - - // different correlation in predicate and projection - assertQueryFails( - "SELECT nationkey FROM nation n WHERE " + - "(SELECT n.nationkey * 2 FROM region r WHERE n.regionkey = r.regionkey) > 6", - UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - - // correlation used in subrelation - assertQuery( - "SELECT nationkey FROM nation n WHERE " + - "(SELECT regionkey * 2 FROM (SELECT regionkey FROM region r WHERE n.regionkey = r.regionkey)) > 6 " + - "ORDER BY 1 LIMIT 3", - "VALUES 4, 10, 11"); // h2 didn't make it - - // with duplicated rows - assertQuery( - "SELECT (SELECT name FROM nation WHERE nationkey = a) FROM (VALUES 1, 1, 2, 3) t(a)", - "VALUES 'ARGENTINA', 'ARGENTINA', 'BRAZIL', 'CANADA'"); // h2 didn't make it - - // returning null when nothing matched - assertQuery( - "SELECT (SELECT name FROM nation WHERE nationkey = a) FROM (VALUES 31) t(a)", - "VALUES null"); - - assertQuery( - "SELECT (SELECT r.name FROM nation n, region r WHERE r.regionkey = n.regionkey AND n.nationkey = a) FROM (VALUES 1) t(a)", - "VALUES 'AMERICA'"); - } - - @Test - public void testCorrelatedScalarSubqueriesWithScalarAggregationAndEqualityPredicatesInWhere() - { - assertQuery("SELECT (SELECT count(*) WHERE o.orderkey = 1) FROM orders o"); - assertQuery("SELECT count(*) FROM orders o WHERE 1 = (SELECT count(*) WHERE o.orderkey = 0)"); - assertQuery("SELECT * FROM orders o ORDER BY (SELECT count(*) WHERE o.orderkey = 0)"); - assertQuery( - "SELECT count(*) FROM nation n WHERE " + - "(SELECT count(*) FROM region r WHERE n.regionkey = r.regionkey) > 1"); - assertQueryFails( - "SELECT count(*) FROM nation n WHERE " + - "(SELECT avg(a) FROM (SELECT count(*) FROM region r WHERE n.regionkey = r.regionkey) t(a)) > 1", - UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - - // with duplicated rows - assertQuery( - "SELECT (SELECT count(*) WHERE a = 1) FROM (VALUES 1, 1, 2, 3) t(a)", - "VALUES true, true, false, false"); - - // group by - assertQuery( - "SELECT max(o.totalprice), o.orderkey, (SELECT count(*) WHERE o.orderkey = 0) " + - "FROM orders o GROUP BY o.orderkey"); - assertQuery( - "SELECT max(o.totalprice), o.orderkey " + - "FROM orders o GROUP BY o.orderkey HAVING 1 = (SELECT count(*) WHERE o.orderkey = 0)"); - assertQuery( - "SELECT max(o.totalprice), o.orderkey FROM orders o " + - "GROUP BY o.orderkey, (SELECT count(*) WHERE o.orderkey = 0)"); - - // join - assertQuery( - "SELECT count(*) " + - "FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10) o1 " + - "JOIN (SELECT * FROM orders ORDER BY orderkey LIMIT 5) o2 " + - "ON NOT 1 = (SELECT count(*) WHERE o1.orderkey = o2.orderkey)"); - assertQueryFails( - "SELECT count(*) FROM orders o1 LEFT JOIN orders o2 " + - "ON NOT 1 = (SELECT count(*) WHERE o1.orderkey = o2.orderkey)", - "line .*: Correlated subquery in given context is not supported"); - - // subrelation - assertQuery( - "SELECT count(*) FROM orders o " + - "WHERE 1 = (SELECT * FROM (SELECT (SELECT count(*) WHERE o.orderkey = 0)))", - "SELECT count(*) FROM orders o WHERE o.orderkey = 0"); - } - - @Test - public void testCorrelatedScalarSubqueriesWithScalarAggregation() - { - // projection - assertQuery( - "SELECT (SELECT round(3 * avg(i.a)) FROM (VALUES 1, 1, 1, 2, 2, 3, 4) i(a) WHERE i.a < o.a AND i.a < 4) " + - "FROM (VALUES 0, 3, 3, 5) o(a)", - "VALUES null, 4, 4, 5"); - - assertQuery( - "SELECT count(*) FROM orders o " + - "WHERE (SELECT avg(i.orderkey) FROM orders i " + - "WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0) > 100", - "VALUES 14999"); // h2 is slow - - // order by - assertQuery( - "SELECT orderkey FROM orders o " + - "ORDER BY " + - " (SELECT avg(i.orderkey) FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0), " + - " orderkey " + - "LIMIT 1", - "VALUES 1"); // h2 is slow - - // group by - assertQuery( - "SELECT max(o.orderdate), o.orderkey, " + - "(SELECT avg(i.orderkey) FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0) " + - "FROM orders o GROUP BY o.orderkey ORDER BY o.orderkey LIMIT 1", - "VALUES ('1996-01-02', 1, 40000)"); // h2 is slow - assertQuery( - "SELECT max(o.orderdate), o.orderkey " + - "FROM orders o " + - "GROUP BY o.orderkey " + - "HAVING 40000 < (SELECT avg(i.orderkey) FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0)" + - "ORDER BY o.orderkey LIMIT 1", - "VALUES ('1996-07-24', 20000)"); // h2 is slow - assertQuery( - "SELECT max(o.orderdate), o.orderkey FROM orders o " + - "GROUP BY o.orderkey, (SELECT avg(i.orderkey) FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0)" + - "ORDER BY o.orderkey LIMIT 1", - "VALUES ('1996-01-02', 1)"); // h2 is slow - - // join - assertQuery( - "SELECT count(*) " + - "FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10) o1 " + - "JOIN (SELECT * FROM orders ORDER BY orderkey LIMIT 5) o2 " + - "ON NOT 1 = (SELECT avg(i.orderkey) FROM orders i WHERE o1.orderkey < o2.orderkey AND i.orderkey % 10000 = 0)"); - assertQueryFails( - "SELECT count(*) FROM orders o1 LEFT JOIN orders o2 " + - "ON NOT 1 = (SELECT avg(i.orderkey) FROM orders i WHERE o1.orderkey < o2.orderkey)", - "line .*: Correlated subquery in given context is not supported"); - - // subrelation - assertQuery( - "SELECT count(*) FROM orders o " + - "WHERE 100 < (SELECT * " + - "FROM (SELECT (SELECT avg(i.orderkey) FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0)))", - "VALUES 14999"); // h2 is slow - - // consecutive correlated subqueries with scalar aggregation - assertQuery("SELECT " + - "(SELECT avg(regionkey) " + - " FROM nation n2" + - " WHERE n2.nationkey = n1.nationkey)," + - "(SELECT avg(regionkey)" + - " FROM nation n3" + - " WHERE n3.nationkey = n1.nationkey)" + - "FROM nation n1"); - assertQuery("SELECT" + - "(SELECT avg(regionkey)" + - " FROM nation n2 " + - " WHERE n2.nationkey = n1.nationkey)," + - "(SELECT avg(regionkey)+1 " + - " FROM nation n3 " + - " WHERE n3.nationkey = n1.nationkey)" + - "FROM nation n1"); - - // count in subquery - assertQuery("SELECT * " + - "FROM (VALUES (0), (1), (2), (7)) AS v1(c1) " + - "WHERE v1.c1 > (SELECT count(c1) FROM (VALUES (0), (1), (2)) AS v2(c1) WHERE v1.c1 = v2.c1)", - "VALUES (2), (7)"); - - // count rows - assertQuery("SELECT (SELECT count(*) FROM (VALUES (1, true), (null, true)) inner_table(a, b) WHERE inner_table.b = outer_table.b) FROM (VALUES (true)) outer_table(b)", - "VALUES (2)"); - - // count rows - assertQuery("SELECT (SELECT count() FROM (VALUES (1, true), (null, true)) inner_table(a, b) WHERE inner_table.b = outer_table.b) FROM (VALUES (true)) outer_table(b)", - "VALUES (2)"); - - // count non null values - assertQuery("SELECT (SELECT count(a) FROM (VALUES (1, true), (null, true)) inner_table(a, b) WHERE inner_table.b = outer_table.b) FROM (VALUES (true)) outer_table(b)", - "VALUES (1)"); - } - - @Test - public void testCorrelatedInPredicateSubqueries() - { - assertQuery("SELECT orderkey, clerk IN (SELECT clerk FROM orders s WHERE s.custkey = o.custkey AND s.orderkey < o.orderkey) FROM orders o"); - assertQuery("SELECT orderkey FROM orders o WHERE clerk IN (SELECT clerk FROM orders s WHERE s.custkey = o.custkey AND s.orderkey < o.orderkey)"); - - // all cases of IN (as one test query to avoid pruning, over-eager push down) - assertQuery( - "SELECT t1.a, t1.b, " + - " t1.b in (SELECT t2.b " + - " FROM (values (2, 3), (2, 4), (3, 0), (30,NULL)) t2(a, b) " + - " WHERE t1.a - 5 <= t2.a and t2.a <= t1.a and 0 <= t2.a) " + - "from (values (1,1), (2,4), (3,5), (4,NULL), (30,2), (40,NULL) ) t1(a, b) " + - "order by t1.a", - "VALUES (1,1,FALSE), (2,4,TRUE), (3,5,FALSE), (4,NULL,NULL), (30,2,NULL), (40,NULL,FALSE)"); - - // subquery with LIMIT (correlated filter below any unhandled node type) - assertQueryFails( - "SELECT orderkey FROM orders o WHERE clerk IN (SELECT clerk FROM orders s WHERE s.custkey = o.custkey AND s.orderkey < o.orderkey ORDER BY 1 LIMIT 1)", - UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - - assertQueryFails("SELECT 1 IN (SELECT l.orderkey) FROM lineitem l", UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - assertQueryFails("SELECT 1 IN (SELECT 2 * l.orderkey) FROM lineitem l", UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - assertQueryFails("SELECT * FROM lineitem l WHERE 1 IN (SELECT 2 * l.orderkey)", UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - assertQueryFails("SELECT * FROM lineitem l ORDER BY 1 IN (SELECT 2 * l.orderkey)", UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - - // group by - assertQueryFails("SELECT max(l.quantity), 2 * l.orderkey, 1 IN (SELECT l.orderkey) FROM lineitem l GROUP BY l.orderkey", UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - assertQueryFails("SELECT max(l.quantity), 2 * l.orderkey FROM lineitem l GROUP BY l.orderkey HAVING max(l.quantity) IN (SELECT l.orderkey)", UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - assertQueryFails("SELECT max(l.quantity), 2 * l.orderkey FROM lineitem l GROUP BY l.orderkey, 1 IN (SELECT l.orderkey)", UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - - // join - assertQueryFails("SELECT * FROM lineitem l1 JOIN lineitem l2 ON l1.orderkey IN (SELECT l2.orderkey)", UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - - // subrelation - assertQueryFails( - "SELECT * FROM lineitem l WHERE (SELECT * FROM (SELECT 1 IN (SELECT 2 * l.orderkey)))", - UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - - // two level of nesting - assertQueryFails("SELECT * FROM lineitem l WHERE true IN (SELECT 1 IN (SELECT 2 * l.orderkey))", UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - } - - @Test - public void testCorrelatedExistsSubqueriesWithPrunedCorrelationSymbols() - { - assertQuery("SELECT EXISTS(SELECT o.orderkey) FROM orders o"); - assertQuery("SELECT count(*) FROM orders o WHERE EXISTS(SELECT o.orderkey)"); - assertQuery("SELECT * FROM orders o ORDER BY EXISTS(SELECT o.orderkey)"); - - // group by - assertQuery( - "SELECT max(o.totalprice), o.orderkey, EXISTS(SELECT o.orderkey) FROM orders o GROUP BY o.orderkey"); - assertQuery( - "SELECT max(o.totalprice), o.orderkey " + - "FROM orders o GROUP BY o.orderkey HAVING EXISTS (SELECT o.orderkey)"); - assertQuery( - "SELECT max(o.totalprice), o.orderkey FROM orders o GROUP BY o.orderkey, EXISTS (SELECT o.orderkey)"); - - // join - assertQuery( - "SELECT * FROM orders o JOIN (SELECT * FROM lineitem ORDER BY orderkey LIMIT 2) l " + - "ON NOT EXISTS(SELECT o.orderkey = l.orderkey)"); - - // subrelation - assertQuery( - "SELECT count(*) FROM orders o WHERE (SELECT * FROM (SELECT EXISTS(SELECT o.orderkey)))", - "VALUES 15000"); - } - - @Test - public void testCorrelatedExistsSubqueriesWithEqualityPredicatesInWhere() - { - assertQuery("SELECT EXISTS(SELECT 1 WHERE o.orderkey = 1) FROM orders o"); - assertQuery("SELECT EXISTS(SELECT null WHERE o.orderkey = 1) FROM orders o"); - assertQuery("SELECT count(*) FROM orders o WHERE EXISTS(SELECT 1 WHERE o.orderkey = 0)"); - assertQuery("SELECT * FROM orders o ORDER BY EXISTS(SELECT 1 WHERE o.orderkey = 0)"); - assertQuery( - "SELECT count(*) FROM orders o " + - "WHERE EXISTS (SELECT avg(l.orderkey) FROM lineitem l WHERE o.orderkey = l.orderkey)"); - assertQuery( - "SELECT count(*) FROM orders o " + - "WHERE EXISTS (SELECT avg(l.orderkey) FROM lineitem l WHERE o.orderkey = l.orderkey GROUP BY l.linenumber)"); - assertQueryFails( - "SELECT count(*) FROM orders o " + - "WHERE EXISTS (SELECT count(*) FROM lineitem l WHERE o.orderkey = l.orderkey HAVING count(*) > 3)", - UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - - // with duplicated rows - assertQuery( - "SELECT EXISTS(SELECT 1 WHERE a = 1) FROM (VALUES 1, 1, 2, 3) t(a)", - "VALUES true, true, false, false"); - - // group by - assertQuery( - "SELECT max(o.totalprice), o.orderkey, EXISTS(SELECT 1 WHERE o.orderkey = 0) " + - "FROM orders o GROUP BY o.orderkey"); - assertQuery( - "SELECT max(o.totalprice), o.orderkey " + - "FROM orders o GROUP BY o.orderkey HAVING EXISTS (SELECT 1 WHERE o.orderkey = 0)"); - assertQuery( - "SELECT max(o.totalprice), o.orderkey " + - "FROM orders o GROUP BY o.orderkey, EXISTS (SELECT 1 WHERE o.orderkey = 0)"); - - // join - assertQuery( - "SELECT count(*) " + - "FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10) o1 " + - "JOIN (SELECT * FROM orders ORDER BY orderkey LIMIT 5) o2 " + - "ON NOT EXISTS(SELECT 1 WHERE o1.orderkey = o2.orderkey)"); - assertQueryFails( - "SELECT count(*) FROM orders o1 LEFT JOIN orders o2 " + - "ON NOT EXISTS(SELECT 1 WHERE o1.orderkey = o2.orderkey)", - "line .*: Correlated subquery in given context is not supported"); - - // subrelation - assertQuery( - "SELECT count(*) FROM orders o WHERE (SELECT * FROM (SELECT EXISTS(SELECT 1 WHERE o.orderkey = 0)))", - "SELECT count(*) FROM orders o WHERE o.orderkey = 0"); - - // not exists - assertQuery( - "SELECT count(*) FROM customer WHERE NOT EXISTS(SELECT * FROM orders WHERE orders.custkey=customer.custkey)", - "VALUES 500"); - } - - @Test - public void testCorrelatedExistsSubqueries() - { - // projection - assertQuery( - "SELECT EXISTS(SELECT 1 FROM (VALUES 1, 1, 1, 2, 2, 3, 4) i(a) WHERE i.a < o.a AND i.a < 4) " + - "FROM (VALUES 0, 3, 3, 5) o(a)", - "VALUES false, true, true, true"); - assertQuery( - "SELECT EXISTS(SELECT 1 WHERE l.orderkey > 0 OR l.orderkey != 3) " + - "FROM lineitem l LIMIT 1"); - - assertQuery( - "SELECT count(*) FROM orders o " + - "WHERE EXISTS(SELECT 1 FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 1000 = 0)", - "VALUES 14999"); // h2 is slow - assertQuery( - "SELECT count(*) FROM lineitem l " + - "WHERE EXISTS(SELECT 1 WHERE l.orderkey > 0 OR l.orderkey != 3)"); - - // order by - assertQuery( - "SELECT orderkey FROM orders o ORDER BY " + - "EXISTS(SELECT 1 FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0)" + - "LIMIT 1", - "VALUES 60000"); // h2 is slow - assertQuery( - "SELECT orderkey FROM lineitem l ORDER BY " + - "EXISTS(SELECT 1 WHERE l.orderkey > 0 OR l.orderkey != 3)"); - - // group by - assertQuery( - "SELECT max(o.orderdate), o.orderkey, " + - "EXISTS(SELECT 1 FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0) " + - "FROM orders o GROUP BY o.orderkey ORDER BY o.orderkey LIMIT 1", - "VALUES ('1996-01-02', 1, true)"); // h2 is slow - assertQuery( - "SELECT max(o.orderdate), o.orderkey " + - "FROM orders o " + - "GROUP BY o.orderkey " + - "HAVING EXISTS(SELECT 1 FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0)" + - "ORDER BY o.orderkey LIMIT 1", - "VALUES ('1996-01-02', 1)"); // h2 is slow - assertQuery( - "SELECT max(o.orderdate), o.orderkey FROM orders o " + - "GROUP BY o.orderkey, EXISTS(SELECT 1 FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0)" + - "ORDER BY o.orderkey LIMIT 1", - "VALUES ('1996-01-02', 1)"); // h2 is slow - assertQuery( - "SELECT max(l.quantity), l.orderkey, EXISTS(SELECT 1 WHERE l.orderkey > 0 OR l.orderkey != 3) FROM lineitem l " + - "GROUP BY l.orderkey"); - assertQuery( - "SELECT max(l.quantity), l.orderkey FROM lineitem l " + - "GROUP BY l.orderkey " + - "HAVING EXISTS (SELECT 1 WHERE l.orderkey > 0 OR l.orderkey != 3)"); - assertQuery( - "SELECT max(l.quantity), l.orderkey FROM lineitem l " + - "GROUP BY l.orderkey, EXISTS (SELECT 1 WHERE l.orderkey > 0 OR l.orderkey != 3)"); - - // join - assertQuery( - "SELECT count(*) " + - "FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10) o1 " + - "JOIN (SELECT * FROM orders ORDER BY orderkey LIMIT 5) o2 " + - "ON NOT EXISTS(SELECT 1 FROM orders i WHERE o1.orderkey < o2.orderkey AND i.orderkey % 10000 = 0)"); - assertQueryFails( - "SELECT count(*) FROM orders o1 LEFT JOIN orders o2 " + - "ON NOT EXISTS(SELECT 1 FROM orders i WHERE o1.orderkey < o2.orderkey)", - "line .*: Correlated subquery in given context is not supported"); - - // subrelation - assertQuery( - "SELECT count(*) FROM orders o " + - "WHERE (SELECT * FROM (SELECT EXISTS(SELECT 1 FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0)))", - "VALUES 14999"); // h2 is slow - assertQuery( - "SELECT count(*) FROM orders o " + - "WHERE (SELECT * FROM (SELECT EXISTS(SELECT 1 WHERE o.orderkey > 10 OR o.orderkey != 3)))", - "VALUES 14999"); - } - - @Test - public void testTwoCorrelatedExistsSubqueries() - { - // This is simplified TPC-H q21 - assertQuery("SELECT\n" + - " count(*) AS numwait\n" + - "FROM\n" + - " nation l1\n" + - "WHERE\n" + - " EXISTS(\n" + - " SELECT *\n" + - " FROM\n" + - " nation l2\n" + - " WHERE\n" + - " l2.nationkey = l1.nationkey\n" + - " )\n" + - " AND NOT EXISTS(\n" + - " SELECT *\n" + - " FROM\n" + - " nation l3\n" + - " WHERE\n" + - " l3.nationkey= l1.nationkey\n" + - " )\n", - "VALUES 0"); // EXISTS predicates are contradictory - } - - @Test - public void testPredicatePushdown() - { - assertQuery("" + - "SELECT *\n" + - "FROM (\n" + - " SELECT orderkey+1 AS a FROM orders WHERE orderstatus = 'F' UNION ALL \n" + - " SELECT orderkey FROM orders WHERE orderkey % 2 = 0 UNION ALL \n" + - " (SELECT orderkey+custkey FROM orders ORDER BY orderkey LIMIT 10)\n" + - ") \n" + - "WHERE a < 20 OR a > 100 \n" + - "ORDER BY a"); - } - - @Test - public void testGroupByKeyPredicatePushdown() - { - assertQuery("" + - "SELECT *\n" + - "FROM (\n" + - " SELECT custkey1, orderstatus1, SUM(totalprice1) totalprice, MAX(custkey2) maxcustkey\n" + - " FROM (\n" + - " SELECT *\n" + - " FROM (\n" + - " SELECT custkey custkey1, orderstatus orderstatus1, CAST(totalprice AS BIGINT) totalprice1, orderkey orderkey1\n" + - " FROM orders\n" + - " ) orders1 \n" + - " JOIN (\n" + - " SELECT custkey custkey2, orderstatus orderstatus2, CAST(totalprice AS BIGINT) totalprice2, orderkey orderkey2\n" + - " FROM orders\n" + - " ) orders2 ON orders1.orderkey1 = orders2.orderkey2\n" + - " ) \n" + - " GROUP BY custkey1, orderstatus1\n" + - ")\n" + - "WHERE custkey1 = maxcustkey\n" + - "AND maxcustkey % 2 = 0 \n" + - "AND orderstatus1 = 'F'\n" + - "AND totalprice > 10000\n" + - "ORDER BY custkey1, orderstatus1, totalprice, maxcustkey"); - } - - @Test - public void testTrivialNonDeterministicPredicatePushdown() - { - assertQuery("SELECT COUNT(*) WHERE rand() >= 0"); - } - - @Test - public void testNonDeterministicTableScanPredicatePushdown() - { - MaterializedResult materializedResult = computeActual("" + - "SELECT COUNT(*)\n" + - "FROM (\n" + - " SELECT *\n" + - " FROM lineitem\n" + - " LIMIT 1000\n" + - ")\n" + - "WHERE rand() > 0.5"); - MaterializedRow row = getOnlyElement(materializedResult.getMaterializedRows()); - assertEquals(row.getFieldCount(), 1); - long count = (Long) row.getField(0); - // Technically non-deterministic unit test but has essentially a next to impossible chance of a false positive - assertTrue(count > 0 && count < 1000); - } - - @Test - public void testNonDeterministicAggregationPredicatePushdown() - { - MaterializedResult materializedResult = computeActual("" + - "SELECT COUNT(*)\n" + - "FROM (\n" + - " SELECT orderkey, COUNT(*)\n" + - " FROM lineitem\n" + - " GROUP BY orderkey\n" + - " LIMIT 1000\n" + - ")\n" + - "WHERE rand() > 0.5"); - MaterializedRow row = getOnlyElement(materializedResult.getMaterializedRows()); - assertEquals(row.getFieldCount(), 1); - long count = (Long) row.getField(0); - // Technically non-deterministic unit test but has essentially a next to impossible chance of a false positive - assertTrue(count > 0 && count < 1000); - } + // cast scalar sub-query + assertQuery("SELECT 1.0/(SELECT 1), CAST(1.0 AS REAL)/(SELECT 1), 1/(SELECT 1)"); + assertQuery("SELECT 1.0 = (SELECT 1) AND 1 = (SELECT 1), 2.0 = (SELECT 1) WHERE 1.0 = (SELECT 1) AND 1 = (SELECT 1)"); + assertQuery("SELECT 1.0 = (SELECT 1), 2.0 = (SELECT 1), CAST(2.0 AS REAL) = (SELECT 1) WHERE 1.0 = (SELECT 1)"); - @Test - public void testUnionAllPredicateMoveAroundWithOverlappingProjections() - { - assertQuery("" + - "SELECT COUNT(*)\n" + - "FROM (\n" + - " SELECT orderkey AS x, orderkey AS y\n" + - " FROM orders\n" + - " WHERE orderkey % 3 = 0\n" + - " UNION ALL\n" + - " SELECT orderkey AS x, orderkey AS y\n" + - " FROM orders\n" + - " WHERE orderkey % 2 = 0\n" + - ") a\n" + - "JOIN (\n" + - " SELECT orderkey AS x, orderkey AS y\n" + - " FROM orders\n" + - ") b\n" + - "ON a.x = b.x"); + // coerce correlated symbols + assertQuery("SELECT * FROM (VALUES 1) t(a) WHERE 1=(SELECT count(*) WHERE 1.0 = a)", "SELECT 1"); + assertQuery("SELECT * FROM (VALUES 1.0) t(a) WHERE 1=(SELECT count(*) WHERE 1 = a)", "SELECT 1.0"); } @Test - public void testTableSampleBernoulliBoundaryValues() + public void testExistsSubquery() { - MaterializedResult fullSample = computeActual("SELECT orderkey FROM orders TABLESAMPLE BERNOULLI (100)"); - MaterializedResult emptySample = computeActual("SELECT orderkey FROM orders TABLESAMPLE BERNOULLI (0)"); - MaterializedResult all = computeExpected("SELECT orderkey FROM orders", fullSample.getTypes()); + // nested + assertQuery("SELECT EXISTS(SELECT NOT EXISTS(SELECT EXISTS(SELECT 1)))"); - assertContains(all, fullSample); - assertEquals(emptySample.getMaterializedRows().size(), 0); - } + // aggregation + assertQuery("SELECT COUNT(*) FROM lineitem WHERE " + + "EXISTS(SELECT max(orderkey) FROM orders)"); + assertQuery("SELECT COUNT(*) FROM lineitem WHERE " + + "NOT EXISTS(SELECT max(orderkey) FROM orders)"); + assertQuery("SELECT COUNT(*) FROM lineitem WHERE " + + "NOT EXISTS(SELECT orderkey FROM orders WHERE false)"); - @Test - public void testTableSampleBernoulli() - { - DescriptiveStatistics stats = new DescriptiveStatistics(); + // no output + assertQuery("SELECT COUNT(*) FROM lineitem WHERE " + + "EXISTS(SELECT orderkey FROM orders WHERE false)"); + assertQuery("SELECT COUNT(*) FROM lineitem WHERE " + + "NOT EXISTS(SELECT orderkey FROM orders WHERE false)"); - int total = computeExpected("SELECT orderkey FROM orders", ImmutableList.of(BIGINT)).getMaterializedRows().size(); + // exists with in-predicate + assertQuery("SELECT (EXISTS(SELECT 1)) IN (false)", "SELECT false"); + assertQuery("SELECT (NOT EXISTS(SELECT 1)) IN (false)", "SELECT true"); - for (int i = 0; i < 100; i++) { - List values = computeActual("SELECT orderkey FROM orders TABLESAMPLE BERNOULLI (50)").getMaterializedRows(); + assertQuery("SELECT (EXISTS(SELECT 1)) IN (true, false)", "SELECT true"); + assertQuery("SELECT (NOT EXISTS(SELECT 1)) IN (true, false)", "SELECT true"); - assertEquals(values.size(), ImmutableSet.copyOf(values).size(), "TABLESAMPLE produced duplicate rows"); - stats.addValue(values.size() * 1.0 / total); - } + assertQuery("SELECT (EXISTS(SELECT 1 WHERE false)) IN (true, false)", "SELECT true"); + assertQuery("SELECT (NOT EXISTS(SELECT 1 WHERE false)) IN (true, false)", "SELECT true"); - double mean = stats.getGeometricMean(); - assertTrue(mean > 0.45 && mean < 0.55, format("Expected mean sampling rate to be ~0.5, but was %s", mean)); - } + assertQuery("SELECT (EXISTS(SELECT 1 WHERE false)) IN (false)", "SELECT true"); + assertQuery("SELECT (NOT EXISTS(SELECT 1 WHERE false)) IN (false)", "SELECT false"); - @Test - public void testFunctionNotRegistered() - { - assertQueryFails( - "SELECT length(1)", - "\\Qline 1:8: Unexpected parameters (integer) for function length. Expected:\\E.*"); - } + // multiple exists + assertQuery("SELECT (EXISTS(SELECT 1)) = (EXISTS(SELECT 1)) WHERE NOT EXISTS(SELECT 1)", "SELECT true WHERE false"); + assertQuery("SELECT (EXISTS(SELECT 1)) = (EXISTS(SELECT 3)) WHERE NOT EXISTS(SELECT 1 WHERE false)", "SELECT true"); + assertQuery("SELECT COUNT(*) FROM lineitem WHERE " + + "(EXISTS(SELECT min(orderkey) FROM orders))" + + "=" + + "(NOT EXISTS(SELECT orderkey FROM orders WHERE false))", + "SELECT count(*) FROM lineitem"); + assertQuery("SELECT EXISTS(SELECT 1), EXISTS(SELECT 1), EXISTS(SELECT 3), NOT EXISTS(SELECT 1), NOT EXISTS(SELECT 1 WHERE false)"); - @Test - public void testFunctionArgumentTypeConstraint() - { - assertQueryFails( - "SELECT greatest(rgb(255, 0, 0))", - "\\Qline 1:8: Unexpected parameters (color) for function greatest. Expected: greatest(E) E:orderable\\E.*"); - } + // distinct + assertQuery("SELECT DISTINCT orderkey FROM lineitem " + + "WHERE EXISTS(SELECT avg(orderkey) FROM orders)"); - @Test - public void testTypeMismatch() - { - assertQueryFails("SELECT 1 <> 'x'", "\\Qline 1:10: '<>' cannot be applied to integer, varchar(1)\\E"); - } + // subqueries used with joins + QueryTemplate.Parameter joinType = parameter("join_type"); + QueryTemplate.Parameter condition = parameter("condition"); + QueryTemplate queryTemplate = queryTemplate( + "SELECT o1.orderkey, COUNT(*) " + + "FROM orders o1 %join_type% JOIN (SELECT * FROM orders LIMIT 10) o2 ON %condition% " + + "GROUP BY o1.orderkey ORDER BY o1.orderkey LIMIT 5", + joinType, + condition); + List conditions = condition.of( + "EXISTS(SELECT avg(orderkey) FROM orders)", + "(SELECT avg(orderkey) FROM orders) > 3"); + for (QueryTemplate.Parameter actualCondition : conditions) { + for (QueryTemplate.Parameter actualJoinType : joinType.of("", "LEFT", "RIGHT")) { + assertQuery(queryTemplate.replace(actualJoinType, actualCondition)); + } + assertQuery( + queryTemplate.replace(joinType.of("FULL"), actualCondition), + "VALUES (1, 10), (2, 10), (3, 10), (4, 10), (5, 10)"); + } - @Test - public void testInvalidType() - { - assertQueryFails("SELECT CAST(null AS array(foo))", "\\Qline 1:8: Unknown type: array(foo)\\E"); + // subqueries with ORDER BY + assertQuery("SELECT orderkey, totalprice FROM orders ORDER BY EXISTS(SELECT 2)"); + assertQuery("SELECT orderkey, totalprice FROM orders ORDER BY NOT(EXISTS(SELECT 2))"); } @Test - public void testInvalidTypeInfixOperator() + public void testScalarSubqueryWithGroupBy() { - // Comment on why error message references varchar(214783647) instead of varchar(2) which seems expected result type for concatenation in expression. - // Currently variable argument functions do not play well with arguments using parametrized types. - // The variable argument functions mechanism requires that all the arguments are of exactly same type. We cannot enforce that base must match but parameters may differ. - assertQueryFails("SELECT ('a' || 'z') + (3 * 4) / 5", "\\Qline 1:21: '+' cannot be applied to varchar, integer\\E"); - } + // using the same subquery in query + assertQuery("SELECT linenumber, min(orderkey), (SELECT max(orderkey) FROM orders WHERE orderkey < 7)" + + "FROM lineitem " + + "GROUP BY linenumber"); - @Test - public void testInvalidTypeBetweenOperator() - { - assertQueryFails("SELECT 'a' BETWEEN 3 AND 'z'", "\\Qline 1:12: Cannot check if varchar(1) is BETWEEN integer and varchar(1)\\E"); - } + assertQuery("SELECT linenumber, min(orderkey), (SELECT max(orderkey) FROM orders WHERE orderkey < 7)" + + "FROM lineitem " + + "GROUP BY linenumber, (SELECT max(orderkey) FROM orders WHERE orderkey < 7)"); - @Test - public void testInvalidTypeArray() - { - assertQueryFails("SELECT ARRAY[1, 2, 'a']", "\\Qline 1:20: All ARRAY elements must be the same type: integer\\E"); - } + assertQuery("SELECT linenumber, min(orderkey) " + + "FROM lineitem " + + "GROUP BY linenumber, (SELECT max(orderkey) FROM orders WHERE orderkey < 7)"); - @Test - public void testArrayShuffle() - { - List expected = IntStream.rangeClosed(1, 500).boxed().collect(toList()); - Set> distinctResults = new HashSet<>(); + assertQuery("SELECT linenumber, min(orderkey) " + + "FROM lineitem " + + "GROUP BY linenumber " + + "HAVING min(orderkey) < (SELECT avg(orderkey) FROM orders WHERE orderkey < 7)"); - distinctResults.add(expected); - for (int i = 0; i < 3; i++) { - MaterializedResult results = computeActual(format("SELECT shuffle(ARRAY %s) FROM orders LIMIT 10", expected)); - List rows = results.getMaterializedRows(); - assertEquals(rows.size(), 10); + assertQuery("SELECT linenumber, min(orderkey), (SELECT max(orderkey) FROM orders WHERE orderkey < 7)" + + "FROM lineitem " + + "GROUP BY linenumber, (SELECT max(orderkey) FROM orders WHERE orderkey < 7)" + + "HAVING min(orderkey) < (SELECT max(orderkey) FROM orders WHERE orderkey < 7)"); - for (MaterializedRow row : rows) { - @SuppressWarnings("unchecked") - List actual = (List) row.getField(0); + // using different subqueries + assertQuery("SELECT linenumber, min(orderkey), (SELECT max(orderkey) FROM orders WHERE orderkey < 7)" + + "FROM lineitem " + + "GROUP BY linenumber, (SELECT sum(orderkey) FROM orders WHERE orderkey < 7)"); - // check if the result is a correct permutation - assertEqualsIgnoreOrder(actual, expected); + assertQuery("SELECT linenumber, max(orderkey), (SELECT min(orderkey) FROM orders WHERE orderkey < 5)" + + "FROM lineitem " + + "GROUP BY linenumber " + + "HAVING sum(orderkey) > (SELECT min(orderkey) FROM orders WHERE orderkey < 7)"); - distinctResults.add(actual); - } - } - assertTrue(distinctResults.size() >= 24, "shuffle must produce at least 24 distinct results"); + assertQuery("SELECT linenumber, min(orderkey), (SELECT max(orderkey) FROM orders WHERE orderkey < 7)" + + "FROM lineitem " + + "GROUP BY linenumber, (SELECT count(orderkey) FROM orders WHERE orderkey < 7)" + + "HAVING min(orderkey) < (SELECT sum(orderkey) FROM orders WHERE orderkey < 7)"); } @Test - public void testNonReservedTimeWords() + public void testExistsSubqueryWithGroupBy() { - assertQuery( - "SELECT TIME, TIMESTAMP, DATE, INTERVAL FROM (SELECT 1 TIME, 2 TIMESTAMP, 3 DATE, 4 INTERVAL)", - "VALUES (1, 2, 3, 4)"); - } + // using the same subquery in query + assertQuery("SELECT linenumber, min(orderkey), EXISTS(SELECT orderkey FROM orders WHERE orderkey < 7)" + + "FROM lineitem " + + "GROUP BY linenumber"); - @Test - public void testCustomAdd() - { - assertQuery( - "SELECT custom_add(orderkey, custkey) FROM orders", - "SELECT orderkey + custkey FROM orders"); - } + assertQuery("SELECT linenumber, min(orderkey), EXISTS(SELECT orderkey FROM orders WHERE orderkey < 7)" + + "FROM lineitem " + + "GROUP BY linenumber, EXISTS(SELECT orderkey FROM orders WHERE orderkey < 7)"); - @Test - public void testCustomSum() - { - @Language("SQL") String sql = "SELECT orderstatus, custom_sum(orderkey) FROM orders GROUP BY orderstatus"; - assertQuery(sql, sql.replace("custom_sum", "sum")); - } + assertQuery("SELECT linenumber, min(orderkey) " + + "FROM lineitem " + + "GROUP BY linenumber, EXISTS(SELECT orderkey FROM orders WHERE orderkey < 7)"); - @Test - public void testCustomRank() - { - @Language("SQL") String sql = "" + - "SELECT orderstatus, clerk, sales\n" + - ", custom_rank() OVER (PARTITION BY orderstatus ORDER BY sales DESC) rnk\n" + - "FROM (\n" + - " SELECT orderstatus, clerk, sum(totalprice) sales\n" + - " FROM orders\n" + - " GROUP BY orderstatus, clerk\n" + - ")\n" + - "ORDER BY orderstatus, clerk"; + assertQuery("SELECT linenumber, min(orderkey) " + + "FROM lineitem " + + "GROUP BY linenumber " + + "HAVING EXISTS(SELECT orderkey FROM orders WHERE orderkey < 7)"); - assertEquals(computeActual(sql), computeActual(sql.replace("custom_rank", "rank"))); - } + assertQuery("SELECT linenumber, min(orderkey), EXISTS(SELECT orderkey FROM orders WHERE orderkey < 7)" + + "FROM lineitem " + + "GROUP BY linenumber, EXISTS(SELECT orderkey FROM orders WHERE orderkey < 7)" + + "HAVING EXISTS(SELECT orderkey FROM orders WHERE orderkey < 7)"); - @Test - public void testApproxSetBigint() - { - MaterializedResult actual = computeActual("SELECT cardinality(approx_set(custkey)) FROM orders"); + // using different subqueries + assertQuery("SELECT linenumber, min(orderkey), EXISTS(SELECT orderkey FROM orders WHERE orderkey < 7)" + + "FROM lineitem " + + "GROUP BY linenumber, EXISTS(SELECT orderkey FROM orders WHERE orderkey < 17)"); - MaterializedResult expected = resultBuilder(getSession(), BIGINT) - .row(1002L) - .build(); + assertQuery("SELECT linenumber, max(orderkey), EXISTS(SELECT orderkey FROM orders WHERE orderkey < 5)" + + "FROM lineitem " + + "GROUP BY linenumber " + + "HAVING EXISTS(SELECT orderkey FROM orders WHERE orderkey < 7)"); - assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + assertQuery("SELECT linenumber, min(orderkey), EXISTS(SELECT orderkey FROM orders WHERE orderkey < 17)" + + "FROM lineitem " + + "GROUP BY linenumber, EXISTS(SELECT orderkey FROM orders WHERE orderkey < 17)" + + "HAVING EXISTS(SELECT orderkey FROM orders WHERE orderkey < 27)"); } @Test - public void testApproxSetVarchar() + public void testCorrelatedScalarSubqueries() { - MaterializedResult actual = computeActual("SELECT cardinality(approx_set(CAST(custkey AS VARCHAR))) FROM orders"); + assertQuery("SELECT (SELECT n.nationkey) FROM nation n"); + assertQuery("SELECT (SELECT 2 * n.nationkey) FROM nation n"); + assertQuery("SELECT nationkey FROM nation n WHERE 2 = (SELECT 2 * n.nationkey)"); + assertQuery("SELECT nationkey FROM nation n ORDER BY (SELECT 2 * n.nationkey)"); - MaterializedResult expected = resultBuilder(getSession(), BIGINT) - .row(1024L) - .build(); + // group by + assertQuery("SELECT max(n.regionkey), 2 * n.nationkey, (SELECT n.nationkey) FROM nation n GROUP BY n.nationkey"); + assertQuery( + "SELECT max(l.quantity), 2 * l.orderkey FROM lineitem l GROUP BY l.orderkey HAVING max(l.quantity) < (SELECT l.orderkey)"); + assertQuery("SELECT max(l.quantity), 2 * l.orderkey FROM lineitem l GROUP BY l.orderkey, (SELECT l.orderkey)"); - assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); - } + // join + assertQuery("SELECT * FROM nation n1 JOIN nation n2 ON n1.nationkey = (SELECT n2.nationkey)"); + assertQueryFails( + "SELECT (SELECT l3.* FROM lineitem l2 CROSS JOIN (SELECT l1.orderkey) l3 LIMIT 1) FROM lineitem l1", + UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - @Test - public void testApproxSetDouble() - { - MaterializedResult actual = computeActual("SELECT cardinality(approx_set(CAST(custkey AS DOUBLE))) FROM orders"); + // subrelation + assertQuery( + "SELECT 1 FROM nation n WHERE 2 * nationkey - 1 = (SELECT * FROM (SELECT n.nationkey))", + "SELECT 1"); // h2 fails to parse this query - MaterializedResult expected = resultBuilder(getSession(), BIGINT) - .row(1014L) - .build(); + // two level of nesting + assertQuery("SELECT * FROM nation n WHERE 2 = (SELECT (SELECT 2 * n.nationkey))"); + + // redundant LIMIT in subquery + assertQuery("SELECT (SELECT count(*) FROM (VALUES (7,1)) t(orderkey, value) WHERE orderkey = corr_key LIMIT 1) FROM (values 7) t(corr_key)"); - assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + // explicit LIMIT in subquery + assertQuery("SELECT (SELECT count(*) FROM (VALUES (7,1)) t(orderkey, value) WHERE orderkey = corr_key GROUP BY value LIMIT 1) FROM (values 7) t(corr_key)"); + // Limit(1) and non-constant output symbol of the subquery (count) + assertQueryFails("SELECT (SELECT count(*) FROM (VALUES (7,1), (7,2)) t(orderkey, value) WHERE orderkey = corr_key GROUP BY value LIMIT 1) FROM (values 7) t(corr_key)", + UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); } @Test - public void testApproxSetBigintGroupBy() + public void testCorrelatedNonAggregationScalarSubqueries() { - MaterializedResult actual = computeActual("" + - "SELECT orderstatus, cardinality(approx_set(custkey)) " + - "FROM orders " + - "GROUP BY orderstatus"); + String subqueryReturnedTooManyRows = "Scalar sub-query has returned multiple rows"; - MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) - .row("O", 1001L) - .row("F", 998L) - .row("P", 304L) - .build(); + assertQuery("SELECT (SELECT 1 WHERE a = 2) FROM (VALUES 1) t(a)", "SELECT null"); + assertQuery("SELECT (SELECT 2 WHERE a = 1) FROM (VALUES 1) t(a)", "SELECT 2"); + assertQueryFails( + "SELECT (SELECT 2 FROM (VALUES 3, 4) WHERE a = 1) FROM (VALUES 1) t(a)", + subqueryReturnedTooManyRows); + + // multiple subquery output projections + assertQueryFails( + "SELECT name FROM nation n WHERE 'AFRICA' = (SELECT 'bleh' FROM region WHERE regionkey > n.regionkey)", + subqueryReturnedTooManyRows); + assertQueryFails( + "SELECT name FROM nation n WHERE 'AFRICA' = (SELECT name FROM region WHERE regionkey > n.regionkey)", + subqueryReturnedTooManyRows); + assertQueryFails( + "SELECT name FROM nation n WHERE 1 = (SELECT 1 FROM region WHERE regionkey > n.regionkey)", + subqueryReturnedTooManyRows); - assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); - } + // correlation used in subquery output + assertQueryFails( + "SELECT name FROM nation n WHERE 'AFRICA' = (SELECT n.name FROM region WHERE regionkey > n.regionkey)", + UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - @Test - public void testApproxSetVarcharGroupBy() - { - MaterializedResult actual = computeActual("" + - "SELECT orderstatus, cardinality(approx_set(CAST(custkey AS VARCHAR))) " + - "FROM orders " + - "GROUP BY orderstatus"); + assertQuery( + "SELECT (SELECT 2 WHERE o.orderkey = 1) FROM orders o ORDER BY orderkey LIMIT 5", + "VALUES 2, null, null, null, null"); + // outputs plain correlated orderkey symbol which causes ambiguity with outer query orderkey symbol + assertQueryFails( + "SELECT (SELECT o.orderkey WHERE o.orderkey = 1) FROM orders o ORDER BY orderkey LIMIT 5", + UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); + assertQueryFails( + "SELECT (SELECT o.orderkey * 2 WHERE o.orderkey = 1) FROM orders o ORDER BY orderkey LIMIT 5", + UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); + // correlation used outside the subquery + assertQueryFails( + "SELECT o.orderkey, (SELECT o.orderkey * 2 WHERE o.orderkey = 1) FROM orders o ORDER BY orderkey LIMIT 5", + UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) - .row("O", 1021L) - .row("F", 1019L) - .row("P", 304L) - .build(); + // aggregation with having +// TODO: https://github.com/prestodb/presto/issues/8456 +// assertQuery("SELECT (SELECT avg(totalprice) FROM orders GROUP BY custkey, orderdate HAVING avg(totalprice) < a) FROM (VALUES 900) t(a)"); - assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); - } + // correlation in predicate + assertQuery("SELECT name FROM nation n WHERE 'AFRICA' = (SELECT name FROM region WHERE regionkey = n.regionkey)"); - @Test - public void testApproxSetDoubleGroupBy() - { - MaterializedResult actual = computeActual("" + - "SELECT orderstatus, cardinality(approx_set(CAST(custkey AS DOUBLE))) " + - "FROM orders " + - "GROUP BY orderstatus"); + // same correlation in predicate and projection + assertQueryFails( + "SELECT nationkey FROM nation n WHERE " + + "(SELECT n.regionkey * 2 FROM region r WHERE n.regionkey = r.regionkey) > 6", + UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) - .row("O", 1011L) - .row("F", 1011L) - .row("P", 304L) - .build(); + // different correlation in predicate and projection + assertQueryFails( + "SELECT nationkey FROM nation n WHERE " + + "(SELECT n.nationkey * 2 FROM region r WHERE n.regionkey = r.regionkey) > 6", + UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); - } + // correlation used in subrelation + assertQuery( + "SELECT nationkey FROM nation n WHERE " + + "(SELECT regionkey * 2 FROM (SELECT regionkey FROM region r WHERE n.regionkey = r.regionkey)) > 6 " + + "ORDER BY 1 LIMIT 3", + "VALUES 4, 10, 11"); // h2 didn't make it - @Test - public void testApproxSetWithNulls() - { - MaterializedResult actual = computeActual("SELECT cardinality(approx_set(IF(orderstatus = 'O', custkey))) FROM orders"); + // with duplicated rows + assertQuery( + "SELECT (SELECT name FROM nation WHERE nationkey = a) FROM (VALUES 1, 1, 2, 3) t(a)", + "VALUES 'ARGENTINA', 'ARGENTINA', 'BRAZIL', 'CANADA'"); // h2 didn't make it - MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) - .row(1001L) - .build(); + // returning null when nothing matched + assertQuery( + "SELECT (SELECT name FROM nation WHERE nationkey = a) FROM (VALUES 31) t(a)", + "VALUES null"); - assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + assertQuery( + "SELECT (SELECT r.name FROM nation n, region r WHERE r.regionkey = n.regionkey AND n.nationkey = a) FROM (VALUES 1) t(a)", + "VALUES 'AMERICA'"); } @Test - public void testApproxSetOnlyNulls() + public void testCorrelatedScalarSubqueriesWithScalarAggregationAndEqualityPredicatesInWhere() { - MaterializedResult actual = computeActual("SELECT cardinality(approx_set(null)) FROM orders"); - - MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) - .row(new Object[] {null}) - .build(); + assertQuery("SELECT (SELECT count(*) WHERE o.orderkey = 1) FROM orders o"); + assertQuery("SELECT count(*) FROM orders o WHERE 1 = (SELECT count(*) WHERE o.orderkey = 0)"); + assertQuery("SELECT * FROM orders o ORDER BY (SELECT count(*) WHERE o.orderkey = 0)"); + assertQuery( + "SELECT count(*) FROM nation n WHERE " + + "(SELECT count(*) FROM region r WHERE n.regionkey = r.regionkey) > 1"); + assertQueryFails( + "SELECT count(*) FROM nation n WHERE " + + "(SELECT avg(a) FROM (SELECT count(*) FROM region r WHERE n.regionkey = r.regionkey) t(a)) > 1", + UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); - } + // with duplicated rows + assertQuery( + "SELECT (SELECT count(*) WHERE a = 1) FROM (VALUES 1, 1, 2, 3) t(a)", + "VALUES true, true, false, false"); - @Test - public void testApproxSetGroupByWithOnlyNullsInOneGroup() - { - MaterializedResult actual = computeActual("" + - "SELECT orderstatus, cardinality(approx_set(IF(orderstatus != 'O', custkey))) " + - "FROM orders " + - "GROUP BY orderstatus"); + // group by + assertQuery( + "SELECT max(o.totalprice), o.orderkey, (SELECT count(*) WHERE o.orderkey = 0) " + + "FROM orders o GROUP BY o.orderkey"); + assertQuery( + "SELECT max(o.totalprice), o.orderkey " + + "FROM orders o GROUP BY o.orderkey HAVING 1 = (SELECT count(*) WHERE o.orderkey = 0)"); + assertQuery( + "SELECT max(o.totalprice), o.orderkey FROM orders o " + + "GROUP BY o.orderkey, (SELECT count(*) WHERE o.orderkey = 0)"); - MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) - .row("O", null) - .row("F", 998L) - .row("P", 304L) - .build(); + // join + assertQuery( + "SELECT count(*) " + + "FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10) o1 " + + "JOIN (SELECT * FROM orders ORDER BY orderkey LIMIT 5) o2 " + + "ON NOT 1 = (SELECT count(*) WHERE o1.orderkey = o2.orderkey)"); + assertQueryFails( + "SELECT count(*) FROM orders o1 LEFT JOIN orders o2 " + + "ON NOT 1 = (SELECT count(*) WHERE o1.orderkey = o2.orderkey)", + "line .*: Correlated subquery in given context is not supported"); - assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); + // subrelation + assertQuery( + "SELECT count(*) FROM orders o " + + "WHERE 1 = (SELECT * FROM (SELECT (SELECT count(*) WHERE o.orderkey = 0)))", + "SELECT count(*) FROM orders o WHERE o.orderkey = 0"); } @Test - public void testApproxSetGroupByWithNulls() + public void testCorrelatedScalarSubqueriesWithScalarAggregation() { - MaterializedResult actual = computeActual("" + - "SELECT orderstatus, cardinality(approx_set(IF(custkey % 2 <> 0, custkey))) " + - "FROM orders " + - "GROUP BY orderstatus"); - - MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) - .row("O", 499L) - .row("F", 496L) - .row("P", 153L) - .build(); + // projection + assertQuery( + "SELECT (SELECT round(3 * avg(i.a)) FROM (VALUES 1, 1, 1, 2, 2, 3, 4) i(a) WHERE i.a < o.a AND i.a < 4) " + + "FROM (VALUES 0, 3, 3, 5) o(a)", + "VALUES null, 4, 4, 5"); - assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); - } + assertQuery( + "SELECT count(*) FROM orders o " + + "WHERE (SELECT avg(i.orderkey) FROM orders i " + + "WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0) > 100", + "VALUES 14999"); // h2 is slow - @Test - public void testMergeHyperLogLog() - { - MaterializedResult actual = computeActual("SELECT cardinality(merge(create_hll(custkey))) FROM orders"); + // order by + assertQuery( + "SELECT orderkey FROM orders o " + + "ORDER BY " + + " (SELECT avg(i.orderkey) FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0), " + + " orderkey " + + "LIMIT 1", + "VALUES 1"); // h2 is slow - MaterializedResult expected = resultBuilder(getSession(), BIGINT) - .row(1002L) - .build(); + // group by + assertQuery( + "SELECT max(o.orderdate), o.orderkey, " + + "(SELECT avg(i.orderkey) FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0) " + + "FROM orders o GROUP BY o.orderkey ORDER BY o.orderkey LIMIT 1", + "VALUES ('1996-01-02', 1, 40000)"); // h2 is slow + assertQuery( + "SELECT max(o.orderdate), o.orderkey " + + "FROM orders o " + + "GROUP BY o.orderkey " + + "HAVING 40000 < (SELECT avg(i.orderkey) FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0)" + + "ORDER BY o.orderkey LIMIT 1", + "VALUES ('1996-07-24', 20000)"); // h2 is slow + assertQuery( + "SELECT max(o.orderdate), o.orderkey FROM orders o " + + "GROUP BY o.orderkey, (SELECT avg(i.orderkey) FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0)" + + "ORDER BY o.orderkey LIMIT 1", + "VALUES ('1996-01-02', 1)"); // h2 is slow - assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); - } + // join + assertQuery( + "SELECT count(*) " + + "FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10) o1 " + + "JOIN (SELECT * FROM orders ORDER BY orderkey LIMIT 5) o2 " + + "ON NOT 1 = (SELECT avg(i.orderkey) FROM orders i WHERE o1.orderkey < o2.orderkey AND i.orderkey % 10000 = 0)"); + assertQueryFails( + "SELECT count(*) FROM orders o1 LEFT JOIN orders o2 " + + "ON NOT 1 = (SELECT avg(i.orderkey) FROM orders i WHERE o1.orderkey < o2.orderkey)", + "line .*: Correlated subquery in given context is not supported"); - @Test - public void testMergeHyperLogLogGroupBy() - { - MaterializedResult actual = computeActual("" + - "SELECT orderstatus, cardinality(merge(create_hll(custkey))) " + - "FROM orders " + - "GROUP BY orderstatus"); + // subrelation + assertQuery( + "SELECT count(*) FROM orders o " + + "WHERE 100 < (SELECT * " + + "FROM (SELECT (SELECT avg(i.orderkey) FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0)))", + "VALUES 14999"); // h2 is slow - MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) - .row("O", 1001L) - .row("F", 998L) - .row("P", 304L) - .build(); + // consecutive correlated subqueries with scalar aggregation + assertQuery("SELECT " + + "(SELECT avg(regionkey) " + + " FROM nation n2" + + " WHERE n2.nationkey = n1.nationkey)," + + "(SELECT avg(regionkey)" + + " FROM nation n3" + + " WHERE n3.nationkey = n1.nationkey)" + + "FROM nation n1"); + assertQuery("SELECT" + + "(SELECT avg(regionkey)" + + " FROM nation n2 " + + " WHERE n2.nationkey = n1.nationkey)," + + "(SELECT avg(regionkey)+1 " + + " FROM nation n3 " + + " WHERE n3.nationkey = n1.nationkey)" + + "FROM nation n1"); - assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); - } + // count in subquery + assertQuery("SELECT * " + + "FROM (VALUES (0), (1), (2), (7)) AS v1(c1) " + + "WHERE v1.c1 > (SELECT count(c1) FROM (VALUES (0), (1), (2)) AS v2(c1) WHERE v1.c1 = v2.c1)", + "VALUES (2), (7)"); - @Test - public void testMergeHyperLogLogWithNulls() - { - MaterializedResult actual = computeActual("SELECT cardinality(merge(create_hll(IF(orderstatus = 'O', custkey)))) FROM orders"); + // count rows + assertQuery("SELECT (SELECT count(*) FROM (VALUES (1, true), (null, true)) inner_table(a, b) WHERE inner_table.b = outer_table.b) FROM (VALUES (true)) outer_table(b)", + "VALUES (2)"); - MaterializedResult expected = resultBuilder(getSession(), BIGINT) - .row(1001L) - .build(); + // count rows + assertQuery("SELECT (SELECT count() FROM (VALUES (1, true), (null, true)) inner_table(a, b) WHERE inner_table.b = outer_table.b) FROM (VALUES (true)) outer_table(b)", + "VALUES (2)"); - assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + // count non null values + assertQuery("SELECT (SELECT count(a) FROM (VALUES (1, true), (null, true)) inner_table(a, b) WHERE inner_table.b = outer_table.b) FROM (VALUES (true)) outer_table(b)", + "VALUES (1)"); } @Test - public void testMergeHyperLogLogGroupByWithNulls() + public void testCorrelatedInPredicateSubqueries() { - MaterializedResult actual = computeActual("" + - "SELECT orderstatus, cardinality(merge(create_hll(IF(orderstatus != 'O', custkey)))) " + - "FROM orders " + - "GROUP BY orderstatus"); + assertQuery("SELECT orderkey, clerk IN (SELECT clerk FROM orders s WHERE s.custkey = o.custkey AND s.orderkey < o.orderkey) FROM orders o"); + assertQuery("SELECT orderkey FROM orders o WHERE clerk IN (SELECT clerk FROM orders s WHERE s.custkey = o.custkey AND s.orderkey < o.orderkey)"); - MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) - .row("O", null) - .row("F", 998L) - .row("P", 304L) - .build(); + // all cases of IN (as one test query to avoid pruning, over-eager push down) + assertQuery( + "SELECT t1.a, t1.b, " + + " t1.b in (SELECT t2.b " + + " FROM (values (2, 3), (2, 4), (3, 0), (30,NULL)) t2(a, b) " + + " WHERE t1.a - 5 <= t2.a and t2.a <= t1.a and 0 <= t2.a) " + + "from (values (1,1), (2,4), (3,5), (4,NULL), (30,2), (40,NULL) ) t1(a, b) " + + "order by t1.a", + "VALUES (1,1,FALSE), (2,4,TRUE), (3,5,FALSE), (4,NULL,NULL), (30,2,NULL), (40,NULL,FALSE)"); - assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); - } + // subquery with LIMIT (correlated filter below any unhandled node type) + assertQueryFails( + "SELECT orderkey FROM orders o WHERE clerk IN (SELECT clerk FROM orders s WHERE s.custkey = o.custkey AND s.orderkey < o.orderkey ORDER BY 1 LIMIT 1)", + UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - @Test - public void testMergeHyperLogLogOnlyNulls() - { - MaterializedResult actual = computeActual("SELECT cardinality(merge(CAST (null AS HyperLogLog))) FROM orders"); + assertQueryFails("SELECT 1 IN (SELECT l.orderkey) FROM lineitem l", UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); + assertQueryFails("SELECT 1 IN (SELECT 2 * l.orderkey) FROM lineitem l", UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); + assertQueryFails("SELECT * FROM lineitem l WHERE 1 IN (SELECT 2 * l.orderkey)", UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); + assertQueryFails("SELECT * FROM lineitem l ORDER BY 1 IN (SELECT 2 * l.orderkey)", UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - MaterializedResult expected = resultBuilder(getSession(), BIGINT) - .row(new Object[] {null}) - .build(); + // group by + assertQueryFails("SELECT max(l.quantity), 2 * l.orderkey, 1 IN (SELECT l.orderkey) FROM lineitem l GROUP BY l.orderkey", UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); + assertQueryFails("SELECT max(l.quantity), 2 * l.orderkey FROM lineitem l GROUP BY l.orderkey HAVING max(l.quantity) IN (SELECT l.orderkey)", UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); + assertQueryFails("SELECT max(l.quantity), 2 * l.orderkey FROM lineitem l GROUP BY l.orderkey, 1 IN (SELECT l.orderkey)", UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); - } + // join + assertQueryFails("SELECT * FROM lineitem l1 JOIN lineitem l2 ON l1.orderkey IN (SELECT l2.orderkey)", UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - @Test - public void testEmptyApproxSet() - { - MaterializedResult actual = computeActual("SELECT cardinality(empty_approx_set())"); - MaterializedResult expected = resultBuilder(getSession(), BIGINT) - .row(0L) - .build(); - assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + // subrelation + assertQueryFails( + "SELECT * FROM lineitem l WHERE (SELECT * FROM (SELECT 1 IN (SELECT 2 * l.orderkey)))", + UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); + + // two level of nesting + assertQueryFails("SELECT * FROM lineitem l WHERE true IN (SELECT 1 IN (SELECT 2 * l.orderkey))", UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); } @Test - public void testMergeEmptyApproxSet() + public void testCorrelatedExistsSubqueriesWithPrunedCorrelationSymbols() { - MaterializedResult actual = computeActual("SELECT cardinality(merge(empty_approx_set())) FROM orders"); - MaterializedResult expected = resultBuilder(getSession(), BIGINT) - .row(0L) - .build(); - assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); - } + assertQuery("SELECT EXISTS(SELECT o.orderkey) FROM orders o"); + assertQuery("SELECT count(*) FROM orders o WHERE EXISTS(SELECT o.orderkey)"); + assertQuery("SELECT * FROM orders o ORDER BY EXISTS(SELECT o.orderkey)"); - @Test - public void testMergeEmptyNonEmptyApproxSet() - { - MaterializedResult actual = computeActual("SELECT cardinality(merge(c)) FROM (SELECT create_hll(custkey) c FROM orders UNION ALL SELECT empty_approx_set())"); - MaterializedResult expected = resultBuilder(getSession(), BIGINT) - .row(1002L) - .build(); - assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + // group by + assertQuery( + "SELECT max(o.totalprice), o.orderkey, EXISTS(SELECT o.orderkey) FROM orders o GROUP BY o.orderkey"); + assertQuery( + "SELECT max(o.totalprice), o.orderkey " + + "FROM orders o GROUP BY o.orderkey HAVING EXISTS (SELECT o.orderkey)"); + assertQuery( + "SELECT max(o.totalprice), o.orderkey FROM orders o GROUP BY o.orderkey, EXISTS (SELECT o.orderkey)"); + + // join + assertQuery( + "SELECT * FROM orders o JOIN (SELECT * FROM lineitem ORDER BY orderkey LIMIT 2) l " + + "ON NOT EXISTS(SELECT o.orderkey = l.orderkey)"); + + // subrelation + assertQuery( + "SELECT count(*) FROM orders o WHERE (SELECT * FROM (SELECT EXISTS(SELECT o.orderkey)))", + "VALUES 15000"); } @Test - public void testP4ApproxSetBigint() + public void testCorrelatedExistsSubqueriesWithEqualityPredicatesInWhere() { - MaterializedResult actual = computeActual("SELECT cardinality(cast(approx_set(custkey) AS P4HYPERLOGLOG)) FROM orders"); + assertQuery("SELECT EXISTS(SELECT 1 WHERE o.orderkey = 1) FROM orders o"); + assertQuery("SELECT EXISTS(SELECT null WHERE o.orderkey = 1) FROM orders o"); + assertQuery("SELECT count(*) FROM orders o WHERE EXISTS(SELECT 1 WHERE o.orderkey = 0)"); + assertQuery("SELECT * FROM orders o ORDER BY EXISTS(SELECT 1 WHERE o.orderkey = 0)"); + assertQuery( + "SELECT count(*) FROM orders o " + + "WHERE EXISTS (SELECT avg(l.orderkey) FROM lineitem l WHERE o.orderkey = l.orderkey)"); + assertQuery( + "SELECT count(*) FROM orders o " + + "WHERE EXISTS (SELECT avg(l.orderkey) FROM lineitem l WHERE o.orderkey = l.orderkey GROUP BY l.linenumber)"); + assertQueryFails( + "SELECT count(*) FROM orders o " + + "WHERE EXISTS (SELECT count(*) FROM lineitem l WHERE o.orderkey = l.orderkey HAVING count(*) > 3)", + UNSUPPORTED_CORRELATED_SUBQUERY_ERROR_MSG); - MaterializedResult expected = resultBuilder(getSession(), BIGINT) - .row(1002L) - .build(); + // with duplicated rows + assertQuery( + "SELECT EXISTS(SELECT 1 WHERE a = 1) FROM (VALUES 1, 1, 2, 3) t(a)", + "VALUES true, true, false, false"); - assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); - } + // group by + assertQuery( + "SELECT max(o.totalprice), o.orderkey, EXISTS(SELECT 1 WHERE o.orderkey = 0) " + + "FROM orders o GROUP BY o.orderkey"); + assertQuery( + "SELECT max(o.totalprice), o.orderkey " + + "FROM orders o GROUP BY o.orderkey HAVING EXISTS (SELECT 1 WHERE o.orderkey = 0)"); + assertQuery( + "SELECT max(o.totalprice), o.orderkey " + + "FROM orders o GROUP BY o.orderkey, EXISTS (SELECT 1 WHERE o.orderkey = 0)"); - @Test - public void testP4ApproxSetVarchar() - { - MaterializedResult actual = computeActual("SELECT cardinality(cast(approx_set(CAST(custkey AS VARCHAR)) AS P4HYPERLOGLOG)) FROM orders"); + // join + assertQuery( + "SELECT count(*) " + + "FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10) o1 " + + "JOIN (SELECT * FROM orders ORDER BY orderkey LIMIT 5) o2 " + + "ON NOT EXISTS(SELECT 1 WHERE o1.orderkey = o2.orderkey)"); + assertQueryFails( + "SELECT count(*) FROM orders o1 LEFT JOIN orders o2 " + + "ON NOT EXISTS(SELECT 1 WHERE o1.orderkey = o2.orderkey)", + "line .*: Correlated subquery in given context is not supported"); - MaterializedResult expected = resultBuilder(getSession(), BIGINT) - .row(1024L) - .build(); + // subrelation + assertQuery( + "SELECT count(*) FROM orders o WHERE (SELECT * FROM (SELECT EXISTS(SELECT 1 WHERE o.orderkey = 0)))", + "SELECT count(*) FROM orders o WHERE o.orderkey = 0"); - assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + // not exists + assertQuery( + "SELECT count(*) FROM customer WHERE NOT EXISTS(SELECT * FROM orders WHERE orders.custkey=customer.custkey)", + "VALUES 500"); } @Test - public void testP4ApproxSetDouble() + public void testCorrelatedExistsSubqueries() { - MaterializedResult actual = computeActual("SELECT cardinality(cast(approx_set(CAST(custkey AS DOUBLE)) AS P4HYPERLOGLOG)) FROM orders"); + // projection + assertQuery( + "SELECT EXISTS(SELECT 1 FROM (VALUES 1, 1, 1, 2, 2, 3, 4) i(a) WHERE i.a < o.a AND i.a < 4) " + + "FROM (VALUES 0, 3, 3, 5) o(a)", + "VALUES false, true, true, true"); + assertQuery( + "SELECT EXISTS(SELECT 1 WHERE l.orderkey > 0 OR l.orderkey != 3) " + + "FROM lineitem l LIMIT 1"); - MaterializedResult expected = resultBuilder(getSession(), BIGINT) - .row(1014L) - .build(); + assertQuery( + "SELECT count(*) FROM orders o " + + "WHERE EXISTS(SELECT 1 FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 1000 = 0)", + "VALUES 14999"); // h2 is slow + assertQuery( + "SELECT count(*) FROM lineitem l " + + "WHERE EXISTS(SELECT 1 WHERE l.orderkey > 0 OR l.orderkey != 3)"); - assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); - } + // order by + assertQuery( + "SELECT orderkey FROM orders o ORDER BY " + + "EXISTS(SELECT 1 FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0)" + + "LIMIT 1", + "VALUES 60000"); // h2 is slow + assertQuery( + "SELECT orderkey FROM lineitem l ORDER BY " + + "EXISTS(SELECT 1 WHERE l.orderkey > 0 OR l.orderkey != 3)"); - @Test - public void testP4ApproxSetBigintGroupBy() - { - MaterializedResult actual = computeActual("" + - "SELECT orderstatus, cardinality(cast(approx_set(custkey) AS P4HYPERLOGLOG)) " + - "FROM orders " + - "GROUP BY orderstatus"); + // group by + assertQuery( + "SELECT max(o.orderdate), o.orderkey, " + + "EXISTS(SELECT 1 FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0) " + + "FROM orders o GROUP BY o.orderkey ORDER BY o.orderkey LIMIT 1", + "VALUES ('1996-01-02', 1, true)"); // h2 is slow + assertQuery( + "SELECT max(o.orderdate), o.orderkey " + + "FROM orders o " + + "GROUP BY o.orderkey " + + "HAVING EXISTS(SELECT 1 FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0)" + + "ORDER BY o.orderkey LIMIT 1", + "VALUES ('1996-01-02', 1)"); // h2 is slow + assertQuery( + "SELECT max(o.orderdate), o.orderkey FROM orders o " + + "GROUP BY o.orderkey, EXISTS(SELECT 1 FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0)" + + "ORDER BY o.orderkey LIMIT 1", + "VALUES ('1996-01-02', 1)"); // h2 is slow + assertQuery( + "SELECT max(l.quantity), l.orderkey, EXISTS(SELECT 1 WHERE l.orderkey > 0 OR l.orderkey != 3) FROM lineitem l " + + "GROUP BY l.orderkey"); + assertQuery( + "SELECT max(l.quantity), l.orderkey FROM lineitem l " + + "GROUP BY l.orderkey " + + "HAVING EXISTS (SELECT 1 WHERE l.orderkey > 0 OR l.orderkey != 3)"); + assertQuery( + "SELECT max(l.quantity), l.orderkey FROM lineitem l " + + "GROUP BY l.orderkey, EXISTS (SELECT 1 WHERE l.orderkey > 0 OR l.orderkey != 3)"); - MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) - .row("O", 1001L) - .row("F", 998L) - .row("P", 308L) - .build(); + // join + assertQuery( + "SELECT count(*) " + + "FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10) o1 " + + "JOIN (SELECT * FROM orders ORDER BY orderkey LIMIT 5) o2 " + + "ON NOT EXISTS(SELECT 1 FROM orders i WHERE o1.orderkey < o2.orderkey AND i.orderkey % 10000 = 0)"); + assertQueryFails( + "SELECT count(*) FROM orders o1 LEFT JOIN orders o2 " + + "ON NOT EXISTS(SELECT 1 FROM orders i WHERE o1.orderkey < o2.orderkey)", + "line .*: Correlated subquery in given context is not supported"); - assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); + // subrelation + assertQuery( + "SELECT count(*) FROM orders o " + + "WHERE (SELECT * FROM (SELECT EXISTS(SELECT 1 FROM orders i WHERE o.orderkey < i.orderkey AND i.orderkey % 10000 = 0)))", + "VALUES 14999"); // h2 is slow + assertQuery( + "SELECT count(*) FROM orders o " + + "WHERE (SELECT * FROM (SELECT EXISTS(SELECT 1 WHERE o.orderkey > 10 OR o.orderkey != 3)))", + "VALUES 14999"); } @Test - public void testP4ApproxSetVarcharGroupBy() + public void testTwoCorrelatedExistsSubqueries() { - MaterializedResult actual = computeActual("" + - "SELECT orderstatus, cardinality(cast(approx_set(CAST(custkey AS VARCHAR)) AS P4HYPERLOGLOG)) " + - "FROM orders " + - "GROUP BY orderstatus"); - - MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) - .row("O", 1021L) - .row("F", 1019L) - .row("P", 302L) - .build(); - - assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); + // This is simplified TPC-H q21 + assertQuery("SELECT\n" + + " count(*) AS numwait\n" + + "FROM\n" + + " nation l1\n" + + "WHERE\n" + + " EXISTS(\n" + + " SELECT *\n" + + " FROM\n" + + " nation l2\n" + + " WHERE\n" + + " l2.nationkey = l1.nationkey\n" + + " )\n" + + " AND NOT EXISTS(\n" + + " SELECT *\n" + + " FROM\n" + + " nation l3\n" + + " WHERE\n" + + " l3.nationkey= l1.nationkey\n" + + " )\n", + "VALUES 0"); // EXISTS predicates are contradictory } @Test - public void testP4ApproxSetDoubleGroupBy() + public void testPredicatePushdown() { - MaterializedResult actual = computeActual("" + - "SELECT orderstatus, cardinality(cast(approx_set(CAST(custkey AS DOUBLE)) AS P4HYPERLOGLOG)) " + - "FROM orders " + - "GROUP BY orderstatus"); - - MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) - .row("O", 1011L) - .row("F", 1011L) - .row("P", 306L) - .build(); - - assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); + assertQuery("" + + "SELECT *\n" + + "FROM (\n" + + " SELECT orderkey+1 AS a FROM orders WHERE orderstatus = 'F' UNION ALL \n" + + " SELECT orderkey FROM orders WHERE orderkey % 2 = 0 UNION ALL \n" + + " (SELECT orderkey+custkey FROM orders ORDER BY orderkey LIMIT 10)\n" + + ") \n" + + "WHERE a < 20 OR a > 100 \n" + + "ORDER BY a"); } @Test - public void testP4ApproxSetWithNulls() + public void testGroupByKeyPredicatePushdown() { - MaterializedResult actual = computeActual("SELECT cardinality(cast(approx_set(IF(orderstatus = 'O', custkey)) AS P4HYPERLOGLOG)) FROM orders"); - - MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) - .row(1001L) - .build(); - - assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + assertQuery("" + + "SELECT *\n" + + "FROM (\n" + + " SELECT custkey1, orderstatus1, SUM(totalprice1) totalprice, MAX(custkey2) maxcustkey\n" + + " FROM (\n" + + " SELECT *\n" + + " FROM (\n" + + " SELECT custkey custkey1, orderstatus orderstatus1, CAST(totalprice AS BIGINT) totalprice1, orderkey orderkey1\n" + + " FROM orders\n" + + " ) orders1 \n" + + " JOIN (\n" + + " SELECT custkey custkey2, orderstatus orderstatus2, CAST(totalprice AS BIGINT) totalprice2, orderkey orderkey2\n" + + " FROM orders\n" + + " ) orders2 ON orders1.orderkey1 = orders2.orderkey2\n" + + " ) \n" + + " GROUP BY custkey1, orderstatus1\n" + + ")\n" + + "WHERE custkey1 = maxcustkey\n" + + "AND maxcustkey % 2 = 0 \n" + + "AND orderstatus1 = 'F'\n" + + "AND totalprice > 10000\n" + + "ORDER BY custkey1, orderstatus1, totalprice, maxcustkey"); } @Test - public void testP4ApproxSetOnlyNulls() + public void testNonDeterministicTableScanPredicatePushdown() { - MaterializedResult actual = computeActual("SELECT cardinality(cast(approx_set(null) AS P4HYPERLOGLOG)) FROM orders"); - - MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) - .row(new Object[] {null}) - .build(); - - assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + MaterializedResult materializedResult = computeActual("" + + "SELECT COUNT(*)\n" + + "FROM (\n" + + " SELECT *\n" + + " FROM lineitem\n" + + " LIMIT 1000\n" + + ")\n" + + "WHERE rand() > 0.5"); + MaterializedRow row = getOnlyElement(materializedResult.getMaterializedRows()); + assertEquals(row.getFieldCount(), 1); + long count = (Long) row.getField(0); + // Technically non-deterministic unit test but has essentially a next to impossible chance of a false positive + assertTrue(count > 0 && count < 1000); } @Test - public void testP4ApproxSetGroupByWithOnlyNullsInOneGroup() + public void testNonDeterministicAggregationPredicatePushdown() { - MaterializedResult actual = computeActual("" + - "SELECT orderstatus, cardinality(cast(approx_set(IF(orderstatus != 'O', custkey)) AS P4HYPERLOGLOG)) " + - "FROM orders " + - "GROUP BY orderstatus"); - - MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) - .row("O", null) - .row("F", 998L) - .row("P", 308L) - .build(); - - assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); + MaterializedResult materializedResult = computeActual("" + + "SELECT COUNT(*)\n" + + "FROM (\n" + + " SELECT orderkey, COUNT(*)\n" + + " FROM lineitem\n" + + " GROUP BY orderkey\n" + + " LIMIT 1000\n" + + ")\n" + + "WHERE rand() > 0.5"); + MaterializedRow row = getOnlyElement(materializedResult.getMaterializedRows()); + assertEquals(row.getFieldCount(), 1); + long count = (Long) row.getField(0); + // Technically non-deterministic unit test but has essentially a next to impossible chance of a false positive + assertTrue(count > 0 && count < 1000); } @Test - public void testP4ApproxSetGroupByWithNulls() + public void testUnionAllPredicateMoveAroundWithOverlappingProjections() { - MaterializedResult actual = computeActual("" + - "SELECT orderstatus, cardinality(cast(approx_set(IF(custkey % 2 <> 0, custkey)) AS P4HYPERLOGLOG)) " + - "FROM orders " + - "GROUP BY orderstatus"); - - MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) - .row("O", 495L) - .row("F", 491L) - .row("P", 153L) - .build(); - - assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); + assertQuery("" + + "SELECT COUNT(*)\n" + + "FROM (\n" + + " SELECT orderkey AS x, orderkey AS y\n" + + " FROM orders\n" + + " WHERE orderkey % 3 = 0\n" + + " UNION ALL\n" + + " SELECT orderkey AS x, orderkey AS y\n" + + " FROM orders\n" + + " WHERE orderkey % 2 = 0\n" + + ") a\n" + + "JOIN (\n" + + " SELECT orderkey AS x, orderkey AS y\n" + + " FROM orders\n" + + ") b\n" + + "ON a.x = b.x"); } @Test - public void testValuesWithNonTrivialType() + public void testTableSampleBernoulliBoundaryValues() { - MaterializedResult actual = computeActual("VALUES (0E0/0E0, 1E0/0E0, -1E0/0E0)"); - - List rows = actual.getMaterializedRows(); - assertEquals(rows.size(), 1); + MaterializedResult fullSample = computeActual("SELECT orderkey FROM orders TABLESAMPLE BERNOULLI (100)"); + MaterializedResult emptySample = computeActual("SELECT orderkey FROM orders TABLESAMPLE BERNOULLI (0)"); + MaterializedResult all = computeExpected("SELECT orderkey FROM orders", fullSample.getTypes()); - MaterializedRow row = rows.get(0); - assertTrue(((Double) row.getField(0)).isNaN()); - assertEquals(row.getField(1), Double.POSITIVE_INFINITY); - assertEquals(row.getField(2), Double.NEGATIVE_INFINITY); + assertContains(all, fullSample); + assertEquals(emptySample.getMaterializedRows().size(), 0); } @Test - public void testValuesWithTimestamp() + public void testTableSampleBernoulli() { - MaterializedResult actual = computeActual("VALUES (current_timestamp, now())"); - - List rows = actual.getMaterializedRows(); - assertEquals(rows.size(), 1); + DescriptiveStatistics stats = new DescriptiveStatistics(); - MaterializedRow row = rows.get(0); - assertEquals(row.getField(0), row.getField(1)); - } + int total = computeExpected("SELECT orderkey FROM orders", ImmutableList.of(BIGINT)).getMaterializedRows().size(); - @Test - public void testValuesWithUnusedColumns() - { - MaterializedResult actual = computeActual("SELECT foo FROM (values (1, 2)) a(foo, bar)"); + for (int i = 0; i < 100; i++) { + List values = computeActual("SELECT orderkey FROM orders TABLESAMPLE BERNOULLI (50)").getMaterializedRows(); - MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) - .row(1) - .build(); + assertEquals(values.size(), ImmutableSet.copyOf(values).size(), "TABLESAMPLE produced duplicate rows"); + stats.addValue(values.size() * 1.0 / total); + } - assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + double mean = stats.getGeometricMean(); + assertTrue(mean > 0.45 && mean < 0.55, format("Expected mean sampling rate to be ~0.5, but was %s", mean)); } @Test @@ -4718,536 +1792,6 @@ public void testAccessControl() assertAccessDenied("SHOW STATS FOR (SELECT * FROM nation)", "Cannot show stats for columns \\[nationkey, regionkey, name, comment\\] in table or view .*.nation.*", privilege("nation", SELECT_COLUMN)); } - @Test - public void testEmptyInputForUnnest() - { - assertQuery("SELECT val FROM (SELECT DISTINCT vals FROM (values (array[2])) t(vals) WHERE false) tmp CROSS JOIN unnest(tmp.vals) tt(val)", "SELECT 1 WHERE 1=2"); - } - - @Test - public void testCoercions() - { - // VARCHAR - assertQuery("SELECT length(NULL)"); - assertQuery("SELECT CAST('abc' AS VARCHAR(255)) || CAST('abc' AS VARCHAR(252))"); - assertQuery("SELECT CAST('abc' AS VARCHAR(255)) || 'abc'"); - - // DECIMAL - DECIMAL - assertQuery("SELECT CAST(1.1 AS DECIMAL(38,1)) + NULL"); - assertQuery("SELECT CAST(292 AS DECIMAL(38,1)) + CAST(292.1 AS DECIMAL(5,1))"); - assertEqualsIgnoreOrder( - computeActual("SELECT ARRAY[CAST(282 AS DECIMAL(22,1)), CAST(282 AS DECIMAL(10,1))] || CAST(292 AS DECIMAL(5,1))"), - computeActual("SELECT ARRAY[CAST(282 AS DECIMAL(22,1)), CAST(282 AS DECIMAL(10,1)), CAST(292 AS DECIMAL(5,1))]")); - - // BIGINT - DECIMAL - assertQuery("SELECT CAST(1.1 AS DECIMAL(38,1)) + CAST(292 AS BIGINT)"); - assertQuery("SELECT CAST(292 AS DECIMAL(38,1)) = CAST(292 AS BIGINT)"); - assertEqualsIgnoreOrder( - computeActual("SELECT ARRAY[CAST(282 AS DECIMAL(22,1)), CAST(282 AS DECIMAL(10,1))] || CAST(292 AS BIGINT)"), - computeActual("SELECT ARRAY[CAST(282 AS DECIMAL(22,1)), CAST(282 AS DECIMAL(10,1)), CAST(292 AS DECIMAL(19,0))]")); - - // DECIMAL - DECIMAL - assertQuery("SELECT CAST(1.1 AS DECIMAL(38,1)) + CAST(1.1 AS DOUBLE)"); - assertQuery("SELECT CAST(1.1 AS DECIMAL(38,1)) = CAST(1.1 AS DOUBLE)"); - assertQuery("SELECT SIN(CAST(1.1 AS DECIMAL(38,1)))"); - assertEqualsIgnoreOrder( - computeActual("SELECT ARRAY[CAST(282.1 AS DOUBLE), CAST(283.2 AS DOUBLE)] || CAST(101.3 AS DECIMAL(5,1))"), - computeActual("SELECT ARRAY[CAST(282.1 AS DOUBLE), CAST(283.2 AS DOUBLE), CAST(101.3 AS DOUBLE)]")); - - // INTEGER - DECIMAL - assertQuery("SELECT CAST(1.1 AS DECIMAL(38,1)) + CAST(292 AS INTEGER)"); - assertQuery("SELECT CAST(292 AS DECIMAL(38,1)) = CAST(292 AS INTEGER)"); - assertEqualsIgnoreOrder( - computeActual("SELECT ARRAY[CAST(282 AS DECIMAL(22,1)), CAST(282 AS DECIMAL(10,1))] || CAST(292 AS INTEGER)"), - computeActual("SELECT ARRAY[CAST(282 AS DECIMAL(22,1)), CAST(282 AS DECIMAL(10,1)), CAST(292 AS DECIMAL(19,0))]")); - - // TINYINT - DECIMAL - assertQuery("SELECT CAST(1.1 AS DECIMAL(38,1)) + CAST(CAST(121 AS DECIMAL(30,1)) AS TINYINT)"); - assertQuery("SELECT CAST(292 AS DECIMAL(38,1)) = CAST(CAST(121 AS DECIMAL(30,1)) AS TINYINT)"); - - // SMALLINT - DECIMAL - assertQuery("SELECT CAST(1.1 AS DECIMAL(38,1)) + CAST(CAST(121 AS DECIMAL(30,1)) AS SMALLINT)"); - assertQuery("SELECT CAST(292 AS DECIMAL(38,1)) = CAST(CAST(121 AS DECIMAL(30,1)) AS SMALLINT)"); - - // Complex coercions across joins - assertQuery("SELECT * FROM (" + - " SELECT t2.x || t2.z cc FROM (" + - " SELECT *" + - " FROM (VALUES (CAST('a' AS VARCHAR), CAST('c' AS VARCHAR))) t(x, z)" + - " ) t2" + - " JOIN (" + - " SELECT *" + - " FROM (VALUES (CAST('a' AS VARCHAR), CAST('c' AS VARCHAR))) u(x, z)" + - " WHERE z='c'" + - " ) u2" + - " ON t2.z = u2.z" + - ") tt " + - "WHERE cc = 'ac'", - "SELECT 'ac'"); - - assertQuery("SELECT * FROM (" + - " SELECT greatest (t.x, t.z) cc FROM (" + - " SELECT *" + - " FROM (VALUES (VARCHAR 'a', VARCHAR 'c')) t(x, z)" + - " ) t" + - " JOIN (" + - " SELECT *" + - " FROM (VALUES (VARCHAR 'a', VARCHAR 'c')) u(x, z)" + - " WHERE z='c'" + - " ) u" + - " ON t.z = u.z" + - ")" + - "WHERE cc = 'c'", - "SELECT 'c'"); - - assertQuery("SELECT cc[1], cc[2] FROM (" + - " SELECT * FROM (" + - " SELECT array[t.x, t.z] cc FROM (" + - " SELECT *" + - " FROM (VALUES (VARCHAR 'a', VARCHAR 'c')) t(x, z)" + - " ) t" + - " JOIN (" + - " SELECT *" + - " FROM (VALUES (VARCHAR 'a', VARCHAR 'c')) u(x, z)" + - " WHERE z='c'" + - " ) u" + - " ON t.z = u.z)" + - " WHERE cc = array['a', 'c'])", - "SELECT 'a', 'c'"); - - assertQuery("SELECT c = 'x'" + - "FROM (" + - " SELECT 'x' AS c" + - " UNION ALL" + - " SELECT 'yy' AS c" + - ")"); - } - - @Test - public void testExecute() - { - Session session = Session.builder(getSession()) - .addPreparedStatement("my_query", "SELECT 123, 'abc'") - .build(); - assertQuery(session, "EXECUTE my_query", "SELECT 123, 'abc'"); - } - - @Test - public void testExecuteUsing() - { - String query = "SELECT a + 1, count(?) FROM (VALUES 1, 2, 3, 2) t1(a) JOIN (VALUES 1, 2, 3, 4) t2(b) ON b < ? WHERE a < ? GROUP BY a + 1 HAVING count(1) > ?"; - Session session = Session.builder(getSession()) - .addPreparedStatement("my_query", query) - .build(); - assertQuery(session, - "EXECUTE my_query USING 1, 5, 4, 0", - "VALUES (2, 4), (3, 8), (4, 4)"); - } - - @Test - public void testExecuteUsingComplexJoinCriteria() - { - String query = "SELECT * FROM (VALUES 1) t(a) JOIN (VALUES 2) u(a) ON t.a + u.a < ?"; - Session session = Session.builder(getSession()) - .addPreparedStatement("my_query", query) - .build(); - assertQuery(session, - "EXECUTE my_query USING 5", - "VALUES (1, 2)"); - } - - @Test - public void testExecuteUsingWithSubquery() - { - String query = "SELECT ? in (SELECT orderkey FROM orders)"; - Session session = Session.builder(getSession()) - .addPreparedStatement("my_query", query) - .build(); - - assertQuery(session, - "EXECUTE my_query USING 10", - "SELECT 10 in (SELECT orderkey FROM orders)"); - } - - @Test - public void testExecuteUsingWithSubqueryInJoin() - { - String query = "SELECT * " + - "FROM " + - " (VALUES ?,2,3) t(x) " + - " JOIN " + - " (VALUES 1,2,3) t2(y) " + - " ON " + - "(x in (VALUES 1,2,?)) = (y in (VALUES 1,2,3)) AND (x in (VALUES 1,?)) = (y in (VALUES 1,2))"; - - Session session = Session.builder(getSession()) - .addPreparedStatement("my_query", query) - .build(); - assertQuery(session, - "EXECUTE my_query USING 1, 3, 2", - "VALUES (1,1), (1,2), (2,2), (2,1), (3,3)"); - } - - @Test - public void testExecuteWithParametersInGroupBy() - { - String query = "SELECT a + ?, count(1) FROM (VALUES 1, 2, 3, 2) t(a) GROUP BY a + ?"; - Session session = Session.builder(getSession()) - .addPreparedStatement("my_query", query) - .build(); - - assertQueryFails( - session, - "EXECUTE my_query USING 1, 1", - "\\Qline 1:10: '(a + ?)' must be an aggregate expression or appear in GROUP BY clause\\E"); - } - - @Test - public void testExecuteUsingWithWithClause() - { - String query = "WITH src AS (SELECT * FROM (VALUES (1, 4),(2, 5), (3, 6)) AS t(id1, id2) WHERE id2 = ?)" + - " SELECT * from src WHERE id1 between ? and ?"; - - Session session = Session.builder(getSession()) - .addPreparedStatement("my_query", query) - .build(); - assertQuery(session, - "EXECUTE my_query USING 6, 0, 10", - "VALUES (3, 6)"); - } - - @Test - public void testExecuteUsingWithFunctionsAsParameters() - { - String query = "SELECT a + ? FROM (VALUES 1, 2, 3, 4) AS t(a)"; - - Session session = Session.builder(getSession()) - .addPreparedStatement("my_query", query) - .build(); - assertQuery(session, - "EXECUTE my_query USING abs(-2) ", - "VALUES 3, 4, 5, 6"); - } - - @Test - public void testExecuteNoSuchQuery() - { - assertQueryFails("EXECUTE my_query", "Prepared statement not found: my_query"); - } - - @Test - public void testParametersNonPreparedStatement() - { - assertQueryFails( - "SELECT ?, 1", - "line 1:1: Incorrect number of parameters: expected 1 but found 0"); - } - - @Test - public void testDescribeInput() - { - Session session = Session.builder(getSession()) - .addPreparedStatement("my_query", "SELECT ? FROM nation WHERE nationkey = ? and name < ?") - .build(); - MaterializedResult actual = computeActual(session, "DESCRIBE INPUT my_query"); - MaterializedResult expected = resultBuilder(session, BIGINT, VARCHAR) - .row(0, "unknown") - .row(1, "bigint") - .row(2, "varchar") - .build(); - assertEqualsIgnoreOrder(actual, expected); - } - - @Test - public void testDescribeInputWithAggregation() - { - Session session = Session.builder(getSession()) - .addPreparedStatement("my_query", "SELECT count(*) + ? FROM nation") - .build(); - MaterializedResult actual = computeActual(session, "DESCRIBE INPUT my_query"); - MaterializedResult expected = resultBuilder(session, BIGINT, VARCHAR) - .row(0, "bigint") - .build(); - assertEqualsIgnoreOrder(actual, expected); - } - - @Test - public void testDescribeInputNoParameters() - { - Session session = Session.builder(getSession()) - .addPreparedStatement("my_query", "SELECT * FROM nation") - .build(); - MaterializedResult actual = computeActual(session, "DESCRIBE INPUT my_query"); - MaterializedResult expected = resultBuilder(session, UNKNOWN, UNKNOWN).build(); - assertEquals(actual, expected); - } - - @Test - public void testDescribeInputNoSuchQuery() - { - assertQueryFails("DESCRIBE INPUT my_query", "Prepared statement not found: my_query"); - } - - @Test - public void testQuantifiedComparison() - { - assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey = ANY (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); - assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey = ALL (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); - - assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey <> ANY (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); - assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey <> ALL (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); - - assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey = ALL (SELECT regionkey FROM region WHERE name IN ('ASIA'))"); - assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey <> ALL (SELECT regionkey FROM region WHERE name IN ('ASIA'))"); - assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey = ANY (SELECT regionkey FROM region WHERE name IN ('EUROPE'))"); - assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey <> ANY (SELECT regionkey FROM region WHERE name IN ('EUROPE'))"); - - assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey < SOME (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); - assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey <= ANY (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); - assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey > ANY (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); - assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey >= SOME (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); - - assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey < ALL (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); - assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey <= ALL (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); - assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey > ALL (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); - assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey >= ALL (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); - - // subquery with coercion - assertQuery("SELECT 1.0 < ALL(SELECT 1), 1 < ALL(SELECT 1)"); - assertQuery("SELECT 1.0 < ANY(SELECT 1), 1 < ANY(SELECT 1)"); - assertQuery("SELECT 1.0 <= ALL(SELECT 1) WHERE 1 <= ALL(SELECT 1)"); - assertQuery("SELECT 1.0 <= ANY(SELECT 1) WHERE 1 <= ANY(SELECT 1)"); - assertQuery("SELECT 1.0 <= ALL(SELECT 1), 1 <= ALL(SELECT 1) WHERE 1 <= ALL(SELECT 1)"); - assertQuery("SELECT 1.0 <= ANY(SELECT 1), 1 <= ANY(SELECT 1) WHERE 1 <= ANY(SELECT 1)"); - assertQuery("SELECT 1.0 = ALL(SELECT 1) WHERE 1 = ALL(SELECT 1)"); - assertQuery("SELECT 1.0 = ANY(SELECT 1) WHERE 1 = ANY(SELECT 1)"); - assertQuery("SELECT 1.0 = ALL(SELECT 1), 2 = ALL(SELECT 1) WHERE 1 = ALL(SELECT 1)"); - assertQuery("SELECT 1.0 = ANY(SELECT 1), 2 = ANY(SELECT 1) WHERE 1 = ANY(SELECT 1)"); - - // subquery with supertype coercion - assertQuery("SELECT CAST(1 AS decimal(3,2)) < ALL(SELECT CAST(1 AS decimal(3,1)))"); - assertQuery("SELECT CAST(1 AS decimal(3,2)) < ANY(SELECT CAST(1 AS decimal(3,1)))"); - assertQuery("SELECT CAST(1 AS decimal(3,2)) <= ALL(SELECT CAST(1 AS decimal(3,1)))"); - assertQuery("SELECT CAST(1 AS decimal(3,2)) <= ANY(SELECT CAST(1 AS decimal(3,1)))"); - assertQuery("SELECT CAST(1 AS decimal(3,2)) = ALL(SELECT CAST(1 AS decimal(3,1)))"); - assertQuery("SELECT CAST(1 AS decimal(3,2)) = ANY(SELECT CAST(1 AS decimal(3,1)))", "SELECT true"); - assertQuery("SELECT CAST(1 AS decimal(3,2)) <> ALL(SELECT CAST(1 AS decimal(3,1)))"); - assertQuery("SELECT CAST(1 AS decimal(3,2)) <> ANY(SELECT CAST(1 AS decimal(3,1)))"); - } - - @Test(dataProvider = "quantified_comparisons_corner_cases") - public void testQuantifiedComparisonCornerCases(String query) - { - assertQuery(query); - } - - @DataProvider(name = "quantified_comparisons_corner_cases") - public Object[][] qualifiedComparisonsCornerCases() - { - //the %subquery% is wrapped in a SELECT so that H2 does not blow up on the VALUES subquery - return queryTemplate("SELECT %value% %operator% %quantifier% (SELECT * FROM (%subquery%))") - .replaceAll( - parameter("subquery").of( - "SELECT 1 WHERE false", - "SELECT CAST(NULL AS INTEGER)", - "VALUES (1), (NULL)"), - parameter("quantifier").of("ALL", "ANY"), - parameter("value").of("1", "NULL"), - parameter("operator").of("=", "!=", "<", ">", "<=", ">=")) - .collect(toDataProvider()); - } - - @Test - public void testPreparedStatementWithSubqueries() - { - List leftValues = parameter("left").of( - "", "1 = ", - "EXISTS", - "1 IN", - "1 = ANY", "1 = ALL", - "2 <> ANY", "2 <> ALL", - "0 < ALL", "0 < ANY", - "1 <= ALL", "1 <= ANY"); - - queryTemplate("SELECT %left% (SELECT 1 WHERE 2 = ?)") - .replaceAll(leftValues) - .forEach(query -> { - Session session = Session.builder(getSession()) - .addPreparedStatement("my_query", query) - .build(); - assertQuery(session, "EXECUTE my_query USING 2", "SELECT true"); - }); - } - - @Test - public void testDescribeOutput() - { - Session session = Session.builder(getSession()) - .addPreparedStatement("my_query", "SELECT * FROM nation") - .build(); - - MaterializedResult actual = computeActual(session, "DESCRIBE OUTPUT my_query"); - MaterializedResult expected = resultBuilder(session, VARCHAR, VARCHAR, VARCHAR, VARCHAR, VARCHAR, BIGINT, BOOLEAN) - .row("nationkey", session.getCatalog().get(), session.getSchema().get(), "nation", "bigint", 8, false) - .row("name", session.getCatalog().get(), session.getSchema().get(), "nation", "varchar(25)", 0, false) - .row("regionkey", session.getCatalog().get(), session.getSchema().get(), "nation", "bigint", 8, false) - .row("comment", session.getCatalog().get(), session.getSchema().get(), "nation", "varchar(152)", 0, false) - .build(); - assertEqualsIgnoreOrder(actual, expected); - } - - @Test - public void testDescribeOutputNamedAndUnnamed() - { - Session session = Session.builder(getSession()) - .addPreparedStatement("my_query", "SELECT 1, name, regionkey AS my_alias FROM nation") - .build(); - - MaterializedResult actual = computeActual(session, "DESCRIBE OUTPUT my_query"); - MaterializedResult expected = resultBuilder(session, VARCHAR, VARCHAR, VARCHAR, VARCHAR, VARCHAR, BIGINT, BOOLEAN) - .row("_col0", "", "", "", "integer", 4, false) - .row("name", session.getCatalog().get(), session.getSchema().get(), "nation", "varchar(25)", 0, false) - .row("my_alias", session.getCatalog().get(), session.getSchema().get(), "nation", "bigint", 8, true) - .build(); - assertEqualsIgnoreOrder(actual, expected); - } - - @Test - public void testDescribeOutputNonSelect() - { - assertDescribeOutputRowCount("CREATE TABLE foo AS SELECT * FROM nation"); - assertDescribeOutputRowCount("DELETE FROM orders"); - - assertDescribeOutputEmpty("CALL foo()"); - assertDescribeOutputEmpty("SET SESSION optimize_hash_generation=false"); - assertDescribeOutputEmpty("RESET SESSION optimize_hash_generation"); - assertDescribeOutputEmpty("START TRANSACTION"); - assertDescribeOutputEmpty("COMMIT"); - assertDescribeOutputEmpty("ROLLBACK"); - assertDescribeOutputEmpty("GRANT INSERT ON foo TO bar"); - assertDescribeOutputEmpty("REVOKE INSERT ON foo FROM bar"); - assertDescribeOutputEmpty("CREATE SCHEMA foo"); - assertDescribeOutputEmpty("CREATE SCHEMA foo AUTHORIZATION bar"); - assertDescribeOutputEmpty("CREATE SCHEMA foo AUTHORIZATION bar WITH ( x = 'y' )"); - assertDescribeOutputEmpty("ALTER SCHEMA foo RENAME TO bar"); - assertDescribeOutputEmpty("ALTER SCHEMA foo SET AUTHORIZATION bar"); - assertDescribeOutputEmpty("DROP SCHEMA foo"); - assertDescribeOutputEmpty("CREATE TABLE foo (x bigint)"); - assertDescribeOutputEmpty("ALTER TABLE foo ADD COLUMN y bigint"); - assertDescribeOutputEmpty("ALTER TABLE foo RENAME TO bar"); - assertDescribeOutputEmpty("DROP TABLE foo"); - assertDescribeOutputEmpty("CREATE VIEW foo AS SELECT * FROM nation"); - assertDescribeOutputEmpty("DROP VIEW foo"); - assertDescribeOutputEmpty("PREPARE test FROM SELECT * FROM orders"); - assertDescribeOutputEmpty("EXECUTE test"); - assertDescribeOutputEmpty("DEALLOCATE PREPARE test"); - } - - private void assertDescribeOutputRowCount(@Language("SQL") String sql) - { - Session session = Session.builder(getSession()) - .addPreparedStatement("my_query", sql) - .build(); - - MaterializedResult actual = computeActual(session, "DESCRIBE OUTPUT my_query"); - MaterializedResult expected = resultBuilder(session, VARCHAR, VARCHAR, VARCHAR, VARCHAR, VARCHAR, BIGINT, BOOLEAN) - .row("rows", "", "", "", "bigint", 8, false) - .build(); - assertEqualsIgnoreOrder(actual, expected); - } - - private void assertDescribeOutputEmpty(@Language("SQL") String sql) - { - Session session = Session.builder(getSession()) - .addPreparedStatement("my_query", sql) - .build(); - - MaterializedResult actual = computeActual(session, "DESCRIBE OUTPUT my_query"); - MaterializedResult expected = resultBuilder(session, VARCHAR, VARCHAR, VARCHAR, VARCHAR, VARCHAR, BIGINT, BOOLEAN) - .build(); - assertEqualsIgnoreOrder(actual, expected); - } - - @Test - public void testDescribeOutputShowTables() - { - Session session = Session.builder(getSession()) - .addPreparedStatement("my_query", "SHOW TABLES") - .build(); - - MaterializedResult actual = computeActual(session, "DESCRIBE OUTPUT my_query"); - MaterializedResult expected = resultBuilder(session, VARCHAR, VARCHAR, VARCHAR, VARCHAR, VARCHAR, BIGINT, BOOLEAN) - .row("Table", session.getCatalog().get(), "information_schema", "tables", "varchar", 0, true) - .build(); - assertEqualsIgnoreOrder(actual, expected); - } - - @Test - public void testDescribeOutputOnAliasedColumnsAndExpressions() - { - Session session = Session.builder(getSession()) - .addPreparedStatement("my_query", "SELECT count(*) AS this_is_aliased, 1 + 2 FROM nation") - .build(); - - MaterializedResult actual = computeActual(session, "DESCRIBE OUTPUT my_query"); - MaterializedResult expected = resultBuilder(session, VARCHAR, VARCHAR, VARCHAR, VARCHAR, VARCHAR, BIGINT, BOOLEAN) - .row("this_is_aliased", "", "", "", "bigint", 8, true) - .row("_col1", "", "", "", "integer", 4, false) - .build(); - assertEqualsIgnoreOrder(actual, expected); - } - - @Test - public void testDescribeOutputNoSuchQuery() - { - assertQueryFails("DESCRIBE OUTPUT my_query", "Prepared statement not found: my_query"); - } - - @Test - public void testSubqueriesWithDisjunction() - { - List projections = parameter("projection").of("count(*)", "*", "%condition%"); - List conditions = parameter("condition").of( - "nationkey IN (SELECT 1) OR TRUE", - "EXISTS(SELECT 1) OR TRUE"); - - queryTemplate("SELECT %projection% FROM nation WHERE %condition%") - .replaceAll(projections, conditions) - .forEach(this::assertQuery); - - queryTemplate("SELECT %projection% FROM nation WHERE (%condition%) AND nationkey <3") - .replaceAll(projections, conditions) - .forEach(this::assertQuery); - - assertQuery( - "SELECT count(*) FROM nation WHERE (SELECT true FROM (SELECT 1) t(a) WHERE a = nationkey) OR TRUE", - "SELECT 25"); - assertQuery( - "SELECT (SELECT true FROM (SELECT 1) t(a) WHERE a = nationkey) " + - "FROM nation " + - "WHERE (SELECT true FROM (SELECT 1) t(a) WHERE a = nationkey) OR TRUE " + - "ORDER BY nationkey " + - "LIMIT 2", - "VALUES true, null"); - } - - @Test - public void testAssignUniqueId() - { - String unionLineitem25Times = IntStream.range(0, 25) - .mapToObj(i -> "SELECT * FROM lineitem") - .collect(joining(" UNION ALL ")); - - assertQuery( - "SELECT count(*) FROM (" + - "SELECT * FROM (" + - " SELECT (SELECT count(*) WHERE c = 1) " + - " FROM (SELECT CASE orderkey WHEN 1 THEN orderkey ELSE 1 END " + - " FROM (" + unionLineitem25Times + ")) o(c)) result(a) " + - "WHERE a = 1)", - "VALUES 1504375"); - } - @Test public void testCorrelatedJoin() { @@ -5355,46 +1899,30 @@ public void testPruningCountAggregationOverScalar() } @Test - public void testDefaultDecimalLiteralSwitch() + public void testSubqueriesWithDisjunction() { - Session decimalLiteral = Session.builder(getSession()) - .setSystemProperty(SystemSessionProperties.PARSE_DECIMAL_LITERALS_AS_DOUBLE, "false") - .build(); - MaterializedResult decimalColumnResult = computeActual(decimalLiteral, "SELECT 1.0"); - - assertEquals(decimalColumnResult.getRowCount(), 1); - assertEquals(decimalColumnResult.getTypes().get(0), createDecimalType(2, 1)); - assertEquals(decimalColumnResult.getMaterializedRows().get(0).getField(0), new BigDecimal("1.0")); - - Session doubleLiteral = Session.builder(getSession()) - .setSystemProperty(SystemSessionProperties.PARSE_DECIMAL_LITERALS_AS_DOUBLE, "true") - .build(); - MaterializedResult doubleColumnResult = computeActual(doubleLiteral, "SELECT 1.0"); + List projections = parameter("projection").of("count(*)", "*", "%condition%"); + List conditions = parameter("condition").of( + "nationkey IN (SELECT 1) OR TRUE", + "EXISTS(SELECT 1) OR TRUE"); - assertEquals(doubleColumnResult.getRowCount(), 1); - assertEquals(doubleColumnResult.getTypes().get(0), DOUBLE); - assertEquals(doubleColumnResult.getMaterializedRows().get(0).getField(0), 1.0); - } + queryTemplate("SELECT %projection% FROM nation WHERE %condition%") + .replaceAll(projections, conditions) + .forEach(this::assertQuery); - @Test - public void testForcePartitioningMarkDistinctInput() - { - Session session = Session.builder(getSession()) - .setSystemProperty(IGNORE_DOWNSTREAM_PREFERENCES, "false") - .build(); + queryTemplate("SELECT %projection% FROM nation WHERE (%condition%) AND nationkey <3") + .replaceAll(projections, conditions) + .forEach(this::assertQuery); assertQuery( - session, - "SELECT count(orderkey), count(distinct orderkey), custkey , count(1) FROM ( SELECT * FROM (VALUES (1, 2)) as t(custkey, orderkey) UNION ALL SELECT 3, 4) GROUP BY 3", - "VALUES (1, 1, 1, 1), (1, 1, 3, 1)"); - - session = Session.builder(getSession()) - .setSystemProperty(IGNORE_DOWNSTREAM_PREFERENCES, "true") - .build(); - + "SELECT count(*) FROM nation WHERE (SELECT true FROM (SELECT 1) t(a) WHERE a = nationkey) OR TRUE", + "SELECT 25"); assertQuery( - session, - "SELECT count(orderkey), count(distinct orderkey), custkey , count(1) FROM ( SELECT * FROM (VALUES (1, 2)) as t(custkey, orderkey) UNION ALL SELECT 3, 4) GROUP BY 3", - "VALUES (1, 1, 1, 1), (1, 1, 3, 1)"); + "SELECT (SELECT true FROM (SELECT 1) t(a) WHERE a = nationkey) " + + "FROM nation " + + "WHERE (SELECT true FROM (SELECT 1) t(a) WHERE a = nationkey) OR TRUE " + + "ORDER BY nationkey " + + "LIMIT 2", + "VALUES true, null"); } } diff --git a/presto-tests/pom.xml b/presto-tests/pom.xml index 5056972755c7..7d66f1d300e0 100644 --- a/presto-tests/pom.xml +++ b/presto-tests/pom.xml @@ -37,6 +37,11 @@ presto-spi + + io.prestosql + presto-parser + + io.prestosql presto-testing diff --git a/presto-tests/src/test/java/io/prestosql/tests/AbstractTestEngineOnlyQueries.java b/presto-tests/src/test/java/io/prestosql/tests/AbstractTestEngineOnlyQueries.java index bf037c9100b9..15b82f972a94 100644 --- a/presto-tests/src/test/java/io/prestosql/tests/AbstractTestEngineOnlyQueries.java +++ b/presto-tests/src/test/java/io/prestosql/tests/AbstractTestEngineOnlyQueries.java @@ -13,12 +13,30 @@ */ package io.prestosql.tests; +import com.google.common.collect.ArrayListMultimap; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMultimap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Maps; +import com.google.common.collect.Multimap; +import com.google.common.collect.Multimaps; +import com.google.common.collect.Ordering; import io.prestosql.Session; +import io.prestosql.SystemSessionProperties; +import io.prestosql.spi.type.SqlTimestampWithTimeZone; import io.prestosql.spi.type.TimeZoneKey; import io.prestosql.testing.AbstractTestQueryFramework; +import io.prestosql.testing.MaterializedResult; +import io.prestosql.testing.MaterializedRow; +import io.prestosql.testing.assertions.Assert; +import io.prestosql.type.SqlIntervalDayTime; +import io.prestosql.type.SqlIntervalYearMonth; import org.intellij.lang.annotations.Language; +import org.testng.annotations.DataProvider; import org.testng.annotations.Test; +import java.math.BigDecimal; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; @@ -27,14 +45,47 @@ import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; +import java.util.HashSet; +import java.util.List; import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.stream.IntStream; import static com.google.common.base.Preconditions.checkState; +import static com.google.common.collect.Iterables.getOnlyElement; +import static io.prestosql.SystemSessionProperties.IGNORE_DOWNSTREAM_PREFERENCES; +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.BooleanType.BOOLEAN; +import static io.prestosql.spi.type.DecimalType.createDecimalType; +import static io.prestosql.spi.type.DoubleType.DOUBLE; +import static io.prestosql.spi.type.VarcharType.VARCHAR; +import static io.prestosql.sql.tree.ExplainType.Type.DISTRIBUTED; +import static io.prestosql.sql.tree.ExplainType.Type.IO; +import static io.prestosql.sql.tree.ExplainType.Type.LOGICAL; +import static io.prestosql.testing.MaterializedResult.resultBuilder; +import static io.prestosql.testing.QueryAssertions.assertContains; +import static io.prestosql.testing.QueryAssertions.assertEqualsIgnoreOrder; +import static io.prestosql.testing.TestingSession.TESTING_CATALOG; +import static io.prestosql.testing.TestngUtils.toDataProvider; +import static io.prestosql.tests.QueryTemplate.parameter; +import static io.prestosql.tests.QueryTemplate.queryTemplate; +import static io.prestosql.type.UnknownType.UNKNOWN; +import static java.lang.String.format; +import static java.util.stream.Collectors.joining; +import static java.util.stream.Collectors.toList; +import static java.util.stream.IntStream.range; +import static org.assertj.core.api.Assertions.assertThat; import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertNotEquals; +import static org.testng.Assert.assertTrue; public abstract class AbstractTestEngineOnlyQueries extends AbstractTestQueryFramework { + private static final DateTimeFormatter ZONED_DATE_TIME_FORMAT = DateTimeFormatter.ofPattern(SqlTimestampWithTimeZone.JSON_FORMAT); + @Test public void testTimeLiterals() { @@ -95,4 +146,3450 @@ public void testLocallyUnrepresentableTimeLiterals() assertEquals(computeScalar(sql), localTimeThatDidNotOccurOn19700101); // this tests Presto and the QueryRunner assertQuery(sql); // this tests H2QueryRunner } + + @Test + public void testNodeRoster() + { + List result = computeActual("SELECT * FROM system.runtime.nodes").getMaterializedRows(); + assertEquals(result.size(), getNodeCount()); + } + + @Test + public void testTransactionsTable() + { + List result = computeActual("SELECT * FROM system.runtime.transactions").getMaterializedRows(); + assertTrue(result.size() >= 1); // At least one row for the current transaction. + } + + @Test + public void testCountOnInternalTables() + { + List rows = computeActual("SELECT count(*) FROM system.runtime.nodes").getMaterializedRows(); + assertEquals(((Long) rows.get(0).getField(0)).longValue(), getNodeCount()); + } + + @Test + public void testParsingError() + { + assertQueryFails("SELECT foo FROM", "line 1:16: mismatched input ''. Expecting: .*"); + } + + @Test + public void testSelectLargeInterval() + { + MaterializedResult result = computeActual("SELECT INTERVAL '30' DAY"); + assertEquals(result.getRowCount(), 1); + assertEquals(result.getMaterializedRows().get(0).getField(0), new SqlIntervalDayTime(30, 0, 0, 0, 0)); + + result = computeActual("SELECT INTERVAL '" + Short.MAX_VALUE + "' YEAR"); + assertEquals(result.getRowCount(), 1); + assertEquals(result.getMaterializedRows().get(0).getField(0), new SqlIntervalYearMonth(Short.MAX_VALUE, 0)); + } + + @Test + public void testNoFrom() + { + assertQuery("SELECT 1 + 2, 3 + 4"); + } + + @Test + public void testSelectNull() + { + assertQuery("SELECT NULL"); + } + + @Test + public void testExceptAllFails() + { + assertQueryFails("SELECT * FROM (VALUES 1, 2, 3, 4) EXCEPT ALL SELECT * FROM (VALUES 3, 4)", "line 1:35: EXCEPT ALL not yet implemented"); + } + + @Test + public void testIntersectAllFails() + { + assertQueryFails("SELECT * FROM (VALUES 1, 2, 3, 4) INTERSECT ALL SELECT * FROM (VALUES 3, 4)", "line 1:35: INTERSECT ALL not yet implemented"); + } + + @Test + public void testLargeInArray() + { + String arrayValues = range(0, 5000) + .mapToObj(i -> format("ARRAY[%s, %s, %s]", i, i + 1, i + 2)) + .collect(joining(", ")); + assertQuery("SELECT ARRAY[0, 0, 0] in (ARRAY[0, 0, 0], " + arrayValues + ")", "values true"); + assertQuery("SELECT ARRAY[0, 0, 0] in (" + arrayValues + ")", "values false"); + } + + @Test + public void testAssignUniqueId() + { + String unionLineitem25Times = range(0, 25) + .mapToObj(i -> "SELECT * FROM lineitem") + .collect(joining(" UNION ALL ")); + + assertQuery( + "SELECT count(*) FROM (" + + "SELECT * FROM (" + + " SELECT (SELECT count(*) WHERE c = 1) " + + " FROM (SELECT CASE orderkey WHEN 1 THEN orderkey ELSE 1 END " + + " FROM (" + unionLineitem25Times + ")) o(c)) result(a) " + + "WHERE a = 1)", + "VALUES 1504375"); + } + + @Test + public void testAtTimeZone() + { + // TODO the expected values here are non-sensical due to https://github.com/prestosql/presto/issues/37 + assertEquals(computeScalar("SELECT TIMESTAMP '2012-10-31 01:00' AT TIME ZONE INTERVAL '07:09' hour to minute"), zonedDateTime("2012-10-30 18:09:00.000 +07:09")); + assertEquals(computeScalar("SELECT TIMESTAMP '2012-10-31 01:00' AT TIME ZONE 'Asia/Oral'"), zonedDateTime("2012-10-30 16:00:00.000 Asia/Oral")); + assertEquals(computeScalar("SELECT MIN(x) AT TIME ZONE 'America/Chicago' FROM (VALUES TIMESTAMP '1970-01-01 00:01:00+00:00') t(x)"), zonedDateTime("1969-12-31 18:01:00.000 America/Chicago")); + assertEquals(computeScalar("SELECT TIMESTAMP '2012-10-31 01:00' AT TIME ZONE '+07:09'"), zonedDateTime("2012-10-30 18:09:00.000 +07:09")); + assertEquals(computeScalar("SELECT TIMESTAMP '2012-10-31 01:00 UTC' AT TIME ZONE 'America/Los_Angeles'"), zonedDateTime("2012-10-30 18:00:00.000 America/Los_Angeles")); + assertEquals(computeScalar("SELECT TIMESTAMP '2012-10-31 01:00' AT TIME ZONE 'America/Los_Angeles'"), zonedDateTime("2012-10-30 04:00:00.000 America/Los_Angeles")); + assertEquals(computeActual("SELECT x AT TIME ZONE 'America/Los_Angeles' FROM (values TIMESTAMP '1970-01-01 00:01:00+00:00', TIMESTAMP '1970-01-01 08:01:00+08:00', TIMESTAMP '1969-12-31 16:01:00-08:00') t(x)").getOnlyColumnAsSet(), + ImmutableSet.of(zonedDateTime("1969-12-31 16:01:00.000 America/Los_Angeles"))); + assertEquals(computeActual("SELECT x AT TIME ZONE 'America/Los_Angeles' FROM (values TIMESTAMP '1970-01-01 00:01:00', TIMESTAMP '1970-01-01 08:01:00', TIMESTAMP '1969-12-31 16:01:00') t(x)").getOnlyColumn().collect(toList()), + ImmutableList.of(zonedDateTime("1970-01-01 03:01:00.000 America/Los_Angeles"), zonedDateTime("1970-01-01 11:01:00.000 America/Los_Angeles"), zonedDateTime("1969-12-31 19:01:00.000 America/Los_Angeles"))); + assertEquals(computeScalar("SELECT min(x) AT TIME ZONE 'America/Los_Angeles' FROM (values TIMESTAMP '1970-01-01 00:01:00+00:00', TIMESTAMP '1970-01-01 08:01:00+08:00', TIMESTAMP '1969-12-31 16:01:00-08:00') t(x)"), + zonedDateTime("1969-12-31 16:01:00.000 America/Los_Angeles")); + + // with chained AT TIME ZONE + assertEquals(computeScalar("SELECT TIMESTAMP '2012-10-31 01:00' AT TIME ZONE 'America/Los_Angeles' AT TIME ZONE 'UTC'"), zonedDateTime("2012-10-30 11:00:00.000 UTC")); + assertEquals(computeScalar("SELECT TIMESTAMP '2012-10-31 01:00' AT TIME ZONE 'Asia/Tokyo' AT TIME ZONE 'America/Los_Angeles'"), zonedDateTime("2012-10-30 04:00:00.000 America/Los_Angeles")); + assertEquals(computeScalar("SELECT TIMESTAMP '2012-10-31 01:00' AT TIME ZONE 'America/Los_Angeles' AT TIME ZONE 'Asia/Shanghai'"), zonedDateTime("2012-10-30 19:00:00.000 Asia/Shanghai")); + assertEquals(computeScalar("SELECT min(x) AT TIME ZONE 'America/Los_Angeles' AT TIME ZONE 'UTC' FROM (values TIMESTAMP '1970-01-01 00:01:00+00:00', TIMESTAMP '1970-01-01 08:01:00+08:00', TIMESTAMP '1969-12-31 16:01:00-08:00') t(x)"), + zonedDateTime("1970-01-01 00:01:00.000 UTC")); + + // with AT TIME ZONE in VALUES + assertEquals(computeScalar("SELECT * FROM (VALUES TIMESTAMP '2012-10-31 01:00' AT TIME ZONE 'Asia/Oral')"), zonedDateTime("2012-10-30 16:00:00.000 Asia/Oral")); + } + + @Test + public void testIn() + { + assertQuery("SELECT x FROM (values 3, 100) t(x) WHERE x IN (2147483649)", "SELECT * WHERE false"); + assertQuery("SELECT x FROM (values 3, 100, 2147483648, 2147483649, 2147483650) t(x) WHERE x IN (2147483648, 2147483650)", "values 2147483648, 2147483650"); + assertQuery("SELECT x FROM (values 3, 100, 2147483648, 2147483649, 2147483650) t(x) WHERE x IN (3, 4, 2147483648, 2147483650)", "values 3, 2147483648, 2147483650"); + assertQuery("SELECT x FROM (values 1, 2, 3) t(x) WHERE x IN (1 + CAST(rand() < 0 AS bigint), 2 + CAST(rand() < 0 AS bigint))", "values 1, 2"); + assertQuery("SELECT x FROM (values 1, 2, 3, 4) t(x) WHERE x IN (1 + CAST(rand() < 0 AS bigint), 2 + CAST(rand() < 0 AS bigint), 4)", "values 1, 2, 4"); + assertQuery("SELECT x FROM (values 1, 2, 3, 4) t(x) WHERE x IN (4, 2, 1)", "values 1, 2, 4"); + assertQuery("SELECT x FROM (values 1, 2, 3, 2147483648) t(x) WHERE x IN (1 + CAST(rand() < 0 AS bigint), 2 + CAST(rand() < 0 AS bigint), 2147483648)", "values 1, 2, 2147483648"); + assertQuery("SELECT x IN (0) FROM (values 4294967296) t(x)", "values false"); + assertQuery("SELECT x IN (0, 4294967297 + CAST(rand() < 0 AS bigint)) FROM (values 4294967296, 4294967297) t(x)", "values false, true"); + assertQuery("SELECT NULL in (1, 2, 3)", "values null"); + assertQuery("SELECT 1 in (1, NULL, 3)", "values true"); + assertQuery("SELECT 2 in (1, NULL, 3)", "values null"); + assertQuery("SELECT x FROM (values DATE '1970-01-01', DATE '1970-01-03') t(x) WHERE x IN (DATE '1970-01-01')", "values DATE '1970-01-01'"); + assertEquals( + computeActual("SELECT x FROM (values TIMESTAMP '1970-01-01 00:01:00+00:00', TIMESTAMP '1970-01-01 08:01:00+08:00', TIMESTAMP '1970-01-01 00:01:00+08:00') t(x) WHERE x IN (TIMESTAMP '1970-01-01 00:01:00+00:00')") + .getOnlyColumn().collect(toList()), + ImmutableList.of(zonedDateTime("1970-01-01 00:01:00.000 UTC"), zonedDateTime("1970-01-01 08:01:00.000 +08:00"))); + assertQuery("SELECT COUNT(*) FROM (values 1) t(x) WHERE x IN (null, 0)", "SELECT 0"); + assertQuery("SELECT d IN (DECIMAL '2.0', DECIMAL '30.0') FROM (VALUES (2.0E0)) t(d)", "SELECT true"); // coercion with type only coercion inside IN list + } + + @Test + public void testNullOnLhsOfInPredicateAllowed() + { + assertQuery("SELECT NULL IN (1, 2, 3)", "SELECT NULL"); + assertQuery("SELECT NULL IN (SELECT 1)", "SELECT NULL"); + assertQuery("SELECT NULL IN (SELECT 1 WHERE FALSE)", "SELECT FALSE"); + assertQuery("SELECT x FROM (VALUES NULL) t(x) WHERE x IN (SELECT 1)", "SELECT 33 WHERE FALSE"); + assertQuery("SELECT NULL IN (SELECT CAST(NULL AS BIGINT))", "SELECT NULL"); + assertQuery("SELECT NULL IN (SELECT NULL WHERE FALSE)", "SELECT FALSE"); + assertQuery("SELECT NULL IN ((SELECT 1) UNION ALL (SELECT NULL))", "SELECT NULL"); + assertQuery("SELECT x IN (SELECT TRUE) FROM (SELECT * FROM (VALUES CAST(NULL AS BOOLEAN)) t(x) WHERE (x OR NULL) IS NULL)", "SELECT NULL"); + assertQuery("SELECT x IN (SELECT 1) FROM (SELECT * FROM (VALUES CAST(NULL AS INTEGER)) t(x) WHERE (x + 10 IS NULL) OR X = 2)", "SELECT NULL"); + assertQuery("SELECT x IN (SELECT 1 WHERE FALSE) FROM (SELECT * FROM (VALUES CAST(NULL AS INTEGER)) t(x) WHERE (x + 10 IS NULL) OR X = 2)", "SELECT FALSE"); + } + + @Test + public void testInSubqueryWithCrossJoin() + { + assertQuery("SELECT a FROM (VALUES (1),(2)) t(a) WHERE a IN " + + "(SELECT b FROM (VALUES (ARRAY[2])) AS t1 (a) CROSS JOIN UNNEST(a) AS t2(b))", "SELECT 2"); + } + + @Test + public void testInUncorrelatedSubquery() + { + assertQuery( + "SELECT CASE WHEN false THEN 1 IN (VALUES 2) END", + "SELECT NULL"); + assertQuery( + "SELECT x FROM (VALUES 2) t(x) WHERE MAP(ARRAY[8589934592], ARRAY[x]) IN (VALUES MAP(ARRAY[8589934592],ARRAY[2]))", + "SELECT 2"); + assertQuery( + "SELECT a IN (VALUES 2), a FROM (VALUES (2)) t(a)", + "SELECT TRUE, 2"); + } + + @Test + public void testCaseInsensitiveRowFieldReference() + { + assertQuery("SELECT a.Col0 FROM (VALUES row(cast(ROW(1,2) AS ROW(col0 integer, col1 integer)))) AS t (a)", "SELECT 1"); + } + + @Test + public void testCaseWithSupertypeCast() + { + assertQuery(" SELECT CASE x WHEN 1 THEN CAST(1 AS decimal(4,1)) WHEN 2 THEN CAST(1 AS decimal(4,2)) ELSE CAST(1 AS decimal(4,3)) END FROM (values 1) t(x)", "SELECT 1.000"); + } + + @Test + public void testChecksum() + { + assertQuery("SELECT to_hex(checksum(0))", "SELECT '0000000000000000'"); + } + + @Test + public void testCoercions() + { + // VARCHAR + assertQuery("SELECT length(NULL)"); + assertQuery("SELECT CAST('abc' AS VARCHAR(255)) || CAST('abc' AS VARCHAR(252))"); + assertQuery("SELECT CAST('abc' AS VARCHAR(255)) || 'abc'"); + + // DECIMAL - DECIMAL + assertQuery("SELECT CAST(1.1 AS DECIMAL(38,1)) + NULL"); + assertQuery("SELECT CAST(292 AS DECIMAL(38,1)) + CAST(292.1 AS DECIMAL(5,1))"); + assertEqualsIgnoreOrder( + computeActual("SELECT ARRAY[CAST(282 AS DECIMAL(22,1)), CAST(282 AS DECIMAL(10,1))] || CAST(292 AS DECIMAL(5,1))"), + computeActual("SELECT ARRAY[CAST(282 AS DECIMAL(22,1)), CAST(282 AS DECIMAL(10,1)), CAST(292 AS DECIMAL(5,1))]")); + + // BIGINT - DECIMAL + assertQuery("SELECT CAST(1.1 AS DECIMAL(38,1)) + CAST(292 AS BIGINT)"); + assertQuery("SELECT CAST(292 AS DECIMAL(38,1)) = CAST(292 AS BIGINT)"); + assertEqualsIgnoreOrder( + computeActual("SELECT ARRAY[CAST(282 AS DECIMAL(22,1)), CAST(282 AS DECIMAL(10,1))] || CAST(292 AS BIGINT)"), + computeActual("SELECT ARRAY[CAST(282 AS DECIMAL(22,1)), CAST(282 AS DECIMAL(10,1)), CAST(292 AS DECIMAL(19,0))]")); + + // DECIMAL - DECIMAL + assertQuery("SELECT CAST(1.1 AS DECIMAL(38,1)) + CAST(1.1 AS DOUBLE)"); + assertQuery("SELECT CAST(1.1 AS DECIMAL(38,1)) = CAST(1.1 AS DOUBLE)"); + assertQuery("SELECT SIN(CAST(1.1 AS DECIMAL(38,1)))"); + assertEqualsIgnoreOrder( + computeActual("SELECT ARRAY[CAST(282.1 AS DOUBLE), CAST(283.2 AS DOUBLE)] || CAST(101.3 AS DECIMAL(5,1))"), + computeActual("SELECT ARRAY[CAST(282.1 AS DOUBLE), CAST(283.2 AS DOUBLE), CAST(101.3 AS DOUBLE)]")); + + // INTEGER - DECIMAL + assertQuery("SELECT CAST(1.1 AS DECIMAL(38,1)) + CAST(292 AS INTEGER)"); + assertQuery("SELECT CAST(292 AS DECIMAL(38,1)) = CAST(292 AS INTEGER)"); + assertEqualsIgnoreOrder( + computeActual("SELECT ARRAY[CAST(282 AS DECIMAL(22,1)), CAST(282 AS DECIMAL(10,1))] || CAST(292 AS INTEGER)"), + computeActual("SELECT ARRAY[CAST(282 AS DECIMAL(22,1)), CAST(282 AS DECIMAL(10,1)), CAST(292 AS DECIMAL(19,0))]")); + + // TINYINT - DECIMAL + assertQuery("SELECT CAST(1.1 AS DECIMAL(38,1)) + CAST(CAST(121 AS DECIMAL(30,1)) AS TINYINT)"); + assertQuery("SELECT CAST(292 AS DECIMAL(38,1)) = CAST(CAST(121 AS DECIMAL(30,1)) AS TINYINT)"); + + // SMALLINT - DECIMAL + assertQuery("SELECT CAST(1.1 AS DECIMAL(38,1)) + CAST(CAST(121 AS DECIMAL(30,1)) AS SMALLINT)"); + assertQuery("SELECT CAST(292 AS DECIMAL(38,1)) = CAST(CAST(121 AS DECIMAL(30,1)) AS SMALLINT)"); + + // Complex coercions across joins + assertQuery("SELECT * FROM (" + + " SELECT t2.x || t2.z cc FROM (" + + " SELECT *" + + " FROM (VALUES (CAST('a' AS VARCHAR), CAST('c' AS VARCHAR))) t(x, z)" + + " ) t2" + + " JOIN (" + + " SELECT *" + + " FROM (VALUES (CAST('a' AS VARCHAR), CAST('c' AS VARCHAR))) u(x, z)" + + " WHERE z='c'" + + " ) u2" + + " ON t2.z = u2.z" + + ") tt " + + "WHERE cc = 'ac'", + "SELECT 'ac'"); + + assertQuery("SELECT * FROM (" + + " SELECT greatest (t.x, t.z) cc FROM (" + + " SELECT *" + + " FROM (VALUES (VARCHAR 'a', VARCHAR 'c')) t(x, z)" + + " ) t" + + " JOIN (" + + " SELECT *" + + " FROM (VALUES (VARCHAR 'a', VARCHAR 'c')) u(x, z)" + + " WHERE z='c'" + + " ) u" + + " ON t.z = u.z" + + ")" + + "WHERE cc = 'c'", + "SELECT 'c'"); + + assertQuery("SELECT cc[1], cc[2] FROM (" + + " SELECT * FROM (" + + " SELECT array[t.x, t.z] cc FROM (" + + " SELECT *" + + " FROM (VALUES (VARCHAR 'a', VARCHAR 'c')) t(x, z)" + + " ) t" + + " JOIN (" + + " SELECT *" + + " FROM (VALUES (VARCHAR 'a', VARCHAR 'c')) u(x, z)" + + " WHERE z='c'" + + " ) u" + + " ON t.z = u.z)" + + " WHERE cc = array['a', 'c'])", + "SELECT 'a', 'c'"); + + assertQuery("SELECT c = 'x'" + + "FROM (" + + " SELECT 'x' AS c" + + " UNION ALL" + + " SELECT 'yy' AS c" + + ")"); + } + + @Test + public void testConcatOperator() + { + assertQuery("SELECT '12' || '34'"); + } + + @Test + public void testEmptyInputForUnnest() + { + assertQuery("SELECT val FROM (SELECT DISTINCT vals FROM (values (array[2])) t(vals) WHERE false) tmp CROSS JOIN unnest(tmp.vals) tt(val)", "SELECT 1 WHERE 1=2"); + } + + @Test + public void testQuantifiedComparison() + { + assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey = ANY (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); + assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey = ALL (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); + + assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey <> ANY (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); + assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey <> ALL (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); + + assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey = ALL (SELECT regionkey FROM region WHERE name IN ('ASIA'))"); + assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey <> ALL (SELECT regionkey FROM region WHERE name IN ('ASIA'))"); + assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey = ANY (SELECT regionkey FROM region WHERE name IN ('EUROPE'))"); + assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey <> ANY (SELECT regionkey FROM region WHERE name IN ('EUROPE'))"); + + assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey < SOME (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); + assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey <= ANY (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); + assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey > ANY (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); + assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey >= SOME (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); + + assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey < ALL (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); + assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey <= ALL (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); + assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey > ALL (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); + assertQuery("SELECT nationkey, name, regionkey FROM nation WHERE regionkey >= ALL (SELECT regionkey FROM region WHERE name IN ('ASIA', 'EUROPE'))"); + + // subquery with coercion + assertQuery("SELECT 1.0 < ALL(SELECT 1), 1 < ALL(SELECT 1)"); + assertQuery("SELECT 1.0 < ANY(SELECT 1), 1 < ANY(SELECT 1)"); + assertQuery("SELECT 1.0 <= ALL(SELECT 1) WHERE 1 <= ALL(SELECT 1)"); + assertQuery("SELECT 1.0 <= ANY(SELECT 1) WHERE 1 <= ANY(SELECT 1)"); + assertQuery("SELECT 1.0 <= ALL(SELECT 1), 1 <= ALL(SELECT 1) WHERE 1 <= ALL(SELECT 1)"); + assertQuery("SELECT 1.0 <= ANY(SELECT 1), 1 <= ANY(SELECT 1) WHERE 1 <= ANY(SELECT 1)"); + assertQuery("SELECT 1.0 = ALL(SELECT 1) WHERE 1 = ALL(SELECT 1)"); + assertQuery("SELECT 1.0 = ANY(SELECT 1) WHERE 1 = ANY(SELECT 1)"); + assertQuery("SELECT 1.0 = ALL(SELECT 1), 2 = ALL(SELECT 1) WHERE 1 = ALL(SELECT 1)"); + assertQuery("SELECT 1.0 = ANY(SELECT 1), 2 = ANY(SELECT 1) WHERE 1 = ANY(SELECT 1)"); + + // subquery with supertype coercion + assertQuery("SELECT CAST(1 AS decimal(3,2)) < ALL(SELECT CAST(1 AS decimal(3,1)))"); + assertQuery("SELECT CAST(1 AS decimal(3,2)) < ANY(SELECT CAST(1 AS decimal(3,1)))"); + assertQuery("SELECT CAST(1 AS decimal(3,2)) <= ALL(SELECT CAST(1 AS decimal(3,1)))"); + assertQuery("SELECT CAST(1 AS decimal(3,2)) <= ANY(SELECT CAST(1 AS decimal(3,1)))"); + assertQuery("SELECT CAST(1 AS decimal(3,2)) = ALL(SELECT CAST(1 AS decimal(3,1)))"); + assertQuery("SELECT CAST(1 AS decimal(3,2)) = ANY(SELECT CAST(1 AS decimal(3,1)))", "SELECT true"); + assertQuery("SELECT CAST(1 AS decimal(3,2)) <> ALL(SELECT CAST(1 AS decimal(3,1)))"); + assertQuery("SELECT CAST(1 AS decimal(3,2)) <> ANY(SELECT CAST(1 AS decimal(3,1)))"); + } + + @Test(dataProvider = "quantified_comparisons_corner_cases") + public void testQuantifiedComparisonCornerCases(String query) + { + assertQuery(query); + } + + @DataProvider(name = "quantified_comparisons_corner_cases") + public Object[][] qualifiedComparisonsCornerCases() + { + //the %subquery% is wrapped in a SELECT so that H2 does not blow up on the VALUES subquery + return queryTemplate("SELECT %value% %operator% %quantifier% (SELECT * FROM (%subquery%))") + .replaceAll( + parameter("subquery").of( + "SELECT 1 WHERE false", + "SELECT CAST(NULL AS INTEGER)", + "VALUES (1), (NULL)"), + parameter("quantifier").of("ALL", "ANY"), + parameter("value").of("1", "NULL"), + parameter("operator").of("=", "!=", "<", ">", "<=", ">=")) + .collect(toDataProvider()); + } + + @Test + public void testInvalidColumn() + { + assertQueryFails( + "SELECT * FROM lineitem l JOIN (SELECT orderkey_1, custkey FROM orders) o on l.orderkey = o.orderkey_1", + "line 1:39: Column 'orderkey_1' cannot be resolved"); + } + + @Test + public void testInvalidCast() + { + assertQueryFails( + "SELECT CAST(1 AS DATE)", + "line 1:8: Cannot cast integer to date"); + } + + @Test + public void testInvalidCastInMultilineQuery() + { + assertQueryFails( + "SELECT CAST(totalprice AS BIGINT),\n" + + "CAST(2015 AS DATE),\n" + + "CAST(orderkey AS DOUBLE) FROM orders", + "line 2:1: Cannot cast integer to date"); + } + + @Test + public void testTryInvalidCast() + { + assertQuery("SELECT TRY(CAST('a' AS BIGINT))", + "SELECT NULL"); + } + + @Test + public void testDefaultDecimalLiteralSwitch() + { + Session decimalLiteral = Session.builder(getSession()) + .setSystemProperty(SystemSessionProperties.PARSE_DECIMAL_LITERALS_AS_DOUBLE, "false") + .build(); + MaterializedResult decimalColumnResult = computeActual(decimalLiteral, "SELECT 1.0"); + + assertEquals(decimalColumnResult.getRowCount(), 1); + assertEquals(decimalColumnResult.getTypes().get(0), createDecimalType(2, 1)); + assertEquals(decimalColumnResult.getMaterializedRows().get(0).getField(0), new BigDecimal("1.0")); + + Session doubleLiteral = Session.builder(getSession()) + .setSystemProperty(SystemSessionProperties.PARSE_DECIMAL_LITERALS_AS_DOUBLE, "true") + .build(); + MaterializedResult doubleColumnResult = computeActual(doubleLiteral, "SELECT 1.0"); + + assertEquals(doubleColumnResult.getRowCount(), 1); + assertEquals(doubleColumnResult.getTypes().get(0), DOUBLE); + assertEquals(doubleColumnResult.getMaterializedRows().get(0).getField(0), 1.0); + } + + @Test + public void testExecute() + { + Session session = Session.builder(getSession()) + .addPreparedStatement("my_query", "SELECT 123, 'abc'") + .build(); + assertQuery(session, "EXECUTE my_query", "SELECT 123, 'abc'"); + } + + @Test + public void testExecuteNoSuchQuery() + { + assertQueryFails("EXECUTE my_query", "Prepared statement not found: my_query"); + } + + @Test + public void testExecuteUsing() + { + String query = "SELECT a + 1, count(?) FROM (VALUES 1, 2, 3, 2) t1(a) JOIN (VALUES 1, 2, 3, 4) t2(b) ON b < ? WHERE a < ? GROUP BY a + 1 HAVING count(1) > ?"; + Session session = Session.builder(getSession()) + .addPreparedStatement("my_query", query) + .build(); + assertQuery(session, + "EXECUTE my_query USING 1, 5, 4, 0", + "VALUES (2, 4), (3, 8), (4, 4)"); + } + + @Test + public void testExecuteUsingComplexJoinCriteria() + { + String query = "SELECT * FROM (VALUES 1) t(a) JOIN (VALUES 2) u(a) ON t.a + u.a < ?"; + Session session = Session.builder(getSession()) + .addPreparedStatement("my_query", query) + .build(); + assertQuery(session, + "EXECUTE my_query USING 5", + "VALUES (1, 2)"); + } + + @Test + public void testExecuteUsingWithSubquery() + { + String query = "SELECT ? in (SELECT orderkey FROM orders)"; + Session session = Session.builder(getSession()) + .addPreparedStatement("my_query", query) + .build(); + + assertQuery(session, + "EXECUTE my_query USING 10", + "SELECT 10 in (SELECT orderkey FROM orders)"); + } + + @Test + public void testExecuteUsingWithSubqueryInJoin() + { + String query = "SELECT * " + + "FROM " + + " (VALUES ?,2,3) t(x) " + + " JOIN " + + " (VALUES 1,2,3) t2(y) " + + " ON " + + "(x in (VALUES 1,2,?)) = (y in (VALUES 1,2,3)) AND (x in (VALUES 1,?)) = (y in (VALUES 1,2))"; + + Session session = Session.builder(getSession()) + .addPreparedStatement("my_query", query) + .build(); + assertQuery(session, + "EXECUTE my_query USING 1, 3, 2", + "VALUES (1,1), (1,2), (2,2), (2,1), (3,3)"); + } + + @Test + public void testExecuteWithParametersInGroupBy() + { + String query = "SELECT a + ?, count(1) FROM (VALUES 1, 2, 3, 2) t(a) GROUP BY a + ?"; + Session session = Session.builder(getSession()) + .addPreparedStatement("my_query", query) + .build(); + + assertQueryFails( + session, + "EXECUTE my_query USING 1, 1", + "\\Qline 1:10: '(a + ?)' must be an aggregate expression or appear in GROUP BY clause\\E"); + } + + @Test + public void testExecuteUsingWithWithClause() + { + String query = "WITH src AS (SELECT * FROM (VALUES (1, 4),(2, 5), (3, 6)) AS t(id1, id2) WHERE id2 = ?)" + + " SELECT * from src WHERE id1 between ? and ?"; + + Session session = Session.builder(getSession()) + .addPreparedStatement("my_query", query) + .build(); + assertQuery(session, + "EXECUTE my_query USING 6, 0, 10", + "VALUES (3, 6)"); + } + + @Test + public void testExecuteUsingWithFunctionsAsParameters() + { + String query = "SELECT a + ? FROM (VALUES 1, 2, 3, 4) AS t(a)"; + + Session session = Session.builder(getSession()) + .addPreparedStatement("my_query", query) + .build(); + assertQuery(session, + "EXECUTE my_query USING abs(-2) ", + "VALUES 3, 4, 5, 6"); + } + + @Test + public void testParametersNonPreparedStatement() + { + assertQueryFails( + "SELECT ?, 1", + "line 1:1: Incorrect number of parameters: expected 1 but found 0"); + } + + @Test + public void testDescribeInput() + { + Session session = Session.builder(getSession()) + .addPreparedStatement("my_query", "SELECT ? FROM nation WHERE nationkey = ? and name < ?") + .build(); + MaterializedResult actual = computeActual(session, "DESCRIBE INPUT my_query"); + MaterializedResult expected = resultBuilder(session, BIGINT, VARCHAR) + .row(0, "unknown") + .row(1, "bigint") + .row(2, "varchar") + .build(); + assertEqualsIgnoreOrder(actual, expected); + } + + @Test + public void testDescribeInputWithAggregation() + { + Session session = Session.builder(getSession()) + .addPreparedStatement("my_query", "SELECT count(*) + ? FROM nation") + .build(); + MaterializedResult actual = computeActual(session, "DESCRIBE INPUT my_query"); + MaterializedResult expected = resultBuilder(session, BIGINT, VARCHAR) + .row(0, "bigint") + .build(); + assertEqualsIgnoreOrder(actual, expected); + } + + @Test + public void testDescribeInputNoParameters() + { + Session session = Session.builder(getSession()) + .addPreparedStatement("my_query", "SELECT * FROM nation") + .build(); + MaterializedResult actual = computeActual(session, "DESCRIBE INPUT my_query"); + MaterializedResult expected = resultBuilder(session, UNKNOWN, UNKNOWN).build(); + Assert.assertEquals(actual, expected); + } + + @Test + public void testDescribeInputNoSuchQuery() + { + assertQueryFails("DESCRIBE INPUT my_query", "Prepared statement not found: my_query"); + } + + @Test + public void testDescribeOutput() + { + Session session = Session.builder(getSession()) + .addPreparedStatement("my_query", "SELECT * FROM nation") + .build(); + + MaterializedResult actual = computeActual(session, "DESCRIBE OUTPUT my_query"); + MaterializedResult expected = resultBuilder(session, VARCHAR, VARCHAR, VARCHAR, VARCHAR, VARCHAR, BIGINT, BOOLEAN) + .row("nationkey", session.getCatalog().get(), session.getSchema().get(), "nation", "bigint", 8, false) + .row("name", session.getCatalog().get(), session.getSchema().get(), "nation", "varchar(25)", 0, false) + .row("regionkey", session.getCatalog().get(), session.getSchema().get(), "nation", "bigint", 8, false) + .row("comment", session.getCatalog().get(), session.getSchema().get(), "nation", "varchar(152)", 0, false) + .build(); + assertEqualsIgnoreOrder(actual, expected); + } + + @Test + public void testDescribeOutputNamedAndUnnamed() + { + Session session = Session.builder(getSession()) + .addPreparedStatement("my_query", "SELECT 1, name, regionkey AS my_alias FROM nation") + .build(); + + MaterializedResult actual = computeActual(session, "DESCRIBE OUTPUT my_query"); + MaterializedResult expected = resultBuilder(session, VARCHAR, VARCHAR, VARCHAR, VARCHAR, VARCHAR, BIGINT, BOOLEAN) + .row("_col0", "", "", "", "integer", 4, false) + .row("name", session.getCatalog().get(), session.getSchema().get(), "nation", "varchar(25)", 0, false) + .row("my_alias", session.getCatalog().get(), session.getSchema().get(), "nation", "bigint", 8, true) + .build(); + assertEqualsIgnoreOrder(actual, expected); + } + + @Test + public void testDescribeOutputNonSelect() + { + assertDescribeOutputRowCount("CREATE TABLE foo AS SELECT * FROM nation"); + assertDescribeOutputRowCount("DELETE FROM orders"); + + assertDescribeOutputEmpty("CALL foo()"); + assertDescribeOutputEmpty("SET SESSION optimize_hash_generation=false"); + assertDescribeOutputEmpty("RESET SESSION optimize_hash_generation"); + assertDescribeOutputEmpty("START TRANSACTION"); + assertDescribeOutputEmpty("COMMIT"); + assertDescribeOutputEmpty("ROLLBACK"); + assertDescribeOutputEmpty("GRANT INSERT ON foo TO bar"); + assertDescribeOutputEmpty("REVOKE INSERT ON foo FROM bar"); + assertDescribeOutputEmpty("CREATE SCHEMA foo"); + assertDescribeOutputEmpty("CREATE SCHEMA foo AUTHORIZATION bar"); + assertDescribeOutputEmpty("CREATE SCHEMA foo AUTHORIZATION bar WITH ( x = 'y' )"); + assertDescribeOutputEmpty("ALTER SCHEMA foo RENAME TO bar"); + assertDescribeOutputEmpty("ALTER SCHEMA foo SET AUTHORIZATION bar"); + assertDescribeOutputEmpty("DROP SCHEMA foo"); + assertDescribeOutputEmpty("CREATE TABLE foo (x bigint)"); + assertDescribeOutputEmpty("ALTER TABLE foo ADD COLUMN y bigint"); + assertDescribeOutputEmpty("ALTER TABLE foo RENAME TO bar"); + assertDescribeOutputEmpty("DROP TABLE foo"); + assertDescribeOutputEmpty("CREATE VIEW foo AS SELECT * FROM nation"); + assertDescribeOutputEmpty("DROP VIEW foo"); + assertDescribeOutputEmpty("PREPARE test FROM SELECT * FROM orders"); + assertDescribeOutputEmpty("EXECUTE test"); + assertDescribeOutputEmpty("DEALLOCATE PREPARE test"); + } + + private void assertDescribeOutputRowCount(@Language("SQL") String sql) + { + Session session = Session.builder(getSession()) + .addPreparedStatement("my_query", sql) + .build(); + + MaterializedResult actual = computeActual(session, "DESCRIBE OUTPUT my_query"); + MaterializedResult expected = resultBuilder(session, VARCHAR, VARCHAR, VARCHAR, VARCHAR, VARCHAR, BIGINT, BOOLEAN) + .row("rows", "", "", "", "bigint", 8, false) + .build(); + assertEqualsIgnoreOrder(actual, expected); + } + + private void assertDescribeOutputEmpty(@Language("SQL") String sql) + { + Session session = Session.builder(getSession()) + .addPreparedStatement("my_query", sql) + .build(); + + MaterializedResult actual = computeActual(session, "DESCRIBE OUTPUT my_query"); + MaterializedResult expected = resultBuilder(session, VARCHAR, VARCHAR, VARCHAR, VARCHAR, VARCHAR, BIGINT, BOOLEAN) + .build(); + assertEqualsIgnoreOrder(actual, expected); + } + + @Test + public void testDescribeOutputShowTables() + { + Session session = Session.builder(getSession()) + .addPreparedStatement("my_query", "SHOW TABLES") + .build(); + + MaterializedResult actual = computeActual(session, "DESCRIBE OUTPUT my_query"); + MaterializedResult expected = resultBuilder(session, VARCHAR, VARCHAR, VARCHAR, VARCHAR, VARCHAR, BIGINT, BOOLEAN) + .row("Table", session.getCatalog().get(), "information_schema", "tables", "varchar", 0, true) + .build(); + assertEqualsIgnoreOrder(actual, expected); + } + + @Test + public void testDescribeOutputOnAliasedColumnsAndExpressions() + { + Session session = Session.builder(getSession()) + .addPreparedStatement("my_query", "SELECT count(*) AS this_is_aliased, 1 + 2 FROM nation") + .build(); + + MaterializedResult actual = computeActual(session, "DESCRIBE OUTPUT my_query"); + MaterializedResult expected = resultBuilder(session, VARCHAR, VARCHAR, VARCHAR, VARCHAR, VARCHAR, BIGINT, BOOLEAN) + .row("this_is_aliased", "", "", "", "bigint", 8, true) + .row("_col1", "", "", "", "integer", 4, false) + .build(); + assertEqualsIgnoreOrder(actual, expected); + } + + @Test + public void testDescribeOutputNoSuchQuery() + { + assertQueryFails("DESCRIBE OUTPUT my_query", "Prepared statement not found: my_query"); + } + + @Test + public void testPreparedStatementWithSubqueries() + { + List leftValues = parameter("left").of( + "", "1 = ", + "EXISTS", + "1 IN", + "1 = ANY", "1 = ALL", + "2 <> ANY", "2 <> ALL", + "0 < ALL", "0 < ANY", + "1 <= ALL", "1 <= ANY"); + + queryTemplate("SELECT %left% (SELECT 1 WHERE 2 = ?)") + .replaceAll(leftValues) + .forEach(query -> { + Session session = Session.builder(getSession()) + .addPreparedStatement("my_query", query) + .build(); + assertQuery(session, "EXECUTE my_query USING 2", "SELECT true"); + }); + } + + @Test + public void testFunctionNotRegistered() + { + assertQueryFails( + "SELECT length(1)", + "\\Qline 1:8: Unexpected parameters (integer) for function length. Expected:\\E.*"); + } + + @Test + public void testFunctionArgumentTypeConstraint() + { + assertQueryFails( + "SELECT greatest(rgb(255, 0, 0))", + "\\Qline 1:8: Unexpected parameters (color) for function greatest. Expected: greatest(E) E:orderable\\E.*"); + } + + @Test + public void testTypeMismatch() + { + assertQueryFails("SELECT 1 <> 'x'", "\\Qline 1:10: '<>' cannot be applied to integer, varchar(1)\\E"); + } + + @Test + public void testInvalidType() + { + assertQueryFails("SELECT CAST(null AS array(foo))", "\\Qline 1:8: Unknown type: array(foo)\\E"); + } + + @Test + public void testInvalidTypeInfixOperator() + { + // Comment on why error message references varchar(214783647) instead of varchar(2) which seems expected result type for concatenation in expression. + // Currently variable argument functions do not play well with arguments using parametrized types. + // The variable argument functions mechanism requires that all the arguments are of exactly same type. We cannot enforce that base must match but parameters may differ. + assertQueryFails("SELECT ('a' || 'z') + (3 * 4) / 5", "\\Qline 1:21: '+' cannot be applied to varchar, integer\\E"); + } + + @Test + public void testInvalidTypeBetweenOperator() + { + assertQueryFails("SELECT 'a' BETWEEN 3 AND 'z'", "\\Qline 1:12: Cannot check if varchar(1) is BETWEEN integer and varchar(1)\\E"); + } + + @Test + public void testInvalidTypeArray() + { + assertQueryFails("SELECT ARRAY[1, 2, 'a']", "\\Qline 1:20: All ARRAY elements must be the same type: integer\\E"); + } + + @Test + public void testArrayShuffle() + { + List expected = IntStream.rangeClosed(1, 500).boxed().collect(toList()); + Set> distinctResults = new HashSet<>(); + + distinctResults.add(expected); + for (int i = 0; i < 3; i++) { + MaterializedResult results = computeActual(format("SELECT shuffle(ARRAY %s) FROM orders LIMIT 10", expected)); + List rows = results.getMaterializedRows(); + assertEquals(rows.size(), 10); + + for (MaterializedRow row : rows) { + @SuppressWarnings("unchecked") + List actual = (List) row.getField(0); + + // check if the result is a correct permutation + assertEqualsIgnoreOrder(actual, expected); + + distinctResults.add(actual); + } + } + assertTrue(distinctResults.size() >= 24, "shuffle must produce at least 24 distinct results"); + } + + @Test + public void testNonReservedTimeWords() + { + assertQuery( + "SELECT TIME, TIMESTAMP, DATE, INTERVAL FROM (SELECT 1 TIME, 2 TIMESTAMP, 3 DATE, 4 INTERVAL)", + "VALUES (1, 2, 3, 4)"); + } + + @Test + public void testCustomAdd() + { + assertQuery( + "SELECT custom_add(orderkey, custkey) FROM orders", + "SELECT orderkey + custkey FROM orders"); + } + + @Test + public void testCustomSum() + { + @Language("SQL") String sql = "SELECT orderstatus, custom_sum(orderkey) FROM orders GROUP BY orderstatus"; + assertQuery(sql, sql.replace("custom_sum", "sum")); + } + + @Test + public void testCustomRank() + { + @Language("SQL") String sql = "" + + "SELECT orderstatus, clerk, sales\n" + + ", custom_rank() OVER (PARTITION BY orderstatus ORDER BY sales DESC) rnk\n" + + "FROM (\n" + + " SELECT orderstatus, clerk, sum(totalprice) sales\n" + + " FROM orders\n" + + " GROUP BY orderstatus, clerk\n" + + ")\n" + + "ORDER BY orderstatus, clerk"; + + Assert.assertEquals(computeActual(sql), computeActual(sql.replace("custom_rank", "rank"))); + } + + @Test + public void testApproxSetBigint() + { + MaterializedResult actual = computeActual("SELECT cardinality(approx_set(custkey)) FROM orders"); + + MaterializedResult expected = resultBuilder(getSession(), BIGINT) + .row(1002L) + .build(); + + assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testApproxSetVarchar() + { + MaterializedResult actual = computeActual("SELECT cardinality(approx_set(CAST(custkey AS VARCHAR))) FROM orders"); + + MaterializedResult expected = resultBuilder(getSession(), BIGINT) + .row(1024L) + .build(); + + assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testApproxSetDouble() + { + MaterializedResult actual = computeActual("SELECT cardinality(approx_set(CAST(custkey AS DOUBLE))) FROM orders"); + + MaterializedResult expected = resultBuilder(getSession(), BIGINT) + .row(1014L) + .build(); + + assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testApproxSetBigintGroupBy() + { + MaterializedResult actual = computeActual("" + + "SELECT orderstatus, cardinality(approx_set(custkey)) " + + "FROM orders " + + "GROUP BY orderstatus"); + + MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) + .row("O", 1001L) + .row("F", 998L) + .row("P", 304L) + .build(); + + assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testApproxSetVarcharGroupBy() + { + MaterializedResult actual = computeActual("" + + "SELECT orderstatus, cardinality(approx_set(CAST(custkey AS VARCHAR))) " + + "FROM orders " + + "GROUP BY orderstatus"); + + MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) + .row("O", 1021L) + .row("F", 1019L) + .row("P", 304L) + .build(); + + assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testApproxSetDoubleGroupBy() + { + MaterializedResult actual = computeActual("" + + "SELECT orderstatus, cardinality(approx_set(CAST(custkey AS DOUBLE))) " + + "FROM orders " + + "GROUP BY orderstatus"); + + MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) + .row("O", 1011L) + .row("F", 1011L) + .row("P", 304L) + .build(); + + assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testApproxSetWithNulls() + { + MaterializedResult actual = computeActual("SELECT cardinality(approx_set(IF(orderstatus = 'O', custkey))) FROM orders"); + + MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) + .row(1001L) + .build(); + + assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testApproxSetOnlyNulls() + { + MaterializedResult actual = computeActual("SELECT cardinality(approx_set(null)) FROM orders"); + + MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) + .row(new Object[] {null}) + .build(); + + assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testApproxSetGroupByWithOnlyNullsInOneGroup() + { + MaterializedResult actual = computeActual("" + + "SELECT orderstatus, cardinality(approx_set(IF(orderstatus != 'O', custkey))) " + + "FROM orders " + + "GROUP BY orderstatus"); + + MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) + .row("O", null) + .row("F", 998L) + .row("P", 304L) + .build(); + + assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testApproxSetGroupByWithNulls() + { + MaterializedResult actual = computeActual("" + + "SELECT orderstatus, cardinality(approx_set(IF(custkey % 2 <> 0, custkey))) " + + "FROM orders " + + "GROUP BY orderstatus"); + + MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) + .row("O", 499L) + .row("F", 496L) + .row("P", 153L) + .build(); + + assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testMergeHyperLogLog() + { + MaterializedResult actual = computeActual("SELECT cardinality(merge(create_hll(custkey))) FROM orders"); + + MaterializedResult expected = resultBuilder(getSession(), BIGINT) + .row(1002L) + .build(); + + assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testMergeHyperLogLogGroupBy() + { + MaterializedResult actual = computeActual("" + + "SELECT orderstatus, cardinality(merge(create_hll(custkey))) " + + "FROM orders " + + "GROUP BY orderstatus"); + + MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) + .row("O", 1001L) + .row("F", 998L) + .row("P", 304L) + .build(); + + assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testMergeHyperLogLogWithNulls() + { + MaterializedResult actual = computeActual("SELECT cardinality(merge(create_hll(IF(orderstatus = 'O', custkey)))) FROM orders"); + + MaterializedResult expected = resultBuilder(getSession(), BIGINT) + .row(1001L) + .build(); + + assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testMergeHyperLogLogGroupByWithNulls() + { + MaterializedResult actual = computeActual("" + + "SELECT orderstatus, cardinality(merge(create_hll(IF(orderstatus != 'O', custkey)))) " + + "FROM orders " + + "GROUP BY orderstatus"); + + MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) + .row("O", null) + .row("F", 998L) + .row("P", 304L) + .build(); + + assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testMergeHyperLogLogOnlyNulls() + { + MaterializedResult actual = computeActual("SELECT cardinality(merge(CAST (null AS HyperLogLog))) FROM orders"); + + MaterializedResult expected = resultBuilder(getSession(), BIGINT) + .row(new Object[] {null}) + .build(); + + assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testEmptyApproxSet() + { + MaterializedResult actual = computeActual("SELECT cardinality(empty_approx_set())"); + MaterializedResult expected = resultBuilder(getSession(), BIGINT) + .row(0L) + .build(); + assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testMergeEmptyApproxSet() + { + MaterializedResult actual = computeActual("SELECT cardinality(merge(empty_approx_set())) FROM orders"); + MaterializedResult expected = resultBuilder(getSession(), BIGINT) + .row(0L) + .build(); + assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testMergeEmptyNonEmptyApproxSet() + { + MaterializedResult actual = computeActual("SELECT cardinality(merge(c)) FROM (SELECT create_hll(custkey) c FROM orders UNION ALL SELECT empty_approx_set())"); + MaterializedResult expected = resultBuilder(getSession(), BIGINT) + .row(1002L) + .build(); + assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testP4ApproxSetBigint() + { + MaterializedResult actual = computeActual("SELECT cardinality(cast(approx_set(custkey) AS P4HYPERLOGLOG)) FROM orders"); + + MaterializedResult expected = resultBuilder(getSession(), BIGINT) + .row(1002L) + .build(); + + assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testP4ApproxSetVarchar() + { + MaterializedResult actual = computeActual("SELECT cardinality(cast(approx_set(CAST(custkey AS VARCHAR)) AS P4HYPERLOGLOG)) FROM orders"); + + MaterializedResult expected = resultBuilder(getSession(), BIGINT) + .row(1024L) + .build(); + + assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testP4ApproxSetDouble() + { + MaterializedResult actual = computeActual("SELECT cardinality(cast(approx_set(CAST(custkey AS DOUBLE)) AS P4HYPERLOGLOG)) FROM orders"); + + MaterializedResult expected = resultBuilder(getSession(), BIGINT) + .row(1014L) + .build(); + + assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testP4ApproxSetBigintGroupBy() + { + MaterializedResult actual = computeActual("" + + "SELECT orderstatus, cardinality(cast(approx_set(custkey) AS P4HYPERLOGLOG)) " + + "FROM orders " + + "GROUP BY orderstatus"); + + MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) + .row("O", 1001L) + .row("F", 998L) + .row("P", 308L) + .build(); + + assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testP4ApproxSetVarcharGroupBy() + { + MaterializedResult actual = computeActual("" + + "SELECT orderstatus, cardinality(cast(approx_set(CAST(custkey AS VARCHAR)) AS P4HYPERLOGLOG)) " + + "FROM orders " + + "GROUP BY orderstatus"); + + MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) + .row("O", 1021L) + .row("F", 1019L) + .row("P", 302L) + .build(); + + assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testP4ApproxSetDoubleGroupBy() + { + MaterializedResult actual = computeActual("" + + "SELECT orderstatus, cardinality(cast(approx_set(CAST(custkey AS DOUBLE)) AS P4HYPERLOGLOG)) " + + "FROM orders " + + "GROUP BY orderstatus"); + + MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) + .row("O", 1011L) + .row("F", 1011L) + .row("P", 306L) + .build(); + + assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testP4ApproxSetWithNulls() + { + MaterializedResult actual = computeActual("SELECT cardinality(cast(approx_set(IF(orderstatus = 'O', custkey)) AS P4HYPERLOGLOG)) FROM orders"); + + MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) + .row(1001L) + .build(); + + assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testP4ApproxSetOnlyNulls() + { + MaterializedResult actual = computeActual("SELECT cardinality(cast(approx_set(null) AS P4HYPERLOGLOG)) FROM orders"); + + MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) + .row(new Object[] {null}) + .build(); + + assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testP4ApproxSetGroupByWithOnlyNullsInOneGroup() + { + MaterializedResult actual = computeActual("" + + "SELECT orderstatus, cardinality(cast(approx_set(IF(orderstatus != 'O', custkey)) AS P4HYPERLOGLOG)) " + + "FROM orders " + + "GROUP BY orderstatus"); + + MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) + .row("O", null) + .row("F", 998L) + .row("P", 308L) + .build(); + + assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testP4ApproxSetGroupByWithNulls() + { + MaterializedResult actual = computeActual("" + + "SELECT orderstatus, cardinality(cast(approx_set(IF(custkey % 2 <> 0, custkey)) AS P4HYPERLOGLOG)) " + + "FROM orders " + + "GROUP BY orderstatus"); + + MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) + .row("O", 495L) + .row("F", 491L) + .row("P", 153L) + .build(); + + assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testValues() + { + assertQuery("VALUES 1, 2, 3, 4"); + assertQuery("VALUES 1, 3, 2, 4 ORDER BY 1", "SELECT * FROM (VALUES 1, 3, 2, 4) ORDER BY 1"); + assertQuery("VALUES (1.1, 2, 'foo'), (sin(3.3), 2+2, 'bar')"); + assertQuery("VALUES (1.1, 2), (sin(3.3), 2+2) ORDER BY 1", "VALUES (sin(3.3), 2+2), (1.1, 2)"); + assertQuery("VALUES (1.1, 2), (sin(3.3), 2+2) LIMIT 1", "VALUES (1.1, 2)"); + assertQuery("SELECT * FROM (VALUES (1.1, 2), (sin(3.3), 2+2))"); + assertQuery("SELECT 1.1 in (VALUES (1.1), (2.2))", "VALUES (TRUE)"); + + assertQuery("" + + "WITH a AS (VALUES (1.1, 2), (sin(3.3), 2+2)) " + + "SELECT * FROM a", + "VALUES (1.1, 2), (sin(3.3), 2+2)"); + + // implicit coersions + assertQuery("VALUES 1, 2.2, 3, 4.4"); + assertQuery("VALUES (1, 2), (3.3, 4.4)"); + assertQuery("VALUES true, 1.0 in (1, 2, 3)"); + } + + @Test + public void testValuesWithNonTrivialType() + { + MaterializedResult actual = computeActual("VALUES (0E0/0E0, 1E0/0E0, -1E0/0E0)"); + + List rows = actual.getMaterializedRows(); + assertEquals(rows.size(), 1); + + MaterializedRow row = rows.get(0); + assertTrue(((Double) row.getField(0)).isNaN()); + assertEquals(row.getField(1), Double.POSITIVE_INFINITY); + assertEquals(row.getField(2), Double.NEGATIVE_INFINITY); + } + + @Test + public void testValuesWithTimestamp() + { + MaterializedResult actual = computeActual("VALUES (current_timestamp, now())"); + + List rows = actual.getMaterializedRows(); + assertEquals(rows.size(), 1); + + MaterializedRow row = rows.get(0); + assertEquals(row.getField(0), row.getField(1)); + } + + @Test + public void testValuesWithUnusedColumns() + { + MaterializedResult actual = computeActual("SELECT foo FROM (values (1, 2)) a(foo, bar)"); + + MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) + .row(1) + .build(); + + assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testArrays() + { + assertQuery("SELECT a[1] FROM (SELECT ARRAY[orderkey] AS a FROM orders ORDER BY orderkey) t", "SELECT orderkey FROM orders"); + assertQuery("SELECT a[1 + CAST(round(rand()) AS BIGINT)] FROM (SELECT ARRAY[orderkey, orderkey] AS a FROM orders ORDER BY orderkey) t", "SELECT orderkey FROM orders"); + assertQuery("SELECT a[1] + 1 FROM (SELECT ARRAY[orderkey] AS a FROM orders ORDER BY orderkey) t", "SELECT orderkey + 1 FROM orders"); + assertQuery("SELECT a[1] FROM (SELECT ARRAY[orderkey + 1] AS a FROM orders ORDER BY orderkey) t", "SELECT orderkey + 1 FROM orders"); + assertQuery("SELECT a[1][1] FROM (SELECT ARRAY[ARRAY[orderkey + 1]] AS a FROM orders ORDER BY orderkey) t", "SELECT orderkey + 1 FROM orders"); + assertQuery("SELECT CARDINALITY(a) FROM (SELECT ARRAY[orderkey, orderkey + 1] AS a FROM orders ORDER BY orderkey) t", "SELECT 2 FROM orders"); + } + + @Test + public void testArrayAgg() + { + assertQuery("SELECT clerk, cardinality(array_agg(orderkey)) FROM orders GROUP BY clerk", "SELECT clerk, count(*) FROM orders GROUP BY clerk"); + } + + @Test + public void testReduceAgg() + { + assertQuery( + "SELECT x, reduce_agg(y, 1, (a, b) -> a * b, (a, b) -> a * b) " + + "FROM (VALUES (1, 5), (1, 6), (1, 7), (2, 8), (2, 9), (3, 10)) AS t(x, y) " + + "GROUP BY x", + "VALUES (1, 5 * 6 * 7), (2, 8 * 9), (3, 10)"); + assertQuery( + "SELECT x, reduce_agg(y, 0, (a, b) -> a + b, (a, b) -> a + b) " + + "FROM (VALUES (1, 5), (1, 6), (1, 7), (2, 8), (2, 9), (3, 10)) AS t(x, y) " + + "GROUP BY x", + "VALUES (1, 5 + 6 + 7), (2, 8 + 9), (3, 10)"); + + assertQuery( + "SELECT x, reduce_agg(y, 1, (a, b) -> a * b, (a, b) -> a * b) " + + "FROM (VALUES (1, CAST(5 AS DOUBLE)), (1, 6), (1, 7), (2, 8), (2, 9), (3, 10)) AS t(x, y) " + + "GROUP BY x", + "VALUES (1, CAST(5 AS DOUBLE) * 6 * 7), (2, 8 * 9), (3, 10)"); + assertQuery( + "SELECT x, reduce_agg(y, 0, (a, b) -> a + b, (a, b) -> a + b) " + + "FROM (VALUES (1, CAST(5 AS DOUBLE)), (1, 6), (1, 7), (2, 8), (2, 9), (3, 10)) AS t(x, y) " + + "GROUP BY x", + "VALUES (1, CAST(5 AS DOUBLE) + 6 + 7), (2, 8 + 9), (3, 10)"); + } + + @Test + public void testRows() + { + // Using JSON_FORMAT(CAST(_ AS JSON)) because H2 does not support ROW type + assertQuery("SELECT JSON_FORMAT(CAST(ROW(1 + 2, CONCAT('a', 'b')) AS JSON))", "SELECT '[3,\"ab\"]'"); + assertQuery("SELECT JSON_FORMAT(CAST(ROW(a + b) AS JSON)) FROM (VALUES (1, 2)) AS t(a, b)", "SELECT '[3]'"); + assertQuery("SELECT JSON_FORMAT(CAST(ROW(1, ROW(9, a, ARRAY[], NULL), ROW(1, 2)) AS JSON)) FROM (VALUES ('a')) t(a)", "SELECT '[1,[9,\"a\",[],null],[1,2]]'"); + assertQuery("SELECT JSON_FORMAT(CAST(ROW(ROW(ROW(ROW(ROW(a, b), c), d), e), f) AS JSON)) FROM (VALUES (ROW(0, 1), 2, '3', NULL, ARRAY[5], ARRAY[])) t(a, b, c, d, e, f)", + "SELECT '[[[[[[0,1],2],\"3\"],null],[5]],[]]'"); + assertQuery("SELECT JSON_FORMAT(CAST(ARRAY_AGG(ROW(a, b)) AS JSON)) FROM (VALUES (1, 2), (3, 4), (5, 6)) t(a, b)", "SELECT '[[1,2],[3,4],[5,6]]'"); + assertQuery("SELECT CONTAINS(ARRAY_AGG(ROW(a, b)), ROW(1, 2)) FROM (VALUES (1, 2), (3, 4), (5, 6)) t(a, b)", "SELECT TRUE"); + assertQuery("SELECT JSON_FORMAT(CAST(ARRAY_AGG(ROW(c, d)) AS JSON)) FROM (VALUES (ARRAY[1, 3, 5], ARRAY[2, 4, 6])) AS t(a, b) CROSS JOIN UNNEST(a, b) AS u(c, d)", + "SELECT '[[1,2],[3,4],[5,6]]'"); + assertQuery("SELECT JSON_FORMAT(CAST(ROW(x, y, z) AS JSON)) FROM (VALUES ROW(1, NULL, '3')) t(x,y,z)", "SELECT '[1,null,\"3\"]'"); + assertQuery("SELECT JSON_FORMAT(CAST(ROW(x, y, z) AS JSON)) FROM (VALUES ROW(1, CAST(NULL AS INTEGER), '3')) t(x,y,z)", "SELECT '[1,null,\"3\"]'"); + } + + @Test + public void testMaps() + { + assertQuery("SELECT m[max_key] FROM (SELECT map_agg(orderkey, orderkey) m, max(orderkey) max_key FROM orders)", "SELECT max(orderkey) FROM orders"); + // Make sure that even if the map constructor throws with the NULL key the block builders are left in a consistent state + // and the TRY() call eventually succeeds and return NULL values. + assertQuery("SELECT JSON_FORMAT(CAST(TRY(MAP(ARRAY[NULL], ARRAY[x])) AS JSON)) FROM (VALUES 1, 2) t(x)", "SELECT * FROM (VALUES NULL, NULL)"); + } + + @Test + public void testSpecialFloatingPointValues() + { + MaterializedResult actual = computeActual("SELECT nan(), infinity(), -infinity()"); + MaterializedRow row = getOnlyElement(actual.getMaterializedRows()); + assertEquals(row.getField(0), Double.NaN); + assertEquals(row.getField(1), Double.POSITIVE_INFINITY); + assertEquals(row.getField(2), Double.NEGATIVE_INFINITY); + } + + @Test + public void testOutputInEnforceSingleRow() + { + assertQuery("SELECT count(*) FROM (SELECT (SELECT 1))"); + assertQuery("SELECT * FROM (SELECT (SELECT 1))"); + assertQueryFails( + "SELECT * FROM (SELECT (SELECT 1, 2))", + "line 1:23: Multiple columns returned by subquery are not yet supported. Found 2"); + } + + @Test + public void testRowNumberNoOptimization() + { + MaterializedResult actual = computeActual("" + + "SELECT orderkey, orderstatus FROM (\n" + + " SELECT row_number() OVER () rn, orderkey, orderstatus\n" + + " FROM orders\n" + + ") WHERE NOT rn <= 10"); + MaterializedResult all = computeExpected("SELECT orderkey, orderstatus FROM orders", actual.getTypes()); + assertEquals(actual.getMaterializedRows().size(), all.getMaterializedRows().size() - 10); + assertContains(all, actual); + + actual = computeActual("" + + "SELECT orderkey, orderstatus FROM (\n" + + " SELECT row_number() OVER () rn, orderkey, orderstatus\n" + + " FROM orders\n" + + ") WHERE rn - 5 <= 10"); + all = computeExpected("SELECT orderkey, orderstatus FROM orders", actual.getTypes()); + assertEquals(actual.getMaterializedRows().size(), 15); + assertContains(all, actual); + } + + @Test + public void testRowNumberLimit() + { + MaterializedResult actual = computeActual("" + + "SELECT row_number() OVER (PARTITION BY orderstatus) rn, orderstatus\n" + + "FROM orders\n" + + "LIMIT 10"); + assertEquals(actual.getMaterializedRows().size(), 10); + + actual = computeActual("" + + "SELECT row_number() OVER (PARTITION BY orderstatus ORDER BY orderkey) rn\n" + + "FROM orders\n" + + "LIMIT 10"); + assertEquals(actual.getMaterializedRows().size(), 10); + + actual = computeActual("" + + "SELECT row_number() OVER () rn, orderstatus\n" + + "FROM orders\n" + + "LIMIT 10"); + assertEquals(actual.getMaterializedRows().size(), 10); + + actual = computeActual("" + + "SELECT row_number() OVER (ORDER BY orderkey) rn\n" + + "FROM orders\n" + + "LIMIT 10"); + assertEquals(actual.getMaterializedRows().size(), 10); + } + + @Test + public void testRowNumberMultipleFilters() + { + MaterializedResult actual = computeActual("" + + "SELECT * FROM (" + + " SELECT a, row_number() OVER (PARTITION BY a ORDER BY a) rn\n" + + " FROM (VALUES (1), (1), (1), (2), (2), (3)) t (a)) t " + + "WHERE rn < 3 AND rn % 2 = 0 AND a = 2 LIMIT 2"); + MaterializedResult expected = resultBuilder(getSession(), BIGINT, BIGINT) + .row(2, 2L) + .build(); + assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); + } + + @Test + public void testRowNumberSpecialFilters() + { + // Test "row_number() = negative number" filter with ORDER BY. This should create a Window Node with a Filter Node on top and return 0 rows. + assertQueryReturnsEmptyResult("" + + "SELECT * FROM (" + + " SELECT a, row_number() OVER (PARTITION BY a ORDER BY a) rn\n" + + " FROM (VALUES (1), (1), (1), (2), (2), (3)) t (a)) t " + + "WHERE rn = -1"); + + // Test "row_number() <= negative number" filter with ORDER BY. This should create a Window Node with a Filter Node on top and return 0 rows. + assertQueryReturnsEmptyResult("" + + "SELECT * FROM (" + + " SELECT a, row_number() OVER (PARTITION BY a ORDER BY a) rn\n" + + " FROM (VALUES (1), (1), (1), (2), (2), (3)) t (a)) t " + + "WHERE rn <= -1"); + + // Test "row_number() = 0" filter with ORDER BY. This should create a Window Node with a Filter Node on top and return 0 rows. + assertQueryReturnsEmptyResult("" + + "SELECT * FROM (" + + " SELECT a, row_number() OVER (PARTITION BY a ORDER BY a) rn\n" + + " FROM (VALUES (1), (1), (1), (2), (2), (3)) t (a)) t " + + "WHERE rn = 0"); + + // Test "row_number() = negative number" filter without ORDER BY. This should create a RowNumber Node with a Filter Node on top and return 0 rows. + assertQueryReturnsEmptyResult("" + + "SELECT * FROM (" + + " SELECT a, row_number() OVER (PARTITION BY a) rn\n" + + " FROM (VALUES (1), (1), (1), (2), (2), (3)) t (a)) t " + + "WHERE rn = -1"); + + // Test "row_number() <= negative number" filter without ORDER BY. This should create a RowNumber Node with a Filter Node on top and return 0 rows. + assertQueryReturnsEmptyResult("" + + "SELECT * FROM (" + + " SELECT a, row_number() OVER (PARTITION BY a) rn\n" + + " FROM (VALUES (1), (1), (1), (2), (2), (3)) t (a)) t " + + "WHERE rn <= -1"); + + // Test "row_number() = 0" filter without ORDER BY. This should create a RowNumber Node with a Filter Node on top and return 0 rows. + assertQueryReturnsEmptyResult("" + + "SELECT * FROM (" + + " SELECT a, row_number() OVER (PARTITION BY a) rn\n" + + " FROM (VALUES (1), (1), (1), (2), (2), (3)) t (a)) t " + + "WHERE rn = 0"); + } + + @Test + public void testRowNumberFilterAndLimit() + { + MaterializedResult actual = computeActual("" + + "SELECT * FROM (" + + "SELECT a, row_number() OVER (PARTITION BY a ORDER BY a) rn\n" + + "FROM (VALUES (1), (2), (1), (2)) t (a)) t WHERE rn < 2 LIMIT 2"); + + MaterializedResult expected = resultBuilder(getSession(), BIGINT, BIGINT) + .row(1, 1L) + .row(2, 1L) + .build(); + assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); + + actual = computeActual("" + + "SELECT * FROM (" + + "SELECT a, row_number() OVER (PARTITION BY a) rn\n" + + "FROM (VALUES (1), (2), (1), (2), (1)) t (a)) t WHERE rn < 3 LIMIT 2"); + + expected = resultBuilder(getSession(), BIGINT, BIGINT) + .row(1, 1L) + .row(1, 2L) + .row(2, 1L) + .row(2, 2L) + .build(); + assertEquals(actual.getMaterializedRows().size(), 2); + assertContains(expected, actual); + } + + @Test + public void testRowNumberUnpartitionedFilter() + { + MaterializedResult actual = computeActual("" + + "SELECT orderkey, orderstatus FROM (\n" + + " SELECT row_number() OVER () rn, orderkey, orderstatus\n" + + " FROM orders\n" + + ") WHERE rn <= 5 AND orderstatus != 'Z'"); + MaterializedResult all = computeExpected("SELECT orderkey, orderstatus FROM orders", actual.getTypes()); + assertEquals(actual.getMaterializedRows().size(), 5); + assertContains(all, actual); + + actual = computeActual("" + + "SELECT orderkey, orderstatus FROM (\n" + + " SELECT row_number() OVER () rn, orderkey, orderstatus\n" + + " FROM orders\n" + + ") WHERE rn < 5"); + all = computeExpected("SELECT orderkey, orderstatus FROM orders", actual.getTypes()); + + assertEquals(actual.getMaterializedRows().size(), 4); + assertContains(all, actual); + + actual = computeActual("" + + "SELECT orderkey, orderstatus FROM (\n" + + " SELECT row_number() OVER () rn, orderkey, orderstatus\n" + + " FROM orders\n" + + ") LIMIT 5"); + all = computeExpected("SELECT orderkey, orderstatus FROM orders", actual.getTypes()); + + assertEquals(actual.getMaterializedRows().size(), 5); + assertContains(all, actual); + } + + @Test + public void testRowNumberPartitionedFilter() + { + MaterializedResult actual = computeActual("" + + "SELECT orderkey, orderstatus FROM (\n" + + " SELECT row_number() OVER (PARTITION BY orderstatus) rn, orderkey, orderstatus\n" + + " FROM orders\n" + + ") WHERE rn <= 5"); + MaterializedResult all = computeExpected("SELECT orderkey, orderstatus FROM orders", actual.getTypes()); + + // there are 3 DISTINCT orderstatus, so expect 15 rows. + assertEquals(actual.getMaterializedRows().size(), 15); + assertContains(all, actual); + + // Test for unreferenced outputs + actual = computeActual("" + + "SELECT orderkey FROM (\n" + + " SELECT row_number() OVER (PARTITION BY orderstatus) rn, orderkey\n" + + " FROM orders\n" + + ") WHERE rn <= 5"); + all = computeExpected("SELECT orderkey FROM orders", actual.getTypes()); + + // there are 3 distinct orderstatus, so expect 15 rows. + assertEquals(actual.getMaterializedRows().size(), 15); + assertContains(all, actual); + } + + @Test + public void testRowNumberUnpartitionedFilterLimit() + { + assertQuery("" + + "SELECT row_number() OVER ()\n" + + "FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey\n" + + "WHERE orders.orderkey = 10000\n" + + "LIMIT 20"); + } + + @Test + public void testRowNumberPropertyDerivation() + { + assertQuery( + "SELECT orderkey, orderstatus, SUM(rn) OVER (PARTITION BY orderstatus) c " + + "FROM ( " + + " SELECT orderkey, orderstatus, row_number() OVER (PARTITION BY orderstatus) rn " + + " FROM ( " + + " SELECT * FROM orders ORDER BY orderkey LIMIT 10 " + + " ) " + + ")", + "VALUES " + + "(1, 'O', 21), " + + "(2, 'O', 21), " + + "(3, 'F', 10), " + + "(4, 'O', 21), " + + "(5, 'F', 10), " + + "(6, 'F', 10), " + + "(7, 'O', 21), " + + "(32, 'O', 21), " + + "(33, 'F', 10), " + + "(34, 'O', 21)"); + } + + @Test + public void testTopNUnpartitionedWindow() + { + MaterializedResult actual = computeActual("" + + "SELECT * FROM (\n" + + " SELECT row_number() OVER (ORDER BY orderkey) rn, orderkey, orderstatus\n" + + " FROM orders\n" + + ") WHERE rn <= 5"); + String sql = "SELECT row_number() OVER (), orderkey, orderstatus FROM orders ORDER BY orderkey LIMIT 5"; + MaterializedResult expected = computeExpected(sql, actual.getTypes()); + Assert.assertEquals(actual, expected); + } + + @Test + public void testTopNUnpartitionedLargeWindow() + { + MaterializedResult actual = computeActual("" + + "SELECT * FROM (\n" + + " SELECT row_number() OVER (ORDER BY orderkey) rn, orderkey, orderstatus\n" + + " FROM orders\n" + + ") WHERE rn <= 10000"); + String sql = "SELECT row_number() OVER (), orderkey, orderstatus FROM orders ORDER BY orderkey LIMIT 10000"; + MaterializedResult expected = computeExpected(sql, actual.getTypes()); + assertEqualsIgnoreOrder(actual, expected); + } + + @Test + public void testTopNPartitionedWindow() + { + assertQuery( + "SELECT * FROM ( " + + " SELECT row_number() OVER (PARTITION BY orderstatus ORDER BY orderkey) rn, orderkey, orderstatus " + + " FROM orders " + + ") WHERE rn <= 2", + "VALUES " + + "(1, 1, 'O'), " + + "(2, 2, 'O'), " + + "(1, 3, 'F'), " + + "(2, 5, 'F'), " + + "(1, 65, 'P'), " + + "(2, 197, 'P')"); + + // Test for unreferenced outputs + assertQuery( + "SELECT * FROM ( " + + " SELECT row_number() OVER (PARTITION BY orderstatus ORDER BY orderkey) rn, orderkey " + + " FROM orders " + + ") WHERE rn <= 2", + "VALUES " + + "(1, 1), " + + "(2, 2), " + + "(1, 3), " + + "(2, 5), " + + "(1, 65), " + + "(2, 197)"); + + assertQuery( + "SELECT * FROM ( " + + " SELECT row_number() OVER (PARTITION BY orderstatus ORDER BY orderkey) rn, orderstatus " + + " FROM orders " + + ") WHERE rn <= 2", + "VALUES " + + "(1, 'O'), " + + "(2, 'O'), " + + "(1, 'F'), " + + "(2, 'F'), " + + "(1, 'P'), " + + "(2, 'P')"); + } + + @Test + public void testTopNUnpartitionedWindowWithEqualityFilter() + { + assertQuery( + "SELECT * FROM ( " + + " SELECT row_number() OVER (ORDER BY orderkey) rn, orderkey, orderstatus " + + " FROM orders " + + ") WHERE rn = 2", + "VALUES (2, 2, 'O')"); + } + + @Test + public void testTopNUnpartitionedWindowWithCompositeFilter() + { + assertQuery( + "SELECT * FROM ( " + + " SELECT row_number() OVER (ORDER BY orderkey) rn, orderkey, orderstatus " + + " FROM orders " + + ") WHERE rn = 1 OR rn IN (3, 4) OR rn BETWEEN 6 AND 7", + "VALUES " + + "(1, 1, 'O'), " + + "(3, 3, 'F'), " + + "(4, 4, 'O'), " + + "(6, 6, 'F'), " + + "(7, 7, 'O')"); + } + + @Test + public void testTopNPartitionedWindowWithEqualityFilter() + { + assertQuery( + "SELECT * FROM ( " + + " SELECT row_number() OVER (PARTITION BY orderstatus ORDER BY orderkey) rn, orderkey, orderstatus " + + " FROM orders " + + ") WHERE rn = 2", + "VALUES " + + "(2, 2, 'O'), " + + "(2, 5, 'F'), " + + "(2, 197, 'P')"); + + // Test for unreferenced outputs + assertQuery( + "SELECT * FROM ( " + + " SELECT row_number() OVER (PARTITION BY orderstatus ORDER BY orderkey) rn, orderkey " + + " FROM orders " + + ") WHERE rn = 2", + "VALUES (2, 2), (2, 5), (2, 197)"); + + assertQuery( + "SELECT * FROM ( " + + " SELECT row_number() OVER (PARTITION BY orderstatus ORDER BY orderkey) rn, orderstatus " + + " FROM orders " + + ") WHERE rn = 2", + "VALUES (2, 'O'), (2, 'F'), (2, 'P')"); + } + + @Test + public void testScalarFunction() + { + assertQuery("SELECT SUBSTR('Quadratically', 5, 6)"); + } + + @Test + public void testUnaliasedSubqueries() + { + assertQuery("SELECT orderkey FROM (SELECT orderkey FROM orders)"); + } + + @Test + public void testUnaliasedSubqueries1() + { + assertQuery("SELECT a FROM (SELECT orderkey a FROM orders)"); + } + + @Test + public void testWith() + { + assertQuery("" + + "WITH a AS (SELECT * FROM orders) " + + "SELECT * FROM a", + "SELECT * FROM orders"); + } + + @Test + public void testWithQualifiedPrefix() + { + assertQuery("WITH a AS (SELECT 123) SELECT a.* FROM a", "SELECT 123"); + } + + @Test + public void testWithAliased() + { + assertQuery("WITH a AS (SELECT * FROM orders) SELECT * FROM a x", "SELECT * FROM orders"); + } + + @Test + public void testReferenceToWithQueryInFromClause() + { + assertQuery( + "WITH a AS (SELECT * FROM orders)" + + "SELECT * FROM (" + + " SELECT * FROM a" + + ")", + "SELECT * FROM orders"); + } + + @Test + public void testWithChaining() + { + assertQuery("" + + "WITH a AS (SELECT orderkey n FROM orders)\n" + + ", b AS (SELECT n + 1 n FROM a)\n" + + ", c AS (SELECT n + 1 n FROM b)\n" + + "SELECT n + 1 FROM c", + "SELECT orderkey + 3 FROM orders"); + } + + @Test + public void testWithNestedSubqueries() + { + assertQuery("" + + "WITH a AS (\n" + + " WITH aa AS (SELECT 123 x FROM orders LIMIT 1)\n" + + " SELECT x y FROM aa\n" + + "), b AS (\n" + + " WITH bb AS (\n" + + " WITH bbb AS (SELECT y FROM a)\n" + + " SELECT bbb.* FROM bbb\n" + + " )\n" + + " SELECT y z FROM bb\n" + + ")\n" + + "SELECT *\n" + + "FROM (\n" + + " WITH q AS (SELECT z w FROM b)\n" + + " SELECT j.*, k.*\n" + + " FROM a j\n" + + " JOIN q k ON (j.y = k.w)\n" + + ") t", "" + + "SELECT 123, 123 FROM orders LIMIT 1"); + } + + @Test + public void testWithColumnAliasing() + { + assertQuery("WITH a (id) AS (SELECT 123) SELECT id FROM a", "SELECT 123"); + + assertQuery( + "WITH t (a, b, c) AS (SELECT 1, custkey x, orderkey FROM orders) SELECT c, b, a FROM t", + "SELECT orderkey, custkey, 1 FROM orders"); + } + + @Test + public void testWithHiding() + { + assertQuery("" + + "WITH a AS (SELECT 1), " + + " b AS (" + + " WITH a AS (SELECT 2)" + + " SELECT * FROM a" + + " )" + + "SELECT * FROM b", + "SELECT 2"); + assertQueryFails( + "WITH a AS (VALUES 1), " + + " a AS (VALUES 2)" + + "SELECT * FROM a", + "line 1:28: WITH query name 'a' specified more than once"); + } + + @Test + public void testWithRecursive() + { + assertQueryFails( + "WITH RECURSIVE a AS (SELECT 123) SELECT * FROM a", + "line 1:1: Recursive WITH queries are not supported"); + } + + @Test + public void testCaseNoElse() + { + assertQuery("SELECT orderkey, CASE orderstatus WHEN 'O' THEN 'a' END FROM orders"); + } + + @Test + public void testCaseNoElseInconsistentResultType() + { + assertQueryFails( + "SELECT orderkey, CASE orderstatus WHEN 'O' THEN 'a' WHEN '1' THEN 2 END FROM orders", + "\\Qline 1:67: All CASE results must be the same type: varchar(1)\\E"); + } + + @Test + public void testIfExpression() + { + assertQuery( + "SELECT sum(IF(orderstatus = 'F', totalprice, 0.0)) FROM orders", + "SELECT sum(CASE WHEN orderstatus = 'F' THEN totalprice ELSE 0.0 END) FROM orders"); + assertQuery( + "SELECT sum(IF(orderstatus = 'Z', totalprice)) FROM orders", + "SELECT sum(CASE WHEN orderstatus = 'Z' THEN totalprice END) FROM orders"); + assertQuery( + "SELECT sum(IF(orderstatus = 'F', NULL, totalprice)) FROM orders", + "SELECT sum(CASE WHEN orderstatus = 'F' THEN NULL ELSE totalprice END) FROM orders"); + assertQuery( + "SELECT IF(orderstatus = 'Z', orderkey / 0, orderkey) FROM orders", + "SELECT CASE WHEN orderstatus = 'Z' THEN orderkey / 0 ELSE orderkey END FROM orders"); + assertQuery( + "SELECT sum(IF(NULLIF(orderstatus, 'F') <> 'F', totalprice, 5.1)) FROM orders", + "SELECT sum(CASE WHEN NULLIF(orderstatus, 'F') <> 'F' THEN totalprice ELSE 5.1 END) FROM orders"); + + // coercions to supertype + assertQuery("SELECT if(true, CAST(1 AS decimal(2,1)), 1)", "SELECT 1.0"); + } + + @Test + public void testDuplicateFields() + { + assertQuery( + "SELECT * FROM (SELECT orderkey, orderkey FROM orders)", + "SELECT orderkey, orderkey FROM orders"); + } + + @Test + public void testWildcardFromSubquery() + { + assertQuery("SELECT * FROM (SELECT orderkey X FROM orders)"); + } + + @Test + public void testCaseInsensitiveAttribute() + { + assertQuery("SELECT x FROM (SELECT orderkey X FROM orders)"); + } + + @Test + public void testCaseInsensitiveAliasedRelation() + { + assertQuery("SELECT A.* FROM orders a"); + } + + @Test + public void testSubqueryBody() + { + assertQuery("(SELECT orderkey, custkey FROM orders)"); + } + + @Test + public void testSubqueryBodyOrderLimit() + { + assertQueryOrdered("(SELECT orderkey AS a, custkey AS b FROM orders) ORDER BY a LIMIT 1"); + } + + @Test + public void testSubqueryBodyProjectedOrderby() + { + assertQueryOrdered("(SELECT orderkey, custkey FROM orders) ORDER BY orderkey * -1"); + } + + @Test + public void testSubqueryBodyDoubleOrderby() + { + assertQueryOrdered("(SELECT orderkey, custkey FROM orders ORDER BY custkey) ORDER BY orderkey"); + } + + @Test + public void testLambdaCapture() + { + // Test for lambda expression without capture can be found in TestLambdaExpression + + assertQuery("SELECT apply(0, x -> x + c1) FROM (VALUES 1) t(c1)", "VALUES 1"); + assertQuery("SELECT apply(0, x -> x + t.c1) FROM (VALUES 1) t(c1)", "VALUES 1"); + assertQuery("SELECT apply(c1, x -> x + c2) FROM (VALUES (1, 2), (3, 4), (5, 6)) t(c1, c2)", "VALUES 3, 7, 11"); + assertQuery("SELECT apply(c1 + 10, x -> apply(x + 100, y -> c1)) FROM (VALUES 1) t(c1)", "VALUES 1"); + assertQuery("SELECT apply(c1 + 10, x -> apply(x + 100, y -> t.c1)) FROM (VALUES 1) t(c1)", "VALUES 1"); + assertQuery("SELECT apply(CAST(ROW(10) AS ROW(x INTEGER)), r -> r.x)", "VALUES 10"); + assertQuery("SELECT apply(CAST(ROW(10) AS ROW(x INTEGER)), r -> r.x) FROM (VALUES 1) u(x)", "VALUES 10"); + assertQuery("SELECT apply(CAST(ROW(10) AS ROW(x INTEGER)), r -> r.x) FROM (VALUES 1) r(x)", "VALUES 10"); + assertQuery("SELECT apply(CAST(ROW(10) AS ROW(x INTEGER)), r -> apply(3, y -> y + r.x)) FROM (VALUES 1) u(x)", "VALUES 13"); + assertQuery("SELECT apply(CAST(ROW(10) AS ROW(x INTEGER)), r -> apply(3, y -> y + r.x)) FROM (VALUES 1) r(x)", "VALUES 13"); + assertQuery("SELECT apply(CAST(ROW(10) AS ROW(x INTEGER)), r -> apply(3, y -> y + r.x)) FROM (VALUES 'a') r(x)", "VALUES 13"); + assertQuery("SELECT apply(CAST(ROW(10) AS ROW(x INTEGER)), z -> apply(3, y -> y + r.x)) FROM (VALUES 1) r(x)", "VALUES 4"); + + // reference lambda variable of the not-immediately-enclosing lambda + assertQuery("SELECT apply(1, x -> apply(10, y -> x)) FROM (VALUES 1000) t(x)", "VALUES 1"); + assertQuery("SELECT apply(1, x -> apply(10, y -> x)) FROM (VALUES 'abc') t(x)", "VALUES 1"); + assertQuery("SELECT apply(1, x -> apply(10, y -> apply(100, z -> x))) FROM (VALUES 1000) t(x)", "VALUES 1"); + assertQuery("SELECT apply(1, x -> apply(10, y -> apply(100, z -> x))) FROM (VALUES 'abc') t(x)", "VALUES 1"); + + // in join post-filter + assertQuery("SELECT * FROM (VALUES true) t(x) left JOIN (VALUES 1001) t2(y) ON (apply(false, z -> apply(false, y -> x)))", "SELECT true, 1001"); + } + + @Test + public void testLambdaInAggregationContext() + { + assertQuery("SELECT apply(sum(x), i -> i * i) FROM (VALUES 1, 2, 3, 4, 5) t(x)", "SELECT 225"); + assertQuery("SELECT apply(x, i -> i - 1), sum(y) FROM (VALUES (1, 10), (1, 20), (2, 50)) t(x,y) GROUP BY x", "VALUES (0, 30), (1, 50)"); + assertQuery("SELECT x, apply(sum(y), i -> i * 10) FROM (VALUES (1, 10), (1, 20), (2, 50)) t(x,y) GROUP BY x", "VALUES (1, 300), (2, 500)"); + assertQuery("SELECT apply(8, x -> x + 1) FROM (VALUES (1, 2)) t(x,y) GROUP BY y", "SELECT 9"); + + assertQuery("SELECT apply(CAST(ROW(1) AS ROW(someField BIGINT)), x -> x.someField) FROM (VALUES (1,2)) t(x,y) GROUP BY y", "SELECT 1"); + assertQuery("SELECT apply(sum(x), x -> x * x) FROM (VALUES 1, 2, 3, 4, 5) t(x)", "SELECT 225"); + // nested lambda expression uses the same variable name + assertQuery("SELECT apply(sum(x), x -> apply(x, x -> x * x)) FROM (VALUES 1, 2, 3, 4, 5) t(x)", "SELECT 225"); + } + + @Test + public void testLambdaInSubqueryContext() + { + assertQuery("SELECT apply(x, i -> i * i) FROM (SELECT 10 x)", "SELECT 100"); + assertQuery("SELECT apply((SELECT 10), i -> i * i)", "SELECT 100"); + + // with capture + assertQuery("SELECT apply(x, i -> i * x) FROM (SELECT 10 x)", "SELECT 100"); + assertQuery("SELECT apply(x, y -> y * x) FROM (SELECT 10 x, 3 y)", "SELECT 100"); + assertQuery("SELECT apply(x, z -> y * x) FROM (SELECT 10 x, 3 y)", "SELECT 30"); + } + + @Test + public void testLambdaInValuesAndUnnest() + { + assertQuery("SELECT * FROM UNNEST(transform(sequence(1, 5), x -> x * x))", "SELECT * FROM (VALUES 1, 4, 9, 16, 25)"); + assertQuery("SELECT x[5] FROM (VALUES transform(sequence(1, 5), x -> x * x)) t(x)", "SELECT 25"); + } + + @Test + public void testTryLambdaRepeated() + { + assertQuery("SELECT x + x FROM (SELECT apply(a, i -> i * i) x FROM (VALUES 3) t(a))", "SELECT 18"); + assertQuery("SELECT apply(a, i -> i * i) + apply(a, i -> i * i) FROM (VALUES 3) t(a)", "SELECT 18"); + assertQuery("SELECT apply(a, i -> i * i), apply(a, i -> i * i) FROM (VALUES 3) t(a)", "SELECT 9, 9"); + assertQuery("SELECT try(10 / a) + try(10 / a) FROM (VALUES 5) t(a)", "SELECT 4"); + assertQuery("SELECT try(10 / a), try(10 / a) FROM (VALUES 5) t(a)", "SELECT 2, 2"); + } + + @Test + public void testTryNoMergeProjections() + { + // no regexp specified because the JVM optimizes away exception message constructor if run enough times + assertQueryFails("SELECT TRY(x) FROM (SELECT 1/y AS x FROM (VALUES 1, 2, 3, 0, 4) t(y))", ".*"); + } + + @Test + public void testNonDeterministicFilter() + { + MaterializedResult materializedResult = computeActual("SELECT u FROM ( SELECT if(rand() > 0.5, 0, 1) AS u ) WHERE u <> u"); + assertEquals(materializedResult.getRowCount(), 0); + + materializedResult = computeActual("SELECT u, v FROM ( SELECT if(rand() > 0.5, 0, 1) AS u, 4*4 AS v ) WHERE u <> u and v > 10"); + assertEquals(materializedResult.getRowCount(), 0); + + materializedResult = computeActual("SELECT u, v, w FROM ( SELECT if(rand() > 0.5, 0, 1) AS u, 4*4 AS v, 'abc' AS w ) WHERE v > 10"); + assertEquals(materializedResult.getRowCount(), 1); + } + + @Test + public void testNonDeterministicProjection() + { + MaterializedResult materializedResult = computeActual("SELECT r, r + 1 FROM (SELECT rand(100) r FROM orders) LIMIT 10"); + assertEquals(materializedResult.getRowCount(), 10); + for (MaterializedRow materializedRow : materializedResult) { + assertEquals(materializedRow.getFieldCount(), 2); + assertEquals(((Number) materializedRow.getField(0)).intValue() + 1, materializedRow.getField(1)); + } + } + + @Test + public void testMapSubscript() + { + assertQuery("SELECT map(array[1], array['aa'])[1]", "SELECT 'aa'"); + assertQuery("SELECT map(array['a'], array['aa'])['a']", "SELECT 'aa'"); + assertQuery("SELECT map(array[array[1,1]], array['a'])[array[1,1]]", "SELECT 'a'"); + assertQuery("SELECT map(array[(1,2)], array['a'])[(1,2)]", "SELECT 'a'"); + } + + @Test + public void testRowSubscript() + { + // Subscript on Row with unnamed fields + assertQuery("SELECT ROW (1, 'a', true)[2]", "SELECT 'a'"); + assertQuery("SELECT r[2] FROM (VALUES (ROW (ROW (1, 'a', true)))) AS v(r)", "SELECT 'a'"); + assertQuery("SELECT r[1], r[2] FROM (SELECT ROW (name, regionkey) FROM nation ORDER BY name LIMIT 1) t(r)", "VALUES ('ALGERIA', 0)"); + + // Subscript on Row with named fields + assertQuery("SELECT (CAST (ROW (1, 'a', 2 ) AS ROW (field1 bigint, field2 varchar(1), field3 bigint)))[2]", "SELECT 'a'"); + + // Subscript on nested Row + assertQuery("SELECT ROW (1, 'a', ROW (false, 2, 'b'))[3][3]", "SELECT 'b'"); + + // Row subscript in filter condition + assertQuery("SELECT orderstatus FROM orders WHERE ROW (orderkey, custkey)[1] = 100", "SELECT 'O'"); + + // Row subscript in join condition + assertQuery("SELECT n.name, r.name FROM nation n JOIN region r ON ROW (n.name, n.regionkey)[2] = ROW (r.name, r.regionkey)[2] ORDER BY n.name LIMIT 1", "VALUES ('ALGERIA', 'AFRICA')"); + } + + @Test + public void testVarbinary() + { + assertQuery("SELECT LENGTH(x) FROM (SELECT from_base64('gw==') AS x)", "SELECT 1"); + assertQuery("SELECT LENGTH(from_base64('gw=='))", "SELECT 1"); + } + + @Test + public void testRowFieldAccessor() + { + //Dereference only + assertQuery("SELECT a.col0 FROM (VALUES ROW (CAST(ROW(1, 2) AS ROW(col0 integer, col1 integer)))) AS t (a)", "SELECT 1"); + assertQuery("SELECT a.col0 FROM (VALUES ROW (CAST(ROW(1.0E0, 2.0E0) AS ROW(col0 integer, col1 integer)))) AS t (a)", "SELECT 1.0"); + assertQuery("SELECT a.col0 FROM (VALUES ROW (CAST(ROW(TRUE, FALSE) AS ROW(col0 boolean, col1 boolean)))) AS t (a)", "SELECT TRUE"); + assertQuery("SELECT a.col1 FROM (VALUES ROW (CAST(ROW(1.0, 'kittens') AS ROW(col0 varchar, col1 varchar)))) AS t (a)", "SELECT 'kittens'"); + assertQuery("SELECT a.col2.col1 FROM (VALUES ROW(CAST(ROW(1.0, ARRAY[2], row(3, 4.0)) AS ROW(col0 double, col1 array(int), col2 row(col0 integer, col1 double))))) t(a)", "SELECT 4.0"); + + // mixture of row field reference and table field reference + assertQuery("SELECT CAST(row(1, t.x) AS row(col0 bigint, col1 bigint)).col1 FROM (VALUES 1, 2, 3) t(x)", "SELECT * FROM (VALUES 1, 2, 3)"); + assertQuery("SELECT Y.col1 FROM (SELECT CAST(row(1, t.x) AS row(col0 bigint, col1 bigint)) AS Y FROM (VALUES 1, 2, 3) t(x)) test_t", "SELECT * FROM (VALUES 1, 2, 3)"); + + // Subscript + Dereference + assertQuery("SELECT a.col1[2] FROM (VALUES ROW(CAST(ROW(1.0, ARRAY[22, 33, 44, 55], row(3, 4.0E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double))))) t(a)", "SELECT 33"); + assertQuery("SELECT a.col1[2].col0, a.col1[2].col1 FROM (VALUES ROW(cast(row(1.0, ARRAY[row(31, 4.1E0), row(32, 4.2E0)], row(3, 4.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double))))) t(a)", "SELECT 32, 4.2"); + + assertQuery("SELECT CAST(row(11, 12) AS row(col0 bigint, col1 bigint)).col0", "SELECT 11"); + } + + @Test + public void testRowFieldAccessorInAggregate() + { + assertQuery("SELECT a.col0, SUM(a.col1[2]), SUM(a.col2.col0), SUM(a.col2.col1) FROM " + + "(VALUES " + + "ROW(CAST(ROW(1.0, ARRAY[2, 13, 4], row(11, 4.1E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double)))), " + + "ROW(CAST(ROW(2.0, ARRAY[2, 23, 4], row(12, 14.0E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double)))), " + + "ROW(CAST(ROW(1.0, ARRAY[22, 33, 44], row(13, 5.0E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double))))) t(a) " + + "GROUP BY a.col0", + "SELECT * FROM VALUES (1.0, 46, 24, 9.1), (2.0, 23, 12, 14.0)"); + + assertQuery("SELECT a.col2.col0, SUM(a.col0), SUM(a.col1[2]), SUM(a.col2.col1) FROM " + + "(VALUES " + + "ROW(CAST(ROW(1.0, ARRAY[2, 13, 4], row(11, 4.1E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double)))), " + + "ROW(CAST(ROW(2.0, ARRAY[2, 23, 4], row(11, 14.0E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double)))), " + + "ROW(CAST(ROW(7.0, ARRAY[22, 33, 44], row(13, 5.0E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double))))) t(a) " + + "GROUP BY a.col2.col0", + "SELECT * FROM VALUES (11, 3.0, 36, 18.1), (13, 7.0, 33, 5.0)"); + + assertQuery("SELECT a.col1[1].col0, SUM(a.col0), SUM(a.col1[1].col1), SUM(a.col1[2].col0), SUM(a.col2.col1) FROM " + + "(VALUES " + + "ROW(CAST(ROW(1.0, ARRAY[row(31, 4.5E0), row(12, 4.2E0)], row(3, 4.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " + + "ROW(CAST(ROW(3.1, ARRAY[row(41, 3.1E0), row(32, 4.2E0)], row(6, 6.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " + + "ROW(CAST(ROW(2.2, ARRAY[row(31, 4.2E0), row(22, 4.2E0)], row(5, 4.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double))))) t(a) " + + "GROUP BY a.col1[1].col0", + "SELECT * FROM VALUES (31, 3.2, 8.7, 34, 8.0), (41, 3.1, 3.1, 32, 6.0)"); + + assertQuery("SELECT a.col1[1].col0, SUM(a.col0), SUM(a.col1[1].col1), SUM(a.col1[2].col0), SUM(a.col2.col1) FROM " + + "(VALUES " + + "ROW(CAST(ROW(2.2, ARRAY[row(31, 4.2E0), row(22, 4.2E0)], row(5, 4.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " + + "ROW(CAST(ROW(1.0, ARRAY[row(31, 4.5E0), row(12, 4.2E0)], row(3, 4.1E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " + + "ROW(CAST(ROW(3.1, ARRAY[row(41, 3.1E0), row(32, 4.2E0)], row(6, 6.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " + + "ROW(CAST(ROW(3.3, ARRAY[row(41, 3.1E0), row(32, 4.2E0)], row(6, 6.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))) " + + ") t(a) " + + "GROUP BY a.col1[1]", + "SELECT * FROM VALUES (31, 2.2, 4.2, 22, 4.0), (31, 1.0, 4.5, 12, 4.1), (41, 6.4, 6.2, 64, 12.0)"); + + assertQuery("SELECT a.col1[2], SUM(a.col0), SUM(a.col1[1]), SUM(a.col2.col1) FROM " + + "(VALUES " + + "ROW(CAST(ROW(1.0, ARRAY[2, 13, 4], row(11, 4.1E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double)))), " + + "ROW(CAST(ROW(2.0, ARRAY[2, 13, 4], row(12, 14.0E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double)))), " + + "ROW(CAST(ROW(7.0, ARRAY[22, 33, 44], row(13, 5.0E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double))))) t(a) " + + "GROUP BY a.col1[2]", + "SELECT * FROM VALUES (13, 3.0, 4, 18.1), (33, 7.0, 22, 5.0)"); + + assertQuery("SELECT a.col2.col0, SUM(a.col2.col1) FROM " + + "(VALUES " + + "ROW(CAST(ROW(2.2, ARRAY[row(31, 4.2E0), row(22, 4.2E0)], row(5, 4.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " + + "ROW(CAST(ROW(1.0, ARRAY[row(31, 4.5E0), row(12, 4.2E0)], row(3, 4.1E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " + + "ROW(CAST(ROW(3.1, ARRAY[row(41, 3.1E0), row(32, 4.2E0)], row(6, 6.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " + + "ROW(CAST(ROW(3.3, ARRAY[row(41, 3.1E0), row(32, 4.2E0)], row(6, 6.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))) " + + ") t(a) " + + "GROUP BY a.col2", + "SELECT * FROM VALUES (5, 4.0), (3, 4.1), (6, 12.0)"); + + assertQuery("SELECT a.col2.col0, a.col0, SUM(a.col2.col1) FROM " + + "(VALUES " + + "ROW(CAST(ROW(1.0, ARRAY[2, 13, 4], row(11, 4.1E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double)))), " + + "ROW(CAST(ROW(2.0, ARRAY[2, 23, 4], row(11, 14.0E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double)))), " + + "ROW(CAST(ROW(1.5, ARRAY[2, 13, 4], row(11, 4.1E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double)))), " + + "ROW(CAST(ROW(1.5, ARRAY[2, 13, 4], row(11, 4.1E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double)))), " + + "ROW(CAST(ROW(7.0, ARRAY[22, 33, 44], row(13, 5.0E0)) AS ROW(col0 double, col1 array(integer), col2 row(col0 integer, col1 double))))) t(a) " + + "WHERE a.col1[2] < 30 " + + "GROUP BY 1, 2 ORDER BY 1", + "SELECT * FROM VALUES (11, 1.0, 4.1), (11, 1.5, 8.2), (11, 2.0, 14.0)"); + + assertQuery("SELECT a[1].col0, COUNT(1) FROM " + + "(VALUES " + + "(ROW(CAST(ARRAY[row(31, 4.2E0), row(22, 4.2E0)] AS ARRAY(ROW(col0 integer, col1 double))))), " + + "(ROW(CAST(ARRAY[row(31, 4.5E0), row(12, 4.2E0)] AS ARRAY(ROW(col0 integer, col1 double))))), " + + "(ROW(CAST(ARRAY[row(41, 3.1E0), row(32, 4.2E0)] AS ARRAY(ROW(col0 integer, col1 double))))), " + + "(ROW(CAST(ARRAY[row(31, 3.1E0), row(32, 4.2E0)] AS ARRAY(ROW(col0 integer, col1 double))))) " + + ") t(a) " + + "GROUP BY 1 " + + "ORDER BY 2 DESC", + "SELECT * FROM VALUES (31, 3), (41, 1)"); + } + + @Test + public void testRowFieldAccessorInJoin() + { + assertQuery("" + + "SELECT t.a.col1, custkey, orderkey FROM " + + "(VALUES " + + "ROW(CAST(ROW(1, 11) AS ROW(col0 integer, col1 integer))), " + + "ROW(CAST(ROW(2, 22) AS ROW(col0 integer, col1 integer))), " + + "ROW(CAST(ROW(3, 33) AS ROW(col0 integer, col1 integer)))) t(a) " + + "INNER JOIN orders " + + "ON t.a.col0 = orders.orderkey", + "SELECT * FROM VALUES (11, 370, 1), (22, 781, 2), (33, 1234, 3)"); + } + + @Test + public void testRowCast() + { + assertQuery("SELECT CAST(row(1, 2) AS row(aa bigint, bb boolean)).aa", "SELECT 1"); + assertQuery("SELECT CAST(row(1, 2) AS row(aa bigint, bb boolean)).bb", "SELECT true"); + assertQuery("SELECT CAST(row(1, 2) AS row(aa bigint, bb varchar)).bb", "SELECT '2'"); + assertQuery("SELECT CAST(row(true, array[0, 2]) AS row(aa boolean, bb array(boolean))).bb[1]", "SELECT false"); + assertQuery("SELECT CAST(row(0.1, array[0, 2], row(1, 0.5)) AS row(aa bigint, bb array(boolean), cc row(dd varchar, ee varchar))).cc.ee", "SELECT '0.5'"); + assertQuery("SELECT CAST(array[row(0.1, array[0, 2], row(1, 0.5))] AS array)[1].cc.ee", "SELECT '0.5'"); + } + + @Test + public void testDereferenceInSubquery() + { + assertQuery("" + + "SELECT x " + + "FROM (" + + " SELECT a.x" + + " FROM (VALUES 1, 2, 3) a(x)" + + ") " + + "GROUP BY x", + "SELECT * FROM VALUES 1, 2, 3"); + + assertQuery("" + + "SELECT t2.*, max(t1.b) AS max_b " + + "FROM (VALUES (1, 'a'), (2, 'b'), (1, 'c'), (3, 'd')) t1(a, b) " + + "INNER JOIN " + + "(VALUES 1, 2, 3, 4) t2(a) " + + "ON t1.a = t2.a " + + "GROUP BY t2.a", + "SELECT * FROM VALUES (1, 'c'), (2, 'b'), (3, 'd')"); + + assertQuery("" + + "SELECT t2.*, max(t1.b1) AS max_b1 " + + "FROM (VALUES (1, 'a'), (2, 'b'), (1, 'c'), (3, 'd')) t1(a1, b1) " + + "INNER JOIN " + + "(VALUES (1, 11, 111), (2, 22, 222), (3, 33, 333), (4, 44, 444)) t2(a2, b2, c2) " + + "ON t1.a1 = t2.a2 " + + "GROUP BY t2.a2, t2.b2, t2.c2", + "SELECT * FROM VALUES (1, 11, 111, 'c'), (2, 22, 222, 'b'), (3, 33, 333, 'd')"); + + assertQuery("" + + "SELECT custkey, orders2 " + + "FROM (" + + " SELECT x.custkey, SUM(x.orders) + 1 orders2 " + + " FROM ( " + + " SELECT x.custkey, COUNT(x.orderkey) orders " + + " FROM orders x " + + " WHERE x.custkey < 100 " + + " GROUP BY x.custkey " + + " ) x " + + " GROUP BY x.custkey" + + ") " + + "ORDER BY custkey"); + } + + @Test + public void testDereferenceInFunctionCall() + { + assertQuery("" + + "SELECT COUNT(DISTINCT custkey) " + + "FROM ( " + + " SELECT x.custkey " + + " FROM orders x " + + " WHERE custkey < 100 " + + ") t"); + } + + @Test + public void testDereferenceInComparison() + { + assertQuery("" + + "SELECT orders.custkey, orders.orderkey " + + "FROM orders " + + "WHERE orders.custkey > orders.orderkey AND orders.custkey < 200.3"); + } + + @Test + public void testMissingRowFieldInGroupBy() + { + assertQueryFails( + "SELECT a.col0, count(*) FROM (VALUES ROW(cast(ROW(1, 1) AS ROW(col0 integer, col1 integer)))) t(a)", + "line 1:8: 'a.col0' must be an aggregate expression or appear in GROUP BY clause"); + } + + @Test + public void testWhereWithRowField() + { + assertQuery("SELECT a.col0 FROM (VALUES ROW(CAST(ROW(1, 2) AS ROW(col0 integer, col1 integer)))) AS t (a) WHERE a.col0 > 0", "SELECT 1"); + assertQuery("SELECT SUM(a.col0) FROM (VALUES ROW(CAST(ROW(1, 2) AS ROW(col0 integer, col1 integer)))) AS t (a) WHERE a.col0 <= 0", "SELECT null"); + + assertQuery("SELECT a.col0 FROM (VALUES ROW(CAST(ROW(1, 2) AS ROW(col0 integer, col1 integer)))) AS t (a) WHERE a.col0 < a.col1", "SELECT 1"); + assertQuery("SELECT SUM(a.col0) FROM (VALUES ROW(CAST(ROW(1, 2) AS ROW(col0 integer, col1 integer)))) AS t (a) WHERE a.col0 < a.col1", "SELECT 1"); + assertQuery("SELECT SUM(a.col0) FROM (VALUES ROW(CAST(ROW(1, 2) AS ROW(col0 integer, col1 integer)))) AS t (a) WHERE a.col0 > a.col1", "SELECT null"); + } + + @Test + public void testUnnest() + { + assertQuery("SELECT 1 FROM (VALUES (ARRAY[1])) AS t (a) CROSS JOIN UNNEST(a)", "SELECT 1"); + assertQuery("SELECT x[1] FROM UNNEST(ARRAY[ARRAY[1, 2, 3]]) t(x)", "SELECT 1"); + assertQuery("SELECT x[1][2] FROM UNNEST(ARRAY[ARRAY[ARRAY[1, 2, 3]]]) t(x)", "SELECT 2"); + assertQuery("SELECT x[2] FROM UNNEST(ARRAY[MAP(ARRAY[1,2], ARRAY['hello', 'hi'])]) t(x)", "SELECT 'hi'"); + assertQuery("SELECT * FROM UNNEST(ARRAY[1, 2, 3])", "SELECT * FROM VALUES (1), (2), (3)"); + assertQuery("SELECT a FROM UNNEST(ARRAY[1, 2, 3]) t(a)", "SELECT * FROM VALUES (1), (2), (3)"); + assertQuery("SELECT a, b FROM UNNEST(ARRAY[1, 2], ARRAY[3, 4]) t(a, b)", "SELECT * FROM VALUES (1, 3), (2, 4)"); + assertQuery("SELECT a, b FROM UNNEST(ARRAY[1, 2, 3], ARRAY[4, 5]) t(a, b)", "SELECT * FROM VALUES (1, 4), (2, 5), (3, NULL)"); + assertQuery("SELECT a FROM UNNEST(ARRAY[1, 2, 3], ARRAY[4, 5]) t(a, b)", "SELECT * FROM VALUES 1, 2, 3"); + assertQuery("SELECT b FROM UNNEST(ARRAY[1, 2, 3], ARRAY[4, 5]) t(a, b)", "SELECT * FROM VALUES 4, 5, NULL"); + assertQuery("SELECT count(*) FROM UNNEST(ARRAY[1, 2, 3], ARRAY[4, 5])", "SELECT 3"); + assertQuery("SELECT a FROM UNNEST(ARRAY['kittens', 'puppies']) t(a)", "SELECT * FROM VALUES ('kittens'), ('puppies')"); + assertQuery("" + + "SELECT c " + + "FROM UNNEST(ARRAY[1, 2, 3], ARRAY[4, 5]) t(a, b) " + + "CROSS JOIN (values (8), (9)) t2(c)", + "SELECT * FROM VALUES 8, 8, 8, 9, 9, 9"); + assertQuery("" + + "SELECT a.custkey, t.e " + + "FROM (SELECT custkey, ARRAY[1, 2, 3] AS my_array FROM orders ORDER BY orderkey LIMIT 1) a " + + "CROSS JOIN UNNEST(my_array) t(e)", + "SELECT * FROM (SELECT custkey FROM orders ORDER BY orderkey LIMIT 1) CROSS JOIN (VALUES (1), (2), (3))"); + assertQuery("" + + "SELECT a.custkey, t.e " + + "FROM (SELECT custkey, ARRAY[1, 2, 3] AS my_array FROM orders ORDER BY orderkey LIMIT 1) a, " + + "UNNEST(my_array) t(e)", + "SELECT * FROM (SELECT custkey FROM orders ORDER BY orderkey LIMIT 1) CROSS JOIN (VALUES (1), (2), (3))"); + assertQuery("SELECT * FROM UNNEST(ARRAY[0, 1]) CROSS JOIN UNNEST(ARRAY[0, 1]) CROSS JOIN UNNEST(ARRAY[0, 1])", + "SELECT * FROM VALUES (0, 0, 0), (0, 0, 1), (0, 1, 0), (0, 1, 1), (1, 0, 0), (1, 0, 1), (1, 1, 0), (1, 1, 1)"); + assertQuery("SELECT * FROM UNNEST(ARRAY[0, 1]), UNNEST(ARRAY[0, 1]), UNNEST(ARRAY[0, 1])", + "SELECT * FROM VALUES (0, 0, 0), (0, 0, 1), (0, 1, 0), (0, 1, 1), (1, 0, 0), (1, 0, 1), (1, 1, 0), (1, 1, 1)"); + assertQuery("SELECT a, b FROM UNNEST(MAP(ARRAY[1,2], ARRAY['cat', 'dog'])) t(a, b)", "SELECT * FROM VALUES (1, 'cat'), (2, 'dog')"); + assertQuery("SELECT a, b FROM UNNEST(MAP(ARRAY[1,2], ARRAY['cat', NULL])) t(a, b)", "SELECT * FROM VALUES (1, 'cat'), (2, NULL)"); + + assertQuery("SELECT 1 FROM (VALUES (ARRAY[1])) AS t (a) CROSS JOIN UNNEST(a) WITH ORDINALITY", "SELECT 1"); + assertQuery("SELECT * FROM UNNEST(ARRAY[1, 2, 3]) WITH ORDINALITY", "SELECT * FROM VALUES (1, 1), (2, 2), (3, 3)"); + assertQuery("SELECT b FROM UNNEST(ARRAY[10, 20, 30]) WITH ORDINALITY t(a, b)", "SELECT * FROM VALUES (1), (2), (3)"); + assertQuery("SELECT a, b, c FROM UNNEST(ARRAY[10, 20, 30], ARRAY[4, 5]) WITH ORDINALITY t(a, b, c)", "SELECT * FROM VALUES (10, 4, 1), (20, 5, 2), (30, NULL, 3)"); + assertQuery("SELECT a, b FROM UNNEST(ARRAY['kittens', 'puppies']) WITH ORDINALITY t(a, b)", "SELECT * FROM VALUES ('kittens', 1), ('puppies', 2)"); + assertQuery("" + + "SELECT c " + + "FROM UNNEST(ARRAY[1, 2, 3], ARRAY[4, 5]) WITH ORDINALITY t(a, b, c) " + + "CROSS JOIN (values (8), (9)) t2(d)", + "SELECT * FROM VALUES 1, 1, 2, 2, 3, 3"); + assertQuery("" + + "SELECT a.custkey, t.e, t.f " + + "FROM (SELECT custkey, ARRAY[10, 20, 30] AS my_array FROM orders ORDER BY orderkey LIMIT 1) a " + + "CROSS JOIN UNNEST(my_array) WITH ORDINALITY t(e, f)", + "SELECT * FROM (SELECT custkey FROM orders ORDER BY orderkey LIMIT 1) CROSS JOIN (VALUES (10, 1), (20, 2), (30, 3))"); + assertQuery("" + + "SELECT a.custkey, t.e, t.f " + + "FROM (SELECT custkey, ARRAY[10, 20, 30] AS my_array FROM orders ORDER BY orderkey LIMIT 1) a, " + + "UNNEST(my_array) WITH ORDINALITY t(e, f)", + "SELECT * FROM (SELECT custkey FROM orders ORDER BY orderkey LIMIT 1) CROSS JOIN (VALUES (10, 1), (20, 2), (30, 3))"); + + assertQuery("SELECT * FROM orders, UNNEST(ARRAY[1])", "SELECT orders.*, 1 FROM orders"); + } + + @Test + public void testMaxMinStringWithNulls() + { + assertQuery("SELECT custkey, MAX(NULLIF(orderstatus, 'O')), MIN(NULLIF(orderstatus, 'O')) FROM orders GROUP BY custkey"); + } + + @Test + public void testApproxPercentile() + { + MaterializedResult raw = computeActual("SELECT orderstatus, orderkey, totalprice FROM orders"); + + Multimap orderKeyByStatus = ArrayListMultimap.create(); + Multimap totalPriceByStatus = ArrayListMultimap.create(); + for (MaterializedRow row : raw.getMaterializedRows()) { + orderKeyByStatus.put((String) row.getField(0), ((Number) row.getField(1)).longValue()); + totalPriceByStatus.put((String) row.getField(0), (Double) row.getField(2)); + } + + MaterializedResult actual = computeActual("" + + "SELECT orderstatus, " + + " approx_percentile(orderkey, 0.5), " + + " approx_percentile(totalprice, 0.5)," + + " approx_percentile(orderkey, 2, 0.5)," + + " approx_percentile(totalprice, 2, 0.5)," + + " approx_percentile(orderkey, .2, 0.5)," + + " approx_percentile(totalprice, .2, 0.5)\n" + + "FROM orders\n" + + "GROUP BY orderstatus"); + + for (MaterializedRow row : actual.getMaterializedRows()) { + String status = (String) row.getField(0); + Long orderKey = ((Number) row.getField(1)).longValue(); + Double totalPrice = (Double) row.getField(2); + Long orderKeyWeighted = ((Number) row.getField(3)).longValue(); + Double totalPriceWeighted = (Double) row.getField(4); + Long orderKeyFractionalWeighted = ((Number) row.getField(5)).longValue(); + Double totalPriceFractionalWeighted = (Double) row.getField(6); + + List orderKeys = Ordering.natural().sortedCopy(orderKeyByStatus.get(status)); + List totalPrices = Ordering.natural().sortedCopy(totalPriceByStatus.get(status)); + + // verify real rank of returned value is within 1% of requested rank + assertTrue(orderKey >= orderKeys.get((int) (0.49 * orderKeys.size()))); + assertTrue(orderKey <= orderKeys.get((int) (0.51 * orderKeys.size()))); + + assertTrue(orderKeyWeighted >= orderKeys.get((int) (0.49 * orderKeys.size()))); + assertTrue(orderKeyWeighted <= orderKeys.get((int) (0.51 * orderKeys.size()))); + + assertTrue(orderKeyFractionalWeighted >= orderKeys.get((int) (0.49 * orderKeys.size()))); + assertTrue(orderKeyFractionalWeighted <= orderKeys.get((int) (0.51 * orderKeys.size()))); + + assertTrue(totalPrice >= totalPrices.get((int) (0.49 * totalPrices.size()))); + assertTrue(totalPrice <= totalPrices.get((int) (0.51 * totalPrices.size()))); + + assertTrue(totalPriceWeighted >= totalPrices.get((int) (0.49 * totalPrices.size()))); + assertTrue(totalPriceWeighted <= totalPrices.get((int) (0.51 * totalPrices.size()))); + + assertTrue(totalPriceFractionalWeighted >= totalPrices.get((int) (0.49 * totalPrices.size()))); + assertTrue(totalPriceFractionalWeighted <= totalPrices.get((int) (0.51 * totalPrices.size()))); + } + } + + @Test + public void testWhereNull() + { + // This query is has this strange shape to force the compiler to leave a true on the stack + // with the null flag set so if the filter method is not handling nulls correctly, this + // query will fail + assertQuery("SELECT custkey FROM orders WHERE custkey = custkey AND CAST(nullif(custkey, custkey) AS boolean) AND CAST(nullif(custkey, custkey) AS boolean)"); + } + + @Test + public void testDistinctWithOrderByNotInSelect() + { + assertQueryFails( + "SELECT DISTINCT custkey FROM orders ORDER BY orderkey LIMIT 10", + "line 1:1: For SELECT DISTINCT, ORDER BY expressions must appear in select list"); + } + + @Test + public void testGroupByOrderByLimit() + { + assertQueryOrdered("SELECT custkey, SUM(totalprice) FROM orders GROUP BY custkey ORDER BY SUM(totalprice) DESC LIMIT 10"); + } + + @Test + public void testLimitZero() + { + assertQuery("SELECT custkey, totalprice FROM orders LIMIT 0"); + } + + @Test + public void testLimitAll() + { + assertQuery("SELECT custkey, totalprice FROM orders LIMIT ALL", "SELECT custkey, totalprice FROM orders"); + } + + @Test + public void testOffset() + { + String values = "(VALUES ('A', 3), ('D', 2), ('C', 1), ('B', 4)) AS t(x, y)"; + + MaterializedResult actual = computeActual("SELECT x FROM " + values + " OFFSET 2 ROWS"); + MaterializedResult all = computeExpected("SELECT x FROM " + values, actual.getTypes()); + + assertEquals(actual.getMaterializedRows().size(), 2); + assertNotEquals(actual.getMaterializedRows().get(0), actual.getMaterializedRows().get(1)); + assertContains(all, actual); + } + + @Test + public void testOffsetWithFetch() + { + String values = "(VALUES ('A', 3), ('D', 2), ('C', 1), ('B', 4)) AS t(x, y)"; + + MaterializedResult actual = computeActual("SELECT x FROM " + values + " OFFSET 2 ROWS FETCH NEXT ROW ONLY"); + MaterializedResult all = computeExpected("SELECT x FROM " + values, actual.getTypes()); + + assertEquals(actual.getMaterializedRows().size(), 1); + assertContains(all, actual); + } + + @Test + public void testOffsetWithOrderBy() + { + String values = "(VALUES ('A', 3), ('D', 2), ('C', 1), ('B', 4)) AS t(x, y)"; + + assertQuery("SELECT x FROM " + values + " ORDER BY y OFFSET 2 ROWS", "VALUES 'A', 'B'"); + assertQuery("SELECT x FROM " + values + " ORDER BY y OFFSET 2 ROWS FETCH NEXT 1 ROW ONLY", "VALUES 'A'"); + } + + @Test + public void testOffsetEmptyResult() + { + assertQueryReturnsEmptyResult("SELECT name FROM nation OFFSET 100 ROWS"); + assertQueryReturnsEmptyResult("SELECT name FROM nation ORDER BY regionkey OFFSET 100 ROWS"); + assertQueryReturnsEmptyResult("SELECT name FROM nation OFFSET 100 ROWS LIMIT 20"); + assertQueryReturnsEmptyResult("SELECT name FROM nation ORDER BY regionkey OFFSET 100 ROWS LIMIT 20"); + } + + @Test + public void testFetchFirstWithTies() + { + String values = "(VALUES 1, 1, 1, 0, 0, 0, 2, 2, 2) AS t(x)"; + + assertQuery("SELECT x FROM " + values + " ORDER BY x FETCH FIRST 4 ROWS WITH TIES", "VALUES 0, 0, 0, 1, 1, 1"); + assertQuery("SELECT x FROM " + values + " ORDER BY x FETCH FIRST ROW WITH TIES", "VALUES 0, 0, 0"); + assertQuery("SELECT x FROM " + values + " ORDER BY x FETCH FIRST 20 ROWS WITH TIES", "VALUES 0, 0, 0, 1, 1, 1, 2, 2, 2"); + + assertQuery("SELECT x FROM " + values + " ORDER BY x OFFSET 2 ROWS FETCH NEXT 2 ROWS WITH TIES", "VALUES 0, 1, 1, 1"); + + assertQueryReturnsEmptyResult("SELECT x FROM " + values + " ORDER BY x OFFSET 20 ROWS FETCH NEXT 2 ROWS WITH TIES"); + + assertQueryFails("SELECT x FROM " + values + " FETCH FIRST 4 ROWS WITH TIES", "line 1:58: FETCH FIRST WITH TIES clause requires ORDER BY"); + assertQueryFails( + "SELECT x FROM (SELECT a FROM (VALUES 3, 2, 1, 1, 0) t(a) ORDER BY a) t1(x) FETCH FIRST 2 ROWS WITH TIES", + "line 1:76: FETCH FIRST WITH TIES clause requires ORDER BY"); + + String valuesMultiColumn = "(VALUES ('b', 0), ('b', 0), ('a', 1), ('a', 0), ('b', 1)) AS t(x, y)"; + + // if ORDER BY uses multiple symbols, then TIES are resolved basing on multiple symbols too + assertQuery("SELECT x, y FROM " + valuesMultiColumn + " ORDER BY x, y FETCH FIRST 3 ROWS WITH TIES", "VALUES ('a', 0), ('a', 1), ('b', 0), ('b', 0)"); + assertQuery("SELECT x, y FROM " + valuesMultiColumn + " ORDER BY x DESC, y FETCH FIRST ROW WITH TIES", "VALUES ('b', 0), ('b', 0)"); + } + + @Test + public void testForcePartitioningMarkDistinctInput() + { + Session session = Session.builder(getSession()) + .setSystemProperty(IGNORE_DOWNSTREAM_PREFERENCES, "false") + .build(); + + assertQuery( + session, + "SELECT count(orderkey), count(distinct orderkey), custkey , count(1) FROM ( SELECT * FROM (VALUES (1, 2)) as t(custkey, orderkey) UNION ALL SELECT 3, 4) GROUP BY 3", + "VALUES (1, 1, 1, 1), (1, 1, 3, 1)"); + + session = Session.builder(getSession()) + .setSystemProperty(IGNORE_DOWNSTREAM_PREFERENCES, "true") + .build(); + + assertQuery( + session, + "SELECT count(orderkey), count(distinct orderkey), custkey , count(1) FROM ( SELECT * FROM (VALUES (1, 2)) as t(custkey, orderkey) UNION ALL SELECT 3, 4) GROUP BY 3", + "VALUES (1, 1, 1, 1), (1, 1, 3, 1)"); + } + + @Test + public void testUnaliasSymbolReferencesWithUnion() + { + assertQuery("SELECT 1, 1, 'a', 'a' UNION ALL SELECT 1, 2, 'a', 'b'"); + } + + @Test + public void testSameInPredicateInProjectionAndFilter() + { + assertQuery("SELECT x IN (SELECT * FROM (VALUES 1))\n" + + "FROM (VALUES 1) t(x)\n" + + "WHERE x IN (SELECT * FROM (VALUES 1))", + "SELECT 1"); + + assertQuery("SELECT x IN (SELECT * FROM (VALUES 1))\n" + + "FROM (VALUES 2) t(x)\n" + + "WHERE x IN (SELECT * FROM (VALUES 1))", + "SELECT 1 WHERE false"); + } + + @Test + public void testTrivialNonDeterministicPredicatePushdown() + { + assertQuery("SELECT COUNT(*) WHERE rand() >= 0"); + } + + @Test + public void testMultipleOccurrencesOfCorrelatedSymbol() + { + @Language("SQL") String expected = + "VALUES " + + "('AFRICA', 'MOZAMBIQUE'), " + + "('AMERICA', 'UNITED STATES'), " + + "('ASIA', 'VIETNAM'), " + + "('EUROPE', 'UNITED KINGDOM'), " + + "('MIDDLE EAST', 'SAUDI ARABIA')"; + + // correlated symbol used twice, no coercion + assertQuery( + "SELECT region.name, (SELECT max(name) FROM nation WHERE regionkey * 2 = region.regionkey * 2 AND regionkey = region.regionkey) FROM region", + expected); + + // correlated symbol used twice, first occurrence coerced to double + assertQuery( + "SELECT region.name, (SELECT max(name) FROM nation WHERE CAST(regionkey AS double) = region.regionkey AND regionkey = region.regionkey) FROM region", + expected); + + // correlated symbol used twice, second occurrence coerced to double + assertQuery( + "SELECT region.name, (SELECT max(name) FROM nation WHERE regionkey = region.regionkey AND CAST(regionkey AS double) = region.regionkey) FROM region", + expected); + + // different coercions + assertQuery( + "SELECT region.name, " + + "(SELECT max(name) FROM nation " + + "WHERE CAST(regionkey AS double) = region.regionkey " + // region.regionkey coerced to double + "AND regionkey = region.regionkey " + // no coercion + "AND regionkey * 1.0 = region.regionkey) " + // region.regionkey coerced to decimal + "FROM region", + expected); + } + + @Test + public void testGrouping() + { + assertQuery( + "SELECT a, b AS t, sum(c), grouping(a, b) + grouping(a) " + + "FROM (VALUES ('h', 'j', 11), ('k', 'l', 7)) AS t (a, b, c) " + + "GROUP BY GROUPING SETS ( (a), (b)) " + + "ORDER BY grouping(b) ASC", + "VALUES (NULL, 'j', 11, 3), (NULL, 'l', 7, 3), ('h', NULL, 11, 1), ('k', NULL, 7, 1)"); + + assertQuery( + "SELECT a, sum(b), grouping(a) FROM (VALUES ('h', 11, 0), ('k', 7, 0)) AS t (a, b, c) GROUP BY GROUPING SETS (a)", + "VALUES ('h', 11, 0), ('k', 7, 0)"); + + assertQuery( + "SELECT a, b, sum(c), grouping(a, b) FROM (VALUES ('h', 'j', 11), ('k', 'l', 7) ) AS t (a, b, c) GROUP BY GROUPING SETS ( (a), (b)) HAVING grouping(a, b) > 1 ", + "VALUES (NULL, 'j', 11, 2), (NULL, 'l', 7, 2)"); + + assertQuery("SELECT a, grouping(a) * 1.0 FROM (VALUES (1) ) AS t (a) GROUP BY a", + "VALUES (1, 0.0)"); + + assertQuery("SELECT a, grouping(a), grouping(a) FROM (VALUES (1) ) AS t (a) GROUP BY a", + "VALUES (1, 0, 0)"); + + assertQuery("SELECT grouping(a) FROM (VALUES ('h', 'j', 11), ('k', 'l', 7)) AS t (a, b, c) GROUP BY GROUPING SETS (a,c), c*2", + "VALUES (0), (1), (0), (1)"); + } + + @Test + public void testGroupingWithFortyArguments() + { + // This test ensures we correctly pick the bigint implementation version of the grouping + // function which supports up to 62 columns. Semantically it is exactly the same as + // TestGroupingOperationFunction#testMoreThanThirtyTwoArguments. That test is a little easier to + // understand and verify. + String fortyLetterSequence = "aa, ab, ac, ad, ae, af, ag, ah, ai, aj, ak, al, am, an, ao, ap, aq, ar, asa, at, au, av, aw, ax, ay, az, " + + "ba, bb, bc, bd, be, bf, bg, bh, bi, bj, bk, bl, bm, bn"; + String fortyIntegers = "1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, " + + "31, 32, 33, 34, 35, 36, 37, 38, 39, 40"; + // 20, 2, 13, 33, 40, 9 , 14 (corresponding indices from Left to right in the above fortyLetterSequence) + String groupingSet1 = "at, ab, am, bg, bn, ai, an"; + // 28, 4, 5, 29, 31, 10 (corresponding indices from left to right in the above fortyLetterSequence) + String groupingSet2 = "bb, ad, ae, bc, be, aj"; + String query = format( + "SELECT grouping(%s) FROM (VALUES (%s)) AS t(%s) GROUP BY GROUPING SETS ((%s), (%s), (%s))", + fortyLetterSequence, + fortyIntegers, + fortyLetterSequence, + fortyLetterSequence, + groupingSet1, + groupingSet2); + + assertQuery(query, "VALUES (0), (822283861886), (995358664191)"); + } + + @Test + public void testGroupingInTableSubquery() + { + // In addition to testing grouping() in subqueries, the following tests also + // ensure correct behavior in the case of alternating GROUPING SETS and GROUP BY + // clauses in the same plan. This is significant because grouping() with GROUP BY + // works only with a special re-write that should not happen in the presence of + // GROUPING SETS. + + // Inner query has a single GROUP BY and outer query has GROUPING SETS + assertQuery( + "SELECT orderkey, custkey, sum(agg_price) AS outer_sum, grouping(orderkey, custkey), g " + + "FROM " + + " (SELECT orderkey, custkey, sum(totalprice) AS agg_price, grouping(custkey, orderkey) AS g " + + " FROM orders " + + " GROUP BY orderkey, custkey " + + " ORDER BY agg_price ASC " + + " LIMIT 5) AS t " + + "GROUP BY GROUPING SETS ((orderkey, custkey), g) " + + "ORDER BY outer_sum", + "VALUES (35271, 334, 874.89, 0, NULL), " + + " (28647, 1351, 924.33, 0, NULL), " + + " (58145, 862, 929.03, 0, NULL), " + + " (8354, 634, 974.04, 0, NULL), " + + " (37415, 301, 986.63, 0, NULL), " + + " (NULL, NULL, 4688.92, 3, 0)"); + + // Inner query has GROUPING SETS and outer query has GROUP BY + assertQuery( + "SELECT orderkey, custkey, g, sum(agg_price) AS outer_sum, grouping(orderkey, custkey) " + + "FROM " + + " (SELECT orderkey, custkey, sum(totalprice) AS agg_price, grouping(custkey, orderkey) AS g " + + " FROM orders " + + " GROUP BY GROUPING SETS ((custkey), (orderkey)) " + + " ORDER BY agg_price ASC " + + " LIMIT 5) AS t " + + "GROUP BY orderkey, custkey, g", + "VALUES (28647, NULL, 2, 924.33, 0), " + + " (8354, NULL, 2, 974.04, 0), " + + " (37415, NULL, 2, 986.63, 0), " + + " (58145, NULL, 2, 929.03, 0), " + + " (35271, NULL, 2, 874.89, 0)"); + + // Inner query has GROUPING SETS but no grouping and outer query has a simple GROUP BY + assertQuery( + "SELECT orderkey, custkey, sum(agg_price) AS outer_sum, grouping(orderkey, custkey) " + + "FROM " + + " (SELECT orderkey, custkey, sum(totalprice) AS agg_price " + + " FROM orders " + + " GROUP BY GROUPING SETS ((custkey), (orderkey)) " + + " ORDER BY agg_price ASC NULLS FIRST) AS t " + + "GROUP BY orderkey, custkey " + + "ORDER BY outer_sum ASC NULLS FIRST " + + "LIMIT 5", + "VALUES (35271, NULL, 874.89, 0), " + + " (28647, NULL, 924.33, 0), " + + " (58145, NULL, 929.03, 0), " + + " (8354, NULL, 974.04, 0), " + + " (37415, NULL, 986.63, 0)"); + } + + @Test + public void testShowSession() + { + Session session = new Session( + getSession().getQueryId(), + Optional.empty(), + getSession().isClientTransactionSupport(), + getSession().getIdentity(), + getSession().getSource(), + getSession().getCatalog(), + getSession().getSchema(), + getSession().getPath(), + getSession().getTraceToken(), + getSession().getTimeZoneKey(), + getSession().getLocale(), + getSession().getRemoteUserAddress(), + getSession().getUserAgent(), + getSession().getClientInfo(), + getSession().getClientTags(), + getSession().getClientCapabilities(), + getSession().getResourceEstimates(), + getSession().getStartTime(), + ImmutableMap.builder() + .put("test_string", "foo string") + .put("test_long", "424242") + .build(), + ImmutableMap.of(), + ImmutableMap.of(TESTING_CATALOG, ImmutableMap.builder() + .put("connector_string", "bar string") + .put("connector_long", "11") + .build()), + getQueryRunner().getMetadata().getSessionPropertyManager(), + getSession().getPreparedStatements()); + MaterializedResult result = computeActual(session, "SHOW SESSION"); + + ImmutableMap properties = Maps.uniqueIndex(result.getMaterializedRows(), input -> { + assertEquals(input.getFieldCount(), 5); + return (String) input.getField(0); + }); + + assertEquals(properties.get("test_string"), new MaterializedRow(1, "test_string", "foo string", "test default", "varchar", "test string property")); + assertEquals(properties.get("test_long"), new MaterializedRow(1, "test_long", "424242", "42", "bigint", "test long property")); + assertEquals(properties.get(TESTING_CATALOG + ".connector_string"), + new MaterializedRow(1, TESTING_CATALOG + ".connector_string", "bar string", "connector default", "varchar", "connector string property")); + assertEquals(properties.get(TESTING_CATALOG + ".connector_long"), + new MaterializedRow(1, TESTING_CATALOG + ".connector_long", "11", "33", "bigint", "connector long property")); + } + + @Test + public void testTry() + { + // divide by zero + assertQuery( + "SELECT linenumber, sum(TRY(100/(CAST (tax*10 AS BIGINT)))) FROM lineitem GROUP BY linenumber", + "SELECT linenumber, sum(100/(CAST (tax*10 AS BIGINT))) FROM lineitem WHERE CAST(tax*10 AS BIGINT) <> 0 GROUP BY linenumber"); + + // invalid cast + assertQuery( + "SELECT TRY(CAST(IF(round(totalprice) % 2 = 0, CAST(totalprice AS VARCHAR), '^&$' || CAST(totalprice AS VARCHAR)) AS DOUBLE)) FROM orders", + "SELECT CASE WHEN round(totalprice) % 2 = 0 THEN totalprice ELSE null END FROM orders"); + + // invalid function argument + assertQuery( + "SELECT COUNT(TRY(to_base(100, CAST(round(totalprice/100) AS BIGINT)))) FROM orders", + "SELECT SUM(CASE WHEN CAST(round(totalprice/100) AS BIGINT) BETWEEN 2 AND 36 THEN 1 ELSE 0 END) FROM orders"); + + // as part of a complex expression + assertQuery( + "SELECT COUNT(CAST(orderkey AS VARCHAR) || TRY(to_base(100, CAST(round(totalprice/100) AS BIGINT)))) FROM orders", + "SELECT SUM(CASE WHEN CAST(round(totalprice/100) AS BIGINT) BETWEEN 2 AND 36 THEN 1 ELSE 0 END) FROM orders"); + + // missing function argument + assertQueryFails("SELECT TRY()", "line 1:8: The 'try' function must have exactly one argument"); + + // check that TRY is not pushed down + assertQueryFails("SELECT TRY(x) IS NULL FROM (SELECT 1/y AS x FROM (VALUES 1, 2, 3, 0, 4) t(y))", "Division by zero"); + assertQuery("SELECT x IS NULL FROM (SELECT TRY(1/y) AS x FROM (VALUES 3, 0, 4) t(y))", "VALUES false, true, false"); + + // test try with lambda function + assertQuery("SELECT TRY(apply(5, x -> x + 1) / 0)", "SELECT NULL"); + assertQuery("SELECT TRY(apply(5 + RANDOM(1), x -> x + 1) / 0)", "SELECT NULL"); + assertQuery("SELECT apply(5 + RANDOM(1), x -> x + TRY(1 / 0))", "SELECT NULL"); + + // test try with invalid JSON + assertQuery("SELECT JSON_FORMAT(TRY(JSON 'INVALID'))", "SELECT NULL"); + assertQuery("SELECT JSON_FORMAT(TRY (JSON_PARSE('INVALID')))", "SELECT NULL"); + + // tests that might be constant folded + assertQuery("SELECT TRY(CAST(NULL AS BIGINT))", "SELECT NULL"); + assertQuery("SELECT TRY(CAST('123' AS BIGINT))", "SELECT 123L"); + assertQuery("SELECT TRY(CAST('foo' AS BIGINT))", "SELECT NULL"); + assertQuery("SELECT TRY(CAST('foo' AS BIGINT)) + TRY(CAST('123' AS BIGINT))", "SELECT NULL"); + assertQuery("SELECT TRY(CAST(CAST(123 AS VARCHAR) AS BIGINT))", "SELECT 123L"); + assertQuery("SELECT COALESCE(CAST(CONCAT('123', CAST(123 AS VARCHAR)) AS BIGINT), 0)", "SELECT 123123L"); + assertQuery("SELECT TRY(CAST(CONCAT('hello', CAST(123 AS VARCHAR)) AS BIGINT))", "SELECT NULL"); + assertQuery("SELECT COALESCE(TRY(CAST(CONCAT('a', CAST(123 AS VARCHAR)) AS INTEGER)), 0)", "SELECT 0"); + assertQuery("SELECT COALESCE(TRY(CAST(CONCAT('a', CAST(123 AS VARCHAR)) AS BIGINT)), 0)", "SELECT 0L"); + assertQuery("SELECT 123 + TRY(ABS(-9223372036854775807 - 1))", "SELECT NULL"); + assertQuery("SELECT JSON_FORMAT(TRY(JSON '[]')) || '123'", "SELECT '[]123'"); + assertQuery("SELECT JSON_FORMAT(TRY(JSON 'INVALID')) || '123'", "SELECT NULL"); + assertQuery("SELECT TRY(2/1)", "SELECT 2"); + assertQuery("SELECT TRY(2/0)", "SELECT null"); + assertQuery("SELECT COALESCE(TRY(2/0), 0)", "SELECT 0"); + assertQuery("SELECT TRY(ABS(-2))", "SELECT 2"); + } + + @Test + public void testExchangeWithProjectionPushDown() + { + assertQuery( + "SELECT * FROM \n" + + " (SELECT orderkey + 1 orderkey FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 100)) o \n" + + "JOIN \n" + + " (SELECT orderkey + 1 orderkey FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 100)) o1 \n" + + "ON (o.orderkey = o1.orderkey)"); + } + + @Test + public void testUnionWithProjectionPushDown() + { + assertQuery("SELECT key + 5, status FROM (SELECT orderkey key, orderstatus status FROM orders UNION ALL SELECT orderkey key, linestatus status FROM lineitem)"); + } + + @Test + public void testUnion() + { + assertQuery("SELECT orderkey FROM orders UNION SELECT custkey FROM orders"); + assertQuery("SELECT 123 UNION DISTINCT SELECT 123 UNION ALL SELECT 123"); + assertQuery("SELECT NULL UNION SELECT NULL"); + assertQuery("SELECT NULL, NULL UNION ALL SELECT NULL, NULL FROM nation"); + assertQuery("SELECT 'x', 'y' UNION ALL SELECT name, name FROM nation"); + + // mixed single-node vs fixed vs source-distributed + assertQuery("SELECT orderkey FROM orders UNION ALL SELECT 123 UNION ALL (SELECT custkey FROM orders GROUP BY custkey)"); + } + + @Test + public void testUnionDistinct() + { + assertQuery("SELECT orderkey FROM orders UNION DISTINCT SELECT custkey FROM orders"); + } + + @Test + public void testUnionAll() + { + assertQuery("SELECT orderkey FROM orders UNION ALL SELECT custkey FROM orders"); + } + + @Test + public void testUnionArray() + { + assertQuery("SELECT a[1] FROM (SELECT ARRAY[1] UNION ALL SELECT ARRAY[1]) t(a) LIMIT 1", "SELECT 1"); + } + + @Test + public void testChainedUnionsWithOrder() + { + assertQueryOrdered( + "SELECT orderkey FROM orders UNION (SELECT custkey FROM orders UNION SELECT linenumber FROM lineitem) UNION ALL SELECT orderkey FROM lineitem ORDER BY orderkey"); + } + + @Test + public void testUnionWithTopN() + { + assertQuery("SELECT * FROM (" + + " SELECT regionkey FROM nation " + + " UNION ALL " + + " SELECT nationkey FROM nation" + + ") t(a) " + + "ORDER BY a LIMIT 1", + "SELECT 0"); + } + + @Test + public void testUnionWithJoin() + { + assertQuery( + "SELECT * FROM (" + + " SELECT orderdate ds, orderkey FROM orders " + + " UNION ALL " + + " SELECT shipdate ds, orderkey FROM lineitem) a " + + "JOIN orders o ON (a.orderkey = o.orderkey)"); + } + + @Test + public void testUnionWithAggregation() + { + assertQuery( + "SELECT regionkey, count(*) FROM (" + + " SELECT regionkey FROM nation " + + " UNION ALL " + + " SELECT * FROM (VALUES 2, 100) t(regionkey)) " + + "GROUP BY regionkey", + "SELECT * FROM (VALUES (0, 5), (1, 5), (2, 6), (3, 5), (4, 5), (100, 1))"); + + assertQuery( + "SELECT ds, count(*) FROM (" + + " SELECT orderdate ds, orderkey FROM orders " + + " UNION ALL " + + " SELECT shipdate ds, orderkey FROM lineitem) a " + + "GROUP BY ds"); + assertQuery( + "SELECT ds, count(*) FROM (" + + " SELECT orderdate ds, orderkey FROM orders " + + " UNION " + + " SELECT shipdate ds, orderkey FROM lineitem) a " + + "GROUP BY ds"); + assertQuery( + "SELECT ds, count(DISTINCT orderkey) FROM (" + + " SELECT orderdate ds, orderkey FROM orders " + + " UNION " + + " SELECT shipdate ds, orderkey FROM lineitem) a " + + "GROUP BY ds"); + assertQuery( + "SELECT clerk, count(DISTINCT orderstatus) FROM (" + + "SELECT * FROM orders WHERE orderkey=0 " + + " UNION ALL " + + "SELECT * FROM orders WHERE orderkey<>0) " + + "GROUP BY clerk"); + assertQuery( + "SELECT count(clerk) FROM (" + + "SELECT clerk FROM orders WHERE orderkey=0 " + + " UNION ALL " + + "SELECT clerk FROM orders WHERE orderkey<>0) " + + "GROUP BY clerk"); + assertQuery( + "SELECT count(orderkey), sum(sc) FROM (" + + " SELECT sum(custkey) sc, orderkey FROM (" + + " SELECT custkey,orderkey, orderkey+1 FROM orders WHERE orderkey=0" + + " UNION ALL " + + " SELECT custkey,orderkey,orderkey+1 FROM orders WHERE orderkey<>0) " + + " GROUP BY orderkey)"); + + assertQuery( + "SELECT count(orderkey), sum(sc) FROM (\n" + + " SELECT sum(custkey) sc, orderkey FROM (\n" + + " SELECT custkey, orderkey, orderkey+1, orderstatus FROM orders WHERE orderkey=0\n" + + " UNION ALL \n" + + " SELECT custkey, orderkey, orderkey+1, orderstatus FROM orders WHERE orderkey<>0) \n" + + " GROUP BY GROUPING SETS ((orderkey, orderstatus), (orderkey)))", + "SELECT count(orderkey), sum(sc) FROM (\n" + + " SELECT sum(custkey) sc, orderkey FROM (\n" + + " SELECT custkey, orderkey, orderkey+1, orderstatus FROM orders WHERE orderkey=0\n" + + " UNION ALL \n" + + " SELECT custkey, orderkey, orderkey+1, orderstatus FROM orders WHERE orderkey<>0) \n" + + " GROUP BY orderkey, orderstatus \n" + + " \n" + + " UNION ALL \n" + + " \n" + + " SELECT sum(custkey) sc, orderkey FROM (\n" + + " SELECT custkey, orderkey, orderkey+1, orderstatus FROM orders WHERE orderkey=0\n" + + " UNION ALL \n" + + " SELECT custkey, orderkey, orderkey+1, orderstatus FROM orders WHERE orderkey<>0) \n" + + " GROUP BY orderkey)"); + } + + @Test + public void testUnionWithUnionAndAggregation() + { + assertQuery( + "SELECT count(*) FROM (" + + "SELECT 1 FROM nation GROUP BY regionkey " + + "UNION ALL " + + "SELECT 1 FROM (" + + " SELECT 1 FROM nation " + + " UNION ALL " + + " SELECT 1 FROM nation))"); + assertQuery( + "SELECT count(*) FROM (" + + "SELECT 1 FROM (" + + " SELECT 1 FROM nation " + + " UNION ALL " + + " SELECT 1 FROM nation)" + + "UNION ALL " + + "SELECT 1 FROM nation GROUP BY regionkey)"); + } + + @Test + public void testUnionWithAggregationAndTableScan() + { + assertQuery( + "SELECT orderkey, 1 FROM orders " + + "UNION ALL " + + "SELECT orderkey, count(*) FROM orders GROUP BY 1", + "SELECT orderkey, 1 FROM orders " + + "UNION ALL " + + "SELECT orderkey, count(*) FROM orders GROUP BY orderkey"); + + assertQuery( + "SELECT orderkey, count(*) FROM orders GROUP BY 1 " + + "UNION ALL " + + "SELECT orderkey, 1 FROM orders", + "SELECT orderkey, count(*) FROM orders GROUP BY orderkey " + + "UNION ALL " + + "SELECT orderkey, 1 FROM orders"); + } + + @Test + public void testUnionWithAggregationAndJoin() + { + assertQuery( + "SELECT * FROM ( " + + "SELECT orderkey, count(*) FROM (" + + " SELECT orderdate ds, orderkey FROM orders " + + " UNION ALL " + + " SELECT shipdate ds, orderkey FROM lineitem) a " + + "GROUP BY orderkey) t " + + "JOIN orders o " + + "ON (o.orderkey = t.orderkey)"); + } + + @Test + public void testUnionWithJoinOnNonTranslateableSymbols() + { + assertQuery("SELECT *\n" + + "FROM (SELECT orderdate ds, orderkey\n" + + " FROM orders\n" + + " UNION ALL\n" + + " SELECT shipdate ds, orderkey\n" + + " FROM lineitem) a\n" + + "JOIN orders o\n" + + "ON (substr(cast(a.ds AS VARCHAR), 6, 2) = substr(cast(o.orderdate AS VARCHAR), 6, 2) AND a.orderkey = o.orderkey)"); + } + + @Test + public void testSubqueryUnion() + { + assertQueryOrdered("SELECT * FROM (SELECT orderkey FROM orders UNION SELECT custkey FROM orders UNION SELECT orderkey FROM orders) ORDER BY orderkey LIMIT 1000"); + } + + @Test + public void testUnionWithFilterNotInSelect() + { + assertQuery("SELECT orderkey, orderdate FROM orders WHERE custkey < 1000 UNION ALL SELECT orderkey, shipdate FROM lineitem WHERE linenumber < 2000"); + assertQuery("SELECT orderkey, orderdate FROM orders UNION ALL SELECT orderkey, shipdate FROM lineitem WHERE linenumber < 2000"); + assertQuery("SELECT orderkey, orderdate FROM orders WHERE custkey < 1000 UNION ALL SELECT orderkey, shipdate FROM lineitem"); + } + + @Test + public void testSelectOnlyUnion() + { + assertQuery("SELECT 123, 'foo' UNION ALL SELECT 999, 'bar'"); + } + + @Test + public void testMultiColumnUnionAll() + { + assertQuery("SELECT * FROM orders UNION ALL SELECT * FROM orders"); + } + + @Test + public void testUnionRequiringCoercion() + { + assertQuery("VALUES 1 UNION ALL VALUES 1.0, 2", "SELECT * FROM (VALUES 1) UNION ALL SELECT * FROM (VALUES 1.0, 2)"); + assertQuery("(VALUES 1) UNION ALL (VALUES 1.0, 2)", "SELECT * FROM (VALUES 1) UNION ALL SELECT * FROM (VALUES 1.0, 2)"); + assertQuery("SELECT 0, 0 UNION ALL SELECT 1.0, 0"); // This test case generates a RelationPlan whose .outputSymbols is different .root.outputSymbols + assertQuery("SELECT 0, 0, 0, 0 UNION ALL SELECT 0.0, 0.0, 0, 0"); // This test case generates a RelationPlan where multiple positions share the same symbol + assertQuery("SELECT * FROM (VALUES 1) UNION ALL SELECT * FROM (VALUES 1.0, 2)"); + + assertQuery("SELECT * FROM (VALUES 1) UNION SELECT * FROM (VALUES 1.0, 2)", "VALUES 1.0, 2.0"); // H2 produces incorrect result for the original query: 1.0 1.0 2.0 + assertQuery("SELECT * FROM (VALUES (2, 2)) UNION SELECT * FROM (VALUES (1, 1.0))"); + assertQuery("SELECT * FROM (VALUES (NULL, NULL)) UNION SELECT * FROM (VALUES (1, 1.0))"); + assertQuery("SELECT * FROM (VALUES (NULL, NULL)) UNION ALL SELECT * FROM (VALUES (NULL, 1.0))"); + + // Test for https://github.com/prestodb/presto/issues/7496 + // Cast varchar(1) -> varchar(4) for orderstatus in first source of union was not added. It was not done for type-only coercions. + // Then as a result of predicate pushdown orderstatus (without cast) was compared with CAST('aaa' AS varchar(4)) which trigger checkArgument that + // both types of comparison should be equal in DomainTranslator. + assertQuery("SELECT a FROM " + + "(" + + " (SELECT orderstatus AS a FROM orders LIMIT 1) " + + "UNION ALL " + + " SELECT 'aaaa' AS a" + + ") " + + "WHERE a = 'aaa'"); + } + + @Test + public void testTableQuery() + { + assertQuery("TABLE orders", "SELECT * FROM orders"); + } + + @Test + public void testTableQueryOrderLimit() + { + assertQueryOrdered("TABLE orders ORDER BY orderkey LIMIT 10", "SELECT * FROM orders ORDER BY orderkey LIMIT 10"); + } + + @Test + public void testTableQueryInUnion() + { + assertQuery("(SELECT * FROM orders ORDER BY orderkey LIMIT 10) UNION ALL TABLE orders", "(SELECT * FROM orders ORDER BY orderkey LIMIT 10) UNION ALL SELECT * FROM orders"); + } + + @Test + public void testTableAsSubquery() + { + assertQueryOrdered("(TABLE orders) ORDER BY orderkey", "(SELECT * FROM orders) ORDER BY orderkey"); + } + + @Test + public void testVariance() + { + // int64 + assertQuery("SELECT VAR_SAMP(custkey) FROM orders"); + assertQuery("SELECT VAR_SAMP(custkey) FROM (SELECT custkey FROM orders ORDER BY custkey LIMIT 2) T"); + assertQuery("SELECT VAR_SAMP(custkey) FROM (SELECT custkey FROM orders ORDER BY custkey LIMIT 1) T"); + assertQuery("SELECT VAR_SAMP(custkey) FROM (SELECT custkey FROM orders LIMIT 0) T"); + + // double + assertQuery("SELECT VAR_SAMP(totalprice) FROM orders"); + assertQuery("SELECT VAR_SAMP(totalprice) FROM (SELECT totalprice FROM orders ORDER BY totalprice LIMIT 2) T"); + assertQuery("SELECT VAR_SAMP(totalprice) FROM (SELECT totalprice FROM orders ORDER BY totalprice LIMIT 1) T"); + assertQuery("SELECT VAR_SAMP(totalprice) FROM (SELECT totalprice FROM orders LIMIT 0) T"); + } + + @Test + public void testVariancePop() + { + // int64 + assertQuery("SELECT VAR_POP(custkey) FROM orders"); + assertQuery("SELECT VAR_POP(custkey) FROM (SELECT custkey FROM orders ORDER BY custkey LIMIT 2) T"); + assertQuery("SELECT VAR_POP(custkey) FROM (SELECT custkey FROM orders ORDER BY custkey LIMIT 1) T"); + assertQuery("SELECT VAR_POP(custkey) FROM (SELECT custkey FROM orders LIMIT 0) T"); + + // double + assertQuery("SELECT VAR_POP(totalprice) FROM orders"); + assertQuery("SELECT VAR_POP(totalprice) FROM (SELECT totalprice FROM orders ORDER BY totalprice LIMIT 2) T"); + assertQuery("SELECT VAR_POP(totalprice) FROM (SELECT totalprice FROM orders ORDER BY totalprice LIMIT 1) T"); + assertQuery("SELECT VAR_POP(totalprice) FROM (SELECT totalprice FROM orders LIMIT 0) T"); + } + + @Test + public void testStdDev() + { + // int64 + assertQuery("SELECT STDDEV_SAMP(custkey) FROM orders"); + assertQuery("SELECT STDDEV_SAMP(custkey) FROM (SELECT custkey FROM orders ORDER BY custkey LIMIT 2) T"); + assertQuery("SELECT STDDEV_SAMP(custkey) FROM (SELECT custkey FROM orders ORDER BY custkey LIMIT 1) T"); + assertQuery("SELECT STDDEV_SAMP(custkey) FROM (SELECT custkey FROM orders LIMIT 0) T"); + + // double + assertQuery("SELECT STDDEV_SAMP(totalprice) FROM orders"); + assertQuery("SELECT STDDEV_SAMP(totalprice) FROM (SELECT totalprice FROM orders ORDER BY totalprice LIMIT 2) T"); + assertQuery("SELECT STDDEV_SAMP(totalprice) FROM (SELECT totalprice FROM orders ORDER BY totalprice LIMIT 1) T"); + assertQuery("SELECT STDDEV_SAMP(totalprice) FROM (SELECT totalprice FROM orders LIMIT 0) T"); + } + + @Test + public void testStdDevPop() + { + // int64 + assertQuery("SELECT STDDEV_POP(custkey) FROM orders"); + assertQuery("SELECT STDDEV_POP(custkey) FROM (SELECT custkey FROM orders ORDER BY custkey LIMIT 2) T"); + assertQuery("SELECT STDDEV_POP(custkey) FROM (SELECT custkey FROM orders ORDER BY custkey LIMIT 1) T"); + assertQuery("SELECT STDDEV_POP(custkey) FROM (SELECT custkey FROM orders LIMIT 0) T"); + + // double + assertQuery("SELECT STDDEV_POP(totalprice) FROM orders"); + assertQuery("SELECT STDDEV_POP(totalprice) FROM (SELECT totalprice FROM orders ORDER BY totalprice LIMIT 2) T"); + assertQuery("SELECT STDDEV_POP(totalprice) FROM (SELECT totalprice FROM orders ORDER BY totalprice LIMIT 1) T"); + assertQuery("SELECT STDDEV_POP(totalprice) FROM (SELECT totalprice FROM orders LIMIT 0) T"); + } + + @Test + public void testDefaultExplainTextFormat() + { + String query = "SELECT * FROM orders"; + MaterializedResult result = computeActual("EXPLAIN " + query); + assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan(query, LOGICAL)); + } + + @Test + public void testDefaultExplainGraphvizFormat() + { + String query = "SELECT * FROM orders"; + MaterializedResult result = computeActual("EXPLAIN (FORMAT GRAPHVIZ) " + query); + assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getGraphvizExplainPlan(query, LOGICAL)); + } + + @Test + public void testLogicalExplain() + { + String query = "SELECT * FROM orders"; + MaterializedResult result = computeActual("EXPLAIN (TYPE LOGICAL) " + query); + assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan(query, LOGICAL)); + } + + @Test + public void testIoExplain() + { + String query = "SELECT * FROM orders"; + MaterializedResult result = computeActual("EXPLAIN (TYPE IO) " + query); + assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan(query, IO)); + } + + @Test + public void testLogicalExplainTextFormat() + { + String query = "SELECT * FROM orders"; + MaterializedResult result = computeActual("EXPLAIN (TYPE LOGICAL, FORMAT TEXT) " + query); + assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan(query, LOGICAL)); + } + + @Test + public void testLogicalExplainGraphvizFormat() + { + String query = "SELECT * FROM orders"; + MaterializedResult result = computeActual("EXPLAIN (TYPE LOGICAL, FORMAT GRAPHVIZ) " + query); + assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getGraphvizExplainPlan(query, LOGICAL)); + } + + @Test + public void testDistributedExplain() + { + String query = "SELECT * FROM orders"; + MaterializedResult result = computeActual("EXPLAIN (TYPE DISTRIBUTED) " + query); + assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan(query, DISTRIBUTED)); + } + + @Test + public void testDistributedExplainTextFormat() + { + String query = "SELECT * FROM orders"; + MaterializedResult result = computeActual("EXPLAIN (TYPE DISTRIBUTED, FORMAT TEXT) " + query); + assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan(query, DISTRIBUTED)); + } + + @Test + public void testDistributedExplainGraphvizFormat() + { + String query = "SELECT * FROM orders"; + MaterializedResult result = computeActual("EXPLAIN (TYPE DISTRIBUTED, FORMAT GRAPHVIZ) " + query); + assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getGraphvizExplainPlan(query, DISTRIBUTED)); + } + + @Test + public void testExplainOfExplain() + { + String query = "EXPLAIN SELECT * FROM orders"; + MaterializedResult result = computeActual("EXPLAIN " + query); + assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan(query, LOGICAL)); + } + + @Test + public void testExplainOfExplainAnalyze() + { + String query = "EXPLAIN ANALYZE SELECT * FROM orders"; + MaterializedResult result = computeActual("EXPLAIN " + query); + assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan(query, LOGICAL)); + } + + @Test + public void testExplainDdl() + { + assertExplainDdl("CREATE TABLE foo (pk bigint)", "CREATE TABLE foo"); + assertExplainDdl("CREATE VIEW foo AS SELECT * FROM orders", "CREATE VIEW foo"); + assertExplainDdl("DROP TABLE orders"); + assertExplainDdl("DROP VIEW view"); + assertExplainDdl("ALTER TABLE orders RENAME TO new_name"); + assertExplainDdl("ALTER TABLE orders RENAME COLUMN orderkey TO new_column_name"); + assertExplainDdl("SET SESSION foo = 'bar'"); + assertExplainDdl("PREPARE my_query FROM SELECT * FROM orders", "PREPARE my_query"); + assertExplainDdl("DEALLOCATE PREPARE my_query"); + assertExplainDdl("RESET SESSION foo"); + assertExplainDdl("START TRANSACTION"); + assertExplainDdl("COMMIT"); + assertExplainDdl("ROLLBACK"); + } + + private void assertExplainDdl(String query) + { + assertExplainDdl(query, query); + } + + private void assertExplainDdl(String query, String expected) + { + MaterializedResult result = computeActual("EXPLAIN " + query); + assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), expected); + } + + @Test + public void testExplainValidate() + { + MaterializedResult result = computeActual("EXPLAIN (TYPE VALIDATE) SELECT 1"); + assertEquals(result.getOnlyValue(), true); + } + + @Test + public void testExplainValidateThrows() + { + assertQueryFails("EXPLAIN (TYPE VALIDATE) SELECT x", "line 1:32: Column 'x' cannot be resolved"); + } + + @Test + public void testExplainExecute() + { + Session session = Session.builder(getSession()) + .addPreparedStatement("my_query", "SELECT * FROM orders") + .build(); + MaterializedResult result = computeActual(session, "EXPLAIN (TYPE LOGICAL) EXECUTE my_query"); + assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan("SELECT * FROM orders", LOGICAL)); + } + + @Test + public void testExplainExecuteWithUsing() + { + Session session = Session.builder(getSession()) + .addPreparedStatement("my_query", "SELECT * FROM orders WHERE orderkey < ?") + .build(); + MaterializedResult result = computeActual(session, "EXPLAIN (TYPE LOGICAL) EXECUTE my_query USING 7"); + assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan("SELECT * FROM orders WHERE orderkey < 7", LOGICAL)); + } + + @Test + public void testExplainSetSessionWithUsing() + { + Session session = Session.builder(getSession()) + .addPreparedStatement("my_query", "SET SESSION foo = ?") + .build(); + MaterializedResult result = computeActual(session, "EXPLAIN (TYPE LOGICAL) EXECUTE my_query USING 7"); + assertEquals( + getOnlyElement(result.getOnlyColumnAsSet()), + "SET SESSION foo = ?\n" + + "Parameters: [7]"); + } + + @Test + public void testShowTablesLikeWithEscape() + { + assertQueryFails("SHOW TABLES IN a LIKE '%$_%' ESCAPE", "line 1:36: mismatched input ''. Expecting: "); + assertQueryFails("SHOW TABLES LIKE 't$_%' ESCAPE ''", "Escape string must be a single character"); + assertQueryFails("SHOW TABLES LIKE 't$_%' ESCAPE '$$'", "Escape string must be a single character"); + + Set allTables = computeActual("SHOW TABLES FROM information_schema").getOnlyColumnAsSet(); + assertEquals(allTables, computeActual("SHOW TABLES FROM information_schema LIKE '%_%'").getOnlyColumnAsSet()); + Set result = computeActual("SHOW TABLES FROM information_schema LIKE '%$_%' ESCAPE '$'").getOnlyColumnAsSet(); + assertNotEquals(allTables, result); + assertThat(result).contains("table_privileges").allMatch(schemaName -> ((String) schemaName).contains("_")); + } + + @Test + public void testShowCatalogs() + { + MaterializedResult result = computeActual("SHOW CATALOGS"); + assertTrue(result.getOnlyColumnAsSet().contains(getSession().getCatalog().get())); + } + + @Test + public void testShowCatalogsLike() + { + MaterializedResult result = computeActual(format("SHOW CATALOGS LIKE '%s'", getSession().getCatalog().get())); + assertEquals(result.getOnlyColumnAsSet(), ImmutableSet.of(getSession().getCatalog().get())); + } + + @Test + public void testShowFunctions() + { + MaterializedResult result = computeActual("SHOW FUNCTIONS"); + ImmutableMultimap functions = Multimaps.index(result.getMaterializedRows(), input -> { + assertEquals(input.getFieldCount(), 6); + return (String) input.getField(0); + }); + + assertTrue(functions.containsKey("avg"), "Expected function names " + functions + " to contain 'avg'"); + assertEquals(functions.get("avg").asList().size(), 6); + assertEquals(functions.get("avg").asList().get(0).getField(1), "decimal(p,s)"); + assertEquals(functions.get("avg").asList().get(0).getField(2), "decimal(p,s)"); + assertEquals(functions.get("avg").asList().get(0).getField(3), "aggregate"); + assertEquals(functions.get("avg").asList().get(1).getField(1), "double"); + assertEquals(functions.get("avg").asList().get(1).getField(2), "bigint"); + assertEquals(functions.get("avg").asList().get(1).getField(3), "aggregate"); + assertEquals(functions.get("avg").asList().get(2).getField(1), "double"); + assertEquals(functions.get("avg").asList().get(2).getField(2), "double"); + assertEquals(functions.get("avg").asList().get(2).getField(3), "aggregate"); + assertEquals(functions.get("avg").asList().get(3).getField(1), "interval day to second"); + assertEquals(functions.get("avg").asList().get(3).getField(2), "interval day to second"); + assertEquals(functions.get("avg").asList().get(3).getField(3), "aggregate"); + assertEquals(functions.get("avg").asList().get(4).getField(1), "interval year to month"); + assertEquals(functions.get("avg").asList().get(4).getField(2), "interval year to month"); + assertEquals(functions.get("avg").asList().get(4).getField(3), "aggregate"); + assertEquals(functions.get("avg").asList().get(5).getField(1), "real"); + assertEquals(functions.get("avg").asList().get(5).getField(2), "real"); + assertEquals(functions.get("avg").asList().get(5).getField(3), "aggregate"); + + assertTrue(functions.containsKey("abs"), "Expected function names " + functions + " to contain 'abs'"); + assertEquals(functions.get("abs").asList().get(0).getField(3), "scalar"); + assertEquals(functions.get("abs").asList().get(0).getField(4), true); + + assertTrue(functions.containsKey("rand"), "Expected function names " + functions + " to contain 'rand'"); + assertEquals(functions.get("rand").asList().get(0).getField(3), "scalar"); + assertEquals(functions.get("rand").asList().get(0).getField(4), false); + + assertTrue(functions.containsKey("rank"), "Expected function names " + functions + " to contain 'rank'"); + assertEquals(functions.get("rank").asList().get(0).getField(3), "window"); + + assertTrue(functions.containsKey("rank"), "Expected function names " + functions + " to contain 'split_part'"); + assertEquals(functions.get("split_part").asList().get(0).getField(1), "varchar(x)"); + assertEquals(functions.get("split_part").asList().get(0).getField(2), "varchar(x), varchar(y), bigint"); + assertEquals(functions.get("split_part").asList().get(0).getField(3), "scalar"); + + assertFalse(functions.containsKey("like"), "Expected function names " + functions + " not to contain 'like'"); + } + + private static ZonedDateTime zonedDateTime(String value) + { + return ZONED_DATE_TIME_FORMAT.parse(value, ZonedDateTime::from); + } } diff --git a/presto-tests/src/test/java/io/prestosql/tests/TestDistributedEngineOnlyQueries.java b/presto-tests/src/test/java/io/prestosql/tests/TestDistributedEngineOnlyQueries.java index c87edf7dc307..1ce8b98726e2 100644 --- a/presto-tests/src/test/java/io/prestosql/tests/TestDistributedEngineOnlyQueries.java +++ b/presto-tests/src/test/java/io/prestosql/tests/TestDistributedEngineOnlyQueries.java @@ -13,9 +13,11 @@ */ package io.prestosql.tests; +import com.google.common.base.Strings; import io.prestosql.Session; import io.prestosql.testing.QueryRunner; import io.prestosql.tests.tpch.TpchQueryRunnerBuilder; +import org.intellij.lang.annotations.Language; import org.testng.annotations.Test; public class TestDistributedEngineOnlyQueries @@ -59,4 +61,45 @@ public void testDuplicatedRowCreateTable() assertQueryFails("CREATE TABLE test (a integer, OrderKey integer, LIKE orders INCLUDING PROPERTIES)", "line 1:49: Column name 'orderkey' specified more than once"); } + + @Test + public void testTooLongQuery() + { + // Generate a super-long query: SELECT x,x,x,x,x,... FROM (VALUES 1,2,3,4,5) t(x) + @Language("SQL") String longQuery = "SELECT x" + Strings.repeat(",x", 500_000) + " FROM (VALUES 1,2,3,4,5) t(x)"; + assertQueryFails(longQuery, "Query text length \\(1000037\\) exceeds the maximum length \\(1000000\\)"); + } + + @Test + public void testTooManyStages() + { + @Language("SQL") String query = "WITH\n" + + " t1 AS (SELECT nationkey AS x FROM nation where name='UNITED STATES'),\n" + + " t2 AS (SELECT a.x+b.x+c.x+d.x AS x FROM t1 a, t1 b, t1 c, t1 d),\n" + + " t3 AS (SELECT a.x+b.x+c.x+d.x AS x FROM t2 a, t2 b, t2 c, t2 d),\n" + + " t4 AS (SELECT a.x+b.x+c.x+d.x AS x FROM t3 a, t3 b, t3 c, t3 d),\n" + + " t5 AS (SELECT a.x+b.x+c.x+d.x AS x FROM t4 a, t4 b, t4 c, t4 d)\n" + + "SELECT x FROM t5\n"; + assertQueryFails(query, "Number of stages in the query \\([0-9]+\\) exceeds the allowed maximum \\([0-9]+\\).*"); + } + + @Test + public void testRowSubscriptWithReservedKeyword() + { + // Subscript over field named after reserved keyword. This test needs to run in distributed + // mode, as it uncovers a problem during deserialization plan expressions + assertQuery( + "SELECT cast(row(1) AS row(\"cross\" bigint))[1]", + "VALUES 1"); + } + + @Test + public void testRowTypeWithReservedKeyword() + { + // This test is here because it only reproduces the issue (https://github.com/prestosql/presto/issues/1962) + // when running in distributed mode + assertQuery( + "SELECT cast(row(1) AS row(\"cross\" bigint)).\"cross\"", + "VALUES 1"); + } } diff --git a/presto-tests/src/test/java/io/prestosql/tests/TestDistributedSpilledQueries.java b/presto-tests/src/test/java/io/prestosql/tests/TestDistributedSpilledQueries.java index 7c07c674329f..edf96ce8ceea 100644 --- a/presto-tests/src/test/java/io/prestosql/tests/TestDistributedSpilledQueries.java +++ b/presto-tests/src/test/java/io/prestosql/tests/TestDistributedSpilledQueries.java @@ -69,11 +69,4 @@ public static DistributedQueryRunner createSpillingQueryRunner() throw e; } } - - @Override - public void testAssignUniqueId() - { - // TODO: disabled until https://github.com/prestodb/presto/issues/8926 is resolved - // due to long running query test created many spill files on disk. - } } diff --git a/presto-tests/src/test/java/io/prestosql/tests/TestTpchDistributedQueries.java b/presto-tests/src/test/java/io/prestosql/tests/TestTpchDistributedQueries.java index 7f1d0246acc2..50b4a53ee646 100644 --- a/presto-tests/src/test/java/io/prestosql/tests/TestTpchDistributedQueries.java +++ b/presto-tests/src/test/java/io/prestosql/tests/TestTpchDistributedQueries.java @@ -13,7 +13,6 @@ */ package io.prestosql.tests; -import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.airlift.json.JsonCodec; @@ -29,7 +28,6 @@ import io.prestosql.testing.QueryRunner; import io.prestosql.tests.tpch.TpchQueryRunnerBuilder; import io.prestosql.type.TypeDeserializer; -import org.intellij.lang.annotations.Language; import org.testng.annotations.Test; import java.util.Optional; @@ -39,7 +37,6 @@ import static io.prestosql.spi.type.VarcharType.createVarcharType; import static io.prestosql.testing.TestingSession.testSessionBuilder; import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; public class TestTpchDistributedQueries extends AbstractTestQueries @@ -52,7 +49,6 @@ protected QueryRunner createQueryRunner() } @Test - @Override public void testIoExplain() { String query = "SELECT * FROM orders"; @@ -88,14 +84,6 @@ public void testIoExplain() new IoPlanPrinter.IoPlan(ImmutableSet.of(input), Optional.empty(), totalEstimate)); } - @Test - public void testTooLongQuery() - { - // Generate a super-long query: SELECT x,x,x,x,x,... FROM (VALUES 1,2,3,4,5) t(x) - @Language("SQL") String longQuery = "SELECT x" + Strings.repeat(",x", 500_000) + " FROM (VALUES 1,2,3,4,5) t(x)"; - assertQueryFails(longQuery, "Query text length \\(1000037\\) exceeds the maximum length \\(1000000\\)"); - } - @Test public void testAnalyzePropertiesSystemTable() { @@ -109,35 +97,6 @@ public void testAnalyze() assertQueryFails("ANALYZE orders WITH (foo = 'bar')", ".* does not support analyze property 'foo'.*"); } - @Test - public void testTooManyStages() - { - @Language("SQL") String query = "WITH\n" + - " t1 AS (SELECT nationkey AS x FROM nation where name='UNITED STATES'),\n" + - " t2 AS (SELECT a.x+b.x+c.x+d.x AS x FROM t1 a, t1 b, t1 c, t1 d),\n" + - " t3 AS (SELECT a.x+b.x+c.x+d.x AS x FROM t2 a, t2 b, t2 c, t2 d),\n" + - " t4 AS (SELECT a.x+b.x+c.x+d.x AS x FROM t3 a, t3 b, t3 c, t3 d),\n" + - " t5 AS (SELECT a.x+b.x+c.x+d.x AS x FROM t4 a, t4 b, t4 c, t4 d)\n" + - "SELECT x FROM t5\n"; - assertQueryFails(query, "Number of stages in the query \\([0-9]+\\) exceeds the allowed maximum \\([0-9]+\\).*"); - } - - @Test - public void testTableSampleSystem() - { - int total = computeActual("SELECT orderkey FROM orders").getMaterializedRows().size(); - - boolean sampleSizeFound = false; - for (int i = 0; i < 100; i++) { - int sampleSize = computeActual("SELECT orderkey FROM ORDERS TABLESAMPLE SYSTEM (50)").getMaterializedRows().size(); - if (sampleSize > 0 && sampleSize < total) { - sampleSizeFound = true; - break; - } - } - assertTrue(sampleSizeFound, "Table sample returned unexpected number of rows"); - } - @Test @Override public void testShowTables() @@ -149,26 +108,6 @@ public void testShowTables() assertQueryFails("SHOW TABLES FROM sf0", "line 1:1: Schema 'sf0' does not exist"); } - @Test - public void testRowSubscriptWithReservedKeyword() - { - // Subscript over field named after reserved keyword. This test needs to run in distributed - // mode, as it uncovers a problem during deserialization plan expressions - assertQuery( - "SELECT cast(row(1) AS row(\"cross\" bigint))[1]", - "VALUES 1"); - } - - @Test - public void testRowTypeWithReservedKeyword() - { - // This test is here because it only reproduces the issue (https://github.com/prestosql/presto/issues/1962) - // when running in distributed mode - assertQuery( - "SELECT cast(row(1) AS row(\"cross\" bigint)).\"cross\"", - "VALUES 1"); - } - private Session createSession(String schemaName) { return testSessionBuilder() diff --git a/presto-thrift/src/test/java/io/prestosql/plugin/thrift/integration/TestThriftDistributedQueries.java b/presto-thrift/src/test/java/io/prestosql/plugin/thrift/integration/TestThriftDistributedQueries.java index 9b2e26601918..b69af411b4e2 100644 --- a/presto-thrift/src/test/java/io/prestosql/plugin/thrift/integration/TestThriftDistributedQueries.java +++ b/presto-thrift/src/test/java/io/prestosql/plugin/thrift/integration/TestThriftDistributedQueries.java @@ -28,10 +28,4 @@ protected QueryRunner createQueryRunner() { return createThriftQueryRunner(3, false, ImmutableMap.of()); } - - @Override - public void testAssignUniqueId() - { - // this test can take a long time - } } From f922c858734065b2a5a744aa2ef964c61a1dfc47 Mon Sep 17 00:00:00 2001 From: skrzypo987 Date: Tue, 31 Mar 2020 09:00:28 +0200 Subject: [PATCH 009/519] Add access control check while listing views --- .../hive/TestHiveIntegrationSmokeTest.java | 31 +++++++++++++++++++ .../InformationSchemaPageSource.java | 8 ++--- .../prestosql/metadata/MetadataListing.java | 13 ++++++++ .../testing/TestingAccessControlManager.java | 19 ++++++++++++ 4 files changed, 67 insertions(+), 4 deletions(-) diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java index 82ba9bad101c..1292e4a3a8c5 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java @@ -4733,6 +4733,37 @@ public void testShowColumnMetadata() assertUpdate("DROP TABLE " + tableName); } + @Test + public void testShowViews() + { + String viewName = "test_show_views"; + + Session testSession = testSessionBuilder() + .setIdentity(Identity.ofUser("test_view_access_owner")) + .setCatalog(getSession().getCatalog().get()) + .setSchema(getSession().getSchema().get()) + .build(); + + assertUpdate("CREATE VIEW " + viewName + " AS SELECT abs(1) as whatever"); + + String showViews = format("SELECT * FROM information_schema.views WHERE table_name = '%s'", viewName); + assertQuery( + format("SELECT table_name FROM information_schema.views WHERE table_name = '%s'", viewName), + format("VALUES '%s'", viewName)); + + executeExclusively(() -> { + try { + getQueryRunner().getAccessControl().denyTables(table -> false); + assertQueryReturnsEmptyResult(testSession, showViews); + } + finally { + getQueryRunner().getAccessControl().reset(); + } + }); + + assertUpdate("DROP VIEW " + viewName); + } + @Test public void testShowTablePrivileges() { diff --git a/presto-main/src/main/java/io/prestosql/connector/informationschema/InformationSchemaPageSource.java b/presto-main/src/main/java/io/prestosql/connector/informationschema/InformationSchemaPageSource.java index a3712abbbe73..880a002239c3 100644 --- a/presto-main/src/main/java/io/prestosql/connector/informationschema/InformationSchemaPageSource.java +++ b/presto-main/src/main/java/io/prestosql/connector/informationschema/InformationSchemaPageSource.java @@ -17,7 +17,6 @@ import com.google.common.collect.ImmutableList; import io.prestosql.Session; import io.prestosql.metadata.Metadata; -import io.prestosql.metadata.QualifiedObjectName; import io.prestosql.metadata.QualifiedTablePrefix; import io.prestosql.security.AccessControl; import io.prestosql.spi.Page; @@ -53,6 +52,7 @@ import static com.google.common.collect.Sets.union; import static io.prestosql.connector.informationschema.InformationSchemaMetadata.defaultPrefixes; import static io.prestosql.connector.informationschema.InformationSchemaMetadata.isTablesEnumeratingTable; +import static io.prestosql.metadata.MetadataListing.getViews; import static io.prestosql.metadata.MetadataListing.listSchemas; import static io.prestosql.metadata.MetadataListing.listTableColumns; import static io.prestosql.metadata.MetadataListing.listTablePrivileges; @@ -288,11 +288,11 @@ private void addTablesRecords(QualifiedTablePrefix prefix) private void addViewsRecords(QualifiedTablePrefix prefix) { - for (Map.Entry entry : metadata.getViews(session, prefix).entrySet()) { + for (Map.Entry entry : getViews(session, metadata, accessControl, prefix).entrySet()) { addRecord( - entry.getKey().getCatalogName(), + prefix.getCatalogName(), entry.getKey().getSchemaName(), - entry.getKey().getObjectName(), + entry.getKey().getTableName(), entry.getValue().getOriginalSql()); if (isLimitExhausted()) { return; diff --git a/presto-main/src/main/java/io/prestosql/metadata/MetadataListing.java b/presto-main/src/main/java/io/prestosql/metadata/MetadataListing.java index 6052beaf4073..f1c32ffc66d3 100644 --- a/presto-main/src/main/java/io/prestosql/metadata/MetadataListing.java +++ b/presto-main/src/main/java/io/prestosql/metadata/MetadataListing.java @@ -22,6 +22,7 @@ import io.prestosql.security.AccessControl; import io.prestosql.spi.connector.CatalogSchemaTableName; import io.prestosql.spi.connector.ColumnMetadata; +import io.prestosql.spi.connector.ConnectorViewDefinition; import io.prestosql.spi.connector.SchemaTableName; import io.prestosql.spi.security.GrantInfo; @@ -75,6 +76,18 @@ public static Set listViews(Session session, Metadata metadata, return accessControl.filterTables(session.toSecurityContext(), prefix.getCatalogName(), tableNames); } + public static Map getViews(Session session, Metadata metadata, AccessControl accessControl, QualifiedTablePrefix prefix) + { + Map views = metadata.getViews(session, prefix).entrySet().stream() + .collect(toImmutableMap(entry -> entry.getKey().asSchemaTableName(), Entry::getValue)); + + Set accessible = accessControl.filterTables(session.toSecurityContext(), prefix.getCatalogName(), views.keySet()); + + return views.entrySet().stream() + .filter(entry -> accessible.contains(entry.getKey())) + .collect(toImmutableMap(Entry::getKey, Entry::getValue)); + } + public static Set listTablePrivileges(Session session, Metadata metadata, AccessControl accessControl, QualifiedTablePrefix prefix) { List grants = metadata.listTablePrivileges(session, prefix); diff --git a/presto-main/src/main/java/io/prestosql/testing/TestingAccessControlManager.java b/presto-main/src/main/java/io/prestosql/testing/TestingAccessControlManager.java index 4e5d9abfaaa4..398c31a9a82c 100644 --- a/presto-main/src/main/java/io/prestosql/testing/TestingAccessControlManager.java +++ b/presto-main/src/main/java/io/prestosql/testing/TestingAccessControlManager.java @@ -20,6 +20,7 @@ import io.prestosql.security.SecurityContext; import io.prestosql.spi.connector.CatalogSchemaName; import io.prestosql.spi.connector.CatalogSchemaTableName; +import io.prestosql.spi.connector.SchemaTableName; import io.prestosql.spi.security.Identity; import io.prestosql.spi.security.ViewExpression; import io.prestosql.spi.type.Type; @@ -112,6 +113,7 @@ public class TestingAccessControlManager private final Map> rowFilters = new HashMap<>(); private final Map> columnMasks = new HashMap<>(); private Predicate deniedCatalogs = s -> true; + private Predicate deniedTables = s -> true; @Inject public TestingAccessControlManager(TransactionManager transactionManager) @@ -155,6 +157,7 @@ public void reset() { denyPrivileges.clear(); deniedCatalogs = s -> true; + deniedTables = s -> true; rowFilters.clear(); columnMasks.clear(); } @@ -164,6 +167,11 @@ public void denyCatalogs(Predicate deniedCatalogs) this.deniedCatalogs = this.deniedCatalogs.and(deniedCatalogs); } + public void denyTables(Predicate deniedTables) + { + this.deniedTables = this.deniedTables.and(deniedTables); + } + @Override public Set filterCatalogs(Identity identity, Set catalogs) { @@ -174,6 +182,17 @@ public Set filterCatalogs(Identity identity, Set catalogs) .collect(toImmutableSet())); } + @Override + public Set filterTables(SecurityContext context, String catalogName, Set tableNames) + { + return super.filterTables( + context, + catalogName, + tableNames.stream() + .filter(this.deniedTables) + .collect(toImmutableSet())); + } + @Override public void checkCanImpersonateUser(Identity identity, String userName) { From 728be1ac2446b3d2e6f177e91a63c9eea5a5589a Mon Sep 17 00:00:00 2001 From: Pawel Palucha Date: Wed, 25 Mar 2020 15:35:57 +0100 Subject: [PATCH 010/519] Add support for Glue endpoint URL --- .../hive/metastore/glue/GlueHiveMetastore.java | 11 +++++++++-- .../metastore/glue/GlueHiveMetastoreConfig.java | 14 ++++++++++++++ .../glue/TestGlueHiveMetastoreConfig.java | 3 +++ 3 files changed, 26 insertions(+), 2 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastore.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastore.java index 5ea38436144b..3d4695970f2c 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastore.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastore.java @@ -21,6 +21,7 @@ import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; import com.amazonaws.auth.InstanceProfileCredentialsProvider; import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider; +import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration; import com.amazonaws.services.glue.AWSGlueAsync; import com.amazonaws.services.glue.AWSGlueAsyncClientBuilder; import com.amazonaws.services.glue.model.AlreadyExistsException; @@ -112,6 +113,7 @@ import java.util.concurrent.Future; import java.util.function.Function; +import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Strings.isNullOrEmpty; import static com.google.common.collect.Comparators.lexicographical; import static com.google.common.collect.ImmutableMap.toImmutableMap; @@ -182,8 +184,13 @@ private static AWSGlueAsync createAsyncGlueClient(GlueHiveMetastoreConfig config ClientConfiguration clientConfig = new ClientConfiguration().withMaxConnections(config.getMaxGlueConnections()); AWSGlueAsyncClientBuilder asyncGlueClientBuilder = AWSGlueAsyncClientBuilder.standard() .withClientConfiguration(clientConfig); - - if (config.getGlueRegion().isPresent()) { + if (config.getGlueEndpointUrl().isPresent()) { + checkArgument(config.getGlueRegion().isPresent(), "Glue region must be set when Glue endpoint URL is set"); + asyncGlueClientBuilder.setEndpointConfiguration(new EndpointConfiguration( + config.getGlueEndpointUrl().get(), + config.getGlueRegion().get())); + } + else if (config.getGlueRegion().isPresent()) { asyncGlueClientBuilder.setRegion(config.getGlueRegion().get()); } else if (config.getPinGlueClientToCurrentRegion()) { diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastoreConfig.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastoreConfig.java index b6868d6243b3..556e82cbc943 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastoreConfig.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastoreConfig.java @@ -25,6 +25,7 @@ public class GlueHiveMetastoreConfig { private Optional glueRegion = Optional.empty(); + private Optional glueEndpointUrl = Optional.empty(); private boolean pinGlueClientToCurrentRegion; private int maxGlueConnections = 5; private Optional defaultWarehouseDir = Optional.empty(); @@ -50,6 +51,19 @@ public GlueHiveMetastoreConfig setGlueRegion(String region) return this; } + public Optional getGlueEndpointUrl() + { + return glueEndpointUrl; + } + + @Config("hive.metastore.glue.endpoint-url") + @ConfigDescription("Glue API endpoint URL") + public GlueHiveMetastoreConfig setGlueEndpointUrl(String glueEndpointUrl) + { + this.glueEndpointUrl = Optional.ofNullable(glueEndpointUrl); + return this; + } + public boolean getPinGlueClientToCurrentRegion() { return pinGlueClientToCurrentRegion; diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/glue/TestGlueHiveMetastoreConfig.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/glue/TestGlueHiveMetastoreConfig.java index cf66c06af7b2..c24e6a344902 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/glue/TestGlueHiveMetastoreConfig.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/glue/TestGlueHiveMetastoreConfig.java @@ -29,6 +29,7 @@ public void testDefaults() { assertRecordedDefaults(recordDefaults(GlueHiveMetastoreConfig.class) .setGlueRegion(null) + .setGlueEndpointUrl(null) .setPinGlueClientToCurrentRegion(false) .setMaxGlueConnections(5) .setDefaultWarehouseDir(null) @@ -47,6 +48,7 @@ public void testExplicitPropertyMapping() { Map properties = new ImmutableMap.Builder() .put("hive.metastore.glue.region", "us-east-1") + .put("hive.metastore.glue.endpoint-url", "http://foo.bar") .put("hive.metastore.glue.pin-client-to-current-region", "true") .put("hive.metastore.glue.max-connections", "10") .put("hive.metastore.glue.default-warehouse-dir", "/location") @@ -62,6 +64,7 @@ public void testExplicitPropertyMapping() GlueHiveMetastoreConfig expected = new GlueHiveMetastoreConfig() .setGlueRegion("us-east-1") + .setGlueEndpointUrl("http://foo.bar") .setPinGlueClientToCurrentRegion(true) .setMaxGlueConnections(10) .setDefaultWarehouseDir("/location") From 9e6a3f7b77888a345d1e42a742e3a20e11a82218 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 30 Mar 2020 00:39:15 -0700 Subject: [PATCH 011/519] Remove unused bindings from IcebergModule --- .../main/java/io/prestosql/plugin/iceberg/IcebergModule.java | 5 ----- 1 file changed, 5 deletions(-) diff --git a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergModule.java b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergModule.java index 2af32422b012..e084c888cef7 100644 --- a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergModule.java +++ b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergModule.java @@ -18,10 +18,7 @@ import com.google.inject.Scopes; import io.prestosql.plugin.hive.FileFormatDataSourceStats; import io.prestosql.plugin.hive.HiveHdfsModule; -import io.prestosql.plugin.hive.HiveLocationService; import io.prestosql.plugin.hive.HiveNodePartitioningProvider; -import io.prestosql.plugin.hive.HiveTransactionManager; -import io.prestosql.plugin.hive.LocationService; import io.prestosql.plugin.hive.orc.OrcReaderConfig; import io.prestosql.plugin.hive.orc.OrcWriterConfig; import io.prestosql.plugin.hive.parquet.ParquetReaderConfig; @@ -61,9 +58,7 @@ public void configure(Binder binder) configBinder(binder).bindConfig(ParquetReaderConfig.class); configBinder(binder).bindConfig(ParquetWriterConfig.class); - binder.bind(LocationService.class).to(HiveLocationService.class).in(Scopes.SINGLETON); binder.bind(IcebergMetadataFactory.class).in(Scopes.SINGLETON); - binder.bind(HiveTransactionManager.class).in(Scopes.SINGLETON); jsonCodecBinder(binder).bindJsonCodec(CommitTaskData.class); From f64f1b6a1a5c08f9d527ca9125bacafb7dad76ce Mon Sep 17 00:00:00 2001 From: James Petty Date: Tue, 31 Mar 2020 08:43:38 -0400 Subject: [PATCH 012/519] Use JSON generator Base64 conversion in BlockJsonSerde --- .../java/io/prestosql/block/BlockJsonSerde.java | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/block/BlockJsonSerde.java b/presto-main/src/main/java/io/prestosql/block/BlockJsonSerde.java index b17c5fce6c3a..563347f205ed 100644 --- a/presto-main/src/main/java/io/prestosql/block/BlockJsonSerde.java +++ b/presto-main/src/main/java/io/prestosql/block/BlockJsonSerde.java @@ -13,6 +13,7 @@ */ package io.prestosql.block; +import com.fasterxml.jackson.core.Base64Variants; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.databind.DeserializationContext; @@ -20,6 +21,7 @@ import com.fasterxml.jackson.databind.JsonSerializer; import com.fasterxml.jackson.databind.SerializerProvider; import io.airlift.slice.DynamicSliceOutput; +import io.airlift.slice.Slice; import io.airlift.slice.SliceOutput; import io.airlift.slice.Slices; import io.prestosql.spi.block.Block; @@ -28,10 +30,10 @@ import javax.inject.Inject; import java.io.IOException; -import java.util.Base64; import static io.prestosql.block.BlockSerdeUtil.readBlock; import static io.prestosql.block.BlockSerdeUtil.writeBlock; +import static java.lang.Math.toIntExact; import static java.util.Objects.requireNonNull; public final class BlockJsonSerde @@ -53,10 +55,11 @@ public Serializer(BlockEncodingSerde blockEncodingSerde) public void serialize(Block block, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException { - SliceOutput output = new DynamicSliceOutput(64); + // Encoding name is length prefixed as are many block encodings + SliceOutput output = new DynamicSliceOutput(toIntExact(block.getSizeInBytes() + block.getEncodingName().length() + (2 * Integer.BYTES))); writeBlock(blockEncodingSerde, output, block); - String encoded = Base64.getEncoder().encodeToString(output.slice().getBytes()); - jsonGenerator.writeString(encoded); + Slice slice = output.slice(); + jsonGenerator.writeBinary(Base64Variants.MIME_NO_LINEFEEDS, slice.byteArray(), slice.byteArrayOffset(), slice.length()); } } @@ -75,7 +78,7 @@ public Deserializer(BlockEncodingSerde blockEncodingSerde) public Block deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException { - byte[] decoded = Base64.getDecoder().decode(jsonParser.readValueAs(String.class)); + byte[] decoded = jsonParser.getBinaryValue(Base64Variants.MIME_NO_LINEFEEDS); return readBlock(blockEncodingSerde, Slices.wrappedBuffer(decoded)); } } From 53c5dfcba1a874cbfc84fff7104808e0ba1972a6 Mon Sep 17 00:00:00 2001 From: James Petty Date: Tue, 31 Mar 2020 08:48:42 -0400 Subject: [PATCH 013/519] Avoid Slice.getBytes() in parquet reader --- .../io/prestosql/parquet/dictionary/BinaryDictionary.java | 6 ++++-- .../io/prestosql/parquet/reader/PrimitiveColumnReader.java | 3 +-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/presto-parquet/src/main/java/io/prestosql/parquet/dictionary/BinaryDictionary.java b/presto-parquet/src/main/java/io/prestosql/parquet/dictionary/BinaryDictionary.java index 2430486a3eab..bcbc72b16175 100644 --- a/presto-parquet/src/main/java/io/prestosql/parquet/dictionary/BinaryDictionary.java +++ b/presto-parquet/src/main/java/io/prestosql/parquet/dictionary/BinaryDictionary.java @@ -13,6 +13,7 @@ */ package io.prestosql.parquet.dictionary; +import io.airlift.slice.Slice; import io.prestosql.parquet.DictionaryPage; import org.apache.parquet.io.api.Binary; @@ -37,9 +38,10 @@ public BinaryDictionary(DictionaryPage dictionaryPage, Integer length) throws IOException { super(dictionaryPage.getEncoding()); - byte[] dictionaryBytes = dictionaryPage.getSlice().getBytes(); content = new Binary[dictionaryPage.getDictionarySize()]; - int offset = 0; + Slice dictionarySlice = dictionaryPage.getSlice(); + byte[] dictionaryBytes = dictionarySlice.byteArray(); + int offset = dictionarySlice.byteArrayOffset(); if (length == null) { for (int i = 0; i < content.length; i++) { int len = readIntLittleEndian(dictionaryBytes, offset); diff --git a/presto-parquet/src/main/java/io/prestosql/parquet/reader/PrimitiveColumnReader.java b/presto-parquet/src/main/java/io/prestosql/parquet/reader/PrimitiveColumnReader.java index 6c659a451982..fab9c9f9dc32 100644 --- a/presto-parquet/src/main/java/io/prestosql/parquet/reader/PrimitiveColumnReader.java +++ b/presto-parquet/src/main/java/io/prestosql/parquet/reader/PrimitiveColumnReader.java @@ -35,7 +35,6 @@ import org.apache.parquet.io.ParquetDecodingException; import org.apache.parquet.schema.OriginalType; -import java.io.ByteArrayInputStream; import java.io.IOException; import java.util.Optional; @@ -286,7 +285,7 @@ private LevelReader buildLevelRLEReader(int maxLevel, Slice slice) if (maxLevel == 0) { return new LevelNullReader(); } - return new LevelRLEReader(new RunLengthBitPackingHybridDecoder(BytesUtils.getWidthFromMaxInt(maxLevel), new ByteArrayInputStream(slice.getBytes()))); + return new LevelRLEReader(new RunLengthBitPackingHybridDecoder(BytesUtils.getWidthFromMaxInt(maxLevel), slice.getInput())); } private ValuesReader initDataReader(ParquetEncoding dataEncoding, int valueCount, ByteBufferInputStream in) From 021dda102ca94e25cf89239fd9a75a9851c9913a Mon Sep 17 00:00:00 2001 From: James Petty Date: Tue, 31 Mar 2020 09:06:10 -0400 Subject: [PATCH 014/519] Optimize Slice Functions to avoid Slice.getBytes() Avoids copies in slice functions that otherwise perform unnecessary data copies. Specifically optimizes: - LIKE and JONI regex_like - Hash functions (Hmac and digest hashes eg: sha1, md5) - Base64 encoding / decoding functions Queries over short string fields found in the tpch 1000 data set appear to show a 1-5% improvement, although these are highly workload dependent. --- .../operator/scalar/HmacFunctions.java | 22 +++++++++--- .../operator/scalar/JoniRegexpFunctions.java | 14 ++++++-- .../operator/scalar/VarbinaryFunctions.java | 36 ++++++++++++++++--- .../java/io/prestosql/type/LikeFunctions.java | 19 ++++++---- .../io/prestosql/sql/TestLikeFunctions.java | 12 +++++++ 5 files changed, 84 insertions(+), 19 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/operator/scalar/HmacFunctions.java b/presto-main/src/main/java/io/prestosql/operator/scalar/HmacFunctions.java index dd837de71ba0..ab5ab8c1fbc4 100644 --- a/presto-main/src/main/java/io/prestosql/operator/scalar/HmacFunctions.java +++ b/presto-main/src/main/java/io/prestosql/operator/scalar/HmacFunctions.java @@ -13,6 +13,8 @@ */ package io.prestosql.operator.scalar; +import com.google.common.hash.HashCode; +import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; import io.airlift.slice.Slice; import io.prestosql.spi.function.Description; @@ -31,7 +33,7 @@ private HmacFunctions() {} @SqlType(StandardTypes.VARBINARY) public static Slice hmacMd5(@SqlType(StandardTypes.VARBINARY) Slice slice, @SqlType(StandardTypes.VARBINARY) Slice key) { - return wrappedBuffer(Hashing.hmacMd5(key.getBytes()).hashBytes(slice.getBytes()).asBytes()); + return computeHash(Hashing.hmacMd5(key.getBytes()), slice); } @Description("Compute HMAC with SHA1") @@ -39,7 +41,7 @@ public static Slice hmacMd5(@SqlType(StandardTypes.VARBINARY) Slice slice, @SqlT @SqlType(StandardTypes.VARBINARY) public static Slice hmacSha1(@SqlType(StandardTypes.VARBINARY) Slice slice, @SqlType(StandardTypes.VARBINARY) Slice key) { - return wrappedBuffer(Hashing.hmacSha1(key.getBytes()).hashBytes(slice.getBytes()).asBytes()); + return computeHash(Hashing.hmacSha1(key.getBytes()), slice); } @Description("Compute HMAC with SHA256") @@ -47,7 +49,7 @@ public static Slice hmacSha1(@SqlType(StandardTypes.VARBINARY) Slice slice, @Sql @SqlType(StandardTypes.VARBINARY) public static Slice hmacSha256(@SqlType(StandardTypes.VARBINARY) Slice slice, @SqlType(StandardTypes.VARBINARY) Slice key) { - return wrappedBuffer(Hashing.hmacSha256(key.getBytes()).hashBytes(slice.getBytes()).asBytes()); + return computeHash(Hashing.hmacSha256(key.getBytes()), slice); } @Description("Compute HMAC with SHA512") @@ -55,6 +57,18 @@ public static Slice hmacSha256(@SqlType(StandardTypes.VARBINARY) Slice slice, @S @SqlType(StandardTypes.VARBINARY) public static Slice hmacSha512(@SqlType(StandardTypes.VARBINARY) Slice slice, @SqlType(StandardTypes.VARBINARY) Slice key) { - return wrappedBuffer(Hashing.hmacSha512(key.getBytes()).hashBytes(slice.getBytes()).asBytes()); + return computeHash(Hashing.hmacSha512(key.getBytes()), slice); + } + + static Slice computeHash(HashFunction hash, Slice data) + { + HashCode result; + if (data.hasByteArray()) { + result = hash.hashBytes(data.byteArray(), data.byteArrayOffset(), data.length()); + } + else { + result = hash.hashBytes(data.getBytes()); + } + return wrappedBuffer(result.asBytes()); } } diff --git a/presto-main/src/main/java/io/prestosql/operator/scalar/JoniRegexpFunctions.java b/presto-main/src/main/java/io/prestosql/operator/scalar/JoniRegexpFunctions.java index 522177a8da17..c95e324d9af3 100644 --- a/presto-main/src/main/java/io/prestosql/operator/scalar/JoniRegexpFunctions.java +++ b/presto-main/src/main/java/io/prestosql/operator/scalar/JoniRegexpFunctions.java @@ -54,9 +54,17 @@ private JoniRegexpFunctions() {} @SqlType(StandardTypes.BOOLEAN) public static boolean regexpLike(@SqlType("varchar(x)") Slice source, @SqlType(JoniRegexpType.NAME) JoniRegexp pattern) { - Matcher m = pattern.matcher(source.getBytes()); - int offset = m.search(0, source.length(), Option.DEFAULT); - return offset != -1; + Matcher matcher; + int offset; + if (source.hasByteArray()) { + offset = source.byteArrayOffset(); + matcher = pattern.regex().matcher(source.byteArray(), offset, offset + source.length()); + } + else { + offset = 0; + matcher = pattern.matcher(source.getBytes()); + } + return matcher.search(offset, offset + source.length(), Option.DEFAULT) != -1; } private static int getNextStart(Slice source, Matcher matcher) diff --git a/presto-main/src/main/java/io/prestosql/operator/scalar/VarbinaryFunctions.java b/presto-main/src/main/java/io/prestosql/operator/scalar/VarbinaryFunctions.java index e2a3b5bc949d..3e44731ee32b 100644 --- a/presto-main/src/main/java/io/prestosql/operator/scalar/VarbinaryFunctions.java +++ b/presto-main/src/main/java/io/prestosql/operator/scalar/VarbinaryFunctions.java @@ -31,6 +31,7 @@ import java.util.zip.CRC32; import static io.airlift.slice.Slices.EMPTY_SLICE; +import static io.prestosql.operator.scalar.HmacFunctions.computeHash; import static io.prestosql.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT; import static io.prestosql.util.Failures.checkCondition; @@ -51,6 +52,9 @@ public static long length(@SqlType(StandardTypes.VARBINARY) Slice slice) @SqlType(StandardTypes.VARCHAR) public static Slice toBase64(@SqlType(StandardTypes.VARBINARY) Slice slice) { + if (slice.hasByteArray()) { + return Slices.wrappedBuffer(Base64.getEncoder().encode(slice.toByteBuffer())); + } return Slices.wrappedBuffer(Base64.getEncoder().encode(slice.getBytes())); } @@ -61,6 +65,9 @@ public static Slice toBase64(@SqlType(StandardTypes.VARBINARY) Slice slice) public static Slice fromBase64Varchar(@SqlType("varchar(x)") Slice slice) { try { + if (slice.hasByteArray()) { + return Slices.wrappedBuffer(Base64.getDecoder().decode(slice.toByteBuffer())); + } return Slices.wrappedBuffer(Base64.getDecoder().decode(slice.getBytes())); } catch (IllegalArgumentException e) { @@ -74,6 +81,9 @@ public static Slice fromBase64Varchar(@SqlType("varchar(x)") Slice slice) public static Slice fromBase64Varbinary(@SqlType(StandardTypes.VARBINARY) Slice slice) { try { + if (slice.hasByteArray()) { + return Slices.wrappedBuffer(Base64.getDecoder().decode(slice.toByteBuffer())); + } return Slices.wrappedBuffer(Base64.getDecoder().decode(slice.getBytes())); } catch (IllegalArgumentException e) { @@ -86,6 +96,9 @@ public static Slice fromBase64Varbinary(@SqlType(StandardTypes.VARBINARY) Slice @SqlType(StandardTypes.VARCHAR) public static Slice toBase64Url(@SqlType(StandardTypes.VARBINARY) Slice slice) { + if (slice.hasByteArray()) { + return Slices.wrappedBuffer(Base64.getUrlEncoder().encode(slice.toByteBuffer())); + } return Slices.wrappedBuffer(Base64.getUrlEncoder().encode(slice.getBytes())); } @@ -96,6 +109,9 @@ public static Slice toBase64Url(@SqlType(StandardTypes.VARBINARY) Slice slice) public static Slice fromBase64UrlVarchar(@SqlType("varchar(x)") Slice slice) { try { + if (slice.hasByteArray()) { + return Slices.wrappedBuffer(Base64.getUrlDecoder().decode(slice.toByteBuffer())); + } return Slices.wrappedBuffer(Base64.getUrlDecoder().decode(slice.getBytes())); } catch (IllegalArgumentException e) { @@ -109,6 +125,9 @@ public static Slice fromBase64UrlVarchar(@SqlType("varchar(x)") Slice slice) public static Slice fromBase64UrlVarbinary(@SqlType(StandardTypes.VARBINARY) Slice slice) { try { + if (slice.hasByteArray()) { + return Slices.wrappedBuffer(Base64.getUrlDecoder().decode(slice.toByteBuffer())); + } return Slices.wrappedBuffer(Base64.getUrlDecoder().decode(slice.getBytes())); } catch (IllegalArgumentException e) { @@ -121,7 +140,14 @@ public static Slice fromBase64UrlVarbinary(@SqlType(StandardTypes.VARBINARY) Sli @SqlType(StandardTypes.VARCHAR) public static Slice toHex(@SqlType(StandardTypes.VARBINARY) Slice slice) { - return Slices.utf8Slice(BaseEncoding.base16().encode(slice.getBytes())); + String encoded; + if (slice.hasByteArray()) { + encoded = BaseEncoding.base16().encode(slice.byteArray(), slice.byteArrayOffset(), slice.length()); + } + else { + encoded = BaseEncoding.base16().encode(slice.getBytes()); + } + return Slices.utf8Slice(encoded); } @Description("Decode hex encoded binary data") @@ -226,7 +252,7 @@ public static double fromIEEE754Binary64(@SqlType(StandardTypes.VARBINARY) Slice @SqlType(StandardTypes.VARBINARY) public static Slice md5(@SqlType(StandardTypes.VARBINARY) Slice slice) { - return Slices.wrappedBuffer(Hashing.md5().hashBytes(slice.getBytes()).asBytes()); + return computeHash(Hashing.md5(), slice); } @Description("Compute sha1 hash") @@ -234,7 +260,7 @@ public static Slice md5(@SqlType(StandardTypes.VARBINARY) Slice slice) @SqlType(StandardTypes.VARBINARY) public static Slice sha1(@SqlType(StandardTypes.VARBINARY) Slice slice) { - return Slices.wrappedBuffer(Hashing.sha1().hashBytes(slice.getBytes()).asBytes()); + return computeHash(Hashing.sha1(), slice); } @Description("Compute sha256 hash") @@ -242,7 +268,7 @@ public static Slice sha1(@SqlType(StandardTypes.VARBINARY) Slice slice) @SqlType(StandardTypes.VARBINARY) public static Slice sha256(@SqlType(StandardTypes.VARBINARY) Slice slice) { - return Slices.wrappedBuffer(Hashing.sha256().hashBytes(slice.getBytes()).asBytes()); + return computeHash(Hashing.sha256(), slice); } @Description("Compute sha512 hash") @@ -250,7 +276,7 @@ public static Slice sha256(@SqlType(StandardTypes.VARBINARY) Slice slice) @SqlType(StandardTypes.VARBINARY) public static Slice sha512(@SqlType(StandardTypes.VARBINARY) Slice slice) { - return Slices.wrappedBuffer(Hashing.sha512().hashBytes(slice.getBytes()).asBytes()); + return computeHash(Hashing.sha512(), slice); } private static int hexDigitCharToInt(byte b) diff --git a/presto-main/src/main/java/io/prestosql/type/LikeFunctions.java b/presto-main/src/main/java/io/prestosql/type/LikeFunctions.java index f026c36d53ff..0b44ad5a3151 100644 --- a/presto-main/src/main/java/io/prestosql/type/LikeFunctions.java +++ b/presto-main/src/main/java/io/prestosql/type/LikeFunctions.java @@ -14,6 +14,7 @@ package io.prestosql.type; import io.airlift.jcodings.specific.NonStrictUTF8Encoding; +import io.airlift.joni.Matcher; import io.airlift.joni.Option; import io.airlift.joni.Regex; import io.airlift.joni.Syntax; @@ -69,8 +70,17 @@ public static boolean likeVarchar(@SqlType("varchar(x)") Slice value, @SqlType(L { // Joni can infinite loop with UTF8Encoding when invalid UTF-8 is encountered. // NonStrictUTF8Encoding must be used to avoid this issue. - byte[] bytes = value.getBytes(); - return regexMatches(pattern, bytes); + Matcher matcher; + int offset; + if (value.hasByteArray()) { + offset = value.byteArrayOffset(); + matcher = pattern.regex().matcher(value.byteArray(), offset, offset + value.length()); + } + else { + offset = 0; + matcher = pattern.matcher(value.getBytes()); + } + return matcher.match(offset, offset + value.length(), Option.NONE) != -1; } @ScalarFunction(value = LIKE_PATTERN_FUNCTION_NAME, hidden = true) @@ -159,11 +169,6 @@ private static void checkEscape(boolean condition) checkCondition(condition, INVALID_FUNCTION_ARGUMENT, "Escape character must be followed by '%%', '_' or the escape character itself"); } - private static boolean regexMatches(JoniRegexp regex, byte[] bytes) - { - return regex.matcher(bytes).match(0, bytes.length, Option.NONE) != -1; - } - @SuppressWarnings("NestedSwitchStatement") private static JoniRegexp likePattern(String patternString, char escapeChar, boolean shouldEscape) { diff --git a/presto-main/src/test/java/io/prestosql/sql/TestLikeFunctions.java b/presto-main/src/test/java/io/prestosql/sql/TestLikeFunctions.java index 8d611519a397..87084eb0643e 100644 --- a/presto-main/src/test/java/io/prestosql/sql/TestLikeFunctions.java +++ b/presto-main/src/test/java/io/prestosql/sql/TestLikeFunctions.java @@ -36,11 +36,20 @@ public class TestLikeFunctions extends AbstractTestFunctions { + private static Slice offsetHeapSlice(String value) + { + Slice source = Slices.utf8Slice(value); + Slice result = Slices.allocate(source.length() + 5); + result.setBytes(2, source); + return result.slice(2, source.length()); + } + @Test public void testLikeBasic() { JoniRegexp regex = LikeFunctions.compileLikePattern(utf8Slice("f%b__")); assertTrue(likeVarchar(utf8Slice("foobar"), regex)); + assertTrue(likeVarchar(offsetHeapSlice("foobar"), regex)); assertFunction("'foob' LIKE 'f%b__'", BOOLEAN, false); assertFunction("'foob' LIKE 'f%b'", BOOLEAN, true); @@ -51,8 +60,11 @@ public void testLikeChar() { JoniRegexp regex = LikeFunctions.compileLikePattern(utf8Slice("f%b__")); assertTrue(likeChar(6L, utf8Slice("foobar"), regex)); + assertTrue(likeChar(6L, offsetHeapSlice("foobar"), regex)); assertTrue(likeChar(6L, utf8Slice("foob"), regex)); + assertTrue(likeChar(6L, offsetHeapSlice("foob"), regex)); assertFalse(likeChar(7L, utf8Slice("foob"), regex)); + assertFalse(likeChar(7L, offsetHeapSlice("foob"), regex)); assertFunction("cast('foob' as char(6)) LIKE 'f%b__'", BOOLEAN, true); assertFunction("cast('foob' as char(7)) LIKE 'f%b__'", BOOLEAN, false); From f2bf3703052466dc27a378ae70fa2a4e13e15565 Mon Sep 17 00:00:00 2001 From: James Petty Date: Tue, 31 Mar 2020 09:16:33 -0400 Subject: [PATCH 015/519] Remove copy from InMemoryRecordSet --- .../main/java/io/prestosql/spi/connector/InMemoryRecordSet.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-spi/src/main/java/io/prestosql/spi/connector/InMemoryRecordSet.java b/presto-spi/src/main/java/io/prestosql/spi/connector/InMemoryRecordSet.java index f67c2380fb4c..95b266613ca5 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/connector/InMemoryRecordSet.java +++ b/presto-spi/src/main/java/io/prestosql/spi/connector/InMemoryRecordSet.java @@ -295,7 +295,7 @@ else if (value instanceof Block) { completedBytes += ((Block) value).getSizeInBytes(); } else if (value instanceof Slice) { - completedBytes += ((Slice) value).getBytes().length; + completedBytes += ((Slice) value).length(); } else { throw new IllegalArgumentException("Unknown type: " + value.getClass()); From ee91f737e99dd64a07340d6279c63f2f3a8ef2f8 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Sat, 14 Mar 2020 14:30:07 -0700 Subject: [PATCH 016/519] Allow setting identity for TestingConnectorSession --- .../io/prestosql/testing/TestingConnectorSession.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/presto-main/src/main/java/io/prestosql/testing/TestingConnectorSession.java b/presto-main/src/main/java/io/prestosql/testing/TestingConnectorSession.java index 9440eeb3519b..43829aefa7b5 100644 --- a/presto-main/src/main/java/io/prestosql/testing/TestingConnectorSession.java +++ b/presto-main/src/main/java/io/prestosql/testing/TestingConnectorSession.java @@ -159,7 +159,7 @@ public static Builder builder() public static class Builder { - private final ConnectorIdentity identity = ConnectorIdentity.ofUser("user"); + private ConnectorIdentity identity = ConnectorIdentity.ofUser("user"); private final Optional source = Optional.of("test"); private TimeZoneKey timeZoneKey = UTC_KEY; private final Locale locale = ENGLISH; @@ -169,6 +169,12 @@ public static class Builder private Map propertyValues = ImmutableMap.of(); private boolean isLegacyTimestamp = new FeaturesConfig().isLegacyTimestamp(); + public Builder setIdentity(ConnectorIdentity identity) + { + this.identity = requireNonNull(identity, "identity is null"); + return this; + } + public Builder setTimeZoneKey(TimeZoneKey timeZoneKey) { this.timeZoneKey = requireNonNull(timeZoneKey, "timeZoneKey is null"); From 76598483378bc4396c510daeec2c324235379b5d Mon Sep 17 00:00:00 2001 From: David Phillips Date: Sun, 15 Mar 2020 23:31:09 -0700 Subject: [PATCH 017/519] Remove config hive.s3.use-instance-credentials This flag was added before IAM role support and is incompatible with role support, as it means "always use instance credentials". After the introduction of DefaultAWSCredentialsProviderChain, instance credentials can still be used even if this flag is false, as they are the last item in the default chain. Given the confusing nature of this flag, and the incompatibilty with existing role support and the upcoming security mapping feature, it seems best to remove the flag entirely. --- .../src/main/sphinx/connector/hive.rst | 8 ++------ .../plugin/hive/s3/HiveS3Config.java | 15 ++------------- .../s3/PrestoS3ConfigurationInitializer.java | 4 ---- .../plugin/hive/s3/PrestoS3FileSystem.java | 10 ---------- .../hive/s3select/PrestoS3ClientFactory.java | 7 ------- .../plugin/hive/s3/TestHiveS3Config.java | 3 --- .../hive/s3/TestPrestoS3FileSystem.java | 19 ------------------- 7 files changed, 4 insertions(+), 62 deletions(-) diff --git a/presto-docs/src/main/sphinx/connector/hive.rst b/presto-docs/src/main/sphinx/connector/hive.rst index 822da9a1d355..878d278d2b55 100644 --- a/presto-docs/src/main/sphinx/connector/hive.rst +++ b/presto-docs/src/main/sphinx/connector/hive.rst @@ -334,9 +334,6 @@ S3 Configuration Properties ============================================ ================================================================= Property Name Description ============================================ ================================================================= -``hive.s3.use-instance-credentials`` Use the EC2 metadata service to retrieve API credentials, - defaults to ``true``. This works with IAM roles in EC2. - ``hive.s3.aws-access-key`` Default AWS access key to use. ``hive.s3.aws-secret-key`` Default AWS secret key to use. @@ -406,9 +403,8 @@ S3 Credentials ^^^^^^^^^^^^^^ If you are running Presto on Amazon EC2, using EMR or another facility, -it is highly recommended that you set ``hive.s3.use-instance-credentials`` -to ``true`` and use IAM Roles for EC2 to govern access to S3. If this is -the case, your EC2 instances need to be assigned an IAM Role which +it is recommended that you use IAM Roles for EC2 to govern access to S3. +To enable this, your EC2 instances need to be assigned an IAM Role which grants appropriate access to the data stored in the S3 bucket(s) you wish to use. It is also possible to configure an IAM role with ``hive.s3.iam-role`` that is used for accessing any S3 bucket. This is much cleaner than diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/HiveS3Config.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/HiveS3Config.java index c3cd31340cad..c019dd387c97 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/HiveS3Config.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/HiveS3Config.java @@ -17,6 +17,7 @@ import io.airlift.configuration.Config; import io.airlift.configuration.ConfigDescription; import io.airlift.configuration.ConfigSecuritySensitive; +import io.airlift.configuration.DefunctConfig; import io.airlift.units.DataSize; import io.airlift.units.Duration; import io.airlift.units.MinDataSize; @@ -30,6 +31,7 @@ import static io.airlift.units.DataSize.Unit.MEGABYTE; +@DefunctConfig("hive.s3.use-instance-credentials") public class HiveS3Config { private String s3AwsAccessKey; @@ -39,7 +41,6 @@ public class HiveS3Config private PrestoS3SignerType s3SignerType; private String s3SignerClass; private boolean s3PathStyleAccess; - private boolean s3UseInstanceCredentials = true; private String s3IamRole; private boolean s3SslEnabled = true; private boolean s3SseEnabled; @@ -151,18 +152,6 @@ public HiveS3Config setS3PathStyleAccess(boolean s3PathStyleAccess) return this; } - public boolean isS3UseInstanceCredentials() - { - return s3UseInstanceCredentials; - } - - @Config("hive.s3.use-instance-credentials") - public HiveS3Config setS3UseInstanceCredentials(boolean s3UseInstanceCredentials) - { - this.s3UseInstanceCredentials = s3UseInstanceCredentials; - return this; - } - public String getS3IamRole() { return s3IamRole; diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3ConfigurationInitializer.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3ConfigurationInitializer.java index 7f4531d81a16..7fe4ffc6c7bf 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3ConfigurationInitializer.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3ConfigurationInitializer.java @@ -51,7 +51,6 @@ import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_STAGING_DIRECTORY; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_STORAGE_CLASS; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_USER_AGENT_PREFIX; -import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_USE_INSTANCE_CREDENTIALS; public class PrestoS3ConfigurationInitializer implements ConfigurationInitializer @@ -62,7 +61,6 @@ public class PrestoS3ConfigurationInitializer private final PrestoS3StorageClass s3StorageClass; private final PrestoS3SignerType signerType; private final boolean pathStyleAccess; - private final boolean useInstanceCredentials; private final String iamRole; private final boolean sslEnabled; private final boolean sseEnabled; @@ -97,7 +95,6 @@ public PrestoS3ConfigurationInitializer(HiveS3Config config) this.signerType = config.getS3SignerType(); this.signerClass = config.getS3SignerClass(); this.pathStyleAccess = config.isS3PathStyleAccess(); - this.useInstanceCredentials = config.isS3UseInstanceCredentials(); this.iamRole = config.getS3IamRole(); this.sslEnabled = config.isS3SslEnabled(); this.sseEnabled = config.isS3SseEnabled(); @@ -147,7 +144,6 @@ public void initializeConfiguration(Configuration config) config.set(S3_SIGNER_CLASS, signerClass); } config.setBoolean(S3_PATH_STYLE_ACCESS, pathStyleAccess); - config.setBoolean(S3_USE_INSTANCE_CREDENTIALS, useInstanceCredentials); if (iamRole != null) { config.set(S3_IAM_ROLE, iamRole); } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystem.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystem.java index 663fb55b9b85..cd2e24b44cc6 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystem.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystem.java @@ -23,7 +23,6 @@ import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; -import com.amazonaws.auth.InstanceProfileCredentialsProvider; import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider; import com.amazonaws.auth.Signer; import com.amazonaws.auth.SignerFactory; @@ -143,7 +142,6 @@ public class PrestoS3FileSystem public static final String S3_KMS_KEY_ID = "presto.s3.kms-key-id"; public static final String S3_ENCRYPTION_MATERIALS_PROVIDER = "presto.s3.encryption-materials-provider"; public static final String S3_PIN_CLIENT_TO_CURRENT_REGION = "presto.s3.pin-client-to-current-region"; - public static final String S3_USE_INSTANCE_CREDENTIALS = "presto.s3.use-instance-credentials"; public static final String S3_MULTIPART_MIN_PART_SIZE = "presto.s3.multipart.min-part-size"; public static final String S3_MULTIPART_MIN_FILE_SIZE = "presto.s3.multipart.min-file-size"; public static final String S3_STAGING_DIRECTORY = "presto.s3.staging-directory"; @@ -188,7 +186,6 @@ public class PrestoS3FileSystem private int maxAttempts; private Duration maxBackoffTime; private Duration maxRetryTime; - private boolean useInstanceCredentials; private String iamRole; private boolean pinS3ClientToCurrentRegion; private boolean sseEnabled; @@ -232,10 +229,7 @@ public void initialize(URI uri, Configuration conf) this.multiPartUploadMinFileSize = conf.getLong(S3_MULTIPART_MIN_FILE_SIZE, defaults.getS3MultipartMinFileSize().toBytes()); this.multiPartUploadMinPartSize = conf.getLong(S3_MULTIPART_MIN_PART_SIZE, defaults.getS3MultipartMinPartSize().toBytes()); this.isPathStyleAccess = conf.getBoolean(S3_PATH_STYLE_ACCESS, defaults.isS3PathStyleAccess()); - this.useInstanceCredentials = conf.getBoolean(S3_USE_INSTANCE_CREDENTIALS, defaults.isS3UseInstanceCredentials()); this.iamRole = conf.get(S3_IAM_ROLE, defaults.getS3IamRole()); - verify(!(useInstanceCredentials && this.iamRole != null), - "Invalid configuration: either use instance credentials or specify an iam role"); this.pinS3ClientToCurrentRegion = conf.getBoolean(S3_PIN_CLIENT_TO_CURRENT_REGION, defaults.isPinS3ClientToCurrentRegion()); verify(!pinS3ClientToCurrentRegion || conf.get(S3_ENDPOINT) == null, "Invalid configuration: either endpoint can be set or S3 client can be pinned to the current region"); @@ -800,10 +794,6 @@ private AWSCredentialsProvider createAwsCredentialsProvider(URI uri, Configurati return new AWSStaticCredentialsProvider(credentials.get()); } - if (useInstanceCredentials) { - return InstanceProfileCredentialsProvider.getInstance(); - } - if (iamRole != null) { return new STSAssumeRoleSessionCredentialsProvider.Builder(this.iamRole, "presto-session").build(); } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3select/PrestoS3ClientFactory.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3select/PrestoS3ClientFactory.java index 2c27b8e4892c..12d0a25d12ee 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3select/PrestoS3ClientFactory.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3select/PrestoS3ClientFactory.java @@ -20,7 +20,6 @@ import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; -import com.amazonaws.auth.InstanceProfileCredentialsProvider; import com.amazonaws.metrics.RequestMetricCollector; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Builder; @@ -53,7 +52,6 @@ import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_SOCKET_TIMEOUT; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_SSL_ENABLED; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_USER_AGENT_PREFIX; -import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_USE_INSTANCE_CREDENTIALS; import static java.lang.Math.toIntExact; import static java.lang.String.format; @@ -150,11 +148,6 @@ private static AWSCredentialsProvider getAwsCredentialsProvider(Configuration co return new AWSStaticCredentialsProvider(credentials.get()); } - boolean useInstanceCredentials = conf.getBoolean(S3_USE_INSTANCE_CREDENTIALS, defaults.isS3UseInstanceCredentials()); - if (useInstanceCredentials) { - return InstanceProfileCredentialsProvider.getInstance(); - } - String providerClass = conf.get(S3_CREDENTIALS_PROVIDER); if (!isNullOrEmpty(providerClass)) { return getCustomAWSCredentialsProvider(conf, providerClass); diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestHiveS3Config.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestHiveS3Config.java index 858966eb0edd..5e35db3a7f1c 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestHiveS3Config.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestHiveS3Config.java @@ -40,7 +40,6 @@ public void testDefaults() .setS3SignerType(null) .setS3SignerClass(null) .setS3PathStyleAccess(false) - .setS3UseInstanceCredentials(true) .setS3IamRole(null) .setS3StorageClass(PrestoS3StorageClass.STANDARD) .setS3SslEnabled(true) @@ -76,7 +75,6 @@ public void testExplicitPropertyMappings() .put("hive.s3.signer-type", "S3SignerType") .put("hive.s3.signer-class", "com.amazonaws.services.s3.internal.AWSS3V4Signer") .put("hive.s3.path-style-access", "true") - .put("hive.s3.use-instance-credentials", "false") .put("hive.s3.iam-role", "roleArn") .put("hive.s3.storage-class", "INTELLIGENT_TIERING") .put("hive.s3.ssl.enabled", "false") @@ -109,7 +107,6 @@ public void testExplicitPropertyMappings() .setS3SignerType(PrestoS3SignerType.S3SignerType) .setS3SignerClass("com.amazonaws.services.s3.internal.AWSS3V4Signer") .setS3PathStyleAccess(true) - .setS3UseInstanceCredentials(false) .setS3IamRole("roleArn") .setS3StorageClass(PrestoS3StorageClass.INTELLIGENT_TIERING) .setS3SslEnabled(false) diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestPrestoS3FileSystem.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestPrestoS3FileSystem.java index 70f707809744..402e3c640770 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestPrestoS3FileSystem.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestPrestoS3FileSystem.java @@ -19,7 +19,6 @@ import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; -import com.amazonaws.auth.InstanceProfileCredentialsProvider; import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider; import com.amazonaws.services.s3.AmazonS3Client; import com.amazonaws.services.s3.AmazonS3EncryptionClient; @@ -73,7 +72,6 @@ import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_STAGING_DIRECTORY; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_USER_AGENT_PREFIX; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_USER_AGENT_SUFFIX; -import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_USE_INSTANCE_CREDENTIALS; import static java.net.HttpURLConnection.HTTP_FORBIDDEN; import static java.net.HttpURLConnection.HTTP_INTERNAL_ERROR; import static java.net.HttpURLConnection.HTTP_NOT_FOUND; @@ -139,26 +137,12 @@ public void testEndpointWithPinToCurrentRegionConfiguration() } } - @Test - public void testInstanceCredentialsEnabled() - throws Exception - { - Configuration config = new Configuration(false); - // instance credentials are enabled by default - - try (PrestoS3FileSystem fs = new PrestoS3FileSystem()) { - fs.initialize(new URI("s3n://test-bucket/"), config); - assertInstanceOf(getAwsCredentialsProvider(fs), InstanceProfileCredentialsProvider.class); - } - } - @Test public void testAssumeRoleCredentials() throws Exception { Configuration config = new Configuration(false); config.set(S3_IAM_ROLE, "role"); - config.setBoolean(S3_USE_INSTANCE_CREDENTIALS, false); try (PrestoS3FileSystem fs = new PrestoS3FileSystem()) { fs.initialize(new URI("s3n://test-bucket/"), config); @@ -171,7 +155,6 @@ public void testDefaultCredentials() throws Exception { Configuration config = new Configuration(false); - config.setBoolean(S3_USE_INSTANCE_CREDENTIALS, false); try (PrestoS3FileSystem fs = new PrestoS3FileSystem()) { fs.initialize(new URI("s3n://test-bucket/"), config); @@ -451,7 +434,6 @@ public void testCustomCredentialsProvider() throws Exception { Configuration config = new Configuration(false); - config.set(S3_USE_INSTANCE_CREDENTIALS, "false"); config.set(S3_CREDENTIALS_PROVIDER, TestCredentialsProvider.class.getName()); try (PrestoS3FileSystem fs = new PrestoS3FileSystem()) { fs.initialize(new URI("s3n://test-bucket/"), config); @@ -464,7 +446,6 @@ public void testCustomCredentialsClassCannotBeFound() throws Exception { Configuration config = new Configuration(false); - config.set(S3_USE_INSTANCE_CREDENTIALS, "false"); config.set(S3_CREDENTIALS_PROVIDER, "com.example.DoesNotExist"); try (PrestoS3FileSystem fs = new PrestoS3FileSystem()) { fs.initialize(new URI("s3n://test-bucket/"), config); From ad7631cccfd245b62c7a5959773d192c65bfe22e Mon Sep 17 00:00:00 2001 From: David Phillips Date: Sun, 15 Mar 2020 23:45:37 -0700 Subject: [PATCH 018/519] Remove config hive.metastore.glue.use-instance-credentials This is not needed as it is part of the default credentials chain. --- .../hive/metastore/glue/GlueHiveMetastore.java | 4 ---- .../metastore/glue/GlueHiveMetastoreConfig.java | 15 ++------------- .../glue/TestGlueHiveMetastoreConfig.java | 3 --- 3 files changed, 2 insertions(+), 20 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastore.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastore.java index 3d4695970f2c..e4c1164c6204 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastore.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastore.java @@ -19,7 +19,6 @@ import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; -import com.amazonaws.auth.InstanceProfileCredentialsProvider; import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider; import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration; import com.amazonaws.services.glue.AWSGlueAsync; @@ -208,9 +207,6 @@ private static AWSCredentialsProvider getAwsCredentialsProvider(GlueHiveMetastor return new AWSStaticCredentialsProvider( new BasicAWSCredentials(config.getAwsAccessKey().get(), config.getAwsSecretKey().get())); } - if (config.isUseInstanceCredentials()) { - return InstanceProfileCredentialsProvider.getInstance(); - } if (config.getIamRole().isPresent()) { return new STSAssumeRoleSessionCredentialsProvider .Builder(config.getIamRole().get(), "presto-session") diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastoreConfig.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastoreConfig.java index 556e82cbc943..605229f00fa1 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastoreConfig.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastoreConfig.java @@ -16,12 +16,14 @@ import io.airlift.configuration.Config; import io.airlift.configuration.ConfigDescription; import io.airlift.configuration.ConfigSecuritySensitive; +import io.airlift.configuration.DefunctConfig; import javax.validation.constraints.Max; import javax.validation.constraints.Min; import java.util.Optional; +@DefunctConfig("hive.metastore.glue.use-instance-credentials") public class GlueHiveMetastoreConfig { private Optional glueRegion = Optional.empty(); @@ -33,7 +35,6 @@ public class GlueHiveMetastoreConfig private Optional awsAccessKey = Optional.empty(); private Optional awsSecretKey = Optional.empty(); private Optional awsCredentialsProvider = Optional.empty(); - private boolean useInstanceCredentials; private Optional catalogId = Optional.empty(); private int partitionSegments = 5; private int getPartitionThreads = 20; @@ -157,18 +158,6 @@ public GlueHiveMetastoreConfig setCatalogId(String catalogId) return this; } - public boolean isUseInstanceCredentials() - { - return useInstanceCredentials; - } - - @Config("hive.metastore.glue.use-instance-credentials") - public GlueHiveMetastoreConfig setUseInstanceCredentials(boolean useInstanceCredentials) - { - this.useInstanceCredentials = useInstanceCredentials; - return this; - } - public Optional getAwsCredentialsProvider() { return awsCredentialsProvider; diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/glue/TestGlueHiveMetastoreConfig.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/glue/TestGlueHiveMetastoreConfig.java index c24e6a344902..4dfb80e09241 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/glue/TestGlueHiveMetastoreConfig.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/glue/TestGlueHiveMetastoreConfig.java @@ -38,7 +38,6 @@ public void testDefaults() .setAwsSecretKey(null) .setAwsCredentialsProvider(null) .setCatalogId(null) - .setUseInstanceCredentials(false) .setPartitionSegments(5) .setGetPartitionThreads(20)); } @@ -57,7 +56,6 @@ public void testExplicitPropertyMapping() .put("hive.metastore.glue.aws-secret-key", "DEF") .put("hive.metastore.glue.aws-credentials-provider", "custom") .put("hive.metastore.glue.catalogid", "0123456789") - .put("hive.metastore.glue.use-instance-credentials", "true") .put("hive.metastore.glue.partitions-segments", "10") .put("hive.metastore.glue.get-partition-threads", "42") .build(); @@ -73,7 +71,6 @@ public void testExplicitPropertyMapping() .setAwsSecretKey("DEF") .setAwsCredentialsProvider("custom") .setCatalogId("0123456789") - .setUseInstanceCredentials(true) .setPartitionSegments(10) .setGetPartitionThreads(42); From 69709fcf6a0aa8e930a0739d18beaf2989d07805 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Sun, 15 Mar 2020 23:48:46 -0700 Subject: [PATCH 019/519] Remove config elasticsearch.aws.use-instance-credentials This is not needed as it is part of the default credentials chain. --- .../src/main/sphinx/connector/elasticsearch.rst | 1 - .../elasticsearch/AwsSecurityConfig.java | 15 ++------------- .../elasticsearch/client/ElasticsearchClient.java | 4 ---- .../elasticsearch/TestAwsSecurityConfig.java | 7 ++----- 4 files changed, 4 insertions(+), 23 deletions(-) diff --git a/presto-docs/src/main/sphinx/connector/elasticsearch.rst b/presto-docs/src/main/sphinx/connector/elasticsearch.rst index a634e8277861..d36263a48534 100644 --- a/presto-docs/src/main/sphinx/connector/elasticsearch.rst +++ b/presto-docs/src/main/sphinx/connector/elasticsearch.rst @@ -273,6 +273,5 @@ Property Name Description ``elasticsearch.aws.region`` AWS region or the Elasticsearch endpoint. This option is required. ``elasticsearch.aws.access-key`` AWS access key to use to connect to the Elasticsearch domain. ``elasticsearch.aws.secret-key`` AWS secret key to use to connect to the Elasticsearch domain. -``elasticsearch.aws.use-instance-credentials`` Use the EC2 metadata service to retrieve API credentials. ================================================ ================================================================== diff --git a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/AwsSecurityConfig.java b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/AwsSecurityConfig.java index 2f67cfcd7de8..9190ad7cf744 100644 --- a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/AwsSecurityConfig.java +++ b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/AwsSecurityConfig.java @@ -14,16 +14,17 @@ package io.prestosql.elasticsearch; import io.airlift.configuration.Config; +import io.airlift.configuration.DefunctConfig; import javax.validation.constraints.NotNull; import java.util.Optional; +@DefunctConfig("elasticsearch.aws.use-instance-credentials") public class AwsSecurityConfig { private String accessKey; private String secretKey; - private boolean useAwsInstanceCredentials; private String region; @NotNull @@ -52,18 +53,6 @@ public AwsSecurityConfig setSecretKey(String key) return this; } - public boolean isUseInstanceCredentials() - { - return useAwsInstanceCredentials; - } - - @Config("elasticsearch.aws.use-instance-credentials") - public AwsSecurityConfig setUseInstanceCredentials(boolean use) - { - this.useAwsInstanceCredentials = use; - return this; - } - public String getRegion() { return region; diff --git a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/client/ElasticsearchClient.java b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/client/ElasticsearchClient.java index dac466f627cc..24b5c0a6d327 100644 --- a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/client/ElasticsearchClient.java +++ b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/client/ElasticsearchClient.java @@ -17,7 +17,6 @@ import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; -import com.amazonaws.auth.InstanceProfileCredentialsProvider; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.NullNode; @@ -219,9 +218,6 @@ private static AWSCredentialsProvider getAwsCredentialsProvider(AwsSecurityConfi config.getAccessKey().get(), config.getSecretKey().get())); } - if (config.isUseInstanceCredentials()) { - return InstanceProfileCredentialsProvider.getInstance(); - } return DefaultAWSCredentialsProviderChain.getInstance(); } diff --git a/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestAwsSecurityConfig.java b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestAwsSecurityConfig.java index 12d8142f9964..b6c4ca80c37c 100644 --- a/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestAwsSecurityConfig.java +++ b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestAwsSecurityConfig.java @@ -30,8 +30,7 @@ public void testDefaults() assertRecordedDefaults(recordDefaults(AwsSecurityConfig.class) .setAccessKey(null) .setSecretKey(null) - .setRegion(null) - .setUseInstanceCredentials(false)); + .setRegion(null)); } @Test @@ -41,14 +40,12 @@ public void testExplicitPropertyMappings() .put("elasticsearch.aws.access-key", "access") .put("elasticsearch.aws.secret-key", "secret") .put("elasticsearch.aws.region", "region") - .put("elasticsearch.aws.use-instance-credentials", "true") .build(); AwsSecurityConfig expected = new AwsSecurityConfig() .setAccessKey("access") .setSecretKey("secret") - .setRegion("region") - .setUseInstanceCredentials(true); + .setRegion("region"); assertFullMapping(properties, expected); } From 6bf2649f482139f0d3204ed8cb2b81a24041f206 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Sun, 15 Mar 2020 23:00:46 -0700 Subject: [PATCH 020/519] Add S3 security mapping --- .../src/main/sphinx/connector/hive.rst | 96 ++++++ .../plugin/hive/s3/HiveS3Module.java | 15 + .../plugin/hive/s3/S3SecurityMapping.java | 130 +++++++ .../hive/s3/S3SecurityMappingConfig.java | 81 +++++ ...3SecurityMappingConfigurationProvider.java | 143 ++++++++ .../plugin/hive/s3/S3SecurityMappings.java | 43 +++ .../plugin/hive/s3/TestS3SecurityMapping.java | 326 ++++++++++++++++++ .../hive/s3/TestS3SecurityMappingConfig.java | 57 +++ .../plugin/hive/s3/security-mapping.json | 53 +++ 9 files changed, 944 insertions(+) create mode 100644 presto-hive/src/main/java/io/prestosql/plugin/hive/s3/S3SecurityMapping.java create mode 100644 presto-hive/src/main/java/io/prestosql/plugin/hive/s3/S3SecurityMappingConfig.java create mode 100644 presto-hive/src/main/java/io/prestosql/plugin/hive/s3/S3SecurityMappingConfigurationProvider.java create mode 100644 presto-hive/src/main/java/io/prestosql/plugin/hive/s3/S3SecurityMappings.java create mode 100644 presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestS3SecurityMapping.java create mode 100644 presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestS3SecurityMappingConfig.java create mode 100644 presto-hive/src/test/resources/io/prestosql/plugin/hive/s3/security-mapping.json diff --git a/presto-docs/src/main/sphinx/connector/hive.rst b/presto-docs/src/main/sphinx/connector/hive.rst index 878d278d2b55..ff11569ebc29 100644 --- a/presto-docs/src/main/sphinx/connector/hive.rst +++ b/presto-docs/src/main/sphinx/connector/hive.rst @@ -429,6 +429,102 @@ or credentials for a specific use case (e.g., bucket/user specific credentials). This Hadoop configuration property must be set in the Hadoop configuration files referenced by the ``hive.config.resources`` Hive connector property. +S3 Security Mapping +^^^^^^^^^^^^^^^^^^^ + +Presto supports flexible security mapping for S3, allowing for separate +credentials or IAM roles for specific users or buckets/paths. The IAM role +for a specific query can be selected from a list of allowed roles by providing +it as an *extra credential*. + +Each security mapping entry may specify one or more match criteria. If multiple +criteria are specified, all criteria must match. Available match criteria: + +* ``user``: Regular expression to match against username. Example: ``alice|bob`` + +* ``group``: Regular expression to match against any of the groups that the user + belongs to. Example: ``finance|sales`` + +* ``prefix``: S3 URL prefix. It can specify an entire bucket or a path within a + bucket. The URL must start with ``s3://`` but will also match ``s3a`` or ``s3n``. + Example: ``s3://bucket-name/abc/xyz/`` + +The security mapping must provide one or more configuration settings: + +* ``accessKey`` and ``secretKey``: AWS access key and secret key. This overrides + any globally configured credentials, such as access key or instance credentials. + +* ``iamRole``: IAM role to use if no user provided role is specified as an + extra credential. This overrides any globally configured IAM role. This role + is allowed to be specified as an extra credential, although specifying it + explicitly has no effect, as it would be used anyway. + +* ``allowedIamRoles``: IAM roles that are allowed to be specified as an extra + credential. This is useful because a particular AWS account may have permissions + to use many roles, but a specific user should only be allowed to use a subset + of those roles. + +The security mapping entries are processed in the order listed in the configuration +file. More specific mappings should thus be specified before less specific mappings. +For example, the mapping list might have URL prefix ``s3://abc/xyz/`` followed by +``s3://abc/`` to allow different configuration for a specific path within a bucket +than for other paths within the bucket. You can set default configuration by not +including any match criteria for the last entry in the list. + +Example JSON configuration file: + +.. code-block:: json + + { + "mappings": [ + { + "prefix": "s3://bucket-name/abc/", + "iamRole": "arn:aws:iam::123456789101:role/test_path" + }, + { + "user": "bob|charlie", + "iamRole": "arn:aws:iam::123456789101:role/test_default", + "allowedIamRoles": [ + "arn:aws:iam::123456789101:role/test1", + "arn:aws:iam::123456789101:role/test2", + "arn:aws:iam::123456789101:role/test3" + ] + }, + { + "prefix": "s3://special-bucket/", + "accessKey": "AKIAxxxaccess", + "secretKey": "iXbXxxxsecret" + }, + { + "user": "test.*", + "iamRole": "arn:aws:iam::123456789101:role/test_users" + }, + { + "group": "finance", + "iamRole": "arn:aws:iam::123456789101:role/finance_users" + }, + { + "iamRole": "arn:aws:iam::123456789101:role/default" + } + ] + } + +======================================================= ================================================================= +Property Name Description +======================================================= ================================================================= +``hive.s3.security-mapping.config-file`` The JSON configuration file containing security mappings. + +``hive.s3.security-mapping.iam-role-credential-name`` The name of the *extra credential* used to provide the IAM role. + +``hive.s3.security-mapping.refresh-period`` How often to refresh the security mapping configuration. + +``hive.s3.security-mapping.colon-replacement`` The character or characters to be used in place of the colon + (``:``) character when specifying an IAM role name as an + extra credential. Any instances of this replacement value in the + extra credential value will be converted to a colon. Choose a + value that is not used in any of your IAM ARNs. +======================================================= ================================================================= + Tuning Properties ^^^^^^^^^^^^^^^^^ diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/HiveS3Module.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/HiveS3Module.java index ee2ec9dc5f75..68ee66dd6bce 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/HiveS3Module.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/HiveS3Module.java @@ -19,9 +19,12 @@ import io.airlift.configuration.AbstractConfigurationAwareModule; import io.prestosql.plugin.base.CatalogName; import io.prestosql.plugin.hive.ConfigurationInitializer; +import io.prestosql.plugin.hive.DynamicConfigurationProvider; +import io.prestosql.plugin.hive.HiveConfig; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.JavaUtils; +import static com.google.common.base.Preconditions.checkArgument; import static com.google.inject.multibindings.Multibinder.newSetBinder; import static io.airlift.configuration.ConfigBinder.configBinder; import static org.weakref.jmx.guice.ExportBinder.newExporter; @@ -36,6 +39,8 @@ protected void setup(Binder binder) { S3FileSystemType type = buildConfigObject(HiveS3TypeConfig.class).getS3FileSystemType(); if (type == S3FileSystemType.PRESTO) { + bindSecurityMapping(binder); + newSetBinder(binder, ConfigurationInitializer.class).addBinding().to(PrestoS3ConfigurationInitializer.class).in(Scopes.SINGLETON); configBinder(binder).bindConfig(HiveS3Config.class); @@ -56,6 +61,16 @@ else if (type == S3FileSystemType.HADOOP_DEFAULT) { } } + private void bindSecurityMapping(Binder binder) + { + if (buildConfigObject(S3SecurityMappingConfig.class).getConfigFile().isPresent()) { + checkArgument(!buildConfigObject(HiveConfig.class).isS3SelectPushdownEnabled(), "S3 security mapping is not compatible with S3 Select pushdown"); + + newSetBinder(binder, DynamicConfigurationProvider.class).addBinding() + .to(S3SecurityMappingConfigurationProvider.class).in(Scopes.SINGLETON); + } + } + private static void validateEmrFsClass() { // verify that the class exists diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/S3SecurityMapping.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/S3SecurityMapping.java new file mode 100644 index 000000000000..8f2c13a06d72 --- /dev/null +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/S3SecurityMapping.java @@ -0,0 +1,130 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.hive.s3; + +import com.amazonaws.auth.BasicAWSCredentials; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.collect.ImmutableList; +import io.prestosql.spi.security.ConnectorIdentity; + +import java.net.URI; +import java.util.Collection; +import java.util.List; +import java.util.Optional; +import java.util.function.Predicate; +import java.util.regex.Pattern; + +import static com.google.common.base.MoreObjects.toStringHelper; +import static com.google.common.base.Preconditions.checkArgument; +import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.extractBucketName; +import static java.util.Objects.requireNonNull; + +public class S3SecurityMapping +{ + private final Predicate user; + private final Predicate> group; + private final Predicate prefix; + private final List allowedIamRoles; + private final Optional iamRole; + private final Optional credentials; + + @JsonCreator + public S3SecurityMapping( + @JsonProperty("user") Optional user, + @JsonProperty("group") Optional group, + @JsonProperty("prefix") Optional prefix, + @JsonProperty("iamRole") Optional iamRole, + @JsonProperty("allowedIamRoles") Optional> allowedIamRoles, + @JsonProperty("accessKey") Optional accessKey, + @JsonProperty("secretKey") Optional secretKey) + { + this.user = requireNonNull(user, "user is null") + .map(S3SecurityMapping::toPredicate) + .orElse(x -> true); + this.group = requireNonNull(group, "group is null") + .map(S3SecurityMapping::toPredicate) + .map(S3SecurityMapping::anyMatch) + .orElse(x -> true); + this.prefix = requireNonNull(prefix, "prefix is null") + .map(S3SecurityMapping::prefixPredicate) + .orElse(x -> true); + + this.iamRole = requireNonNull(iamRole, "iamRole is null"); + + this.allowedIamRoles = requireNonNull(allowedIamRoles, "allowedIamRoles is null") + .orElse(ImmutableList.of()); + + requireNonNull(accessKey, "accessKey is null"); + requireNonNull(secretKey, "secretKey is null"); + checkArgument(accessKey.isPresent() == secretKey.isPresent(), "accessKey and secretKey must be provided together"); + this.credentials = accessKey.map(access -> new BasicAWSCredentials(access, secretKey.get())); + + checkArgument(!this.allowedIamRoles.isEmpty() || iamRole.isPresent() || credentials.isPresent(), "must provide role and/or credentials"); + } + + public boolean matches(ConnectorIdentity identity, URI uri) + { + return user.test(identity.getUser()) + && group.test(identity.getGroups()) + && prefix.test(uri); + } + + public Optional getIamRole() + { + return iamRole; + } + + public List getAllowedIamRoles() + { + return allowedIamRoles; + } + + public Optional getCredentials() + { + return credentials; + } + + @Override + public String toString() + { + return toStringHelper(this) + .add("user", user) + .add("group", group) + .add("prefix", prefix) + .add("iamRole", iamRole) + .add("allowedIamRoles", allowedIamRoles) + .add("credentials", credentials) + .toString(); + } + + private static Predicate prefixPredicate(URI prefix) + { + checkArgument("s3".equals(prefix.getScheme()), "prefix URI scheme is not 's3': %s", prefix); + checkArgument(prefix.getQuery() == null, "prefix URI must not contain query: %s", prefix); + checkArgument(prefix.getFragment() == null, "prefix URI must not contain fragment: %s", prefix); + return value -> extractBucketName(prefix).equals(extractBucketName(value)) && + value.getPath().startsWith(prefix.getPath()); + } + + private static Predicate toPredicate(Pattern pattern) + { + return value -> pattern.matcher(value).matches(); + } + + private static Predicate> anyMatch(Predicate predicate) + { + return values -> values.stream().anyMatch(predicate); + } +} diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/S3SecurityMappingConfig.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/S3SecurityMappingConfig.java new file mode 100644 index 000000000000..07937d0e5888 --- /dev/null +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/S3SecurityMappingConfig.java @@ -0,0 +1,81 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.hive.s3; + +import io.airlift.configuration.Config; +import io.airlift.configuration.ConfigDescription; +import io.airlift.units.Duration; + +import java.io.File; +import java.util.Optional; + +public class S3SecurityMappingConfig +{ + private File configFile; + private String roleCredentialName; + private Duration refreshPeriod; + private String colonReplacement; + + public Optional getConfigFile() + { + return Optional.ofNullable(configFile); + } + + @Config("hive.s3.security-mapping.config-file") + @ConfigDescription("JSON configuration file containing security mappings") + public S3SecurityMappingConfig setConfigFile(File configFile) + { + this.configFile = configFile; + return this; + } + + public Optional getRoleCredentialName() + { + return Optional.ofNullable(roleCredentialName); + } + + @Config("hive.s3.security-mapping.iam-role-credential-name") + @ConfigDescription("Name of the extra credential used to provide IAM role") + public S3SecurityMappingConfig setRoleCredentialName(String roleCredentialName) + { + this.roleCredentialName = roleCredentialName; + return this; + } + + public Optional getRefreshPeriod() + { + return Optional.ofNullable(refreshPeriod); + } + + @Config("hive.s3.security-mapping.refresh-period") + @ConfigDescription("How often to refresh the security mapping configuration") + public S3SecurityMappingConfig setRefreshPeriod(Duration refreshPeriod) + { + this.refreshPeriod = refreshPeriod; + return this; + } + + public Optional getColonReplacement() + { + return Optional.ofNullable(colonReplacement); + } + + @Config("hive.s3.security-mapping.colon-replacement") + @ConfigDescription("Value used in place of colon for IAM role name in extra credentials") + public S3SecurityMappingConfig setColonReplacement(String colonReplacement) + { + this.colonReplacement = colonReplacement; + return this; + } +} diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/S3SecurityMappingConfigurationProvider.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/S3SecurityMappingConfigurationProvider.java new file mode 100644 index 000000000000..5c760b96f3f2 --- /dev/null +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/S3SecurityMappingConfigurationProvider.java @@ -0,0 +1,143 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.hive.s3; + +import com.google.common.base.Suppliers; +import com.google.common.collect.ImmutableSet; +import com.google.common.hash.Hasher; +import com.google.common.hash.Hashing; +import io.airlift.log.Logger; +import io.prestosql.plugin.hive.DynamicConfigurationProvider; +import io.prestosql.plugin.hive.HdfsEnvironment.HdfsContext; +import io.prestosql.spi.security.AccessDeniedException; +import org.apache.hadoop.conf.Configuration; + +import javax.inject.Inject; + +import java.io.File; +import java.net.URI; +import java.util.Optional; +import java.util.Set; +import java.util.function.Supplier; + +import static com.google.common.base.Verify.verify; +import static io.prestosql.plugin.base.util.JsonUtils.parseJson; +import static io.prestosql.plugin.hive.DynamicConfigurationProvider.setCacheKey; +import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_ACCESS_KEY; +import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_IAM_ROLE; +import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_SECRET_KEY; +import static java.nio.charset.StandardCharsets.UTF_8; +import static java.util.Objects.requireNonNull; +import static java.util.concurrent.TimeUnit.MILLISECONDS; + +public class S3SecurityMappingConfigurationProvider + implements DynamicConfigurationProvider +{ + private static final Logger log = Logger.get(S3SecurityMappingConfigurationProvider.class); + + private static final Set SCHEMES = ImmutableSet.of("s3", "s3a", "s3n"); + + private final Supplier mappings; + private final Optional roleCredentialName; + private final Optional colonReplacement; + + @Inject + public S3SecurityMappingConfigurationProvider(S3SecurityMappingConfig config) + { + this(getMappings(config), config.getRoleCredentialName(), config.getColonReplacement()); + } + + private static Supplier getMappings(S3SecurityMappingConfig config) + { + File configFile = config.getConfigFile().orElseThrow(() -> new IllegalArgumentException("config file not set")); + Supplier supplier = () -> parseJson(configFile.toPath(), S3SecurityMappings.class); + if (!config.getRefreshPeriod().isPresent()) { + return Suppliers.memoize(supplier::get); + } + return Suppliers.memoizeWithExpiration( + () -> { + log.info("Refreshing S3 security mapping configuration from %s", configFile); + return supplier.get(); + }, + config.getRefreshPeriod().get().toMillis(), + MILLISECONDS); + } + + public S3SecurityMappingConfigurationProvider(Supplier mappings, Optional roleCredentialName, Optional colonReplacement) + { + this.mappings = requireNonNull(mappings, "mappings is null"); + this.roleCredentialName = requireNonNull(roleCredentialName, "roleCredentialName is null"); + this.colonReplacement = requireNonNull(colonReplacement, "colonReplacement is null"); + } + + @Override + public void updateConfiguration(Configuration configuration, HdfsContext context, URI uri) + { + if (!SCHEMES.contains(uri.getScheme())) { + return; + } + + S3SecurityMapping mapping = mappings.get().getMapping(context.getIdentity(), uri) + .orElseThrow(() -> new AccessDeniedException("No matching S3 security mapping")); + + Hasher hasher = Hashing.sha256().newHasher(); + + mapping.getCredentials().ifPresent(credentials -> { + configuration.set(S3_ACCESS_KEY, credentials.getAWSAccessKeyId()); + configuration.set(S3_SECRET_KEY, credentials.getAWSSecretKey()); + hasher.putString(credentials.getAWSAccessKeyId(), UTF_8); + hasher.putString(credentials.getAWSSecretKey(), UTF_8); + }); + + selectRole(mapping, context).ifPresent(role -> { + configuration.set(S3_IAM_ROLE, role); + hasher.putString(role, UTF_8); + }); + + setCacheKey(configuration, hasher.hash().toString()); + } + + private Optional selectRole(S3SecurityMapping mapping, HdfsContext context) + { + Optional optionalSelected = getRoleFromExtraCredential(context); + + if (!optionalSelected.isPresent()) { + if (!mapping.getAllowedIamRoles().isEmpty() && !mapping.getIamRole().isPresent()) { + throw new AccessDeniedException("No S3 role selected and mapping has no default role"); + } + verify(mapping.getIamRole().isPresent() || mapping.getCredentials().isPresent(), "mapping must have role or credential"); + return mapping.getIamRole(); + } + + String selected = optionalSelected.get(); + + // selected role must match default or be allowed + if (!selected.equals(mapping.getIamRole().orElse(null)) && + !mapping.getAllowedIamRoles().contains(selected)) { + throw new AccessDeniedException("Selected S3 role is not allowed: " + selected); + } + + return optionalSelected; + } + + private Optional getRoleFromExtraCredential(HdfsContext context) + { + Optional extraCredentialRole = roleCredentialName.map(name -> context.getIdentity().getExtraCredentials().get(name)); + + if (colonReplacement.isPresent()) { + return extraCredentialRole.map(role -> role.replace(colonReplacement.get(), ":")); + } + return extraCredentialRole; + } +} diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/S3SecurityMappings.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/S3SecurityMappings.java new file mode 100644 index 000000000000..180010b09edb --- /dev/null +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/S3SecurityMappings.java @@ -0,0 +1,43 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.hive.s3; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.collect.ImmutableList; +import io.prestosql.spi.security.ConnectorIdentity; + +import java.net.URI; +import java.util.List; +import java.util.Optional; + +import static java.util.Objects.requireNonNull; + +public class S3SecurityMappings +{ + private final List mappings; + + @JsonCreator + public S3SecurityMappings(@JsonProperty("mappings") List mappings) + { + this.mappings = ImmutableList.copyOf(requireNonNull(mappings, "mappings is null")); + } + + public Optional getMapping(ConnectorIdentity identity, URI uri) + { + return mappings.stream() + .filter(mapping -> mapping.matches(identity, uri)) + .findFirst(); + } +} diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestS3SecurityMapping.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestS3SecurityMapping.java new file mode 100644 index 000000000000..0308ac761e46 --- /dev/null +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestS3SecurityMapping.java @@ -0,0 +1,326 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.hive.s3; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import io.prestosql.plugin.hive.DynamicConfigurationProvider; +import io.prestosql.plugin.hive.HdfsEnvironment.HdfsContext; +import io.prestosql.plugin.hive.HiveConfig; +import io.prestosql.plugin.hive.HiveSessionProperties; +import io.prestosql.spi.connector.ConnectorSession; +import io.prestosql.spi.security.AccessDeniedException; +import io.prestosql.spi.security.ConnectorIdentity; +import io.prestosql.testing.TestingConnectorSession; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; +import org.testng.annotations.Test; + +import java.io.File; +import java.util.Optional; +import java.util.Set; + +import static com.google.common.io.Resources.getResource; +import static io.prestosql.plugin.hive.HiveTestUtils.getHiveSessionProperties; +import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_ACCESS_KEY; +import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_IAM_ROLE; +import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_SECRET_KEY; +import static io.prestosql.plugin.hive.s3.TestS3SecurityMapping.MappingResult.credentials; +import static io.prestosql.plugin.hive.s3.TestS3SecurityMapping.MappingResult.role; +import static io.prestosql.plugin.hive.s3.TestS3SecurityMapping.MappingSelector.empty; +import static io.prestosql.plugin.hive.s3.TestS3SecurityMapping.MappingSelector.path; +import static java.util.Objects.requireNonNull; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNull; + +public class TestS3SecurityMapping +{ + private static final HiveSessionProperties HIVE_SESSION_PROPERTIES = getHiveSessionProperties(new HiveConfig()); + + private static final String IAM_ROLE_CREDENTIAL_NAME = "IAM_ROLE_CREDENTIAL_NAME"; + private static final String DEFAULT_PATH = "s3://default"; + private static final String DEFAULT_USER = "testuser"; + + @Test + public void testMapping() + { + S3SecurityMappingConfig mappingConfig = new S3SecurityMappingConfig() + .setConfigFile(new File(getResource(getClass(), "security-mapping.json").getPath())) + .setRoleCredentialName(IAM_ROLE_CREDENTIAL_NAME) + .setColonReplacement("#"); + + DynamicConfigurationProvider provider = new S3SecurityMappingConfigurationProvider(mappingConfig); + + // matches prefix -- mapping provides credentials + assertMapping( + provider, + path("s3://foo/data/test.csv"), + credentials("AKIAxxxaccess", "iXbXxxxsecret")); + + // matches prefix exactly -- mapping provides credentials + assertMapping( + provider, + path("s3://foo"), + credentials("AKIAxxxaccess", "iXbXxxxsecret")); + + // no role selected and mapping has no default role + assertMappingFails( + provider, + path("s3://bar/test"), + "No S3 role selected and mapping has no default role"); + + // matches prefix and user selected one of allowed roles + assertMapping( + provider, + path("s3://bar/test").withExtraCredentialIamRole("arn:aws:iam::123456789101:role/allow_bucket_2"), + role("arn:aws:iam::123456789101:role/allow_bucket_2")); + + // user selected role not in allowed list + assertMappingFails( + provider, + path("s3://bar/test").withUser("bob").withExtraCredentialIamRole("bogus"), + "Selected S3 role is not allowed: bogus"); + + // verify that colon replacement works + String roleWithoutColon = "arn#aws#iam##123456789101#role/allow_bucket_2"; + assertThat(roleWithoutColon).doesNotContain(":"); + assertMapping( + provider, + path("s3://bar/test").withExtraCredentialIamRole(roleWithoutColon), + role("arn:aws:iam::123456789101:role/allow_bucket_2")); + + // matches prefix -- default role used + assertMapping( + provider, + path("s3://bar/abc/data/test.csv"), + role("arn:aws:iam::123456789101:role/allow_path")); + + // matches empty rule at end -- default role used + assertMapping( + provider, + empty(), + role("arn:aws:iam::123456789101:role/default")); + + // matches prefix -- default role used + assertMapping( + provider, + path("s3://xyz/default"), + role("arn:aws:iam::123456789101:role/allow_default")); + + // matches prefix and user selected one of allowed roles + assertMapping( + provider, + path("s3://xyz/foo").withExtraCredentialIamRole("arn:aws:iam::123456789101:role/allow_foo"), + role("arn:aws:iam::123456789101:role/allow_foo")); + + // matches prefix and user selected one of allowed roles + assertMapping( + provider, + path("s3://xyz/bar").withExtraCredentialIamRole("arn:aws:iam::123456789101:role/allow_bar"), + role("arn:aws:iam::123456789101:role/allow_bar")); + + // matches user -- default role used + assertMapping( + provider, + empty().withUser("alice"), + role("alice_role")); + + // matches user and user selected default role + assertMapping( + provider, + empty().withUser("alice").withExtraCredentialIamRole("alice_role"), + role("alice_role")); + + // matches user and selected role not allowed + assertMappingFails( + provider, + empty().withUser("alice").withExtraCredentialIamRole("bogus"), + "Selected S3 role is not allowed: bogus"); + + // verify that first matching rule is used + // matches prefix earlier in file and selected role not allowed + assertMappingFails( + provider, + path("s3://bar/test").withUser("alice").withExtraCredentialIamRole("alice_role"), + "Selected S3 role is not allowed: alice_role"); + + // matches user regex -- default role used + assertMapping( + provider, + empty().withUser("bob"), + role("bob_and_charlie_role")); + + // matches group -- default role used + assertMapping( + provider, + empty().withGroups("finance"), + role("finance_role")); + + // matches group regex -- default role used + assertMapping( + provider, + empty().withGroups("eng"), + role("hr_and_eng_group")); + + // verify that all constraints must match + // matches user but not group -- uses empty mapping at end + assertMapping( + provider, + empty().withUser("danny"), + role("arn:aws:iam::123456789101:role/default")); + + // matches group but not user -- uses empty mapping at end + assertMapping( + provider, + empty().withGroups("hq"), + role("arn:aws:iam::123456789101:role/default")); + + // matches user and group + assertMapping( + provider, + empty().withUser("danny").withGroups("hq"), + role("danny_hq_role")); + } + + private static void assertMapping(DynamicConfigurationProvider provider, MappingSelector selector, MappingResult mappingResult) + { + Configuration configuration = new Configuration(false); + + assertNull(configuration.get(S3_ACCESS_KEY)); + assertNull(configuration.get(S3_SECRET_KEY)); + assertNull(configuration.get(S3_IAM_ROLE)); + + applyMapping(provider, selector, configuration); + + assertEquals(configuration.get(S3_ACCESS_KEY), mappingResult.getAccessKey().orElse(null)); + assertEquals(configuration.get(S3_SECRET_KEY), mappingResult.getSecretKey().orElse(null)); + assertEquals(configuration.get(S3_IAM_ROLE), mappingResult.getRole().orElse(null)); + } + + private static void assertMappingFails(DynamicConfigurationProvider provider, MappingSelector selector, String message) + { + Configuration configuration = new Configuration(false); + + assertThatThrownBy(() -> applyMapping(provider, selector, configuration)) + .isInstanceOf(AccessDeniedException.class) + .hasMessage("Access Denied: " + message); + } + + private static void applyMapping(DynamicConfigurationProvider provider, MappingSelector selector, Configuration configuration) + { + provider.updateConfiguration(configuration, selector.getHdfsContext(), selector.getPath().toUri()); + } + + public static class MappingSelector + { + public static MappingSelector empty() + { + return path(DEFAULT_PATH); + } + + public static MappingSelector path(String path) + { + return new MappingSelector(DEFAULT_USER, ImmutableSet.of(), new Path(path), Optional.empty()); + } + + private final String user; + private final Set groups; + private final Path path; + private final Optional extraCredentialIamRole; + + private MappingSelector(String user, Set groups, Path path, Optional extraCredentialIamRole) + { + this.user = requireNonNull(user, "user is null"); + this.groups = ImmutableSet.copyOf(requireNonNull(groups, "groups is null")); + this.path = requireNonNull(path, "path is null"); + this.extraCredentialIamRole = requireNonNull(extraCredentialIamRole, "extraCredentialIamRole is null"); + } + + public Path getPath() + { + return path; + } + + public MappingSelector withExtraCredentialIamRole(String role) + { + return new MappingSelector(user, groups, path, Optional.of(role)); + } + + public MappingSelector withUser(String user) + { + return new MappingSelector(user, groups, path, extraCredentialIamRole); + } + + public MappingSelector withGroups(String... groups) + { + return new MappingSelector(user, ImmutableSet.copyOf(groups), path, extraCredentialIamRole); + } + + public HdfsContext getHdfsContext() + { + ImmutableMap.Builder extraCredentials = ImmutableMap.builder(); + extraCredentialIamRole.ifPresent(role -> extraCredentials.put(IAM_ROLE_CREDENTIAL_NAME, role)); + + ConnectorSession connectorSession = TestingConnectorSession.builder() + .setIdentity(ConnectorIdentity.forUser(user) + .withGroups(groups) + .withExtraCredentials(extraCredentials.build()) + .build()) + .setPropertyMetadata(HIVE_SESSION_PROPERTIES.getSessionProperties()) + .build(); + return new HdfsContext(connectorSession, "schema"); + } + } + + public static class MappingResult + { + public static MappingResult credentials(String accessKey, String secretKey) + { + return new MappingResult(Optional.of(accessKey), Optional.of(secretKey), Optional.empty()); + } + + public static MappingResult role(String role) + { + return new MappingResult(Optional.empty(), Optional.empty(), Optional.of(role)); + } + + private final Optional accessKey; + private final Optional secretKey; + private final Optional role; + + private MappingResult(Optional accessKey, Optional secretKey, Optional role) + { + this.accessKey = requireNonNull(accessKey, "accessKey is null"); + this.secretKey = requireNonNull(secretKey, "secretKey is null"); + this.role = requireNonNull(role, "role is null"); + } + + public Optional getAccessKey() + { + return accessKey; + } + + public Optional getSecretKey() + { + return secretKey; + } + + public Optional getRole() + { + return role; + } + } +} diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestS3SecurityMappingConfig.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestS3SecurityMappingConfig.java new file mode 100644 index 000000000000..b46b36079e92 --- /dev/null +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestS3SecurityMappingConfig.java @@ -0,0 +1,57 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.hive.s3; + +import com.google.common.collect.ImmutableMap; +import io.airlift.units.Duration; +import org.testng.annotations.Test; + +import java.io.File; +import java.util.Map; + +import static io.airlift.configuration.testing.ConfigAssertions.assertFullMapping; +import static io.airlift.configuration.testing.ConfigAssertions.assertRecordedDefaults; +import static io.airlift.configuration.testing.ConfigAssertions.recordDefaults; + +public class TestS3SecurityMappingConfig +{ + @Test + public void testDefaults() + { + assertRecordedDefaults(recordDefaults(S3SecurityMappingConfig.class) + .setConfigFile(null) + .setRoleCredentialName(null) + .setRefreshPeriod(null) + .setColonReplacement(null)); + } + + @Test + public void testExplicitPropertyMappings() + { + Map properties = new ImmutableMap.Builder() + .put("hive.s3.security-mapping.config-file", "/test/mapping.json") + .put("hive.s3.security-mapping.iam-role-credential-name", "credential-name") + .put("hive.s3.security-mapping.refresh-period", "1s") + .put("hive.s3.security-mapping.colon-replacement", "#") + .build(); + + S3SecurityMappingConfig expected = new S3SecurityMappingConfig() + .setConfigFile(new File("/test/mapping.json")) + .setRoleCredentialName("credential-name") + .setRefreshPeriod(Duration.valueOf("1s")) + .setColonReplacement("#"); + + assertFullMapping(properties, expected); + } +} diff --git a/presto-hive/src/test/resources/io/prestosql/plugin/hive/s3/security-mapping.json b/presto-hive/src/test/resources/io/prestosql/plugin/hive/s3/security-mapping.json new file mode 100644 index 000000000000..6496f9a974aa --- /dev/null +++ b/presto-hive/src/test/resources/io/prestosql/plugin/hive/s3/security-mapping.json @@ -0,0 +1,53 @@ +{ + "mappings": [ + { + "prefix": "s3://bar/abc", + "iamRole": "arn:aws:iam::123456789101:role/allow_path" + }, + { + "prefix": "s3://bar", + "allowedIamRoles": [ + "arn:aws:iam::123456789101:role/allow_bucket_1", + "arn:aws:iam::123456789101:role/allow_bucket_2", + "arn:aws:iam::123456789101:role/allow_bucket_3" + ] + }, + { + "prefix": "s3://xyz", + "iamRole": "arn:aws:iam::123456789101:role/allow_default", + "allowedIamRoles": [ + "arn:aws:iam::123456789101:role/allow_foo", + "arn:aws:iam::123456789101:role/allow_bar" + ] + }, + { + "prefix": "s3://foo", + "accessKey": "AKIAxxxaccess", + "secretKey": "iXbXxxxsecret" + }, + { + "user": "alice", + "iamRole": "alice_role" + }, + { + "user": "bob|charlie", + "iamRole": "bob_and_charlie_role" + }, + { + "group": "finance", + "iamRole": "finance_role" + }, + { + "group": "hr|eng", + "iamRole": "hr_and_eng_group" + }, + { + "user": "danny", + "group": "hq", + "iamRole": "danny_hq_role" + }, + { + "iamRole": "arn:aws:iam::123456789101:role/default" + } + ] +} From 20f15139d5a1858601d54ed633dabcb629a10506 Mon Sep 17 00:00:00 2001 From: Karol Sobczak Date: Wed, 1 Apr 2020 13:01:23 +0200 Subject: [PATCH 021/519] Fix Glue config null check --- .../plugin/hive/metastore/glue/GlueHiveMetastore.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastore.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastore.java index e4c1164c6204..6dc486f38ac2 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastore.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastore.java @@ -168,9 +168,10 @@ public GlueHiveMetastore( GlueColumnStatisticsProvider columnStatisticsProvider, @ForGlueHiveMetastore Executor executor) { + requireNonNull(glueConfig, "glueConfig is null"); this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); this.hdfsContext = new HdfsContext(ConnectorIdentity.ofUser(DEFAULT_METASTORE_USER)); - this.glueClient = requireNonNull(createAsyncGlueClient(glueConfig), "glueClient is null"); + this.glueClient = createAsyncGlueClient(glueConfig); this.defaultDir = glueConfig.getDefaultWarehouseDir(); this.catalogId = glueConfig.getCatalogId().orElse(null); this.partitionSegments = glueConfig.getPartitionSegments(); From 6c9adf57de0899f1ab23e93fead9e745467c6fd4 Mon Sep 17 00:00:00 2001 From: Karol Sobczak Date: Wed, 1 Apr 2020 13:01:47 +0200 Subject: [PATCH 022/519] Static import newOptionalBinder --- .../plugin/hive/metastore/glue/GlueMetastoreModule.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueMetastoreModule.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueMetastoreModule.java index b3657d7203e7..a9860b417379 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueMetastoreModule.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueMetastoreModule.java @@ -18,7 +18,6 @@ import com.google.inject.Scopes; import com.google.inject.Singleton; import com.google.inject.multibindings.Multibinder; -import com.google.inject.multibindings.OptionalBinder; import io.airlift.concurrent.BoundedExecutor; import io.airlift.configuration.AbstractConfigurationAwareModule; import io.prestosql.plugin.base.CatalogName; @@ -35,6 +34,7 @@ import static com.google.common.util.concurrent.MoreExecutors.directExecutor; import static com.google.inject.multibindings.Multibinder.newSetBinder; +import static com.google.inject.multibindings.OptionalBinder.newOptionalBinder; import static io.airlift.concurrent.Threads.daemonThreadsNamed; import static io.airlift.configuration.ConfigBinder.configBinder; import static java.util.concurrent.Executors.newCachedThreadPool; @@ -48,7 +48,7 @@ protected void setup(Binder binder) { configBinder(binder).bindConfig(GlueHiveMetastoreConfig.class); - OptionalBinder.newOptionalBinder(binder, GlueColumnStatisticsProvider.class) + newOptionalBinder(binder, GlueColumnStatisticsProvider.class) .setDefault().to(DisabledGlueColumnStatisticsProvider.class).in(Scopes.SINGLETON); if (buildConfigObject(HiveConfig.class).getRecordingPath() != null) { From e70158597a72250860c75a25367430f17003bc14 Mon Sep 17 00:00:00 2001 From: Karol Sobczak Date: Tue, 22 Jan 2019 14:59:27 +0100 Subject: [PATCH 023/519] Add support for custom Glue request handlers --- .../plugin/hive/metastore/glue/GlueHiveMetastore.java | 11 ++++++++--- .../hive/metastore/glue/GlueMetastoreModule.java | 4 ++++ .../hive/metastore/glue/TestHiveGlueMetastore.java | 2 +- 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastore.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastore.java index 6dc486f38ac2..65c75f0b1f22 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastore.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastore.java @@ -21,6 +21,7 @@ import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider; import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration; +import com.amazonaws.handlers.RequestHandler2; import com.amazonaws.services.glue.AWSGlueAsync; import com.amazonaws.services.glue.AWSGlueAsyncClientBuilder; import com.amazonaws.services.glue.model.AlreadyExistsException; @@ -166,12 +167,13 @@ public GlueHiveMetastore( HdfsEnvironment hdfsEnvironment, GlueHiveMetastoreConfig glueConfig, GlueColumnStatisticsProvider columnStatisticsProvider, - @ForGlueHiveMetastore Executor executor) + @ForGlueHiveMetastore Executor executor, + @ForGlueHiveMetastore Optional requestHandler) { requireNonNull(glueConfig, "glueConfig is null"); this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); this.hdfsContext = new HdfsContext(ConnectorIdentity.ofUser(DEFAULT_METASTORE_USER)); - this.glueClient = createAsyncGlueClient(glueConfig); + this.glueClient = createAsyncGlueClient(glueConfig, requestHandler); this.defaultDir = glueConfig.getDefaultWarehouseDir(); this.catalogId = glueConfig.getCatalogId().orElse(null); this.partitionSegments = glueConfig.getPartitionSegments(); @@ -179,11 +181,14 @@ public GlueHiveMetastore( this.columnStatisticsProvider = requireNonNull(columnStatisticsProvider, "columnStatisticsProvider is null"); } - private static AWSGlueAsync createAsyncGlueClient(GlueHiveMetastoreConfig config) + private static AWSGlueAsync createAsyncGlueClient(GlueHiveMetastoreConfig config, Optional requestHandler) { ClientConfiguration clientConfig = new ClientConfiguration().withMaxConnections(config.getMaxGlueConnections()); AWSGlueAsyncClientBuilder asyncGlueClientBuilder = AWSGlueAsyncClientBuilder.standard() .withClientConfiguration(clientConfig); + + requestHandler.ifPresent(asyncGlueClientBuilder::setRequestHandlers); + if (config.getGlueEndpointUrl().isPresent()) { checkArgument(config.getGlueRegion().isPresent(), "Glue region must be set when Glue endpoint URL is set"); asyncGlueClientBuilder.setEndpointConfiguration(new EndpointConfiguration( diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueMetastoreModule.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueMetastoreModule.java index a9860b417379..14f9dda45a16 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueMetastoreModule.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueMetastoreModule.java @@ -13,7 +13,9 @@ */ package io.prestosql.plugin.hive.metastore.glue; +import com.amazonaws.handlers.RequestHandler2; import com.google.inject.Binder; +import com.google.inject.Key; import com.google.inject.Provides; import com.google.inject.Scopes; import com.google.inject.Singleton; @@ -51,6 +53,8 @@ protected void setup(Binder binder) newOptionalBinder(binder, GlueColumnStatisticsProvider.class) .setDefault().to(DisabledGlueColumnStatisticsProvider.class).in(Scopes.SINGLETON); + newOptionalBinder(binder, Key.get(RequestHandler2.class, ForGlueHiveMetastore.class)); + if (buildConfigObject(HiveConfig.class).getRecordingPath() != null) { binder.bind(HiveMetastore.class) .annotatedWith(ForRecordingHiveMetastore.class) diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/glue/TestHiveGlueMetastore.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/glue/TestHiveGlueMetastore.java index ae90375658e6..86272efd81f7 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/glue/TestHiveGlueMetastore.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/glue/TestHiveGlueMetastore.java @@ -56,7 +56,7 @@ protected HiveMetastore createMetastore(File tempDir) glueConfig.setDefaultWarehouseDir(tempDir.toURI().toString()); Executor executor = new BoundedExecutor(this.executor, 10); - return new GlueHiveMetastore(HDFS_ENVIRONMENT, glueConfig, new DisabledGlueColumnStatisticsProvider(), executor); + return new GlueHiveMetastore(HDFS_ENVIRONMENT, glueConfig, new DisabledGlueColumnStatisticsProvider(), executor, Optional.empty()); } @Override From ed282b32f529fee7dd954df3306d49bafab5b7e6 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Thu, 2 Apr 2020 00:24:17 +0200 Subject: [PATCH 024/519] Avoid calling toIntExact on int --- .../java/io/prestosql/execution/buffer/PagesSerdeUtil.java | 3 +-- .../src/main/java/io/prestosql/sql/gen/InCodeGenerator.java | 2 +- .../src/main/java/io/prestosql/util/FastutilSetHelper.java | 3 +-- .../src/main/java/io/prestosql/orc/OrcRecordReader.java | 2 +- presto-orc/src/main/java/io/prestosql/orc/StripeReader.java | 4 ++-- .../io/prestosql/orc/stream/AbstractDiskOrcDataReader.java | 3 +-- .../io/prestosql/orc/stream/UncompressedOrcChunkLoader.java | 3 +-- .../main/java/io/prestosql/parquet/reader/PageReader.java | 5 ++--- .../main/java/io/prestosql/parquet/reader/ParquetReader.java | 2 +- .../raptor/legacy/storage/organization/TemporalFunction.java | 2 +- .../src/test/java/io/prestosql/rcfile/RcFileTester.java | 2 +- 11 files changed, 13 insertions(+), 18 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/execution/buffer/PagesSerdeUtil.java b/presto-main/src/main/java/io/prestosql/execution/buffer/PagesSerdeUtil.java index 951ea100c347..841d4ea4037d 100644 --- a/presto-main/src/main/java/io/prestosql/execution/buffer/PagesSerdeUtil.java +++ b/presto-main/src/main/java/io/prestosql/execution/buffer/PagesSerdeUtil.java @@ -25,7 +25,6 @@ import static io.prestosql.block.BlockSerdeUtil.readBlock; import static io.prestosql.block.BlockSerdeUtil.writeBlock; -import static java.lang.Math.toIntExact; import static java.util.Arrays.asList; import static java.util.Objects.requireNonNull; @@ -67,7 +66,7 @@ private static SerializedPage readSerializedPage(SliceInput sliceInput) PageCodecMarker.MarkerSet markers = PageCodecMarker.MarkerSet.fromByteValue(sliceInput.readByte()); int uncompressedSizeInBytes = sliceInput.readInt(); int sizeInBytes = sliceInput.readInt(); - Slice slice = sliceInput.readSlice(toIntExact((sizeInBytes))); + Slice slice = sliceInput.readSlice(sizeInBytes); return new SerializedPage(slice, markers, positionCount, uncompressedSizeInBytes); } diff --git a/presto-main/src/main/java/io/prestosql/sql/gen/InCodeGenerator.java b/presto-main/src/main/java/io/prestosql/sql/gen/InCodeGenerator.java index e491c8a3cde4..f16a6fd63be9 100644 --- a/presto-main/src/main/java/io/prestosql/sql/gen/InCodeGenerator.java +++ b/presto-main/src/main/java/io/prestosql/sql/gen/InCodeGenerator.java @@ -143,7 +143,7 @@ public BytecodeNode generateExpression(ResolvedFunction resolvedFunction, Byteco break; case HASH_SWITCH: try { - int hashCode = toIntExact(Long.hashCode((Long) hashCodeFunction.invoke(object))); + int hashCode = Long.hashCode((Long) hashCodeFunction.invoke(object)); hashBucketsBuilder.put(hashCode, testBytecode); } catch (Throwable throwable) { diff --git a/presto-main/src/main/java/io/prestosql/util/FastutilSetHelper.java b/presto-main/src/main/java/io/prestosql/util/FastutilSetHelper.java index 945a6d1581cf..f24bacf2a795 100644 --- a/presto-main/src/main/java/io/prestosql/util/FastutilSetHelper.java +++ b/presto-main/src/main/java/io/prestosql/util/FastutilSetHelper.java @@ -36,7 +36,6 @@ import static io.prestosql.spi.function.OperatorType.EQUAL; import static io.prestosql.spi.function.OperatorType.HASH_CODE; import static java.lang.Boolean.TRUE; -import static java.lang.Math.toIntExact; public final class FastutilSetHelper { @@ -190,7 +189,7 @@ private ObjectStrategy(Metadata metadata, Type type) public int hashCode(Object value) { try { - return toIntExact(Long.hashCode((long) hashCodeHandle.invokeExact(value))); + return Long.hashCode((long) hashCodeHandle.invokeExact(value)); } catch (Throwable t) { throwIfInstanceOf(t, Error.class); diff --git a/presto-orc/src/main/java/io/prestosql/orc/OrcRecordReader.java b/presto-orc/src/main/java/io/prestosql/orc/OrcRecordReader.java index 31060bbd3c05..084486f573bf 100644 --- a/presto-orc/src/main/java/io/prestosql/orc/OrcRecordReader.java +++ b/presto-orc/src/main/java/io/prestosql/orc/OrcRecordReader.java @@ -380,7 +380,7 @@ public Page nextPage() // single fixed width column are: 1, 16, 256, 1024, 1024,..., 1024, 256 and the 256 was because there is only // 256 rows left in this row group, then the nextBatchSize should be 1024 instead of 512. So we need to grow the // nextBatchSize before limiting the currentBatchSize by currentGroupRowCount - nextRowInGroup. - currentBatchSize = toIntExact(min(nextBatchSize, maxBatchSize)); + currentBatchSize = min(nextBatchSize, maxBatchSize); nextBatchSize = min(currentBatchSize * BATCH_SIZE_GROWTH_FACTOR, MAX_BATCH_SIZE); currentBatchSize = toIntExact(min(currentBatchSize, currentGroupRowCount - nextRowInGroup)); diff --git a/presto-orc/src/main/java/io/prestosql/orc/StripeReader.java b/presto-orc/src/main/java/io/prestosql/orc/StripeReader.java index 2949124444bf..449b4fec9b80 100644 --- a/presto-orc/src/main/java/io/prestosql/orc/StripeReader.java +++ b/presto-orc/src/main/java/io/prestosql/orc/StripeReader.java @@ -438,7 +438,7 @@ private Map> readColumnIndexes(Map selectRowGroups(StripeInformation stripe, Map> columnIndexes) { - int rowsInStripe = toIntExact(stripe.getNumberOfRows()); + int rowsInStripe = stripe.getNumberOfRows(); int groupsInStripe = ceil(rowsInStripe, rowsInRowGroup); ImmutableSet.Builder selectedRowGroups = ImmutableSet.builder(); @@ -485,7 +485,7 @@ private static Map getDiskRanges(List streams) ImmutableMap.Builder streamDiskRanges = ImmutableMap.builder(); long stripeOffset = 0; for (Stream stream : streams) { - int streamLength = toIntExact(stream.getLength()); + int streamLength = stream.getLength(); // ignore zero byte streams if (streamLength > 0) { streamDiskRanges.put(new StreamId(stream), new DiskRange(stripeOffset, streamLength)); diff --git a/presto-orc/src/main/java/io/prestosql/orc/stream/AbstractDiskOrcDataReader.java b/presto-orc/src/main/java/io/prestosql/orc/stream/AbstractDiskOrcDataReader.java index dbddd9b0fcf9..90268c8d3489 100644 --- a/presto-orc/src/main/java/io/prestosql/orc/stream/AbstractDiskOrcDataReader.java +++ b/presto-orc/src/main/java/io/prestosql/orc/stream/AbstractDiskOrcDataReader.java @@ -24,7 +24,6 @@ import static com.google.common.base.MoreObjects.toStringHelper; import static java.lang.Math.min; -import static java.lang.Math.toIntExact; import static java.util.Objects.requireNonNull; public abstract class AbstractDiskOrcDataReader @@ -74,7 +73,7 @@ public final int getMaxBufferSize() public final Slice seekBuffer(int newPosition) throws IOException { - int newBufferSize = toIntExact(min(dataSize - newPosition, maxBufferSize)); + int newBufferSize = min(dataSize - newPosition, maxBufferSize); if (buffer == null || buffer.length < newBufferSize) { buffer = new byte[newBufferSize]; } diff --git a/presto-orc/src/main/java/io/prestosql/orc/stream/UncompressedOrcChunkLoader.java b/presto-orc/src/main/java/io/prestosql/orc/stream/UncompressedOrcChunkLoader.java index f931dd51ca70..6eaa31cb9fd4 100644 --- a/presto-orc/src/main/java/io/prestosql/orc/stream/UncompressedOrcChunkLoader.java +++ b/presto-orc/src/main/java/io/prestosql/orc/stream/UncompressedOrcChunkLoader.java @@ -25,7 +25,6 @@ import static io.prestosql.orc.checkpoint.InputStreamCheckpoint.createInputStreamCheckpoint; import static io.prestosql.orc.checkpoint.InputStreamCheckpoint.decodeCompressedBlockOffset; import static io.prestosql.orc.checkpoint.InputStreamCheckpoint.decodeDecompressedOffset; -import static java.lang.Math.toIntExact; import static java.util.Objects.requireNonNull; public final class UncompressedOrcChunkLoader @@ -53,7 +52,7 @@ public OrcDataSourceId getOrcDataSourceId() private int getCurrentCompressedOffset() { - return hasNextChunk() ? 0 : toIntExact(dataReader.getSize()); + return hasNextChunk() ? 0 : dataReader.getSize(); } @Override diff --git a/presto-parquet/src/main/java/io/prestosql/parquet/reader/PageReader.java b/presto-parquet/src/main/java/io/prestosql/parquet/reader/PageReader.java index 3e50bb9e8271..67510f61ef54 100644 --- a/presto-parquet/src/main/java/io/prestosql/parquet/reader/PageReader.java +++ b/presto-parquet/src/main/java/io/prestosql/parquet/reader/PageReader.java @@ -24,7 +24,6 @@ import java.util.List; import static io.prestosql.parquet.ParquetCompressionUtils.decompress; -import static java.lang.Math.toIntExact; class PageReader { @@ -72,9 +71,9 @@ public DataPage readPage() if (!dataPageV2.isCompressed()) { return dataPageV2; } - int uncompressedSize = toIntExact(dataPageV2.getUncompressedSize() + int uncompressedSize = dataPageV2.getUncompressedSize() - dataPageV2.getDefinitionLevels().length() - - dataPageV2.getRepetitionLevels().length()); + - dataPageV2.getRepetitionLevels().length(); return new DataPageV2( dataPageV2.getRowCount(), dataPageV2.getNullCount(), diff --git a/presto-parquet/src/main/java/io/prestosql/parquet/reader/ParquetReader.java b/presto-parquet/src/main/java/io/prestosql/parquet/reader/ParquetReader.java index 522ac81af344..760d757730e5 100644 --- a/presto-parquet/src/main/java/io/prestosql/parquet/reader/ParquetReader.java +++ b/presto-parquet/src/main/java/io/prestosql/parquet/reader/ParquetReader.java @@ -137,7 +137,7 @@ public int nextBatch() return -1; } - batchSize = toIntExact(min(nextBatchSize, maxBatchSize)); + batchSize = min(nextBatchSize, maxBatchSize); nextBatchSize = min(batchSize * BATCH_SIZE_GROWTH_FACTOR, MAX_VECTOR_LENGTH); batchSize = toIntExact(min(batchSize, currentGroupRowCount - nextRowInGroup)); diff --git a/presto-raptor-legacy/src/main/java/io/prestosql/plugin/raptor/legacy/storage/organization/TemporalFunction.java b/presto-raptor-legacy/src/main/java/io/prestosql/plugin/raptor/legacy/storage/organization/TemporalFunction.java index 97942fc34dd0..423c1c9050e1 100644 --- a/presto-raptor-legacy/src/main/java/io/prestosql/plugin/raptor/legacy/storage/organization/TemporalFunction.java +++ b/presto-raptor-legacy/src/main/java/io/prestosql/plugin/raptor/legacy/storage/organization/TemporalFunction.java @@ -86,7 +86,7 @@ private static int determineDay(long rangeStart, long rangeEnd) int startDay = toIntExact(Duration.ofMillis(rangeStart).toDays()); int endDay = toIntExact(Duration.ofMillis(rangeEnd).toDays()); if (startDay == endDay) { - return toIntExact(startDay); + return startDay; } if ((endDay - startDay) > 1) { diff --git a/presto-rcfile/src/test/java/io/prestosql/rcfile/RcFileTester.java b/presto-rcfile/src/test/java/io/prestosql/rcfile/RcFileTester.java index 493d3a8ac213..ca4493509ef7 100644 --- a/presto-rcfile/src/test/java/io/prestosql/rcfile/RcFileTester.java +++ b/presto-rcfile/src/test/java/io/prestosql/rcfile/RcFileTester.java @@ -459,7 +459,7 @@ private static void assertFileContentsNew( Iterator iterator = expectedValues.iterator(); int totalCount = 0; - for (int batchSize = recordReader.advance(); batchSize >= 0; batchSize = toIntExact(recordReader.advance())) { + for (int batchSize = recordReader.advance(); batchSize >= 0; batchSize = recordReader.advance()) { totalCount += batchSize; if (readLastBatchOnly && totalCount == expectedValues.size()) { assertEquals(advance(iterator, batchSize), batchSize); From acc95522d79c165eebf2cbba19540ce70d2389a6 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Wed, 1 Apr 2020 23:33:00 +0200 Subject: [PATCH 025/519] Use type adequate to payload --- .../src/main/java/io/prestosql/server/PagesResponseWriter.java | 3 +-- .../src/main/java/io/prestosql/server/TaskResource.java | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/server/PagesResponseWriter.java b/presto-main/src/main/java/io/prestosql/server/PagesResponseWriter.java index 7d4487c1682c..0d334a8d0fa0 100644 --- a/presto-main/src/main/java/io/prestosql/server/PagesResponseWriter.java +++ b/presto-main/src/main/java/io/prestosql/server/PagesResponseWriter.java @@ -17,7 +17,6 @@ import io.airlift.slice.OutputStreamSliceOutput; import io.airlift.slice.SliceOutput; import io.prestosql.execution.buffer.SerializedPage; -import io.prestosql.spi.Page; import javax.ws.rs.Produces; import javax.ws.rs.WebApplicationException; @@ -58,7 +57,7 @@ public class PagesResponseWriter public boolean isWriteable(Class type, Type genericType, Annotation[] annotations, MediaType mediaType) { return List.class.isAssignableFrom(type) && - TypeToken.of(genericType).resolveType(LIST_GENERIC_TOKEN).getRawType().equals(Page.class) && + TypeToken.of(genericType).resolveType(LIST_GENERIC_TOKEN).getRawType().equals(SerializedPage.class) && mediaType.isCompatible(PRESTO_PAGES_TYPE); } diff --git a/presto-main/src/main/java/io/prestosql/server/TaskResource.java b/presto-main/src/main/java/io/prestosql/server/TaskResource.java index ca2b6bdcc640..2d76c7d1f88a 100644 --- a/presto-main/src/main/java/io/prestosql/server/TaskResource.java +++ b/presto-main/src/main/java/io/prestosql/server/TaskResource.java @@ -31,7 +31,6 @@ import io.prestosql.execution.buffer.OutputBuffers.OutputBufferId; import io.prestosql.execution.buffer.SerializedPage; import io.prestosql.metadata.SessionPropertyManager; -import io.prestosql.spi.Page; import org.weakref.jmx.Managed; import org.weakref.jmx.Nested; @@ -267,7 +266,7 @@ public void getResults( status = Status.NO_CONTENT; } else { - entity = new GenericEntity<>(serializedPages, new TypeToken>() {}.getType()); + entity = new GenericEntity<>(serializedPages, new TypeToken>() {}.getType()); status = Status.OK; } From 6800e80aaab4b9f37a33b806235a593be171fdf8 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Thu, 2 Apr 2020 12:51:54 -0700 Subject: [PATCH 026/519] Capitalize title for Group Provider --- presto-docs/src/main/sphinx/develop/group-provider.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-docs/src/main/sphinx/develop/group-provider.rst b/presto-docs/src/main/sphinx/develop/group-provider.rst index da162c8512d1..07ed0145e7ea 100644 --- a/presto-docs/src/main/sphinx/develop/group-provider.rst +++ b/presto-docs/src/main/sphinx/develop/group-provider.rst @@ -1,5 +1,5 @@ ============== -Group provider +Group Provider ============== Presto can map user names onto groups for easier access control management. From 0652e04e6973d283fbe84631cfbfcf0c7096a355 Mon Sep 17 00:00:00 2001 From: Vlad Rozov Date: Wed, 5 Feb 2020 19:00:10 -0800 Subject: [PATCH 027/519] Add iceberg to product tests suite --- .../product/launcher/env/common/Hadoop.java | 4 +- .../launcher/env/environment/Multinode.java | 4 +- .../env/environment/MultinodeTls.java | 4 +- .../env/environment/MultinodeTlsKerberos.java | 7 +- .../SinglenodeHdfsImpersonation.java | 4 +- .../SinglenodeHiveImpersonation.java | 4 +- .../SinglenodeKerberosHdfsImpersonation.java | 4 +- ...deKerberosHdfsImpersonationCrossRealm.java | 5 ++ ...osHdfsImpersonationWithWireEncryption.java | 7 +- ...SinglenodeKerberosHdfsNoImpersonation.java | 4 +- .../SinglenodeKerberosHiveImpersonation.java | 4 +- ...inglenodeKerberosKmsHdfsImpersonation.java | 5 ++ ...glenodeKerberosKmsHdfsNoImpersonation.java | 5 ++ .../env/environment/TwoKerberosHives.java | 10 +++ .../env/environment/TwoMixedHives.java | 8 ++ .../common/hadoop/iceberg.properties | 4 + .../multinode-tls-kerberos/iceberg.properties | 15 ++++ .../iceberg.properties | 8 ++ .../iceberg.properties | 8 ++ .../iceberg.properties | 22 ++++++ .../iceberg.properties | 17 +++++ .../iceberg.properties | 15 ++++ .../iceberg.properties | 15 ++++ .../iceberg.properties | 16 ++++ .../iceberg.properties | 21 ++++++ .../iceberg.properties | 16 ++++ .../two-kerberos-hives/iceberg1.properties | 16 ++++ .../two-kerberos-hives/iceberg2.properties | 15 ++++ .../two-mixed-hives/iceberg1.properties | 16 ++++ .../two-mixed-hives/iceberg2.properties | 5 ++ .../presto/etc/catalog/iceberg.properties | 3 + .../presto/etc/catalog/iceberg1.properties | 3 + .../presto/etc/catalog/iceberg2.properties | 3 + .../java/io/prestosql/tests/TestGroups.java | 1 + .../tests/iceberg/TestIcebergCreateTable.java | 74 +++++++++++++++++++ 35 files changed, 361 insertions(+), 11 deletions(-) create mode 100644 presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/common/hadoop/iceberg.properties create mode 100644 presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode-tls-kerberos/iceberg.properties create mode 100644 presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-hdfs-impersonation/iceberg.properties create mode 100644 presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-hive-impersonation/iceberg.properties create mode 100644 presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-hdfs-impersonation-cross-realm/iceberg.properties create mode 100644 presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-hdfs-impersonation-with-wire-encryption/iceberg.properties create mode 100644 presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-hdfs-impersonation/iceberg.properties create mode 100644 presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-hdfs-no-impersonation/iceberg.properties create mode 100644 presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-hive-impersonation/iceberg.properties create mode 100644 presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-kms-hdfs-impersonation/iceberg.properties create mode 100644 presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-kms-hdfs-no-impersonation/iceberg.properties create mode 100644 presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/two-kerberos-hives/iceberg1.properties create mode 100644 presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/two-kerberos-hives/iceberg2.properties create mode 100644 presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/two-mixed-hives/iceberg1.properties create mode 100644 presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/two-mixed-hives/iceberg2.properties create mode 100644 presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/presto/etc/catalog/iceberg.properties create mode 100644 presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/presto/etc/catalog/iceberg1.properties create mode 100644 presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/presto/etc/catalog/iceberg2.properties create mode 100644 presto-product-tests/src/main/java/io/prestosql/tests/iceberg/TestIcebergCreateTable.java diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Hadoop.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Hadoop.java index 69cf1335f08c..3ef5928684dc 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Hadoop.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Hadoop.java @@ -33,6 +33,7 @@ public final class Hadoop implements EnvironmentExtender { public static final String CONTAINER_PRESTO_HIVE_PROPERTIES = CONTAINER_PRESTO_ETC + "/catalog/hive.properties"; + public static final String CONTAINER_PRESTO_ICEBERG_PROPERTIES = CONTAINER_PRESTO_ETC + "/catalog/iceberg.properties"; private final DockerFiles dockerFiles; @@ -56,7 +57,8 @@ public void extendEnvironment(Environment.Builder builder) builder.addContainer("hadoop-master", createHadoopMaster()); builder.configureContainer("presto-master", container -> container - .withFileSystemBind(dockerFiles.getDockerFilesHostPath("common/hadoop/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, READ_ONLY)); + .withFileSystemBind(dockerFiles.getDockerFilesHostPath("common/hadoop/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, READ_ONLY) + .withFileSystemBind(dockerFiles.getDockerFilesHostPath("common/hadoop/iceberg.properties"), CONTAINER_PRESTO_ICEBERG_PROPERTIES, READ_ONLY)); } @SuppressWarnings("resource") diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/Multinode.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/Multinode.java index f50c44daa793..6177d58a74d1 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/Multinode.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/Multinode.java @@ -29,6 +29,7 @@ import java.io.File; import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_HIVE_PROPERTIES; +import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_ICEBERG_PROPERTIES; import static io.prestosql.tests.product.launcher.env.common.Standard.CONTAINER_PRESTO_CONFIG_PROPERTIES; import static io.prestosql.tests.product.launcher.env.common.Standard.CONTAINER_PRESTO_JVM_CONFIG; import static io.prestosql.tests.product.launcher.env.common.Standard.createPrestoContainer; @@ -78,7 +79,8 @@ private DockerContainer createPrestoWorker() DockerContainer container = createPrestoContainer(dockerFiles, pathResolver, serverPackage, "prestodev/centos7-oj11:" + imagesVersion) .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/multinode/multinode-worker-jvm.config"), CONTAINER_PRESTO_JVM_CONFIG, READ_ONLY) .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/multinode/multinode-worker-config.properties"), CONTAINER_PRESTO_CONFIG_PROPERTIES, READ_ONLY) - .withFileSystemBind(dockerFiles.getDockerFilesHostPath("common/hadoop/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, READ_ONLY); + .withFileSystemBind(dockerFiles.getDockerFilesHostPath("common/hadoop/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, READ_ONLY) + .withFileSystemBind(dockerFiles.getDockerFilesHostPath("common/hadoop/iceberg.properties"), CONTAINER_PRESTO_ICEBERG_PROPERTIES, READ_ONLY); return container; } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/MultinodeTls.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/MultinodeTls.java index 90fe16915e49..5827e3208bd9 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/MultinodeTls.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/MultinodeTls.java @@ -29,6 +29,7 @@ import java.io.File; import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_HIVE_PROPERTIES; +import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_ICEBERG_PROPERTIES; import static io.prestosql.tests.product.launcher.env.common.Standard.CONTAINER_PRESTO_CONFIG_PROPERTIES; import static io.prestosql.tests.product.launcher.env.common.Standard.CONTAINER_TEMPTO_PROFILE_CONFIG; import static io.prestosql.tests.product.launcher.env.common.Standard.createPrestoContainer; @@ -88,7 +89,8 @@ private void addPrestoWorker(Environment.Builder builder, String workerName) .withCreateContainerCmdModifier(createContainerCmd -> createContainerCmd.withDomainName("docker.cluster")) .withNetworkAliases(workerName + ".docker.cluster") .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/multinode-tls/config-worker.properties"), CONTAINER_PRESTO_CONFIG_PROPERTIES, READ_ONLY) - .withFileSystemBind(dockerFiles.getDockerFilesHostPath("common/hadoop/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, READ_ONLY); + .withFileSystemBind(dockerFiles.getDockerFilesHostPath("common/hadoop/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, READ_ONLY) + .withFileSystemBind(dockerFiles.getDockerFilesHostPath("common/hadoop/iceberg.properties"), CONTAINER_PRESTO_ICEBERG_PROPERTIES, READ_ONLY); builder.addContainer(workerName, container); } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/MultinodeTlsKerberos.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/MultinodeTlsKerberos.java index b9e6d1f0f70a..78b123876aa9 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/MultinodeTlsKerberos.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/MultinodeTlsKerberos.java @@ -32,6 +32,7 @@ import static com.google.common.base.Verify.verify; import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_HIVE_PROPERTIES; +import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_ICEBERG_PROPERTIES; import static io.prestosql.tests.product.launcher.env.common.Standard.CONTAINER_PRESTO_CONFIG_PROPERTIES; import static io.prestosql.tests.product.launcher.env.common.Standard.createPrestoContainer; import static java.util.Objects.requireNonNull; @@ -73,7 +74,8 @@ protected void extendEnvironment(Environment.Builder builder) verify(Objects.equals(container.getDockerImageName(), prestoDockerImageName), "Expected image '%s', but is '%s'", prestoDockerImageName, container.getDockerImageName()); container .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/multinode-tls-kerberos/config-master.properties"), CONTAINER_PRESTO_CONFIG_PROPERTIES, READ_ONLY) - .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/multinode-tls-kerberos/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, READ_ONLY); + .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/multinode-tls-kerberos/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, READ_ONLY) + .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/multinode-tls-kerberos/iceberg.properties"), CONTAINER_PRESTO_ICEBERG_PROPERTIES, READ_ONLY); }); addPrestoWorker(builder, "presto-worker-1"); @@ -87,7 +89,8 @@ private void addPrestoWorker(Environment.Builder builder, String workerName) .withCreateContainerCmdModifier(createContainerCmd -> createContainerCmd.withDomainName("docker.cluster")) .withNetworkAliases(workerName + ".docker.cluster") .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/multinode-tls-kerberos/config-worker.properties"), CONTAINER_PRESTO_CONFIG_PROPERTIES, READ_ONLY) - .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/multinode-tls-kerberos/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, READ_ONLY); + .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/multinode-tls-kerberos/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, READ_ONLY) + .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/multinode-tls-kerberos/iceberg.properties"), CONTAINER_PRESTO_ICEBERG_PROPERTIES, READ_ONLY); builder.addContainer(workerName, container); } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeHdfsImpersonation.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeHdfsImpersonation.java index fe741638262c..63731e746610 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeHdfsImpersonation.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeHdfsImpersonation.java @@ -24,6 +24,7 @@ import javax.inject.Inject; import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_HIVE_PROPERTIES; +import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_ICEBERG_PROPERTIES; import static java.util.Objects.requireNonNull; import static org.testcontainers.containers.BindMode.READ_ONLY; @@ -44,6 +45,7 @@ public SinglenodeHdfsImpersonation(DockerFiles dockerFiles, Standard standard, H protected void extendEnvironment(Environment.Builder builder) { builder.configureContainer("presto-master", container -> container - .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-hdfs-impersonation/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, READ_ONLY)); + .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-hdfs-impersonation/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, READ_ONLY) + .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-hdfs-impersonation/iceberg.properties"), CONTAINER_PRESTO_ICEBERG_PROPERTIES, READ_ONLY)); } } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeHiveImpersonation.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeHiveImpersonation.java index 059263761f15..b1a3e3bb85d4 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeHiveImpersonation.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeHiveImpersonation.java @@ -24,6 +24,7 @@ import javax.inject.Inject; import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_HIVE_PROPERTIES; +import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_ICEBERG_PROPERTIES; import static java.util.Objects.requireNonNull; import static org.testcontainers.containers.BindMode.READ_ONLY; @@ -44,6 +45,7 @@ public SinglenodeHiveImpersonation(DockerFiles dockerFiles, Standard standard, H protected void extendEnvironment(Environment.Builder builder) { builder.configureContainer("presto-master", container -> container - .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-hive-impersonation/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, READ_ONLY)); + .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-hive-impersonation/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, READ_ONLY) + .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-hive-impersonation/iceberg.properties"), CONTAINER_PRESTO_ICEBERG_PROPERTIES, READ_ONLY)); } } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosHdfsImpersonation.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosHdfsImpersonation.java index 18631d7a1448..4ca84d2cba9a 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosHdfsImpersonation.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosHdfsImpersonation.java @@ -25,6 +25,7 @@ import javax.inject.Inject; import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_HIVE_PROPERTIES; +import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_ICEBERG_PROPERTIES; import static java.util.Objects.requireNonNull; import static org.testcontainers.containers.BindMode.READ_ONLY; @@ -45,6 +46,7 @@ public SinglenodeKerberosHdfsImpersonation(DockerFiles dockerFiles, Standard sta protected void extendEnvironment(Environment.Builder builder) { builder.configureContainer("presto-master", container -> container - .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-kerberos-hdfs-impersonation/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, READ_ONLY)); + .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-kerberos-hdfs-impersonation/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, READ_ONLY) + .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-kerberos-hdfs-impersonation/iceberg.properties"), CONTAINER_PRESTO_ICEBERG_PROPERTIES, READ_ONLY)); } } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosHdfsImpersonationCrossRealm.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosHdfsImpersonationCrossRealm.java index 0a8e003dc67a..db010b1ff267 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosHdfsImpersonationCrossRealm.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosHdfsImpersonationCrossRealm.java @@ -25,6 +25,7 @@ import javax.inject.Inject; import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_HIVE_PROPERTIES; +import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_ICEBERG_PROPERTIES; import static java.util.Objects.requireNonNull; import static org.testcontainers.containers.BindMode.READ_ONLY; @@ -50,6 +51,10 @@ protected void extendEnvironment(Environment.Builder builder) .withFileSystemBind( dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-kerberos-hdfs-impersonation-cross-realm/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, + READ_ONLY) + .withFileSystemBind( + dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-kerberos-hdfs-impersonation-cross-realm/iceberg.properties"), + CONTAINER_PRESTO_ICEBERG_PROPERTIES, READ_ONLY); }); } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosHdfsImpersonationWithWireEncryption.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosHdfsImpersonationWithWireEncryption.java index a5a6be82c218..5bb50e684d31 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosHdfsImpersonationWithWireEncryption.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosHdfsImpersonationWithWireEncryption.java @@ -25,6 +25,7 @@ import javax.inject.Inject; import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_HIVE_PROPERTIES; +import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_ICEBERG_PROPERTIES; import static java.util.Objects.requireNonNull; import static org.testcontainers.containers.BindMode.READ_ONLY; @@ -62,7 +63,11 @@ protected void extendEnvironment(Environment.Builder builder) .withFileSystemBind( dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-kerberos-hdfs-impersonation-with-wire-encryption/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, - READ_ONLY); + READ_ONLY) + .withFileSystemBind( + dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-kerberos-hdfs-impersonation-with-wire-encryption/iceberg.properties"), + CONTAINER_PRESTO_ICEBERG_PROPERTIES, + READ_ONLY); }); } } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosHdfsNoImpersonation.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosHdfsNoImpersonation.java index 73a853ed0ae2..4d781b31f251 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosHdfsNoImpersonation.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosHdfsNoImpersonation.java @@ -25,6 +25,7 @@ import javax.inject.Inject; import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_HIVE_PROPERTIES; +import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_ICEBERG_PROPERTIES; import static java.util.Objects.requireNonNull; import static org.testcontainers.containers.BindMode.READ_ONLY; @@ -45,6 +46,7 @@ public SinglenodeKerberosHdfsNoImpersonation(DockerFiles dockerFiles, Standard s protected void extendEnvironment(Environment.Builder builder) { builder.configureContainer("presto-master", container -> container - .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-kerberos-hdfs-no-impersonation/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, READ_ONLY)); + .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-kerberos-hdfs-no-impersonation/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, READ_ONLY) + .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-kerberos-hdfs-no-impersonation/iceberg.properties"), CONTAINER_PRESTO_ICEBERG_PROPERTIES, READ_ONLY)); } } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosHiveImpersonation.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosHiveImpersonation.java index 3d9d39c36c89..38422c08d3d8 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosHiveImpersonation.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosHiveImpersonation.java @@ -25,6 +25,7 @@ import javax.inject.Inject; import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_HIVE_PROPERTIES; +import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_ICEBERG_PROPERTIES; import static java.util.Objects.requireNonNull; import static org.testcontainers.containers.BindMode.READ_ONLY; @@ -45,6 +46,7 @@ public SinglenodeKerberosHiveImpersonation(DockerFiles dockerFiles, Standard sta protected void extendEnvironment(Environment.Builder builder) { builder.configureContainer("presto-master", container -> container - .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-kerberos-hive-impersonation/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, READ_ONLY)); + .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-kerberos-hive-impersonation/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, READ_ONLY) + .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-kerberos-hive-impersonation/iceberg.properties"), CONTAINER_PRESTO_ICEBERG_PROPERTIES, READ_ONLY)); } } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosKmsHdfsImpersonation.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosKmsHdfsImpersonation.java index b07bf40f9b97..3284bf7dc246 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosKmsHdfsImpersonation.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosKmsHdfsImpersonation.java @@ -26,6 +26,7 @@ import javax.inject.Inject; import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_HIVE_PROPERTIES; +import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_ICEBERG_PROPERTIES; import static java.util.Objects.requireNonNull; import static org.testcontainers.containers.BindMode.READ_ONLY; @@ -63,6 +64,10 @@ protected void extendEnvironment(Environment.Builder builder) .withFileSystemBind( dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-kerberos-kms-hdfs-impersonation/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, + READ_ONLY) + .withFileSystemBind( + dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-kerberos-kms-hdfs-impersonation/iceberg.properties"), + CONTAINER_PRESTO_ICEBERG_PROPERTIES, READ_ONLY); }); } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosKmsHdfsNoImpersonation.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosKmsHdfsNoImpersonation.java index 04e3847a2df8..b0ec677ac611 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosKmsHdfsNoImpersonation.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeKerberosKmsHdfsNoImpersonation.java @@ -26,6 +26,7 @@ import javax.inject.Inject; import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_HIVE_PROPERTIES; +import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_ICEBERG_PROPERTIES; import static java.util.Objects.requireNonNull; import static org.testcontainers.containers.BindMode.READ_ONLY; @@ -51,6 +52,10 @@ protected void extendEnvironment(Environment.Builder builder) .withFileSystemBind( dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-kerberos-kms-hdfs-no-impersonation/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, + READ_ONLY) + .withFileSystemBind( + dockerFiles.getDockerFilesHostPath("conf/environment/singlenode-kerberos-kms-hdfs-no-impersonation/iceberg.properties"), + CONTAINER_PRESTO_ICEBERG_PROPERTIES, READ_ONLY); }); } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/TwoKerberosHives.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/TwoKerberosHives.java index 838bd9842d1b..0a0687a53ae5 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/TwoKerberosHives.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/TwoKerberosHives.java @@ -102,6 +102,16 @@ protected void extendEnvironment(Environment.Builder builder) .withFileSystemBind( dockerFiles.getDockerFilesHostPath("conf/environment/two-kerberos-hives/hive2.properties"), CONTAINER_PRESTO_ETC + "/catalog/hive2.properties", + READ_ONLY) + + .withFileSystemBind( + dockerFiles.getDockerFilesHostPath("conf/environment/two-kerberos-hives/iceberg1.properties"), + CONTAINER_PRESTO_ETC + "/catalog/iceberg1.properties", + READ_ONLY) + + .withFileSystemBind( + dockerFiles.getDockerFilesHostPath("conf/environment/two-kerberos-hives/iceberg2.properties"), + CONTAINER_PRESTO_ETC + "/catalog/iceberg2.properties", READ_ONLY); }); diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/TwoMixedHives.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/TwoMixedHives.java index c5b6dbe777ad..2e05647a3b5f 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/TwoMixedHives.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/TwoMixedHives.java @@ -73,6 +73,14 @@ protected void extendEnvironment(Environment.Builder builder) dockerFiles.getDockerFilesHostPath("conf/environment/two-mixed-hives/hive2.properties"), CONTAINER_PRESTO_ETC + "/catalog/hive2.properties", READ_ONLY); + container.withFileSystemBind( + dockerFiles.getDockerFilesHostPath("conf/environment/two-mixed-hives/iceberg1.properties"), + CONTAINER_PRESTO_ETC + "/catalog/iceberg1.properties", + READ_ONLY); + container.withFileSystemBind( + dockerFiles.getDockerFilesHostPath("conf/environment/two-mixed-hives/iceberg2.properties"), + CONTAINER_PRESTO_ETC + "/catalog/iceberg2.properties", + READ_ONLY); }); builder.addContainer("hadoop-master-2", createHadoopMaster2()); diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/common/hadoop/iceberg.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/common/hadoop/iceberg.properties new file mode 100644 index 000000000000..7ec547c05f48 --- /dev/null +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/common/hadoop/iceberg.properties @@ -0,0 +1,4 @@ +connector.name=iceberg +hive.metastore.uri=thrift://hadoop-master:9083 +hive.config.resources=/docker/presto-product-tests/conf/presto/etc/hive-default-fs-site.xml +iceberg.file-format=PARQUET diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode-tls-kerberos/iceberg.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode-tls-kerberos/iceberg.properties new file mode 100644 index 000000000000..88356e4e91b1 --- /dev/null +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode-tls-kerberos/iceberg.properties @@ -0,0 +1,15 @@ +connector.name=iceberg +hive.metastore.uri=thrift://hadoop-master:9083 +hive.config.resources=/docker/presto-product-tests/conf/presto/etc/hive-default-fs-site.xml + +hive.metastore.authentication.type=KERBEROS +hive.metastore.service.principal=hive/hadoop-master@LABS.TERADATA.COM +hive.metastore.client.principal=hive/hadoop-master@LABS.TERADATA.COM +hive.metastore.client.keytab=/etc/hive/conf/hive.keytab + +hive.hdfs.authentication.type=KERBEROS +hive.hdfs.impersonation.enabled=false +hive.hdfs.presto.principal=hdfs/hadoop-master@LABS.TERADATA.COM +hive.hdfs.presto.keytab=/etc/hadoop/conf/hdfs.keytab + +iceberg.file-format=PARQUET diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-hdfs-impersonation/iceberg.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-hdfs-impersonation/iceberg.properties new file mode 100644 index 000000000000..6cfd95f0a6eb --- /dev/null +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-hdfs-impersonation/iceberg.properties @@ -0,0 +1,8 @@ +connector.name=iceberg +hive.metastore.uri=thrift://hadoop-master:9083 +hive.config.resources=/docker/presto-product-tests/conf/presto/etc/hive-default-fs-site.xml + +hive.hdfs.authentication.type=NONE +hive.hdfs.impersonation.enabled=true + +iceberg.file-format=PARQUET diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-hive-impersonation/iceberg.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-hive-impersonation/iceberg.properties new file mode 100644 index 000000000000..9230951cec25 --- /dev/null +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-hive-impersonation/iceberg.properties @@ -0,0 +1,8 @@ +connector.name=iceberg +hive.metastore.uri=thrift://hadoop-master:9083 +hive.config.resources=/docker/presto-product-tests/conf/presto/etc/hive-default-fs-site.xml +hive.metastore.thrift.impersonation.enabled=true +hive.hdfs.authentication.type=NONE +hive.hdfs.impersonation.enabled=true + +iceberg.file-format=PARQUET diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-hdfs-impersonation-cross-realm/iceberg.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-hdfs-impersonation-cross-realm/iceberg.properties new file mode 100644 index 000000000000..1b7fe714c478 --- /dev/null +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-hdfs-impersonation-cross-realm/iceberg.properties @@ -0,0 +1,22 @@ +# +# WARNING +# ^^^^^^^ +# This configuration file is for development only and should NOT be used +# in production. For example configuration, see the Presto documentation. +# + +connector.name=iceberg +hive.metastore.uri=thrift://hadoop-master:9083 + +hive.metastore.authentication.type=KERBEROS +hive.metastore.service.principal=hive/hadoop-master@LABS.TERADATA.COM +hive.metastore.client.principal=hive/hadoop-master@OTHERLABS.TERADATA.COM +hive.metastore.client.keytab=/etc/hive/conf/hive-other.keytab +hive.config.resources = /etc/hadoop/conf/core-site.xml + +hive.hdfs.authentication.type=KERBEROS +hive.hdfs.impersonation.enabled=true +hive.hdfs.presto.principal=hdfs/hadoop-master@OTHERLABS.TERADATA.COM +hive.hdfs.presto.keytab=/etc/hadoop/conf/hdfs-other.keytab + +iceberg.file-format=PARQUET diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-hdfs-impersonation-with-wire-encryption/iceberg.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-hdfs-impersonation-with-wire-encryption/iceberg.properties new file mode 100644 index 000000000000..e42fd3abf7cc --- /dev/null +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-hdfs-impersonation-with-wire-encryption/iceberg.properties @@ -0,0 +1,17 @@ +connector.name=iceberg +hive.metastore.uri=thrift://hadoop-master:9083 +hive.config.resources=/docker/presto-product-tests/conf/presto/etc/hive-default-fs-site.xml + +hive.metastore.authentication.type=KERBEROS +hive.metastore.service.principal=hive/hadoop-master@LABS.TERADATA.COM +hive.metastore.client.principal=hive/_HOST@LABS.TERADATA.COM +hive.metastore.client.keytab=/etc/presto/conf/hive-presto-master.keytab + +hive.hdfs.authentication.type=KERBEROS +hive.hdfs.impersonation.enabled=true +hive.hdfs.presto.principal=presto-server/_HOST@LABS.TERADATA.COM +hive.hdfs.presto.keytab=/etc/presto/conf/presto-server.keytab + +hive.hdfs.wire-encryption.enabled=true + +iceberg.file-format=PARQUET diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-hdfs-impersonation/iceberg.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-hdfs-impersonation/iceberg.properties new file mode 100644 index 000000000000..ff4cc8f826cb --- /dev/null +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-hdfs-impersonation/iceberg.properties @@ -0,0 +1,15 @@ +connector.name=iceberg +hive.metastore.uri=thrift://hadoop-master:9083 +hive.config.resources=/docker/presto-product-tests/conf/presto/etc/hive-default-fs-site.xml + +hive.metastore.authentication.type=KERBEROS +hive.metastore.service.principal=hive/hadoop-master@LABS.TERADATA.COM +hive.metastore.client.principal=hive/_HOST@LABS.TERADATA.COM +hive.metastore.client.keytab=/etc/presto/conf/hive-presto-master.keytab + +hive.hdfs.authentication.type=KERBEROS +hive.hdfs.impersonation.enabled=true +hive.hdfs.presto.principal=presto-server/_HOST@LABS.TERADATA.COM +hive.hdfs.presto.keytab=/etc/presto/conf/presto-server.keytab + +iceberg.file-format=PARQUET diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-hdfs-no-impersonation/iceberg.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-hdfs-no-impersonation/iceberg.properties new file mode 100644 index 000000000000..88356e4e91b1 --- /dev/null +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-hdfs-no-impersonation/iceberg.properties @@ -0,0 +1,15 @@ +connector.name=iceberg +hive.metastore.uri=thrift://hadoop-master:9083 +hive.config.resources=/docker/presto-product-tests/conf/presto/etc/hive-default-fs-site.xml + +hive.metastore.authentication.type=KERBEROS +hive.metastore.service.principal=hive/hadoop-master@LABS.TERADATA.COM +hive.metastore.client.principal=hive/hadoop-master@LABS.TERADATA.COM +hive.metastore.client.keytab=/etc/hive/conf/hive.keytab + +hive.hdfs.authentication.type=KERBEROS +hive.hdfs.impersonation.enabled=false +hive.hdfs.presto.principal=hdfs/hadoop-master@LABS.TERADATA.COM +hive.hdfs.presto.keytab=/etc/hadoop/conf/hdfs.keytab + +iceberg.file-format=PARQUET diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-hive-impersonation/iceberg.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-hive-impersonation/iceberg.properties new file mode 100644 index 000000000000..fbf1e30e1d69 --- /dev/null +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-hive-impersonation/iceberg.properties @@ -0,0 +1,16 @@ +connector.name=iceberg +hive.metastore.uri=thrift://hadoop-master:9083 +hive.config.resources=/docker/presto-product-tests/conf/presto/etc/hive-default-fs-site.xml + +hive.metastore.authentication.type=KERBEROS +hive.metastore.thrift.impersonation.enabled=true +hive.metastore.service.principal=hive/hadoop-master@LABS.TERADATA.COM +hive.metastore.client.principal=hive/_HOST@LABS.TERADATA.COM +hive.metastore.client.keytab=/etc/presto/conf/hive-presto-master.keytab + +hive.hdfs.authentication.type=KERBEROS +hive.hdfs.impersonation.enabled=true +hive.hdfs.presto.principal=presto-server/_HOST@LABS.TERADATA.COM +hive.hdfs.presto.keytab=/etc/presto/conf/presto-server.keytab + +iceberg.file-format=PARQUET diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-kms-hdfs-impersonation/iceberg.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-kms-hdfs-impersonation/iceberg.properties new file mode 100644 index 000000000000..e131a1e9b19b --- /dev/null +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-kms-hdfs-impersonation/iceberg.properties @@ -0,0 +1,21 @@ +connector.name=iceberg +hive.metastore.uri=thrift://hadoop-master:9083 + +hive.metastore.authentication.type=KERBEROS +hive.metastore.service.principal=hive/_HOST@LABS.TERADATA.COM +# When using HDFS impersonation we talk to HDFS as session user configured in tempto (which is 'hive') +# However, TODO we don't have Metastore impersonation yet; when DROP TABLE is issued, +# Metastore needs to be able to delete the table files on HDFS, otherwise they be silently left behind, +# potentially causing some further tests to fail. For this reason, `hive.metastore.client.principal` needs +# to match session user configured in tempto. +hive.metastore.client.principal=hive/hadoop-master@LABS.TERADATA.COM +hive.metastore.client.keytab=/etc/hive/conf/hive.keytab + +hive.hdfs.authentication.type=KERBEROS +hive.hdfs.impersonation.enabled=true +hive.hdfs.presto.principal=presto-server/_HOST@LABS.TERADATA.COM +hive.hdfs.presto.keytab=/etc/presto/conf/presto-server.keytab +hive.fs.cache.max-size=10 +hive.config.resources=/etc/hadoop/conf/core-site.xml,/docker/presto-product-tests/conf/environment/singlenode-kerberos-kms-hdfs-impersonation/hive-disable-key-provider-cache-site.xml + +iceberg.file-format=PARQUET diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-kms-hdfs-no-impersonation/iceberg.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-kms-hdfs-no-impersonation/iceberg.properties new file mode 100644 index 000000000000..3663d9507964 --- /dev/null +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-kerberos-kms-hdfs-no-impersonation/iceberg.properties @@ -0,0 +1,16 @@ +connector.name=iceberg +hive.metastore.uri=thrift://hadoop-master:9083 + +hive.metastore.authentication.type=KERBEROS +hive.metastore.service.principal=hive/_HOST@LABS.TERADATA.COM +hive.metastore.client.principal=presto-server/_HOST@LABS.TERADATA.COM +hive.metastore.client.keytab=/etc/presto/conf/presto-server.keytab + +hive.hdfs.authentication.type=KERBEROS +hive.hdfs.impersonation.enabled=false +hive.hdfs.presto.principal=presto-server/_HOST@LABS.TERADATA.COM +hive.hdfs.presto.keytab=/etc/presto/conf/presto-server.keytab +hive.fs.cache.max-size=10 +hive.config.resources=/etc/hadoop/conf/core-site.xml,/docker/presto-product-tests/conf/environment/singlenode-kerberos-kms-hdfs-no-impersonation/hive-disable-key-provider-cache-site.xml + +iceberg.file-format=PARQUET diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/two-kerberos-hives/iceberg1.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/two-kerberos-hives/iceberg1.properties new file mode 100644 index 000000000000..fbf1e30e1d69 --- /dev/null +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/two-kerberos-hives/iceberg1.properties @@ -0,0 +1,16 @@ +connector.name=iceberg +hive.metastore.uri=thrift://hadoop-master:9083 +hive.config.resources=/docker/presto-product-tests/conf/presto/etc/hive-default-fs-site.xml + +hive.metastore.authentication.type=KERBEROS +hive.metastore.thrift.impersonation.enabled=true +hive.metastore.service.principal=hive/hadoop-master@LABS.TERADATA.COM +hive.metastore.client.principal=hive/_HOST@LABS.TERADATA.COM +hive.metastore.client.keytab=/etc/presto/conf/hive-presto-master.keytab + +hive.hdfs.authentication.type=KERBEROS +hive.hdfs.impersonation.enabled=true +hive.hdfs.presto.principal=presto-server/_HOST@LABS.TERADATA.COM +hive.hdfs.presto.keytab=/etc/presto/conf/presto-server.keytab + +iceberg.file-format=PARQUET diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/two-kerberos-hives/iceberg2.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/two-kerberos-hives/iceberg2.properties new file mode 100644 index 000000000000..77c782d17633 --- /dev/null +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/two-kerberos-hives/iceberg2.properties @@ -0,0 +1,15 @@ +connector.name=iceberg +hive.metastore.uri=thrift://hadoop-master-2:9083 +hive.config.resources=/docker/presto-product-tests/conf/environment/two-kerberos-hives/hive2-default-fs-site.xml,\ + /docker/presto-product-tests/conf/environment/two-kerberos-hives/auth-to-local.xml + +hive.metastore.authentication.type=KERBEROS +hive.metastore.service.principal=hive/hadoop-master-2@OTHERREALM.COM +hive.metastore.client.principal=hive/_HOST@OTHERREALM.COM +hive.metastore.client.keytab=/etc/presto/conf/other-hive-presto-master.keytab + +hive.hdfs.authentication.type=KERBEROS +hive.hdfs.presto.principal=presto-server/_HOST@OTHERREALM.COM +hive.hdfs.presto.keytab=/etc/presto/conf/other-presto-server.keytab + +iceberg.file-format=PARQUET diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/two-mixed-hives/iceberg1.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/two-mixed-hives/iceberg1.properties new file mode 100644 index 000000000000..fbf1e30e1d69 --- /dev/null +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/two-mixed-hives/iceberg1.properties @@ -0,0 +1,16 @@ +connector.name=iceberg +hive.metastore.uri=thrift://hadoop-master:9083 +hive.config.resources=/docker/presto-product-tests/conf/presto/etc/hive-default-fs-site.xml + +hive.metastore.authentication.type=KERBEROS +hive.metastore.thrift.impersonation.enabled=true +hive.metastore.service.principal=hive/hadoop-master@LABS.TERADATA.COM +hive.metastore.client.principal=hive/_HOST@LABS.TERADATA.COM +hive.metastore.client.keytab=/etc/presto/conf/hive-presto-master.keytab + +hive.hdfs.authentication.type=KERBEROS +hive.hdfs.impersonation.enabled=true +hive.hdfs.presto.principal=presto-server/_HOST@LABS.TERADATA.COM +hive.hdfs.presto.keytab=/etc/presto/conf/presto-server.keytab + +iceberg.file-format=PARQUET diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/two-mixed-hives/iceberg2.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/two-mixed-hives/iceberg2.properties new file mode 100644 index 000000000000..318b918a4a61 --- /dev/null +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/two-mixed-hives/iceberg2.properties @@ -0,0 +1,5 @@ +connector.name=iceberg +hive.metastore.uri=thrift://hadoop-master-2:9083 +hive.config.resources=/docker/presto-product-tests/conf/environment/two-mixed-hives/hive2-default-fs-site.xml + +iceberg.file-format=PARQUET diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/presto/etc/catalog/iceberg.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/presto/etc/catalog/iceberg.properties new file mode 100644 index 000000000000..a2ff2918dee5 --- /dev/null +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/presto/etc/catalog/iceberg.properties @@ -0,0 +1,3 @@ +# This file exists so that we can install iceberg connector only in tests using it. +# Docker does not allow to re-bind a file inside a folder that is also a volume, unless the file already exists. +connector.name=noop diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/presto/etc/catalog/iceberg1.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/presto/etc/catalog/iceberg1.properties new file mode 100644 index 000000000000..a2ff2918dee5 --- /dev/null +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/presto/etc/catalog/iceberg1.properties @@ -0,0 +1,3 @@ +# This file exists so that we can install iceberg connector only in tests using it. +# Docker does not allow to re-bind a file inside a folder that is also a volume, unless the file already exists. +connector.name=noop diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/presto/etc/catalog/iceberg2.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/presto/etc/catalog/iceberg2.properties new file mode 100644 index 000000000000..a2ff2918dee5 --- /dev/null +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/presto/etc/catalog/iceberg2.properties @@ -0,0 +1,3 @@ +# This file exists so that we can install iceberg connector only in tests using it. +# Docker does not allow to re-bind a file inside a folder that is also a volume, unless the file already exists. +connector.name=noop diff --git a/presto-product-tests/src/main/java/io/prestosql/tests/TestGroups.java b/presto-product-tests/src/main/java/io/prestosql/tests/TestGroups.java index 2f320c08737b..2ca7d2e1a864 100644 --- a/presto-product-tests/src/main/java/io/prestosql/tests/TestGroups.java +++ b/presto-product-tests/src/main/java/io/prestosql/tests/TestGroups.java @@ -66,6 +66,7 @@ public final class TestGroups public static final String BIG_QUERY = "big_query"; public static final String KAFKA = "kafka"; public static final String TWO_HIVES = "two_hives"; + public static final String ICEBERG = "iceberg"; private TestGroups() {} } diff --git a/presto-product-tests/src/main/java/io/prestosql/tests/iceberg/TestIcebergCreateTable.java b/presto-product-tests/src/main/java/io/prestosql/tests/iceberg/TestIcebergCreateTable.java new file mode 100644 index 000000000000..4037d2b3a417 --- /dev/null +++ b/presto-product-tests/src/main/java/io/prestosql/tests/iceberg/TestIcebergCreateTable.java @@ -0,0 +1,74 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.tests.iceberg; + +import io.prestosql.tempto.ProductTest; +import io.prestosql.tempto.query.QueryExecutor; +import org.testng.annotations.Test; + +import java.sql.SQLException; + +import static io.prestosql.tempto.assertions.QueryAssert.Row.row; +import static io.prestosql.tempto.assertions.QueryAssert.assertThat; +import static io.prestosql.tests.TestGroups.ICEBERG; +import static io.prestosql.tests.TestGroups.STORAGE_FORMATS; +import static io.prestosql.tests.utils.QueryExecutors.onPresto; + +public class TestIcebergCreateTable + extends ProductTest +{ + @Test(groups = {ICEBERG, STORAGE_FORMATS}) + public void testCreateTable() + throws SQLException + { + QueryExecutor queryExecutor = onPresto(); + queryExecutor.executeQuery("CREATE SCHEMA iceberg.iceberg"); + queryExecutor.executeQuery("use iceberg.iceberg"); + queryExecutor.executeQuery("CREATE TABLE test_create_table(a bigint, b varchar)"); + queryExecutor.executeQuery("INSERT INTO test_create_table(a, b) VALUES " + + "(NULL, NULL), " + + "(-42, 'abc'), " + + "(9223372036854775807, 'abcdefghijklmnopqrstuvwxyz')"); + assertThat(queryExecutor.executeQuery("SELECT * FROM test_create_table")) + .containsOnly( + row(null, null), + row(-42, "abc"), + row(9223372036854775807L, "abcdefghijklmnopqrstuvwxyz")); + queryExecutor.executeQuery("DROP TABLE test_create_table"); + queryExecutor.executeQuery("DROP SCHEMA iceberg.iceberg"); + } + + @Test(groups = {ICEBERG, STORAGE_FORMATS}) + public void testCreateTableAsSelect() + throws SQLException + { + QueryExecutor queryExecutor = onPresto(); + queryExecutor.executeQuery("CREATE SCHEMA iceberg.iceberg"); + queryExecutor.executeQuery("use iceberg.iceberg"); + queryExecutor.executeQuery("" + + "CREATE TABLE test_create_table_as_select AS " + + "SELECT * FROM (VALUES " + + " (NULL, NULL), " + + " (-42, 'abc'), " + + " (9223372036854775807, 'abcdefghijklmnopqrstuvwxyz')" + + ") t(a, b)"); + assertThat(queryExecutor.executeQuery("SELECT * FROM test_create_table_as_select")) + .containsOnly( + row(null, null), + row(-42, "abc"), + row(9223372036854775807L, "abcdefghijklmnopqrstuvwxyz")); + queryExecutor.executeQuery("DROP TABLE test_create_table_as_select"); + queryExecutor.executeQuery("DROP SCHEMA iceberg.iceberg"); + } +} From 80c1013de0bfd1e499128d44969c8e80a3e3e1cb Mon Sep 17 00:00:00 2001 From: Vlad Rozov Date: Fri, 20 Mar 2020 08:50:03 -0700 Subject: [PATCH 028/519] Temporarily exclude iceberg from Kerberos product tests --- presto-product-tests/bin/product-tests-suite-2.sh | 4 ++-- presto-product-tests/bin/product-tests-suite-3.sh | 2 +- presto-product-tests/bin/product-tests-suite-5.sh | 2 +- presto-product-tests/bin/product-tests-suite-6-non-generic.sh | 4 ++-- presto-product-tests/bin/product-tests-suite-7-non-generic.sh | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/presto-product-tests/bin/product-tests-suite-2.sh b/presto-product-tests/bin/product-tests-suite-2.sh index aa1547f7722e..5ac0c2402421 100755 --- a/presto-product-tests/bin/product-tests-suite-2.sh +++ b/presto-product-tests/bin/product-tests-suite-2.sh @@ -11,7 +11,7 @@ presto-product-tests-launcher/bin/run-launcher test run \ presto-product-tests-launcher/bin/run-launcher test run \ --environment singlenode-kerberos-hdfs-no-impersonation \ - -- -g storage_formats,hdfs_no_impersonation \ + -- -g storage_formats,hdfs_no_impersonation -x iceberg \ || suite_exit_code=1 presto-product-tests-launcher/bin/run-launcher test run \ @@ -21,7 +21,7 @@ presto-product-tests-launcher/bin/run-launcher test run \ presto-product-tests-launcher/bin/run-launcher test run \ --environment singlenode-kerberos-hdfs-impersonation \ - -- -g storage_formats,cli,hdfs_impersonation,authorization,hive_file_header \ + -- -g storage_formats,cli,hdfs_impersonation,authorization,hive_file_header -x iceberg \ || suite_exit_code=1 echo "$0: exiting with ${suite_exit_code}" diff --git a/presto-product-tests/bin/product-tests-suite-3.sh b/presto-product-tests/bin/product-tests-suite-3.sh index d73104cf7d3f..cdfa7ec6b7e9 100755 --- a/presto-product-tests/bin/product-tests-suite-3.sh +++ b/presto-product-tests/bin/product-tests-suite-3.sh @@ -16,7 +16,7 @@ presto-product-tests-launcher/bin/run-launcher test run \ presto-product-tests-launcher/bin/run-launcher test run \ --environment singlenode-kerberos-hdfs-impersonation-with-wire-encryption \ - -- -g storage_formats,cli,hdfs_impersonation,authorization \ + -- -g storage_formats,cli,hdfs_impersonation,authorization -x iceberg \ || suite_exit_code=1 echo "$0: exiting with ${suite_exit_code}" diff --git a/presto-product-tests/bin/product-tests-suite-5.sh b/presto-product-tests/bin/product-tests-suite-5.sh index d277c8611e5f..e80767791af3 100755 --- a/presto-product-tests/bin/product-tests-suite-5.sh +++ b/presto-product-tests/bin/product-tests-suite-5.sh @@ -11,7 +11,7 @@ presto-product-tests-launcher/bin/run-launcher test run \ presto-product-tests-launcher/bin/run-launcher test run \ --environment singlenode-kerberos-hive-impersonation \ - -- -g storage_formats,hdfs_impersonation,authorization \ + -- -g storage_formats,hdfs_impersonation,authorization -x iceberg \ || suite_exit_code=1 echo "$0: exiting with ${suite_exit_code}" diff --git a/presto-product-tests/bin/product-tests-suite-6-non-generic.sh b/presto-product-tests/bin/product-tests-suite-6-non-generic.sh index d90c243bcd38..9eab67f9f34a 100755 --- a/presto-product-tests/bin/product-tests-suite-6-non-generic.sh +++ b/presto-product-tests/bin/product-tests-suite-6-non-generic.sh @@ -26,13 +26,13 @@ presto-product-tests-launcher/bin/run-launcher test run \ # We have docker images with KMS on CDH only. TODO (https://github.com/prestosql/presto/issues/1652) create images with HDP and KMS presto-product-tests-launcher/bin/run-launcher test run \ --environment singlenode-kerberos-kms-hdfs-no-impersonation \ - -- -g storage_formats \ + -- -g storage_formats -x iceberg \ || suite_exit_code=1 # We have docker images with KMS on CDH only. TODO (https://github.com/prestosql/presto/issues/1652) create images with HDP and KMS presto-product-tests-launcher/bin/run-launcher test run \ --environment singlenode-kerberos-kms-hdfs-impersonation \ - -- -g storage_formats \ + -- -g storage_formats -x iceberg \ || suite_exit_code=1 presto-product-tests-launcher/bin/run-launcher test run \ diff --git a/presto-product-tests/bin/product-tests-suite-7-non-generic.sh b/presto-product-tests/bin/product-tests-suite-7-non-generic.sh index ddbe51209891..864a69348537 100755 --- a/presto-product-tests/bin/product-tests-suite-7-non-generic.sh +++ b/presto-product-tests/bin/product-tests-suite-7-non-generic.sh @@ -33,7 +33,7 @@ presto-product-tests-launcher/bin/run-launcher test run \ # Environment not set up on CDH. (TODO run on HDP 2.6 and HDP 3.1) presto-product-tests-launcher/bin/run-launcher test run \ --environment singlenode-kerberos-hdfs-impersonation-cross-realm \ - -- -g storage_formats,cli,hdfs_impersonation \ + -- -g storage_formats,cli,hdfs_impersonation -x iceberg \ || suite_exit_code=1 presto-product-tests-launcher/bin/run-launcher test run \ From a311584a28d8840a00c5106ecce92977ffb34d73 Mon Sep 17 00:00:00 2001 From: Manfred Moser Date: Thu, 2 Apr 2020 15:07:39 -0700 Subject: [PATCH 029/519] Add ref anchors --- presto-docs/src/main/sphinx/connector/hive.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/presto-docs/src/main/sphinx/connector/hive.rst b/presto-docs/src/main/sphinx/connector/hive.rst index ff11569ebc29..10aeaf58ff08 100644 --- a/presto-docs/src/main/sphinx/connector/hive.rst +++ b/presto-docs/src/main/sphinx/connector/hive.rst @@ -318,6 +318,8 @@ Property Name Description Catalog. ==================================================== ============================================================ +.. _hive-s3: + Amazon S3 Configuration ----------------------- @@ -399,6 +401,8 @@ Property Name Description or partition. Defaults to ``false``. ============================================ ================================================================= +.. _hive-s3-credentials: + S3 Credentials ^^^^^^^^^^^^^^ @@ -429,6 +433,8 @@ or credentials for a specific use case (e.g., bucket/user specific credentials). This Hadoop configuration property must be set in the Hadoop configuration files referenced by the ``hive.config.resources`` Hive connector property. +.. _hive-s3-security-mapping: + S3 Security Mapping ^^^^^^^^^^^^^^^^^^^ From 1a08b547ac31b3c96b1b54be57f4dc742d1b4b6a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Grzegorz=20Kokosi=C5=84ski?= Date: Thu, 2 Apr 2020 11:20:04 +0200 Subject: [PATCH 030/519] Remove unused method --- .../io/prestosql/server/BasicQueryInfo.java | 21 ------------------- 1 file changed, 21 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/server/BasicQueryInfo.java b/presto-main/src/main/java/io/prestosql/server/BasicQueryInfo.java index 30f288037f7c..189e21d1ef57 100644 --- a/presto-main/src/main/java/io/prestosql/server/BasicQueryInfo.java +++ b/presto-main/src/main/java/io/prestosql/server/BasicQueryInfo.java @@ -15,7 +15,6 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import io.prestosql.Session; import io.prestosql.SessionRepresentation; import io.prestosql.execution.QueryInfo; import io.prestosql.execution.QueryState; @@ -32,9 +31,6 @@ import java.util.Optional; import static com.google.common.base.MoreObjects.toStringHelper; -import static io.prestosql.execution.QueryState.FAILED; -import static io.prestosql.memory.LocalMemoryManager.GENERAL_POOL; -import static io.prestosql.server.BasicQueryStats.immediateFailureQueryStats; import static java.util.Objects.requireNonNull; /** @@ -102,23 +98,6 @@ public BasicQueryInfo(QueryInfo queryInfo) queryInfo.getErrorCode()); } - public static BasicQueryInfo immediateFailureQueryInfo(Session session, String query, URI self, Optional resourceGroupId, ErrorCode errorCode) - { - return new BasicQueryInfo( - session.getQueryId(), - session.toSessionRepresentation(), - resourceGroupId, - FAILED, - GENERAL_POOL, - false, - self, - query, - Optional.empty(), - immediateFailureQueryStats(), - errorCode == null ? null : errorCode.getType(), - errorCode); - } - @JsonProperty public QueryId getQueryId() { From 614eb50b7430da5b87bc83d39553682cdc25bad9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Grzegorz=20Kokosi=C5=84ski?= Date: Thu, 2 Apr 2020 11:20:05 +0200 Subject: [PATCH 031/519] Remove unused tableHandles field --- .../execution/SqlQueryExecution.java | 30 ++----------------- 1 file changed, 2 insertions(+), 28 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/execution/SqlQueryExecution.java b/presto-main/src/main/java/io/prestosql/execution/SqlQueryExecution.java index 2b08a8419713..53d902075f5c 100644 --- a/presto-main/src/main/java/io/prestosql/execution/SqlQueryExecution.java +++ b/presto-main/src/main/java/io/prestosql/execution/SqlQueryExecution.java @@ -13,8 +13,6 @@ */ package io.prestosql.execution; -import com.google.common.collect.ImmutableMultimap; -import com.google.common.collect.Multimap; import com.google.common.util.concurrent.ListenableFuture; import io.airlift.concurrent.SetThreadName; import io.airlift.log.Logger; @@ -44,7 +42,6 @@ import io.prestosql.server.protocol.Slug; import io.prestosql.spi.PrestoException; import io.prestosql.spi.QueryId; -import io.prestosql.spi.connector.ConnectorTableHandle; import io.prestosql.split.SplitManager; import io.prestosql.split.SplitSource; import io.prestosql.sql.analyzer.Analysis; @@ -406,23 +403,7 @@ private PlanRoot doPlanQuery() SubPlan fragmentedPlan = planFragmenter.createSubPlans(stateMachine.getSession(), plan, false, stateMachine.getWarningCollector()); boolean explainAnalyze = analysis.getStatement() instanceof Explain && ((Explain) analysis.getStatement()).isAnalyze(); - return new PlanRoot(fragmentedPlan, !explainAnalyze, extractTableHandles(analysis)); - } - - private static Multimap extractTableHandles(Analysis analysis) - { - ImmutableMultimap.Builder tableHandles = ImmutableMultimap.builder(); - - for (TableHandle tableHandle : analysis.getTables()) { - tableHandles.put(tableHandle.getCatalogName(), tableHandle.getConnectorHandle()); - } - - if (analysis.getInsert().isPresent()) { - TableHandle target = analysis.getInsert().get().getTarget(); - tableHandles.put(target.getCatalogName(), target.getConnectorHandle()); - } - - return tableHandles.build(); + return new PlanRoot(fragmentedPlan, !explainAnalyze); } private void planDistribution(PlanRoot plan) @@ -622,13 +603,11 @@ private static class PlanRoot { private final SubPlan root; private final boolean summarizeTaskInfos; - private final Multimap tableHandles; - public PlanRoot(SubPlan root, boolean summarizeTaskInfos, Multimap tableHandles) + public PlanRoot(SubPlan root, boolean summarizeTaskInfos) { this.root = requireNonNull(root, "root is null"); this.summarizeTaskInfos = summarizeTaskInfos; - this.tableHandles = ImmutableMultimap.copyOf(requireNonNull(tableHandles, "tableHandles is null")); } public SubPlan getRoot() @@ -640,11 +619,6 @@ public boolean isSummarizeTaskInfos() { return summarizeTaskInfos; } - - public Multimap getTableHandles() - { - return tableHandles; - } } public static class SqlQueryExecutionFactory From f12c107d147307b0b6110fb64f579fcc7d55ee13 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Grzegorz=20Kokosi=C5=84ski?= Date: Thu, 2 Apr 2020 11:20:08 +0200 Subject: [PATCH 032/519] Add updateType to query event Thanks to that event listener will know if query was SELECT, DELETE or any other DDL statement. --- .../src/main/java/io/prestosql/event/QueryMonitor.java | 3 +++ .../java/io/prestosql/execution/QueryStateMachine.java | 1 + .../main/java/io/prestosql/server/BasicQueryInfo.java | 10 ++++++++++ .../prestosql/execution/MockManagedQueryExecution.java | 1 + .../io/prestosql/spi/eventlistener/QueryMetadata.java | 9 +++++++++ .../memory/TestClusterMemoryLeakDetector.java | 1 + 6 files changed, 25 insertions(+) diff --git a/presto-main/src/main/java/io/prestosql/event/QueryMonitor.java b/presto-main/src/main/java/io/prestosql/event/QueryMonitor.java index 738e02dd4258..a9101b21c707 100644 --- a/presto-main/src/main/java/io/prestosql/event/QueryMonitor.java +++ b/presto-main/src/main/java/io/prestosql/event/QueryMonitor.java @@ -123,6 +123,7 @@ public void queryCreatedEvent(BasicQueryInfo queryInfo) queryInfo.getQueryId().toString(), queryInfo.getSession().getTransactionId().map(TransactionId::toString), queryInfo.getQuery(), + queryInfo.getUpdateType(), queryInfo.getPreparedQuery(), QUEUED.toString(), ImmutableList.of(), @@ -139,6 +140,7 @@ public void queryImmediateFailureEvent(BasicQueryInfo queryInfo, ExecutionFailur queryInfo.getQueryId().toString(), queryInfo.getSession().getTransactionId().map(TransactionId::toString), queryInfo.getQuery(), + queryInfo.getUpdateType(), queryInfo.getPreparedQuery(), queryInfo.getState().toString(), ImmutableList.of(), @@ -209,6 +211,7 @@ private QueryMetadata createQueryMetadata(QueryInfo queryInfo) queryInfo.getQueryId().toString(), queryInfo.getSession().getTransactionId().map(TransactionId::toString), queryInfo.getQuery(), + Optional.ofNullable(queryInfo.getUpdateType()), queryInfo.getPreparedQuery(), queryInfo.getState().toString(), queryInfo.getReferencedTables(), diff --git a/presto-main/src/main/java/io/prestosql/execution/QueryStateMachine.java b/presto-main/src/main/java/io/prestosql/execution/QueryStateMachine.java index fe3990eed101..210abe2654f8 100644 --- a/presto-main/src/main/java/io/prestosql/execution/QueryStateMachine.java +++ b/presto-main/src/main/java/io/prestosql/execution/QueryStateMachine.java @@ -370,6 +370,7 @@ public BasicQueryInfo getBasicQueryInfo(Optional rootStage) stageStats.isScheduled(), self, query, + Optional.ofNullable(updateType.get()), preparedQuery, queryStats, errorCode == null ? null : errorCode.getType(), diff --git a/presto-main/src/main/java/io/prestosql/server/BasicQueryInfo.java b/presto-main/src/main/java/io/prestosql/server/BasicQueryInfo.java index 189e21d1ef57..04afcae9057d 100644 --- a/presto-main/src/main/java/io/prestosql/server/BasicQueryInfo.java +++ b/presto-main/src/main/java/io/prestosql/server/BasicQueryInfo.java @@ -48,6 +48,7 @@ public class BasicQueryInfo private final boolean scheduled; private final URI self; private final String query; + private final Optional updateType; private final Optional preparedQuery; private final BasicQueryStats queryStats; private final ErrorType errorType; @@ -63,6 +64,7 @@ public BasicQueryInfo( @JsonProperty("scheduled") boolean scheduled, @JsonProperty("self") URI self, @JsonProperty("query") String query, + @JsonProperty("updateType") Optional updateType, @JsonProperty("preparedQuery") Optional preparedQuery, @JsonProperty("queryStats") BasicQueryStats queryStats, @JsonProperty("errorType") ErrorType errorType, @@ -78,6 +80,7 @@ public BasicQueryInfo( this.scheduled = scheduled; this.self = requireNonNull(self, "self is null"); this.query = requireNonNull(query, "query is null"); + this.updateType = requireNonNull(updateType, "updateType is null"); this.preparedQuery = requireNonNull(preparedQuery, "preparedQuery is null"); this.queryStats = requireNonNull(queryStats, "queryStats is null"); } @@ -92,6 +95,7 @@ public BasicQueryInfo(QueryInfo queryInfo) queryInfo.isScheduled(), queryInfo.getSelf(), queryInfo.getQuery(), + Optional.ofNullable(queryInfo.getUpdateType()), queryInfo.getPreparedQuery(), new BasicQueryStats(queryInfo.getQueryStats()), queryInfo.getErrorType(), @@ -146,6 +150,12 @@ public String getQuery() return query; } + @JsonProperty + public Optional getUpdateType() + { + return updateType; + } + @JsonProperty public Optional getPreparedQuery() { diff --git a/presto-main/src/test/java/io/prestosql/execution/MockManagedQueryExecution.java b/presto-main/src/test/java/io/prestosql/execution/MockManagedQueryExecution.java index 673473894772..2b621e92ce9c 100644 --- a/presto-main/src/test/java/io/prestosql/execution/MockManagedQueryExecution.java +++ b/presto-main/src/test/java/io/prestosql/execution/MockManagedQueryExecution.java @@ -117,6 +117,7 @@ public BasicQueryInfo getBasicQueryInfo() URI.create("http://test"), "SELECT 1", Optional.empty(), + Optional.empty(), new BasicQueryStats( new DateTime(1), new DateTime(2), diff --git a/presto-spi/src/main/java/io/prestosql/spi/eventlistener/QueryMetadata.java b/presto-spi/src/main/java/io/prestosql/spi/eventlistener/QueryMetadata.java index fa3834085ddf..9039fd42532d 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/eventlistener/QueryMetadata.java +++ b/presto-spi/src/main/java/io/prestosql/spi/eventlistener/QueryMetadata.java @@ -27,6 +27,7 @@ public class QueryMetadata private final Optional transactionId; private final String query; + private final Optional updateType; private final Optional preparedQuery; private final String queryState; @@ -43,6 +44,7 @@ public QueryMetadata( String queryId, Optional transactionId, String query, + Optional updateType, Optional preparedQuery, String queryState, List tables, @@ -54,6 +56,7 @@ public QueryMetadata( this.queryId = requireNonNull(queryId, "queryId is null"); this.transactionId = requireNonNull(transactionId, "transactionId is null"); this.query = requireNonNull(query, "query is null"); + this.updateType = requireNonNull(updateType, "updateType is null"); this.preparedQuery = requireNonNull(preparedQuery, "preparedQuery is null"); this.queryState = requireNonNull(queryState, "queryState is null"); this.tables = requireNonNull(tables, "tables is null"); @@ -81,6 +84,12 @@ public String getQuery() return query; } + @JsonProperty + public Optional getUpdateType() + { + return updateType; + } + @JsonProperty public Optional getPreparedQuery() { diff --git a/presto-tests/src/test/java/io/prestosql/memory/TestClusterMemoryLeakDetector.java b/presto-tests/src/test/java/io/prestosql/memory/TestClusterMemoryLeakDetector.java index c4f33c87e38a..ed2c3740f12f 100644 --- a/presto-tests/src/test/java/io/prestosql/memory/TestClusterMemoryLeakDetector.java +++ b/presto-tests/src/test/java/io/prestosql/memory/TestClusterMemoryLeakDetector.java @@ -78,6 +78,7 @@ private static BasicQueryInfo createQueryInfo(String queryId, QueryState state) URI.create("1"), "", Optional.empty(), + Optional.empty(), new BasicQueryStats( DateTime.parse("1991-09-06T05:00-05:30"), DateTime.parse("1991-09-06T05:01-05:30"), From bc41898ca1e4357c2c0eeb185f58e8e655b88a2c Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Thu, 2 Apr 2020 15:28:38 +0200 Subject: [PATCH 033/519] Synchronize containers output Previously container output would be written to stdout descriptor without synchroniazation. That allowed one container output to be arbitrarily mixed with others. Now the output is synchronized line-by-line. --- .../product/launcher/env/Environment.java | 32 ++++++++++++++----- .../testcontainers/PrintingLogConsumer.java | 17 ++-------- 2 files changed, 27 insertions(+), 22 deletions(-) diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/Environment.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/Environment.java index df1561d9ebbf..c04dd295d745 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/Environment.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/Environment.java @@ -22,6 +22,11 @@ import org.testcontainers.shaded.com.google.common.collect.ImmutableList; import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; +import java.io.FileDescriptor; +import java.io.FileOutputStream; +import java.io.PrintStream; +import java.io.UnsupportedEncodingException; +import java.nio.charset.Charset; import java.util.HashMap; import java.util.Map; import java.util.Optional; @@ -90,7 +95,6 @@ public Builder addContainer(String name, DockerContainer container) String containerName = "ptl-" + name; container - .withLogConsumer(new PrintingLogConsumer(format("%-20s| ", containerName))) .withNetwork(network) .withNetworkAliases(name) .withLabel(PRODUCT_TEST_LAUNCHER_STARTED_LABEL_NAME, PRODUCT_TEST_LAUNCHER_STARTED_LABEL_VALUE) @@ -121,14 +125,26 @@ public Builder removeContainer(String name) public Environment build() { + // write directly to System.out, bypassing logging & io.airlift.log.Logging#rewireStdStreams + PrintStream out; + try { + //noinspection resource + out = new PrintStream(new FileOutputStream(FileDescriptor.out), true, Charset.defaultCharset().name()); + } + catch (UnsupportedEncodingException e) { + throw new RuntimeException(e); + } + containers.forEach((name, container) -> { - container.withCreateContainerCmdModifier(createContainerCmd -> { - Map binds = new HashMap<>(); - for (Bind bind : firstNonNull(createContainerCmd.getBinds(), new Bind[0])) { - binds.put(bind.getVolume().getPath(), bind); // last bind wins - } - createContainerCmd.withBinds(binds.values().toArray(new Bind[0])); - }); + container + .withLogConsumer(new PrintingLogConsumer(out, format("%-20s| ", name))) + .withCreateContainerCmdModifier(createContainerCmd -> { + Map binds = new HashMap<>(); + for (Bind bind : firstNonNull(createContainerCmd.getBinds(), new Bind[0])) { + binds.put(bind.getVolume().getPath(), bind); // last bind wins + } + createContainerCmd.withBinds(binds.values().toArray(new Bind[0])); + }); }); return new Environment(containers); diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/testcontainers/PrintingLogConsumer.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/testcontainers/PrintingLogConsumer.java index bc0dcd84e6c6..17b399101b65 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/testcontainers/PrintingLogConsumer.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/testcontainers/PrintingLogConsumer.java @@ -17,11 +17,7 @@ import org.testcontainers.containers.output.BaseConsumer; import org.testcontainers.containers.output.OutputFrame; -import java.io.FileDescriptor; -import java.io.FileOutputStream; import java.io.PrintStream; -import java.io.UnsupportedEncodingException; -import java.nio.charset.Charset; import static java.util.Objects.requireNonNull; import static org.testcontainers.containers.output.OutputFrame.OutputType.END; @@ -31,21 +27,14 @@ public final class PrintingLogConsumer { private static final Logger log = Logger.get(PrintingLogConsumer.class); + private final PrintStream out; private final String prefix; - private final PrintStream out; - public PrintingLogConsumer(String prefix) + public PrintingLogConsumer(PrintStream out, String prefix) { + this.out = requireNonNull(out, "out is null"); this.prefix = requireNonNull(prefix, "prefix is null"); - - try { - // write directly to System.out, bypassing logging & io.airlift.log.Logging#rewireStdStreams - this.out = new PrintStream(new FileOutputStream(FileDescriptor.out), true, Charset.defaultCharset().name()); - } - catch (UnsupportedEncodingException e) { - throw new RuntimeException(e); - } } @Override From 89fc425a0ac1cc0d5cab4f90da48032e84ab24fa Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Thu, 2 Apr 2020 15:28:47 +0200 Subject: [PATCH 034/519] Make sure containers output is flushed --- .../product/launcher/testcontainers/PrintingLogConsumer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/testcontainers/PrintingLogConsumer.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/testcontainers/PrintingLogConsumer.java index 17b399101b65..9b23420a7cce 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/testcontainers/PrintingLogConsumer.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/testcontainers/PrintingLogConsumer.java @@ -30,7 +30,6 @@ public final class PrintingLogConsumer private final PrintStream out; private final String prefix; - public PrintingLogConsumer(PrintStream out, String prefix) { this.out = requireNonNull(out, "out is null"); @@ -52,5 +51,6 @@ public void accept(OutputFrame outputFrame) if (outputFrame.getType() == END) { out.println(prefix + "(exited)"); } + out.flush(); } } From c20edd716943c69df89b39101c21a57bd6223a75 Mon Sep 17 00:00:00 2001 From: praveenkrishna Date: Thu, 2 Apr 2020 23:31:40 +0530 Subject: [PATCH 035/519] Remove redundant Kudu tests --- .github/workflows/module-tests.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/module-tests.yml b/.github/workflows/module-tests.yml index 36cd42689c2c..fc03212f9c8b 100644 --- a/.github/workflows/module-tests.yml +++ b/.github/workflows/module-tests.yml @@ -80,7 +80,8 @@ jobs: !presto-redis, !presto-sqlserver,!presto-postgresql,!presto-mysql, !presto-phoenix,!presto-iceberg, - !presto-docs,!presto-server,!presto-server-rpm' + !presto-docs,!presto-server,!presto-server-rpm, + !presto-kudu' x: runs-on: ubuntu-latest From ecc4dbc00ac536b958c325a86d708102e871cc96 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Fri, 3 Apr 2020 16:49:41 +0200 Subject: [PATCH 036/519] Reorder arguments in the method The new order is more reasonable. The method was added after last release, so it is a safe change to do now. --- .../main/java/io/prestosql/plugin/jdbc/BaseJdbcClient.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/BaseJdbcClient.java b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/BaseJdbcClient.java index ef6b1f7658d9..f1921a8411a1 100644 --- a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/BaseJdbcClient.java +++ b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/BaseJdbcClient.java @@ -430,7 +430,7 @@ protected JdbcOutputTableHandle createTable(ConnectorSession session, ConnectorT columnList.add(getColumnSql(session, column, columnName)); } - String sql = createTableSql(tableName, remoteSchema, catalog, columnList.build()); + String sql = createTableSql(catalog, remoteSchema, tableName, columnList.build()); execute(connection, sql); return new JdbcOutputTableHandle( @@ -444,7 +444,7 @@ protected JdbcOutputTableHandle createTable(ConnectorSession session, ConnectorT } } - protected String createTableSql(String tableName, String remoteSchema, String catalog, List columns) + protected String createTableSql(String catalog, String remoteSchema, String tableName, List columns) { return format("CREATE TABLE %s (%s)", quoted(catalog, remoteSchema, tableName), join(", ", columns)); } From af99ba9ed518d7d5bd97c838ffb61888e03c369c Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Fri, 3 Apr 2020 16:50:32 +0200 Subject: [PATCH 037/519] Add legacy config The functionality was available under different name before. --- .../java/io/prestosql/plugin/jdbc/TypeHandlingJdbcConfig.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/TypeHandlingJdbcConfig.java b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/TypeHandlingJdbcConfig.java index 3dd24a6da7fc..c91592de65bc 100644 --- a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/TypeHandlingJdbcConfig.java +++ b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/TypeHandlingJdbcConfig.java @@ -15,6 +15,7 @@ import io.airlift.configuration.Config; import io.airlift.configuration.ConfigDescription; +import io.airlift.configuration.LegacyConfig; import javax.validation.constraints.NotNull; @@ -29,6 +30,7 @@ public UnsupportedTypeHandling getUnsupportedTypeHandling() } @Config("unsupported-type-handling") + @LegacyConfig("unsupported-type.handling-strategy") @ConfigDescription("Unsupported type handling strategy") public TypeHandlingJdbcConfig setUnsupportedTypeHandling(UnsupportedTypeHandling unsupportedTypeHandling) { From 8e3cbbc613bfb7dd9ca0a8fadfbe3e995285e43d Mon Sep 17 00:00:00 2001 From: David Phillips Date: Thu, 2 Apr 2020 16:22:35 -0700 Subject: [PATCH 038/519] Free disk space before running product tests --- .github/workflows/product-tests.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/product-tests.yml b/.github/workflows/product-tests.yml index 163bd74813dd..fc5007e27551 100644 --- a/.github/workflows/product-tests.yml +++ b/.github/workflows/product-tests.yml @@ -50,6 +50,11 @@ jobs: run: | export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" ./bin/retry ./mvnw install ${MAVEN_FAST_INSTALL} -pl '!presto-docs,!presto-server-rpm' + - name: Free Disk Space + run: | + docker image prune -af + sudo apt-get clean + rm -rf ~/.m2/repository - name: Product Tests run: | source presto-product-tests/conf/product-tests-${{ matrix.config }}.sh && From 1d9cc7d64d60807639c17d1096f5b999fc515f6f Mon Sep 17 00:00:00 2001 From: praveenkrishna Date: Fri, 3 Apr 2020 17:21:27 +0530 Subject: [PATCH 039/519] Free disk space in checks workflow --- .github/workflows/checks.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index 9106d14bf9ff..6ec1772d25b3 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -31,7 +31,9 @@ jobs: - name: Test Server RPM run: | export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - ./bin/retry ./mvnw install -B -P ci -pl presto-server-rpm + ./bin/retry ./mvnw verify -B -P ci -pl presto-server-rpm + - name: Free Disk Space + run: ./mvnw clean -pl '!presto-server,!presto-cli' - name: Test Docker Image run: docker/build-local.sh From b0be04faab1de7ea92d8e75a98e54fe97631ab9d Mon Sep 17 00:00:00 2001 From: praveenkrishna Date: Sun, 29 Mar 2020 19:31:46 +0530 Subject: [PATCH 040/519] Add support for reading decimal in InMemoryRecordSet --- .../io/prestosql/spi/connector/InMemoryRecordSet.java | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/presto-spi/src/main/java/io/prestosql/spi/connector/InMemoryRecordSet.java b/presto-spi/src/main/java/io/prestosql/spi/connector/InMemoryRecordSet.java index 95b266613ca5..33a009e4f31a 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/connector/InMemoryRecordSet.java +++ b/presto-spi/src/main/java/io/prestosql/spi/connector/InMemoryRecordSet.java @@ -30,6 +30,8 @@ import static io.prestosql.spi.type.BigintType.BIGINT; import static io.prestosql.spi.type.BooleanType.BOOLEAN; import static io.prestosql.spi.type.DateType.DATE; +import static io.prestosql.spi.type.Decimals.isLongDecimal; +import static io.prestosql.spi.type.Decimals.isShortDecimal; import static io.prestosql.spi.type.DoubleType.DOUBLE; import static io.prestosql.spi.type.IntegerType.INTEGER; import static io.prestosql.spi.type.TimestampType.TIMESTAMP; @@ -243,6 +245,14 @@ else if (type instanceof RowType) { checkArgument(value instanceof Block, "Expected value %d to be an instance of Block, but is a %s", i, value.getClass().getSimpleName()); } + else if (isShortDecimal(type)) { + checkArgument(value instanceof Long, + "Expected value %d to be an instance of Long, but is a %s", i, value.getClass().getSimpleName()); + } + else if (isLongDecimal(type)) { + checkArgument(value instanceof Slice, + "Expected value %d to be an instance of Slice, but is a %s", i, value.getClass().getSimpleName()); + } else { throw new IllegalStateException("Unsupported column type " + types.get(i)); } From a3ee83fb706ce471fa9a514fed6371b598d1fc1c Mon Sep 17 00:00:00 2001 From: praveenkrishna Date: Sun, 22 Mar 2020 21:01:24 +0530 Subject: [PATCH 041/519] Add null check for HashStrategy of CustomObjects --- .../io/prestosql/util/FastutilSetHelper.java | 6 +++++ .../operator/scalar/FunctionAssertions.java | 22 ++++++++++++++++--- .../sql/gen/TestExpressionCompiler.java | 12 ++++++++++ 3 files changed, 37 insertions(+), 3 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/util/FastutilSetHelper.java b/presto-main/src/main/java/io/prestosql/util/FastutilSetHelper.java index f24bacf2a795..c20be518c5e7 100644 --- a/presto-main/src/main/java/io/prestosql/util/FastutilSetHelper.java +++ b/presto-main/src/main/java/io/prestosql/util/FastutilSetHelper.java @@ -188,6 +188,9 @@ private ObjectStrategy(Metadata metadata, Type type) @Override public int hashCode(Object value) { + if (value == null) { + return 0; + } try { return Long.hashCode((long) hashCodeHandle.invokeExact(value)); } @@ -201,6 +204,9 @@ public int hashCode(Object value) @Override public boolean equals(Object a, Object b) { + if (b == null || a == null) { + return a == null && b == null; + } try { Boolean result = (Boolean) equalsHandle.invokeExact(a, b); // FastutilHashSet is not intended be used for indeterminate values lookup diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/FunctionAssertions.java b/presto-main/src/test/java/io/prestosql/operator/scalar/FunctionAssertions.java index 70563efb636e..f21c51f6046a 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/FunctionAssertions.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/FunctionAssertions.java @@ -53,6 +53,7 @@ import io.prestosql.spi.connector.RecordSet; import io.prestosql.spi.predicate.TupleDomain; import io.prestosql.spi.predicate.Utils; +import io.prestosql.spi.type.DecimalType; import io.prestosql.spi.type.RowType; import io.prestosql.spi.type.TimeZoneKey; import io.prestosql.spi.type.Type; @@ -77,6 +78,7 @@ import java.io.Closeable; import java.lang.reflect.Field; import java.lang.reflect.Modifier; +import java.math.BigDecimal; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -97,8 +99,10 @@ import static io.prestosql.block.BlockAssertions.createBooleansBlock; import static io.prestosql.block.BlockAssertions.createDoublesBlock; import static io.prestosql.block.BlockAssertions.createIntsBlock; +import static io.prestosql.block.BlockAssertions.createLongDecimalsBlock; import static io.prestosql.block.BlockAssertions.createLongsBlock; import static io.prestosql.block.BlockAssertions.createRowBlock; +import static io.prestosql.block.BlockAssertions.createShortDecimalsBlock; import static io.prestosql.block.BlockAssertions.createSlicesBlock; import static io.prestosql.block.BlockAssertions.createStringsBlock; import static io.prestosql.block.BlockAssertions.createTimestampsWithTimezoneBlock; @@ -109,6 +113,8 @@ import static io.prestosql.spi.type.BigintType.BIGINT; import static io.prestosql.spi.type.BooleanType.BOOLEAN; import static io.prestosql.spi.type.DateTimeEncoding.packDateTimeWithZone; +import static io.prestosql.spi.type.DecimalType.createDecimalType; +import static io.prestosql.spi.type.Decimals.encodeScaledValue; import static io.prestosql.spi.type.DoubleType.DOUBLE; import static io.prestosql.spi.type.IntegerType.INTEGER; import static io.prestosql.spi.type.TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE; @@ -142,6 +148,8 @@ public final class FunctionAssertions private static final int TEST_ROW_NUMBER_OF_FIELDS = 2500; private static final RowType TEST_ROW_TYPE = createTestRowType(TEST_ROW_NUMBER_OF_FIELDS); private static final Block TEST_ROW_DATA = createTestRowData(TEST_ROW_TYPE); + private static final DecimalType SHORT_DECIMAL_TYPE = createDecimalType(14); + private static final DecimalType LONG_DECIMAL_TYPE = createDecimalType(28); private static final Page SOURCE_PAGE = new Page( createLongsBlock(1234L), @@ -154,7 +162,9 @@ public final class FunctionAssertions createTimestampsWithTimezoneBlock(packDateTimeWithZone(new DateTime(1970, 1, 1, 0, 1, 0, 999, DateTimeZone.UTC).getMillis(), TimeZoneKey.getTimeZoneKey("Z"))), createSlicesBlock(Slices.wrappedBuffer((byte) 0xab)), createIntsBlock(1234), - TEST_ROW_DATA); + TEST_ROW_DATA, + createShortDecimalsBlock("1234"), + createLongDecimalsBlock("1234")); private static final Page ZERO_CHANNEL_PAGE = new Page(1); @@ -170,6 +180,8 @@ public final class FunctionAssertions .put(new Symbol("bound_binary_literal"), VARBINARY) .put(new Symbol("bound_integer"), INTEGER) .put(new Symbol("bound_row"), TEST_ROW_TYPE) + .put(new Symbol("bound_short_decimal"), SHORT_DECIMAL_TYPE) + .put(new Symbol("bound_long_decimal"), LONG_DECIMAL_TYPE) .build()); private static final Map INPUT_MAPPING = ImmutableMap.builder() @@ -184,6 +196,8 @@ public final class FunctionAssertions .put(new Symbol("bound_binary_literal"), 8) .put(new Symbol("bound_integer"), 9) .put(new Symbol("bound_row"), 10) + .put(new Symbol("bound_short_decimal"), 11) + .put(new Symbol("bound_long_decimal"), 12) .build(); private static final PageSourceProvider PAGE_SOURCE_PROVIDER = new TestPageSourceProvider(); @@ -850,7 +864,7 @@ public ConnectorPageSource createPageSource(Session session, Split split, TableH assertInstanceOf(split.getConnectorSplit(), FunctionAssertions.TestSplit.class); FunctionAssertions.TestSplit testSplit = (FunctionAssertions.TestSplit) split.getConnectorSplit(); if (testSplit.isRecordSet()) { - RecordSet records = InMemoryRecordSet.builder(ImmutableList.of(BIGINT, VARCHAR, DOUBLE, BOOLEAN, BIGINT, VARCHAR, VARCHAR, TIMESTAMP_WITH_TIME_ZONE, VARBINARY, INTEGER, TEST_ROW_TYPE)) + RecordSet records = InMemoryRecordSet.builder(ImmutableList.of(BIGINT, VARCHAR, DOUBLE, BOOLEAN, BIGINT, VARCHAR, VARCHAR, TIMESTAMP_WITH_TIME_ZONE, VARBINARY, INTEGER, TEST_ROW_TYPE, SHORT_DECIMAL_TYPE, LONG_DECIMAL_TYPE)) .addRow( 1234L, "hello", @@ -862,7 +876,9 @@ public ConnectorPageSource createPageSource(Session session, Split split, TableH packDateTimeWithZone(new DateTime(1970, 1, 1, 0, 1, 0, 999, DateTimeZone.UTC).getMillis(), TimeZoneKey.getTimeZoneKey("Z")), Slices.wrappedBuffer((byte) 0xab), 1234, - TEST_ROW_DATA.getObject(0, Block.class)) + TEST_ROW_DATA.getObject(0, Block.class), + new BigDecimal("1234").unscaledValue().longValue(), + encodeScaledValue(new BigDecimal("1234"))) .build(); return new RecordPageSource(records); } diff --git a/presto-main/src/test/java/io/prestosql/sql/gen/TestExpressionCompiler.java b/presto-main/src/test/java/io/prestosql/sql/gen/TestExpressionCompiler.java index 65e9350e5afe..55bc2410b602 100644 --- a/presto-main/src/test/java/io/prestosql/sql/gen/TestExpressionCompiler.java +++ b/presto-main/src/test/java/io/prestosql/sql/gen/TestExpressionCompiler.java @@ -1321,6 +1321,18 @@ public void testHugeIn() assertExecute("bound_timestamp_with_timezone in (" + timestampValues + ")", BOOLEAN, true); assertExecute("bound_timestamp_with_timezone in (TIMESTAMP '1970-01-01 01:01:00.0+02:00')", BOOLEAN, false); + String shortDecimalValues = range(2000, 7000) + .mapToObj(value -> format("decimal '%s'", value)) + .collect(joining(", ")); + assertExecute("bound_short_decimal in (1234, " + shortDecimalValues + ")", BOOLEAN, true); + assertExecute("bound_short_decimal in (" + shortDecimalValues + ")", BOOLEAN, false); + + String longDecimalValues = range(2000, 7000) + .mapToObj(value -> format("decimal '123456789012345678901234567890%s'", value)) + .collect(joining(", ")); + assertExecute("bound_long_decimal in (1234, " + longDecimalValues + ")", BOOLEAN, true); + assertExecute("bound_long_decimal in (" + longDecimalValues + ")", BOOLEAN, false); + Futures.allAsList(futures).get(); } From 8f8427b4fb678127c985ae4e489e441c912cebba Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Sat, 4 Apr 2020 08:44:12 +0200 Subject: [PATCH 042/519] Test configuration cleanup --- presto-main/etc/log.properties | 1 - .../conf/environment/multinode/multinode-master-jvm.config | 1 - .../conf/environment/multinode/multinode-worker-jvm.config | 1 - .../presto-product-tests/conf/presto/etc/catalog/tpch.properties | 1 - .../docker/presto-product-tests/conf/presto/etc/log.properties | 1 - 5 files changed, 5 deletions(-) diff --git a/presto-main/etc/log.properties b/presto-main/etc/log.properties index 28704cfad880..97f82d5268ba 100644 --- a/presto-main/etc/log.properties +++ b/presto-main/etc/log.properties @@ -7,5 +7,4 @@ io.prestosql=INFO com.sun.jersey.guice.spi.container.GuiceComponentProviderFactory=WARN -com.ning.http.client=WARN io.prestosql.server.PluginManager=DEBUG diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode/multinode-master-jvm.config b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode/multinode-master-jvm.config index eeabec44ba22..365b917c5746 100644 --- a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode/multinode-master-jvm.config +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode/multinode-master-jvm.config @@ -14,5 +14,4 @@ -Djdk.nio.maxCachedBufferSize=0 -DHADOOP_USER_NAME=hive -Duser.timezone=Asia/Kathmandu -#-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -XX:ErrorFile=/docker/logs/product-tests-presto-jvm-error-file.log diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode/multinode-worker-jvm.config b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode/multinode-worker-jvm.config index 323520ce585a..2a027e117e77 100644 --- a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode/multinode-worker-jvm.config +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/multinode/multinode-worker-jvm.config @@ -13,5 +13,4 @@ -Djdk.nio.maxCachedBufferSize=0 -DHADOOP_USER_NAME=hive -Duser.timezone=Asia/Kathmandu -#-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -XX:ErrorFile=/docker/logs/product-tests-presto-jvm-error-file.log diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/presto/etc/catalog/tpch.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/presto/etc/catalog/tpch.properties index 599f5ec6e21f..75110c5acf14 100644 --- a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/presto/etc/catalog/tpch.properties +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/presto/etc/catalog/tpch.properties @@ -1,2 +1 @@ connector.name=tpch -tpch.splits-per-node=4 diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/presto/etc/log.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/presto/etc/log.properties index f420c3223744..9f90cfda2439 100644 --- a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/presto/etc/log.properties +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/presto/etc/log.properties @@ -1,2 +1 @@ com.sun.jersey.guice.spi.container.GuiceComponentProviderFactory=WARN -com.ning.http.client=DEBUG From 78807879984d8a6b027bedc99b9403e80b77cba9 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Sat, 4 Apr 2020 12:29:21 -0700 Subject: [PATCH 043/519] Handle empty and arbitrary slices in Parquet reader --- .../parquet/dictionary/BinaryDictionary.java | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/presto-parquet/src/main/java/io/prestosql/parquet/dictionary/BinaryDictionary.java b/presto-parquet/src/main/java/io/prestosql/parquet/dictionary/BinaryDictionary.java index bcbc72b16175..19a928cfe6d1 100644 --- a/presto-parquet/src/main/java/io/prestosql/parquet/dictionary/BinaryDictionary.java +++ b/presto-parquet/src/main/java/io/prestosql/parquet/dictionary/BinaryDictionary.java @@ -39,9 +39,19 @@ public BinaryDictionary(DictionaryPage dictionaryPage, Integer length) { super(dictionaryPage.getEncoding()); content = new Binary[dictionaryPage.getDictionarySize()]; + + byte[] dictionaryBytes; + int offset; Slice dictionarySlice = dictionaryPage.getSlice(); - byte[] dictionaryBytes = dictionarySlice.byteArray(); - int offset = dictionarySlice.byteArrayOffset(); + if (dictionarySlice.hasByteArray()) { + dictionaryBytes = dictionarySlice.byteArray(); + offset = dictionarySlice.byteArrayOffset(); + } + else { + dictionaryBytes = dictionarySlice.getBytes(); + offset = 0; + } + if (length == null) { for (int i = 0; i < content.length; i++) { int len = readIntLittleEndian(dictionaryBytes, offset); From 13a3c523f2739a2373c251697f6826daaf193266 Mon Sep 17 00:00:00 2001 From: Martin Traverso Date: Fri, 3 Apr 2020 10:38:32 -0700 Subject: [PATCH 044/519] Report field name when decoding Elasticsearch value fails --- .../ElasticsearchPageSource.java | 22 +++++++++---------- .../elasticsearch/decoders/ArrayDecoder.java | 8 +++++-- .../elasticsearch/decoders/BigintDecoder.java | 11 +++++++++- .../decoders/BooleanDecoder.java | 11 +++++++++- .../elasticsearch/decoders/DoubleDecoder.java | 11 +++++++++- .../decoders/IntegerDecoder.java | 11 +++++++++- .../elasticsearch/decoders/RealDecoder.java | 11 +++++++++- .../elasticsearch/decoders/RowDecoder.java | 8 +++++-- .../decoders/SmallintDecoder.java | 13 +++++++++-- .../decoders/TimestampDecoder.java | 12 +++++----- .../decoders/TinyintDecoder.java | 13 +++++++++-- .../decoders/VarbinaryDecoder.java | 11 +++++++++- .../decoders/VarcharDecoder.java | 11 +++++++++- 13 files changed, 122 insertions(+), 31 deletions(-) diff --git a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/ElasticsearchPageSource.java b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/ElasticsearchPageSource.java index 397269862b89..164e4005daa8 100644 --- a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/ElasticsearchPageSource.java +++ b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/ElasticsearchPageSource.java @@ -287,34 +287,34 @@ private List createDecoders(ConnectorSession session, List getter, BlockBuilder output) { @@ -36,7 +45,7 @@ else if (value instanceof Number) { BIGINT.writeLong(output, ((Number) value).longValue()); } else { - throw new PrestoException(TYPE_MISMATCH, "Expected a numeric value for BIGINT field"); + throw new PrestoException(TYPE_MISMATCH, format("Expected a numeric value for field '%s' of type BIGINT: %s [%s]", path, value, value.getClass().getSimpleName())); } } } diff --git a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/BooleanDecoder.java b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/BooleanDecoder.java index a48a9591ac2a..068024609543 100644 --- a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/BooleanDecoder.java +++ b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/BooleanDecoder.java @@ -21,10 +21,19 @@ import static io.prestosql.spi.StandardErrorCode.TYPE_MISMATCH; import static io.prestosql.spi.type.BooleanType.BOOLEAN; +import static java.lang.String.format; +import static java.util.Objects.requireNonNull; public class BooleanDecoder implements Decoder { + private final String path; + + public BooleanDecoder(String path) + { + this.path = requireNonNull(path, "path is null"); + } + @Override public void decode(SearchHit hit, Supplier getter, BlockBuilder output) { @@ -36,7 +45,7 @@ else if (value instanceof Boolean) { BOOLEAN.writeBoolean(output, (Boolean) value); } else { - throw new PrestoException(TYPE_MISMATCH, "Expected a boolean value for BOOLEAN field"); + throw new PrestoException(TYPE_MISMATCH, format("Expected a boolean value for field %s of type BOOLEAN: %s [%s]", path, value, value.getClass().getSimpleName())); } } } diff --git a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/DoubleDecoder.java b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/DoubleDecoder.java index 695983c38c00..3cb450fdd858 100644 --- a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/DoubleDecoder.java +++ b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/DoubleDecoder.java @@ -21,10 +21,19 @@ import static io.prestosql.spi.StandardErrorCode.TYPE_MISMATCH; import static io.prestosql.spi.type.DoubleType.DOUBLE; +import static java.lang.String.format; +import static java.util.Objects.requireNonNull; public class DoubleDecoder implements Decoder { + private final String path; + + public DoubleDecoder(String path) + { + this.path = requireNonNull(path, "path is null"); + } + @Override public void decode(SearchHit hit, Supplier getter, BlockBuilder output) { @@ -36,7 +45,7 @@ else if (value instanceof Number) { DOUBLE.writeDouble(output, ((Number) value).doubleValue()); } else { - throw new PrestoException(TYPE_MISMATCH, "Expected a numeric value for DOUBLE field"); + throw new PrestoException(TYPE_MISMATCH, format("Expected a numeric value for field %s of type DOUBLE: %s [%s]", path, value, value.getClass().getSimpleName())); } } } diff --git a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/IntegerDecoder.java b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/IntegerDecoder.java index b4a0033d076e..3799381f29ff 100644 --- a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/IntegerDecoder.java +++ b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/IntegerDecoder.java @@ -22,10 +22,19 @@ import static io.prestosql.spi.StandardErrorCode.TYPE_MISMATCH; import static io.prestosql.spi.type.IntegerType.INTEGER; import static java.lang.Math.toIntExact; +import static java.lang.String.format; +import static java.util.Objects.requireNonNull; public class IntegerDecoder implements Decoder { + private final String path; + + public IntegerDecoder(String path) + { + this.path = requireNonNull(path, "path is null"); + } + @Override public void decode(SearchHit hit, Supplier getter, BlockBuilder output) { @@ -37,7 +46,7 @@ else if (value instanceof Number) { INTEGER.writeLong(output, toIntExact(((Number) value).longValue())); } else { - throw new PrestoException(TYPE_MISMATCH, "Expected a numeric value for INTEGER field"); + throw new PrestoException(TYPE_MISMATCH, format("Expected a string value for field '%s' of type INTEGER: %s [%s]", path, value, value.getClass().getSimpleName())); } } } diff --git a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/RealDecoder.java b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/RealDecoder.java index 3f15cc8f2345..fbadc02db71f 100644 --- a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/RealDecoder.java +++ b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/RealDecoder.java @@ -21,10 +21,19 @@ import static io.prestosql.spi.StandardErrorCode.TYPE_MISMATCH; import static io.prestosql.spi.type.RealType.REAL; +import static java.lang.String.format; +import static java.util.Objects.requireNonNull; public class RealDecoder implements Decoder { + private final String path; + + public RealDecoder(String path) + { + this.path = requireNonNull(path, "path is null"); + } + @Override public void decode(SearchHit hit, Supplier getter, BlockBuilder output) { @@ -36,7 +45,7 @@ else if (value instanceof Number) { REAL.writeLong(output, Float.floatToRawIntBits(((Number) value).floatValue())); } else { - throw new PrestoException(TYPE_MISMATCH, "Expected a numeric value for REAL field"); + throw new PrestoException(TYPE_MISMATCH, format("Expected a numeric value for field %s of type REAL: %s [%s]", path, value, value.getClass().getSimpleName())); } } } diff --git a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/RowDecoder.java b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/RowDecoder.java index d3c36affc662..fc7033e4bd46 100644 --- a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/RowDecoder.java +++ b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/RowDecoder.java @@ -23,15 +23,19 @@ import static io.prestosql.elasticsearch.ElasticsearchPageSource.getField; import static io.prestosql.spi.StandardErrorCode.TYPE_MISMATCH; +import static java.lang.String.format; +import static java.util.Objects.requireNonNull; public class RowDecoder implements Decoder { + private final String path; private final List fieldNames; private final List decoders; - public RowDecoder(List fieldNames, List decoders) + public RowDecoder(String path, List fieldNames, List decoders) { + this.path = requireNonNull(path, "path is null"); this.fieldNames = fieldNames; this.decoders = decoders; } @@ -53,7 +57,7 @@ else if (data instanceof Map) { output.closeEntry(); } else { - throw new PrestoException(TYPE_MISMATCH, "Expected object for ROW field"); + throw new PrestoException(TYPE_MISMATCH, format("Expected object for field '%s' of type ROW: %s [%s]", path, data, data.getClass().getSimpleName())); } } } diff --git a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/SmallintDecoder.java b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/SmallintDecoder.java index de7b297ac2d7..e1994fdf252d 100644 --- a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/SmallintDecoder.java +++ b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/SmallintDecoder.java @@ -21,10 +21,19 @@ import static io.prestosql.spi.StandardErrorCode.TYPE_MISMATCH; import static io.prestosql.spi.type.SmallintType.SMALLINT; +import static java.lang.String.format; +import static java.util.Objects.requireNonNull; public class SmallintDecoder implements Decoder { + private final String path; + + public SmallintDecoder(String path) + { + this.path = requireNonNull(path, "path is null"); + } + @Override public void decode(SearchHit hit, Supplier getter, BlockBuilder output) { @@ -36,13 +45,13 @@ else if (value instanceof Number) { long decoded = ((Number) value).longValue(); if (decoded < Short.MIN_VALUE || decoded > Short.MAX_VALUE) { - throw new PrestoException(TYPE_MISMATCH, "Value out of range for SMALLINT field"); + throw new PrestoException(TYPE_MISMATCH, format("Value out of range for field '%s' of type SMALLINT: %s", path, decoded)); } SMALLINT.writeLong(output, decoded); } else { - throw new PrestoException(TYPE_MISMATCH, "Expected a numeric value for SMALLINT field"); + throw new PrestoException(TYPE_MISMATCH, format("Expected a numeric value for field '%s' of type SMALLINT: %s [%s]", path, value, value.getClass().getSimpleName())); } } } diff --git a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/TimestampDecoder.java b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/TimestampDecoder.java index b31627b98155..15e468ad0509 100644 --- a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/TimestampDecoder.java +++ b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/TimestampDecoder.java @@ -29,6 +29,7 @@ import static io.prestosql.spi.type.TimestampType.TIMESTAMP; import static java.lang.String.format; import static java.time.format.DateTimeFormatter.ISO_DATE_TIME; +import static java.util.Objects.requireNonNull; public class TimestampDecoder implements Decoder @@ -39,7 +40,7 @@ public class TimestampDecoder public TimestampDecoder(ConnectorSession session, String path) { - this.path = path; + this.path = requireNonNull(path, "path is null"); this.zoneId = ZoneId.of(session.getTimeZoneKey().getId()); } @@ -51,7 +52,7 @@ public void decode(SearchHit hit, Supplier getter, BlockBuilder output) if (documentField != null) { if (documentField.getValues().size() > 1) { - throw new PrestoException(TYPE_MISMATCH, "Expected single value for column: " + path); + throw new PrestoException(TYPE_MISMATCH, format("Expected single value for column '%s', found: %s", path, documentField.getValues().size())); } value = documentField.getValue(); } @@ -72,9 +73,10 @@ else if (value instanceof Number) { } else { throw new PrestoException(NOT_SUPPORTED, format( - "Unsupported representation for timestamp type: %s [%s]", - value.getClass().getSimpleName(), - value)); + "Unsupported representation for field '%s' of type TIMESTAMP: %s [%s]", + path, + value, + value.getClass().getSimpleName())); } long epochMillis = timestamp.atZone(zoneId) diff --git a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/TinyintDecoder.java b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/TinyintDecoder.java index 560d470befc1..1e5e63a0987a 100644 --- a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/TinyintDecoder.java +++ b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/TinyintDecoder.java @@ -21,10 +21,19 @@ import static io.prestosql.spi.StandardErrorCode.TYPE_MISMATCH; import static io.prestosql.spi.type.TinyintType.TINYINT; +import static java.lang.String.format; +import static java.util.Objects.requireNonNull; public class TinyintDecoder implements Decoder { + private final String path; + + public TinyintDecoder(String path) + { + this.path = requireNonNull(path, "path is null"); + } + @Override public void decode(SearchHit hit, Supplier getter, BlockBuilder output) { @@ -36,13 +45,13 @@ else if (value instanceof Number) { long decoded = ((Number) value).longValue(); if (decoded < Byte.MIN_VALUE || decoded > Byte.MAX_VALUE) { - throw new PrestoException(TYPE_MISMATCH, "Value out of range for TINYINT field"); + throw new PrestoException(TYPE_MISMATCH, format("Value out of range for field '%s' of type TINYINT: %s", path, decoded)); } TINYINT.writeLong(output, decoded); } else { - throw new PrestoException(TYPE_MISMATCH, "Expected a numeric value for TINYINT field"); + throw new PrestoException(TYPE_MISMATCH, format("Expected a numeric value for field '%s' of type TINYINT: %s [%s]", path, value, value.getClass().getSimpleName())); } } } diff --git a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/VarbinaryDecoder.java b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/VarbinaryDecoder.java index 84d4d6ddcf27..1a772d5d690e 100644 --- a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/VarbinaryDecoder.java +++ b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/VarbinaryDecoder.java @@ -23,10 +23,19 @@ import static io.prestosql.spi.StandardErrorCode.TYPE_MISMATCH; import static io.prestosql.spi.type.VarbinaryType.VARBINARY; +import static java.lang.String.format; +import static java.util.Objects.requireNonNull; public class VarbinaryDecoder implements Decoder { + private final String path; + + public VarbinaryDecoder(String path) + { + this.path = requireNonNull(path, "path is null"); + } + @Override public void decode(SearchHit hit, Supplier getter, BlockBuilder output) { @@ -38,7 +47,7 @@ else if (value instanceof String) { VARBINARY.writeSlice(output, Slices.wrappedBuffer(Base64.getDecoder().decode(value.toString()))); } else { - throw new PrestoException(TYPE_MISMATCH, "Expected a string value for VARBINARY field"); + throw new PrestoException(TYPE_MISMATCH, format("Expected a string value for field '%s' of type VARBINARY: %s [%s]", path, value, value.getClass().getSimpleName())); } } } diff --git a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/VarcharDecoder.java b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/VarcharDecoder.java index d4883a576559..9eeb2af8e810 100644 --- a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/VarcharDecoder.java +++ b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/VarcharDecoder.java @@ -22,10 +22,19 @@ import static io.prestosql.spi.StandardErrorCode.TYPE_MISMATCH; import static io.prestosql.spi.type.VarcharType.VARCHAR; +import static java.lang.String.format; +import static java.util.Objects.requireNonNull; public class VarcharDecoder implements Decoder { + private final String path; + + public VarcharDecoder(String path) + { + this.path = requireNonNull(path, "path is null"); + } + @Override public void decode(SearchHit hit, Supplier getter, BlockBuilder output) { @@ -37,7 +46,7 @@ else if (value instanceof String) { VARCHAR.writeSlice(output, Slices.utf8Slice(value.toString())); } else { - throw new PrestoException(TYPE_MISMATCH, "Expected a string value for VARCHAR field"); + throw new PrestoException(TYPE_MISMATCH, format("Expected a string value for field '%s' of type VARCHAR: %s [%s]", path, value, value.getClass().getSimpleName())); } } } From fb69cc4dd3f9b90df07dafd16eb29573c30dbfc6 Mon Sep 17 00:00:00 2001 From: Martin Traverso Date: Fri, 3 Apr 2020 10:05:01 -0700 Subject: [PATCH 045/519] Add instructions on how to start Elasticsearch --- .../io/prestosql/elasticsearch/ElasticsearchQueryRunner.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/ElasticsearchQueryRunner.java b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/ElasticsearchQueryRunner.java index 90b17668e34c..701a942a4756 100644 --- a/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/ElasticsearchQueryRunner.java +++ b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/ElasticsearchQueryRunner.java @@ -119,6 +119,9 @@ public static Session createSession() public static void main(String[] args) throws Exception { + // To start Elasticsearch: + // docker run -p 9200:9200 -e "discovery.type=single-node" docker.elastic.co/elasticsearch/elasticsearch:7.6.2 + Logging.initialize(); DistributedQueryRunner queryRunner = createElasticsearchQueryRunner( From 44438f2366ce20f39aa4c9d4a9cc5757eb40be34 Mon Sep 17 00:00:00 2001 From: Martin Traverso Date: Wed, 1 Apr 2020 10:45:20 -0700 Subject: [PATCH 046/519] Call applyProjection during table scan column pruning The existing code was limiting the columns in the TableScan node but wasn't providing this information to connectors. Some connectors can leverage this information during split generation. --- .../prestosql/sql/planner/PlanOptimizers.java | 14 +-- .../iterative/rule/PruneTableScanColumns.java | 90 ++++++++++++++++--- .../PruneUnreferencedOutputs.java | 44 ++++++--- .../sql/planner/assertions/BasePlanTest.java | 3 +- .../rule/TestPruneTableScanColumns.java | 4 +- .../optimizations/TestEliminateSorts.java | 5 +- .../optimizations/TestMergeWindows.java | 3 +- ...TestOptimizeMixedDistinctAggregations.java | 3 +- .../optimizations/TestReorderWindows.java | 2 +- .../TestSetFlatteningOptimizer.java | 3 +- .../TestThriftProjectionPushdown.java | 40 +++++++++ 11 files changed, 171 insertions(+), 40 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java index dd363ca56cd8..33b4fa215038 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java @@ -256,7 +256,7 @@ public PlanOptimizers( new PruneWindowColumns(), new PruneOffsetColumns(), new PruneLimitColumns(), - new PruneTableScanColumns()); + new PruneTableScanColumns(metadata, typeAnalyzer)); Set> projectionPushdownRules = ImmutableSet.of( new PushProjectionIntoTableScan(metadata, typeAnalyzer), @@ -370,7 +370,7 @@ public PlanOptimizers( new ImplementIntersectAsUnion(metadata), new ImplementExceptAsUnion(metadata))), new LimitPushDown(), // Run the LimitPushDown after flattening set operators to make it easier to do the set flattening - new PruneUnreferencedOutputs(), + new PruneUnreferencedOutputs(metadata, typeAnalyzer), inlineProjections, new IterativeOptimizer( ruleStats, @@ -433,7 +433,7 @@ public PlanOptimizers( // Temporary hack: separate optimizer step to avoid the sample node being replaced by filter before pushing // it to table scan node ImmutableSet.of(new ImplementBernoulliSampleAsFilter(metadata))), - new PruneUnreferencedOutputs(), + new PruneUnreferencedOutputs(metadata, typeAnalyzer), new IterativeOptimizer( ruleStats, statsCalculator, @@ -446,7 +446,7 @@ public PlanOptimizers( simplifyOptimizer, // Re-run the SimplifyExpressions to simplify any recomposed expressions from other optimizations projectionPushDown, new UnaliasSymbolReferences(metadata), // Run again because predicate pushdown and projection pushdown might add more projections - new PruneUnreferencedOutputs(), // Make sure to run this before index join. Filtered projections may not have all the columns. + new PruneUnreferencedOutputs(metadata, typeAnalyzer), // Make sure to run this before index join. Filtered projections may not have all the columns. new IndexJoinOptimizer(metadata), // Run this after projections and filters have been fully simplified and pushed down new IterativeOptimizer( ruleStats, @@ -465,7 +465,7 @@ public PlanOptimizers( .addAll(GatherAndMergeWindows.rules()) .build()), inlineProjections, - new PruneUnreferencedOutputs(), // Make sure to run this at the end to help clean the plan for logging/execution and not remove info that other optimizers might need at an earlier point + new PruneUnreferencedOutputs(metadata, typeAnalyzer), // Make sure to run this at the end to help clean the plan for logging/execution and not remove info that other optimizers might need at an earlier point new IterativeOptimizer( ruleStats, statsCalculator, @@ -487,7 +487,7 @@ public PlanOptimizers( estimatedExchangesCostCalculator, ImmutableSet.of(new PushPredicateIntoTableScan(metadata, typeAnalyzer))), projectionPushDown, - new PruneUnreferencedOutputs(), + new PruneUnreferencedOutputs(metadata, typeAnalyzer), new IterativeOptimizer( ruleStats, statsCalculator, @@ -577,7 +577,7 @@ public PlanOptimizers( builder.add(projectionPushDown); builder.add(inlineProjections); builder.add(new UnaliasSymbolReferences(metadata)); // Run unalias after merging projections to simplify projections more efficiently - builder.add(new PruneUnreferencedOutputs()); + builder.add(new PruneUnreferencedOutputs(metadata, typeAnalyzer)); builder.add(new IterativeOptimizer( ruleStats, diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneTableScanColumns.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneTableScanColumns.java index 1fc14343760a..f2099be3fbe2 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneTableScanColumns.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneTableScanColumns.java @@ -13,34 +13,104 @@ */ package io.prestosql.sql.planner.iterative.rule; +import com.google.common.collect.ImmutableMap; +import io.prestosql.Session; +import io.prestosql.metadata.Metadata; +import io.prestosql.metadata.TableHandle; +import io.prestosql.spi.connector.ColumnHandle; +import io.prestosql.spi.connector.ProjectionApplicationResult; +import io.prestosql.spi.expression.ConnectorExpression; +import io.prestosql.spi.expression.Variable; import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.TypeAnalyzer; +import io.prestosql.sql.planner.TypeProvider; import io.prestosql.sql.planner.plan.PlanNode; import io.prestosql.sql.planner.plan.TableScanNode; +import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.function.Function; -import static com.google.common.collect.Maps.filterKeys; +import static com.google.common.collect.ImmutableList.toImmutableList; +import static com.google.common.collect.ImmutableMap.toImmutableMap; import static io.prestosql.sql.planner.plan.Patterns.tableScan; import static io.prestosql.util.MoreLists.filteredCopy; +import static java.util.Objects.requireNonNull; +/** + * TODO: this is a special case of PushProjectionIntoTableScan and should be merged with that rule. + */ public class PruneTableScanColumns extends ProjectOffPushDownRule { - public PruneTableScanColumns() + private final Metadata metadata; + private final TypeAnalyzer typeAnalyzer; + + public PruneTableScanColumns(Metadata metadata, TypeAnalyzer typeAnalyzer) { super(tableScan()); + this.metadata = requireNonNull(metadata, "metadata is null"); + this.typeAnalyzer = requireNonNull(typeAnalyzer, "typeAnalyzer is null"); } @Override - protected Optional pushDownProjectOff(Context context, TableScanNode tableScanNode, Set referencedOutputs) + protected Optional pushDownProjectOff(Context context, TableScanNode node, Set referencedOutputs) { - return Optional.of( - new TableScanNode( - tableScanNode.getId(), - tableScanNode.getTable(), - filteredCopy(tableScanNode.getOutputSymbols(), referencedOutputs::contains), - filterKeys(tableScanNode.getAssignments(), referencedOutputs::contains), - tableScanNode.getEnforcedConstraint())); + Session session = context.getSession(); + TypeProvider types = context.getSymbolAllocator().getTypes(); + + return pruneColumns(metadata, typeAnalyzer, types, session, node, referencedOutputs); + } + + public static Optional pruneColumns(Metadata metadata, TypeAnalyzer typeAnalyzer, TypeProvider types, Session session, TableScanNode node, Set referencedOutputs) + { + List newOutputs = filteredCopy(node.getOutputSymbols(), referencedOutputs::contains); + + if (newOutputs.size() == node.getOutputSymbols().size()) { + return Optional.empty(); + } + + List projections = newOutputs.stream() + .map(symbol -> new Variable(symbol.getName(), types.get(symbol))) + .collect(toImmutableList()); + + TableHandle handle = node.getTable(); + Optional> result = metadata.applyProjection( + session, + handle, + projections, + newOutputs.stream() + .collect(toImmutableMap(Symbol::getName, node.getAssignments()::get))); + + Map newAssignments; + // Attempt to push down the constrained list of columns into the connector. + // Bail out if the connector does anything other than limit the list of columns (e.g., if it synthesizes arbitrary expressions) + if (result.isPresent() && result.get().getProjections().stream().allMatch(Variable.class::isInstance)) { + handle = result.get().getHandle(); + + Map assignments = result.get().getAssignments().stream() + .collect(toImmutableMap(ProjectionApplicationResult.Assignment::getVariable, ProjectionApplicationResult.Assignment::getColumn)); + + ImmutableMap.Builder builder = ImmutableMap.builder(); + for (int i = 0; i < newOutputs.size(); i++) { + Variable variable = (Variable) result.get().getProjections().get(i); + builder.put(newOutputs.get(i), assignments.get(variable.getName())); + } + + newAssignments = builder.build(); + } + else { + newAssignments = newOutputs.stream() + .collect(toImmutableMap(Function.identity(), node.getAssignments()::get)); + } + + return Optional.of(new TableScanNode( + node.getId(), + handle, + newOutputs, + newAssignments, + node.getEnforcedConstraint())); } } diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java index 0ced3c4bbd77..da2ba562bcb9 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java @@ -22,6 +22,7 @@ import com.google.common.collect.Sets; import io.prestosql.Session; import io.prestosql.execution.warnings.WarningCollector; +import io.prestosql.metadata.Metadata; import io.prestosql.spi.connector.ColumnHandle; import io.prestosql.sql.planner.OrderingScheme; import io.prestosql.sql.planner.PartitioningScheme; @@ -29,7 +30,9 @@ import io.prestosql.sql.planner.Symbol; import io.prestosql.sql.planner.SymbolAllocator; import io.prestosql.sql.planner.SymbolsExtractor; +import io.prestosql.sql.planner.TypeAnalyzer; import io.prestosql.sql.planner.TypeProvider; +import io.prestosql.sql.planner.iterative.rule.PruneTableScanColumns; import io.prestosql.sql.planner.plan.AggregationNode; import io.prestosql.sql.planner.plan.AggregationNode.Aggregation; import io.prestosql.sql.planner.plan.ApplyNode; @@ -109,6 +112,15 @@ public class PruneUnreferencedOutputs implements PlanOptimizer { + private final Metadata metadata; + private final TypeAnalyzer typeAnalyzer; + + public PruneUnreferencedOutputs(Metadata metadata, TypeAnalyzer typeAnalyzer) + { + this.metadata = requireNonNull(metadata, "metadata is null"); + this.typeAnalyzer = requireNonNull(typeAnalyzer, "typeAnalyzer is null"); + } + @Override public PlanNode optimize(PlanNode plan, Session session, TypeProvider types, SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator, WarningCollector warningCollector) { @@ -118,12 +130,27 @@ public PlanNode optimize(PlanNode plan, Session session, TypeProvider types, Sym requireNonNull(symbolAllocator, "symbolAllocator is null"); requireNonNull(idAllocator, "idAllocator is null"); - return SimplePlanRewriter.rewriteWith(new Rewriter(), plan, ImmutableSet.of()); + return SimplePlanRewriter.rewriteWith(new Rewriter(metadata, types, typeAnalyzer, symbolAllocator, session), plan, ImmutableSet.of()); } private static class Rewriter extends SimplePlanRewriter> { + private final Metadata metadata; + private final TypeProvider types; + private final TypeAnalyzer typeAnalyzer; + private final SymbolAllocator symbolAllocator; + private final Session session; + + public Rewriter(Metadata metadata, TypeProvider types, TypeAnalyzer typeAnalyzer, SymbolAllocator symbolAllocator, Session session) + { + this.metadata = metadata; + this.types = types; + this.typeAnalyzer = typeAnalyzer; + this.symbolAllocator = symbolAllocator; + this.session = session; + } + @Override public PlanNode visitExplainAnalyze(ExplainAnalyzeNode node, RewriteContext> context) { @@ -424,19 +451,8 @@ public PlanNode visitWindow(WindowNode node, RewriteContext> context @Override public PlanNode visitTableScan(TableScanNode node, RewriteContext> context) { - List newOutputs = node.getOutputSymbols().stream() - .filter(context.get()::contains) - .collect(toImmutableList()); - - Map newAssignments = newOutputs.stream() - .collect(Collectors.toMap(Function.identity(), node.getAssignments()::get)); - - return new TableScanNode( - node.getId(), - node.getTable(), - newOutputs, - newAssignments, - node.getEnforcedConstraint()); + return PruneTableScanColumns.pruneColumns(metadata, typeAnalyzer, types, session, node, context.get()) + .orElse(node); } @Override diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/BasePlanTest.java b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/BasePlanTest.java index c5ddeb43660d..18ffa9d00528 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/BasePlanTest.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/BasePlanTest.java @@ -24,6 +24,7 @@ import io.prestosql.sql.planner.Plan; import io.prestosql.sql.planner.RuleStatsRecorder; import io.prestosql.sql.planner.SubPlan; +import io.prestosql.sql.planner.TypeAnalyzer; import io.prestosql.sql.planner.iterative.IterativeOptimizer; import io.prestosql.sql.planner.iterative.rule.RemoveRedundantIdentityProjections; import io.prestosql.sql.planner.optimizations.PlanOptimizer; @@ -156,7 +157,7 @@ protected void assertMinimallyOptimizedPlan(@Language("SQL") String sql, PlanMat { List optimizers = ImmutableList.of( new UnaliasSymbolReferences(getQueryRunner().getMetadata()), - new PruneUnreferencedOutputs(), + new PruneUnreferencedOutputs(queryRunner.getMetadata(), new TypeAnalyzer(queryRunner.getSqlParser(), queryRunner.getMetadata())), new IterativeOptimizer( new RuleStatsRecorder(), queryRunner.getStatsCalculator(), diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneTableScanColumns.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneTableScanColumns.java index fab37ad8f97f..9ba5fc74cd38 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneTableScanColumns.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneTableScanColumns.java @@ -42,7 +42,7 @@ public class TestPruneTableScanColumns @Test public void testNotAllOutputsReferenced() { - tester().assertThat(new PruneTableScanColumns()) + tester().assertThat(new PruneTableScanColumns(tester().getMetadata(), tester().getTypeAnalyzer())) .on(p -> { Symbol orderdate = p.symbol("orderdate", DATE); Symbol totalprice = p.symbol("totalprice", DOUBLE); @@ -68,7 +68,7 @@ orderdate, new TpchColumnHandle(orderdate.getName(), DATE), @Test public void testAllOutputsReferenced() { - tester().assertThat(new PruneTableScanColumns()) + tester().assertThat(new PruneTableScanColumns(tester().getMetadata(), tester().getTypeAnalyzer())) .on(p -> p.project( Assignments.of(p.symbol("y"), expression("x")), diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestEliminateSorts.java b/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestEliminateSorts.java index 45675682d8de..a38f509979c9 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestEliminateSorts.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestEliminateSorts.java @@ -88,10 +88,11 @@ public void testNotEliminateSorts() private void assertUnitPlan(@Language("SQL") String sql, PlanMatchPattern pattern) { + TypeAnalyzer typeAnalyzer = new TypeAnalyzer(new SqlParser(), getQueryRunner().getMetadata()); List optimizers = ImmutableList.of( new UnaliasSymbolReferences(getQueryRunner().getMetadata()), - new AddExchanges(getQueryRunner().getMetadata(), new TypeAnalyzer(new SqlParser(), getQueryRunner().getMetadata())), - new PruneUnreferencedOutputs(), + new AddExchanges(getQueryRunner().getMetadata(), typeAnalyzer), + new PruneUnreferencedOutputs(getQueryRunner().getMetadata(), typeAnalyzer), new IterativeOptimizer( new RuleStatsRecorder(), getQueryRunner().getStatsCalculator(), diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestMergeWindows.java b/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestMergeWindows.java index 2f72480125bd..cc764f18b8da 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestMergeWindows.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestMergeWindows.java @@ -18,6 +18,7 @@ import com.google.common.collect.ImmutableSet; import io.prestosql.spi.block.SortOrder; import io.prestosql.sql.planner.RuleStatsRecorder; +import io.prestosql.sql.planner.TypeAnalyzer; import io.prestosql.sql.planner.assertions.BasePlanTest; import io.prestosql.sql.planner.assertions.ExpectedValueProvider; import io.prestosql.sql.planner.assertions.PlanMatchPattern; @@ -562,7 +563,7 @@ private void assertUnitPlan(@Language("SQL") String sql, PlanMatchPattern patter .add(new RemoveRedundantIdentityProjections()) .addAll(GatherAndMergeWindows.rules()) .build()), - new PruneUnreferencedOutputs()); + new PruneUnreferencedOutputs(getQueryRunner().getMetadata(), new TypeAnalyzer(getQueryRunner().getSqlParser(), getQueryRunner().getMetadata()))); assertPlan(sql, pattern, optimizers); } } diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestOptimizeMixedDistinctAggregations.java b/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestOptimizeMixedDistinctAggregations.java index 0a797a464e65..96cac6e6f3c6 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestOptimizeMixedDistinctAggregations.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestOptimizeMixedDistinctAggregations.java @@ -18,6 +18,7 @@ import com.google.common.collect.ImmutableSet; import io.prestosql.SystemSessionProperties; import io.prestosql.sql.planner.RuleStatsRecorder; +import io.prestosql.sql.planner.TypeAnalyzer; import io.prestosql.sql.planner.assertions.BasePlanTest; import io.prestosql.sql.planner.assertions.ExpectedValueProvider; import io.prestosql.sql.planner.assertions.PlanMatchPattern; @@ -124,7 +125,7 @@ private void assertUnitPlan(String sql, PlanMatchPattern pattern) new SingleDistinctAggregationToGroupBy(), new MultipleDistinctAggregationToMarkDistinct())), new OptimizeMixedDistinctAggregations(getQueryRunner().getMetadata()), - new PruneUnreferencedOutputs()); + new PruneUnreferencedOutputs(getQueryRunner().getMetadata(), new TypeAnalyzer(getQueryRunner().getSqlParser(), getQueryRunner().getMetadata()))); assertPlan(sql, pattern, optimizers); } } diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestReorderWindows.java b/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestReorderWindows.java index 22dc0f1279d8..2a66e8753e99 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestReorderWindows.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestReorderWindows.java @@ -333,7 +333,7 @@ private void assertUnitPlan(@Language("SQL") String sql, PlanMatchPattern patter new GatherAndMergeWindows.SwapAdjacentWindowsBySpecifications(0), new GatherAndMergeWindows.SwapAdjacentWindowsBySpecifications(1), new GatherAndMergeWindows.SwapAdjacentWindowsBySpecifications(2))), - new PruneUnreferencedOutputs()); + new PruneUnreferencedOutputs(getQueryRunner().getMetadata(), new TypeAnalyzer(getQueryRunner().getSqlParser(), getQueryRunner().getMetadata()))); assertPlan(sql, pattern, optimizers); } } diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestSetFlatteningOptimizer.java b/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestSetFlatteningOptimizer.java index de509efceede..95cf4c0eb817 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestSetFlatteningOptimizer.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestSetFlatteningOptimizer.java @@ -16,6 +16,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import io.prestosql.sql.planner.RuleStatsRecorder; +import io.prestosql.sql.planner.TypeAnalyzer; import io.prestosql.sql.planner.assertions.BasePlanTest; import io.prestosql.sql.planner.assertions.PlanMatchPattern; import io.prestosql.sql.planner.iterative.IterativeOptimizer; @@ -128,7 +129,7 @@ protected void assertPlan(String sql, PlanMatchPattern pattern) { List optimizers = ImmutableList.of( new UnaliasSymbolReferences(getQueryRunner().getMetadata()), - new PruneUnreferencedOutputs(), + new PruneUnreferencedOutputs(getQueryRunner().getMetadata(), new TypeAnalyzer(getQueryRunner().getSqlParser(), getQueryRunner().getMetadata())), new IterativeOptimizer( new RuleStatsRecorder(), getQueryRunner().getStatsCalculator(), diff --git a/presto-thrift/src/test/java/io/prestosql/plugin/thrift/integration/TestThriftProjectionPushdown.java b/presto-thrift/src/test/java/io/prestosql/plugin/thrift/integration/TestThriftProjectionPushdown.java index 0e15eba7b528..cec653607ed7 100644 --- a/presto-thrift/src/test/java/io/prestosql/plugin/thrift/integration/TestThriftProjectionPushdown.java +++ b/presto-thrift/src/test/java/io/prestosql/plugin/thrift/integration/TestThriftProjectionPushdown.java @@ -32,6 +32,7 @@ import io.prestosql.sql.parser.SqlParser; import io.prestosql.sql.planner.Symbol; import io.prestosql.sql.planner.TypeAnalyzer; +import io.prestosql.sql.planner.iterative.rule.PruneTableScanColumns; import io.prestosql.sql.planner.iterative.rule.PushProjectionIntoTableScan; import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; import io.prestosql.sql.planner.plan.Assignments; @@ -199,4 +200,43 @@ public void testProjectionPushdown() TupleDomain.all(), ImmutableMap.of(columnName, equalTo(columnHandle))))); } + + @Test + public void testPruneColumns() + { + PruneTableScanColumns rule = new PruneTableScanColumns( + tester().getMetadata(), + new TypeAnalyzer(new SqlParser(), tester().getMetadata())); + + ThriftColumnHandle nationKeyColumn = new ThriftColumnHandle("nationKey", VARCHAR, "", false); + ThriftColumnHandle nameColumn = new ThriftColumnHandle("name", VARCHAR, "", false); + + tester().assertThat(rule) + .on(p -> { + Symbol nationKey = p.symbol(nationKeyColumn.getColumnName(), VARCHAR); + Symbol name = p.symbol(nameColumn.getColumnName(), VARCHAR); + return p.project( + Assignments.of( + p.symbol("expr", VARCHAR), + nationKey.toSymbolReference()), + p.tableScan( + NATION_TABLE, + ImmutableList.of(nationKey, name), + ImmutableMap.builder() + .put(nationKey, nationKeyColumn) + .put(name, nameColumn) + .build())); + }) + .withSession(SESSION) + .matches(project( + ImmutableMap.of("expr", expression(new SymbolReference(nationKeyColumn.getColumnName()))), + tableScan( + equalTo(new ThriftTableHandle( + TINY_SCHEMA, + "nation", + TupleDomain.all(), + Optional.of(ImmutableSet.of(nationKeyColumn)))), + TupleDomain.all(), + ImmutableMap.of(nationKeyColumn.getColumnName(), equalTo(nationKeyColumn))))); + } } From b99da44f7ea9bfbc8da00488cc73951606efd3cf Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Wed, 1 Apr 2020 18:10:48 +0200 Subject: [PATCH 047/519] Add column-pruning rules for set operation nodes Adds iterative rules for pruning ExceptNode's, IntersectNode's and UnionNode's children's columns not referenced in the node's mapping. --- .../prestosql/sql/planner/PlanOptimizers.java | 6 + .../rule/PruneExceptSourceColumns.java | 49 ++++++++ .../rule/PruneIntersectSourceColumns.java | 49 ++++++++ .../rule/PruneUnionSourceColumns.java | 49 ++++++++ .../rule/TestPruneExceptSourceColumns.java | 94 ++++++++++++++++ .../rule/TestPruneIntersectSourceColumns.java | 105 ++++++++++++++++++ .../rule/TestPruneUnionSourceColumns.java | 105 ++++++++++++++++++ 7 files changed, 457 insertions(+) create mode 100644 presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneExceptSourceColumns.java create mode 100644 presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneIntersectSourceColumns.java create mode 100644 presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneUnionSourceColumns.java create mode 100644 presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneExceptSourceColumns.java create mode 100644 presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneIntersectSourceColumns.java create mode 100644 presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneUnionSourceColumns.java diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java index 33b4fa215038..a011f040e578 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java @@ -67,8 +67,10 @@ import io.prestosql.sql.planner.iterative.rule.PruneCountAggregationOverScalar; import io.prestosql.sql.planner.iterative.rule.PruneDeleteSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneEnforceSingleRowColumns; +import io.prestosql.sql.planner.iterative.rule.PruneExceptSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneFilterColumns; import io.prestosql.sql.planner.iterative.rule.PruneIndexSourceColumns; +import io.prestosql.sql.planner.iterative.rule.PruneIntersectSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneJoinChildrenColumns; import io.prestosql.sql.planner.iterative.rule.PruneJoinColumns; import io.prestosql.sql.planner.iterative.rule.PruneLimitColumns; @@ -81,6 +83,7 @@ import io.prestosql.sql.planner.iterative.rule.PruneSemiJoinFilteringSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneTableScanColumns; import io.prestosql.sql.planner.iterative.rule.PruneTopNColumns; +import io.prestosql.sql.planner.iterative.rule.PruneUnionSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneValuesColumns; import io.prestosql.sql.planner.iterative.rule.PruneWindowColumns; import io.prestosql.sql.planner.iterative.rule.PushAggregationThroughOuterJoin; @@ -242,8 +245,10 @@ public PlanOptimizers( new PruneCorrelatedJoinColumns(), new PruneDeleteSourceColumns(), new PruneEnforceSingleRowColumns(), + new PruneExceptSourceColumns(), new PruneFilterColumns(), new PruneIndexSourceColumns(), + new PruneIntersectSourceColumns(), new PruneJoinChildrenColumns(), new PruneJoinColumns(), new PruneMarkDistinctColumns(), @@ -252,6 +257,7 @@ public PlanOptimizers( new PruneSemiJoinColumns(), new PruneSemiJoinFilteringSourceColumns(), new PruneTopNColumns(), + new PruneUnionSourceColumns(), new PruneValuesColumns(), new PruneWindowColumns(), new PruneOffsetColumns(), diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneExceptSourceColumns.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneExceptSourceColumns.java new file mode 100644 index 000000000000..f370915fe875 --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneExceptSourceColumns.java @@ -0,0 +1,49 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableSet; +import io.prestosql.matching.Captures; +import io.prestosql.matching.Pattern; +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.iterative.Rule; +import io.prestosql.sql.planner.plan.ExceptNode; + +import java.util.Set; + +import static io.prestosql.sql.planner.iterative.rule.Util.restrictChildOutputs; +import static io.prestosql.sql.planner.plan.Patterns.except; + +public class PruneExceptSourceColumns + implements Rule +{ + @Override + public Pattern getPattern() + { + return except(); + } + + @Override + public Result apply(ExceptNode node, Captures captures, Context context) + { + @SuppressWarnings("unchecked") + Set[] referencedInputs = new Set[node.getSources().size()]; + for (int i = 0; i < node.getSources().size(); i++) { + referencedInputs[i] = ImmutableSet.copyOf(node.sourceOutputLayout(i)); + } + return restrictChildOutputs(context.getIdAllocator(), node, referencedInputs) + .map(Rule.Result::ofPlanNode) + .orElse(Rule.Result.empty()); + } +} diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneIntersectSourceColumns.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneIntersectSourceColumns.java new file mode 100644 index 000000000000..874a9ba18b90 --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneIntersectSourceColumns.java @@ -0,0 +1,49 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableSet; +import io.prestosql.matching.Captures; +import io.prestosql.matching.Pattern; +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.iterative.Rule; +import io.prestosql.sql.planner.plan.IntersectNode; + +import java.util.Set; + +import static io.prestosql.sql.planner.iterative.rule.Util.restrictChildOutputs; +import static io.prestosql.sql.planner.plan.Patterns.intersect; + +public class PruneIntersectSourceColumns + implements Rule +{ + @Override + public Pattern getPattern() + { + return intersect(); + } + + @Override + public Result apply(IntersectNode node, Captures captures, Context context) + { + @SuppressWarnings("unchecked") + Set[] referencedInputs = new Set[node.getSources().size()]; + for (int i = 0; i < node.getSources().size(); i++) { + referencedInputs[i] = ImmutableSet.copyOf(node.sourceOutputLayout(i)); + } + return restrictChildOutputs(context.getIdAllocator(), node, referencedInputs) + .map(Rule.Result::ofPlanNode) + .orElse(Rule.Result.empty()); + } +} diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneUnionSourceColumns.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneUnionSourceColumns.java new file mode 100644 index 000000000000..d80198484c7f --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneUnionSourceColumns.java @@ -0,0 +1,49 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableSet; +import io.prestosql.matching.Captures; +import io.prestosql.matching.Pattern; +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.iterative.Rule; +import io.prestosql.sql.planner.plan.UnionNode; + +import java.util.Set; + +import static io.prestosql.sql.planner.iterative.rule.Util.restrictChildOutputs; +import static io.prestosql.sql.planner.plan.Patterns.union; + +public class PruneUnionSourceColumns + implements Rule +{ + @Override + public Pattern getPattern() + { + return union(); + } + + @Override + public Result apply(UnionNode node, Captures captures, Context context) + { + @SuppressWarnings("unchecked") + Set[] referencedInputs = new Set[node.getSources().size()]; + for (int i = 0; i < node.getSources().size(); i++) { + referencedInputs[i] = ImmutableSet.copyOf(node.sourceOutputLayout(i)); + } + return restrictChildOutputs(context.getIdAllocator(), node, referencedInputs) + .map(Rule.Result::ofPlanNode) + .orElse(Rule.Result.empty()); + } +} diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneExceptSourceColumns.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneExceptSourceColumns.java new file mode 100644 index 000000000000..3af0682bfcf8 --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneExceptSourceColumns.java @@ -0,0 +1,94 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableListMultimap; +import com.google.common.collect.ImmutableMap; +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; +import org.testng.annotations.Test; + +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.except; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.strictProject; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; + +public class TestPruneExceptSourceColumns + extends BaseRuleTest +{ + @Test + public void testPruneOneChild() + { + tester().assertThat(new PruneExceptSourceColumns()) + .on(p -> { + Symbol output = p.symbol("output"); + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + Symbol c = p.symbol("c"); + return p.except( + ImmutableListMultimap.of(output, a, output, c), + ImmutableList.of( + p.values(a, b), + p.values(c))); + }) + .matches(except( + strictProject( + ImmutableMap.of("a", expression("a")), + values("a", "b")), + values("c"))); + } + + @Test + public void testPruneAllChildren() + { + tester().assertThat(new PruneExceptSourceColumns()) + .on(p -> { + Symbol output = p.symbol("output"); + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + Symbol c = p.symbol("c"); + Symbol d = p.symbol("d"); + return p.except( + ImmutableListMultimap.of(output, a, output, c), + ImmutableList.of( + p.values(a, b), + p.values(c, d))); + }) + .matches(except( + strictProject( + ImmutableMap.of("a", expression("a")), + values("a", "b")), + strictProject( + ImmutableMap.of("c", expression("c")), + values("c", "d")))); + } + + @Test + public void testAllInputsReferenced() + { + tester().assertThat(new PruneExceptSourceColumns()) + .on(p -> { + Symbol output = p.symbol("output"); + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + return p.except( + ImmutableListMultimap.of(output, a, output, b), + ImmutableList.of( + p.values(a), + p.values(b))); + }) + .doesNotFire(); + } +} diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneIntersectSourceColumns.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneIntersectSourceColumns.java new file mode 100644 index 000000000000..a01589a39085 --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneIntersectSourceColumns.java @@ -0,0 +1,105 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableListMultimap; +import com.google.common.collect.ImmutableMap; +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; +import org.testng.annotations.Test; + +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.intersect; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.strictProject; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; + +public class TestPruneIntersectSourceColumns + extends BaseRuleTest +{ + @Test + public void testPruneOneChild() + { + tester().assertThat(new PruneIntersectSourceColumns()) + .on(p -> { + Symbol output = p.symbol("output"); + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + Symbol c = p.symbol("c"); + Symbol d = p.symbol("d"); + return p.intersect( + ImmutableListMultimap.of(output, a, output, c, output, d), + ImmutableList.of( + p.values(a, b), + p.values(c), + p.values(d))); + }) + .matches(intersect( + strictProject( + ImmutableMap.of("a", expression("a")), + values("a", "b")), + values("c"), + values("d"))); + } + + @Test + public void testPruneAllChildren() + { + tester().assertThat(new PruneIntersectSourceColumns()) + .on(p -> { + Symbol output = p.symbol("output"); + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + Symbol c = p.symbol("c"); + Symbol d = p.symbol("d"); + Symbol e = p.symbol("e"); + Symbol f = p.symbol("f"); + return p.intersect( + ImmutableListMultimap.of(output, a, output, c, output, e), + ImmutableList.of( + p.values(a, b), + p.values(c, d), + p.values(e, f))); + }) + .matches(intersect( + strictProject( + ImmutableMap.of("a", expression("a")), + values("a", "b")), + strictProject( + ImmutableMap.of("c", expression("c")), + values("c", "d")), + strictProject( + ImmutableMap.of("e", expression("e")), + values("e", "f")))); + } + + @Test + public void testAllInputsReferenced() + { + tester().assertThat(new PruneIntersectSourceColumns()) + .on(p -> { + Symbol output = p.symbol("output"); + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + Symbol c = p.symbol("c"); + return p.intersect( + ImmutableListMultimap.of(output, a, output, b, output, c), + ImmutableList.of( + p.values(a), + p.values(b), + p.values(c))); + }) + .doesNotFire(); + } +} diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneUnionSourceColumns.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneUnionSourceColumns.java new file mode 100644 index 000000000000..12adad273d5e --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneUnionSourceColumns.java @@ -0,0 +1,105 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableListMultimap; +import com.google.common.collect.ImmutableMap; +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; +import org.testng.annotations.Test; + +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.strictProject; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.union; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; + +public class TestPruneUnionSourceColumns + extends BaseRuleTest +{ + @Test + public void testPruneOneChild() + { + tester().assertThat(new PruneUnionSourceColumns()) + .on(p -> { + Symbol output = p.symbol("output"); + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + Symbol c = p.symbol("c"); + Symbol d = p.symbol("d"); + return p.union( + ImmutableListMultimap.of(output, a, output, c, output, d), + ImmutableList.of( + p.values(a, b), + p.values(c), + p.values(d))); + }) + .matches(union( + strictProject( + ImmutableMap.of("a", expression("a")), + values("a", "b")), + values("c"), + values("d"))); + } + + @Test + public void testPruneAllChildren() + { + tester().assertThat(new PruneUnionSourceColumns()) + .on(p -> { + Symbol output = p.symbol("output"); + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + Symbol c = p.symbol("c"); + Symbol d = p.symbol("d"); + Symbol e = p.symbol("e"); + Symbol f = p.symbol("f"); + return p.union( + ImmutableListMultimap.of(output, a, output, c, output, e), + ImmutableList.of( + p.values(a, b), + p.values(c, d), + p.values(e, f))); + }) + .matches(union( + strictProject( + ImmutableMap.of("a", expression("a")), + values("a", "b")), + strictProject( + ImmutableMap.of("c", expression("c")), + values("c", "d")), + strictProject( + ImmutableMap.of("e", expression("e")), + values("e", "f")))); + } + + @Test + public void testAllInputsReferenced() + { + tester().assertThat(new PruneUnionSourceColumns()) + .on(p -> { + Symbol output = p.symbol("output"); + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + Symbol c = p.symbol("c"); + return p.union( + ImmutableListMultimap.of(output, a, output, b, output, c), + ImmutableList.of( + p.values(a), + p.values(b), + p.values(c))); + }) + .doesNotFire(); + } +} From 8fad160288ce9dc5fb0a1613edf7d6517f7c63fb Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Fri, 3 Apr 2020 16:11:28 +0200 Subject: [PATCH 048/519] Add Project-off rule for UnionNode --- .../prestosql/sql/planner/PlanOptimizers.java | 2 + .../iterative/rule/PruneUnionColumns.java | 72 +++++++++++++++++++ .../iterative/rule/TestPruneUnionColumns.java | 68 ++++++++++++++++++ 3 files changed, 142 insertions(+) create mode 100644 presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneUnionColumns.java create mode 100644 presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneUnionColumns.java diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java index a011f040e578..64ac36381ec6 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java @@ -83,6 +83,7 @@ import io.prestosql.sql.planner.iterative.rule.PruneSemiJoinFilteringSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneTableScanColumns; import io.prestosql.sql.planner.iterative.rule.PruneTopNColumns; +import io.prestosql.sql.planner.iterative.rule.PruneUnionColumns; import io.prestosql.sql.planner.iterative.rule.PruneUnionSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneValuesColumns; import io.prestosql.sql.planner.iterative.rule.PruneWindowColumns; @@ -257,6 +258,7 @@ public PlanOptimizers( new PruneSemiJoinColumns(), new PruneSemiJoinFilteringSourceColumns(), new PruneTopNColumns(), + new PruneUnionColumns(), new PruneUnionSourceColumns(), new PruneValuesColumns(), new PruneWindowColumns(), diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneUnionColumns.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneUnionColumns.java new file mode 100644 index 000000000000..bed617e889ff --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneUnionColumns.java @@ -0,0 +1,72 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableListMultimap; +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.plan.PlanNode; +import io.prestosql.sql.planner.plan.UnionNode; + +import java.util.Map; +import java.util.Optional; +import java.util.Set; + +import static com.google.common.collect.ImmutableListMultimap.toImmutableListMultimap; +import static io.prestosql.sql.planner.plan.Patterns.union; + +/** + * Transforms + *
+ * - Project (a)
+ *      - Union
+ *        output mappings: {a->c, a->e, b->d, b->f}
+ *          - Source (c, d)
+ *          - Source (e, f)
+ * 
+ * into: + *
+ * - Project (a)
+ *      - Union
+ *        output mappings: {a->c, a->e}
+ *          - Source (c, d)
+ *          - Source (e, f)
+ * 
+ * Note: as a result of this rule, the UnionNode's sources + * are eligible for pruning outputs. This is accomplished + * by PruneUnionSourceColumns rule. + */ +public class PruneUnionColumns + extends ProjectOffPushDownRule +{ + public PruneUnionColumns() + { + super(union()); + } + + @Override + protected Optional pushDownProjectOff(Context context, UnionNode unionNode, Set referencedOutputs) + { + ImmutableListMultimap prunedOutputMappings = unionNode.getSymbolMapping().entries().stream() + .filter(entry -> referencedOutputs.contains(entry.getKey())) + .collect(toImmutableListMultimap(Map.Entry::getKey, Map.Entry::getValue)); + + return Optional.of( + new UnionNode( + unionNode.getId(), + unionNode.getSources(), + prunedOutputMappings, + ImmutableList.copyOf(prunedOutputMappings.keySet()))); + } +} diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneUnionColumns.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneUnionColumns.java new file mode 100644 index 000000000000..5b5cea5fae46 --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneUnionColumns.java @@ -0,0 +1,68 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableListMultimap; +import com.google.common.collect.ImmutableMap; +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; +import io.prestosql.sql.planner.plan.Assignments; +import org.testng.annotations.Test; + +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.strictProject; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.union; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; + +public class TestPruneUnionColumns + extends BaseRuleTest +{ + @Test + public void testPruneInputColumns() + { + tester().assertThat(new PruneUnionColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + Symbol c = p.symbol("c"); + return p.project( + Assignments.of(), + p.union( + ImmutableListMultimap.of(a, b, a, c), + ImmutableList.of( + p.values(b), + p.values(c)))); + }) + .matches( + strictProject( + ImmutableMap.of(), + union( + values("b"), + values("c")) + .withExactOutputs())); + } + + @Test + public void testAllOutputsReferenced() + { + tester().assertThat(new PruneUnionColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + return p.project( + Assignments.identity(a), + p.enforceSingleRow(p.values(a))); + }) + .doesNotFire(); + } +} From 2fd1c54a8c2cfd22e4ce9e8228cd6f6a3e776faa Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Mon, 30 Mar 2020 16:01:05 +0200 Subject: [PATCH 049/519] Fix formatting --- .../planner/iterative/rule/RemoveRedundantDistinctLimit.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/RemoveRedundantDistinctLimit.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/RemoveRedundantDistinctLimit.java index 2c5dc3b2b24a..b7a3354ece49 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/RemoveRedundantDistinctLimit.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/RemoveRedundantDistinctLimit.java @@ -59,7 +59,8 @@ public Result apply(DistinctLimitNode node, Captures captures, Context context) return Result.ofPlanNode(node.getSource()); } if (isAtMost(node.getSource(), context.getLookup(), node.getLimit())) { - return Result.ofPlanNode(new AggregationNode(node.getId(), + return Result.ofPlanNode(new AggregationNode( + node.getId(), node.getSource(), ImmutableMap.of(), singleGroupingSet(node.getDistinctSymbols()), From 6496319429e91ef064e17a4db79ac4f6ecaf2f86 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Sat, 4 Apr 2020 17:04:24 +0200 Subject: [PATCH 050/519] Add column-pruning rule for DistinctLimitNode --- .../prestosql/sql/planner/PlanOptimizers.java | 2 + .../rule/PruneDistinctLimitSourceColumns.java | 48 ++++++++++ .../assertions/DistinctLimitMatcher.java | 80 +++++++++++++++++ .../planner/assertions/PlanMatchPattern.java | 17 ++++ .../TestPruneDistinctLimitSourceColumns.java | 89 +++++++++++++++++++ .../iterative/rule/test/PlanBuilder.java | 7 +- 6 files changed, 242 insertions(+), 1 deletion(-) create mode 100644 presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneDistinctLimitSourceColumns.java create mode 100644 presto-main/src/test/java/io/prestosql/sql/planner/assertions/DistinctLimitMatcher.java create mode 100644 presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneDistinctLimitSourceColumns.java diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java index 64ac36381ec6..9e983b745556 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java @@ -66,6 +66,7 @@ import io.prestosql.sql.planner.iterative.rule.PruneCorrelatedJoinColumns; import io.prestosql.sql.planner.iterative.rule.PruneCountAggregationOverScalar; import io.prestosql.sql.planner.iterative.rule.PruneDeleteSourceColumns; +import io.prestosql.sql.planner.iterative.rule.PruneDistinctLimitSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneEnforceSingleRowColumns; import io.prestosql.sql.planner.iterative.rule.PruneExceptSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneFilterColumns; @@ -245,6 +246,7 @@ public PlanOptimizers( new PruneAssignUniqueIdColumns(), new PruneCorrelatedJoinColumns(), new PruneDeleteSourceColumns(), + new PruneDistinctLimitSourceColumns(), new PruneEnforceSingleRowColumns(), new PruneExceptSourceColumns(), new PruneFilterColumns(), diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneDistinctLimitSourceColumns.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneDistinctLimitSourceColumns.java new file mode 100644 index 000000000000..1f7eb91f9769 --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneDistinctLimitSourceColumns.java @@ -0,0 +1,48 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableSet; +import io.prestosql.matching.Captures; +import io.prestosql.matching.Pattern; +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.iterative.Rule; +import io.prestosql.sql.planner.plan.DistinctLimitNode; + +import static io.prestosql.sql.planner.iterative.rule.Util.restrictChildOutputs; +import static io.prestosql.sql.planner.plan.Patterns.distinctLimit; + +public class PruneDistinctLimitSourceColumns + implements Rule +{ + private static final Pattern PATTERN = distinctLimit(); + + @Override + public Pattern getPattern() + { + return PATTERN; + } + + @Override + public Result apply(DistinctLimitNode distinctLimit, Captures captures, Context context) + { + ImmutableSet.Builder expectedInputs = ImmutableSet.builder(); + expectedInputs.addAll(distinctLimit.getOutputSymbols()); + distinctLimit.getHashSymbol().ifPresent(expectedInputs::add); + + return restrictChildOutputs(context.getIdAllocator(), distinctLimit, expectedInputs.build()) + .map(Result::ofPlanNode) + .orElse(Result.empty()); + } +} diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/DistinctLimitMatcher.java b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/DistinctLimitMatcher.java new file mode 100644 index 000000000000..c541b141fafe --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/DistinctLimitMatcher.java @@ -0,0 +1,80 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.assertions; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import io.prestosql.Session; +import io.prestosql.cost.StatsProvider; +import io.prestosql.metadata.Metadata; +import io.prestosql.sql.planner.plan.DistinctLimitNode; +import io.prestosql.sql.planner.plan.PlanNode; + +import java.util.List; +import java.util.Optional; + +import static com.google.common.base.MoreObjects.toStringHelper; +import static com.google.common.base.Preconditions.checkState; +import static com.google.common.collect.ImmutableSet.toImmutableSet; +import static io.prestosql.sql.planner.assertions.MatchResult.NO_MATCH; +import static java.util.Objects.requireNonNull; + +public class DistinctLimitMatcher + implements Matcher +{ + private final long limit; + private final List distinctSymbols; + private final Optional hashSymbol; + + public DistinctLimitMatcher(long limit, List distinctSymbols, Optional hashSymbol) + { + this.limit = limit; + this.distinctSymbols = ImmutableList.copyOf(requireNonNull(distinctSymbols, "distinctSymbols is null")); + this.hashSymbol = requireNonNull(hashSymbol, "hashSymbol is null"); + } + + @Override + public boolean shapeMatches(PlanNode node) + { + return node instanceof DistinctLimitNode; + } + + @Override + public MatchResult detailMatches(PlanNode node, StatsProvider stats, Session session, Metadata metadata, SymbolAliases symbolAliases) + { + checkState(shapeMatches(node), "Plan testing framework error: shapeMatches returned false in detailMatches in %s", this.getClass().getName()); + DistinctLimitNode distinctLimitNode = (DistinctLimitNode) node; + + if (distinctLimitNode.getLimit() != limit) { + return NO_MATCH; + } + + if (!distinctLimitNode.getHashSymbol().equals(hashSymbol.map(alias -> alias.toSymbol(symbolAliases)))) { + return NO_MATCH; + } + + return new MatchResult(ImmutableSet.copyOf(distinctLimitNode.getDistinctSymbols()) + .equals(distinctSymbols.stream().map(alias -> alias.toSymbol(symbolAliases)).collect(toImmutableSet()))); + } + + @Override + public String toString() + { + return toStringHelper(this) + .add("limit", limit) + .add("distinctSymbols", distinctSymbols) + .add("hashSymbol", hashSymbol) + .toString(); + } +} diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java index 2b3560c815bf..43b23cb7e5bf 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java @@ -36,6 +36,7 @@ import io.prestosql.sql.planner.plan.ApplyNode; import io.prestosql.sql.planner.plan.AssignUniqueId; import io.prestosql.sql.planner.plan.CorrelatedJoinNode; +import io.prestosql.sql.planner.plan.DistinctLimitNode; import io.prestosql.sql.planner.plan.EnforceSingleRowNode; import io.prestosql.sql.planner.plan.ExceptNode; import io.prestosql.sql.planner.plan.ExchangeNode; @@ -243,6 +244,22 @@ public static PlanMatchPattern aggregation( return result; } + public static PlanMatchPattern distinctLimit(long limit, List distinctSymbols, PlanMatchPattern source) + { + return node(DistinctLimitNode.class, source).with(new DistinctLimitMatcher( + limit, + toSymbolAliases(distinctSymbols), + Optional.empty())); + } + + public static PlanMatchPattern distinctLimit(long limit, List distinctSymbols, String hashSymbol, PlanMatchPattern source) + { + return node(DistinctLimitNode.class, source).with(new DistinctLimitMatcher( + limit, + toSymbolAliases(distinctSymbols), + Optional.of(new SymbolAlias(hashSymbol)))); + } + public static PlanMatchPattern markDistinct( String markerSymbol, List distinctSymbols, diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneDistinctLimitSourceColumns.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneDistinctLimitSourceColumns.java new file mode 100644 index 000000000000..9be770758a56 --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneDistinctLimitSourceColumns.java @@ -0,0 +1,89 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; +import org.testng.annotations.Test; + +import java.util.Optional; + +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.distinctLimit; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.strictProject; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; + +public class TestPruneDistinctLimitSourceColumns + extends BaseRuleTest +{ + @Test + public void testPruneInputColumn() + { + tester().assertThat(new PruneDistinctLimitSourceColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + return p.distinctLimit( + 5, + ImmutableList.of(a), + p.values(a, b)); + }) + .matches( + distinctLimit( + 5, + ImmutableList.of("a"), + strictProject( + ImmutableMap.of("a", expression("a")), + values("a", "b")))); + + tester().assertThat(new PruneDistinctLimitSourceColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + Symbol hashSymbol = p.symbol("hash_symbol"); + return p.distinctLimit( + 5, + ImmutableList.of(a), + Optional.of(hashSymbol), + p.values(a, b, hashSymbol)); + }) + .matches( + distinctLimit( + 5, + ImmutableList.of("a"), + "hash_symbol", + strictProject( + ImmutableMap.of("a", expression("a"), "hash_symbol", expression("hash_symbol")), + values("a", "b", "hash_symbol")))); + } + + @Test + public void allInputsNeeded() + { + tester().assertThat(new PruneDistinctLimitSourceColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + Symbol hashSymbol = p.symbol("hash_symbol"); + return p.distinctLimit( + 5, + ImmutableList.of(a, b), + Optional.of(hashSymbol), + p.values(a, b, hashSymbol)); + }) + .doesNotFire(); + } +} diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/test/PlanBuilder.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/test/PlanBuilder.java index 849832b148aa..793cd1ae7c2c 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/test/PlanBuilder.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/test/PlanBuilder.java @@ -293,6 +293,11 @@ public AggregationNode aggregation(Consumer aggregationBuild } public DistinctLimitNode distinctLimit(long count, List distinctSymbols, PlanNode source) + { + return distinctLimit(count, distinctSymbols, Optional.empty(), source); + } + + public DistinctLimitNode distinctLimit(long count, List distinctSymbols, Optional hashSymbol, PlanNode source) { return new DistinctLimitNode( idAllocator.getNextId(), @@ -300,7 +305,7 @@ public DistinctLimitNode distinctLimit(long count, List distinctSymbols, count, false, distinctSymbols, - Optional.empty()); + hashSymbol); } public class AggregationBuilder From 7138dcecfdcd9c7367e7d12d666a00f5aa0a6eba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Grzegorz=20Kokosi=C5=84ski?= Date: Sun, 5 Apr 2020 10:56:49 +0200 Subject: [PATCH 051/519] Use unique table name in test --- .../AbstractTestDistributedQueries.java | 24 ++++++++++--------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java index c5bb60499721..3374e5f40435 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java @@ -490,7 +490,9 @@ public void testInsert() @Test public void testInsertWithCoercion() { - assertUpdate("CREATE TABLE test_insert_with_coercion (" + + String tableName = "test_insert_with_coercion_" + randomTableSuffix(); + + assertUpdate("CREATE TABLE " + tableName + " (" + "tinyint_column TINYINT, " + "integer_column INTEGER, " + "decimal_column DECIMAL(5, 3), " + @@ -500,14 +502,14 @@ public void testInsertWithCoercion() "unbounded_varchar_column VARCHAR, " + "date_column DATE)"); - assertUpdate("INSERT INTO test_insert_with_coercion (tinyint_column, integer_column, decimal_column, real_column) VALUES (1e0, 2e0, 3e0, 4e0)", 1); - assertUpdate("INSERT INTO test_insert_with_coercion (char_column, bounded_varchar_column, unbounded_varchar_column) VALUES (CAST('aa ' AS varchar), CAST('aa ' AS varchar), CAST('aa ' AS varchar))", 1); - assertUpdate("INSERT INTO test_insert_with_coercion (char_column, bounded_varchar_column, unbounded_varchar_column) VALUES (NULL, NULL, NULL)", 1); - assertUpdate("INSERT INTO test_insert_with_coercion (char_column, bounded_varchar_column, unbounded_varchar_column) VALUES (CAST(NULL AS varchar), CAST(NULL AS varchar), CAST(NULL AS varchar))", 1); - assertUpdate("INSERT INTO test_insert_with_coercion (date_column) VALUES (TIMESTAMP '2019-11-18 22:13:40')", 1); + assertUpdate("INSERT INTO " + tableName + " (tinyint_column, integer_column, decimal_column, real_column) VALUES (1e0, 2e0, 3e0, 4e0)", 1); + assertUpdate("INSERT INTO " + tableName + " (char_column, bounded_varchar_column, unbounded_varchar_column) VALUES (CAST('aa ' AS varchar), CAST('aa ' AS varchar), CAST('aa ' AS varchar))", 1); + assertUpdate("INSERT INTO " + tableName + " (char_column, bounded_varchar_column, unbounded_varchar_column) VALUES (NULL, NULL, NULL)", 1); + assertUpdate("INSERT INTO " + tableName + " (char_column, bounded_varchar_column, unbounded_varchar_column) VALUES (CAST(NULL AS varchar), CAST(NULL AS varchar), CAST(NULL AS varchar))", 1); + assertUpdate("INSERT INTO " + tableName + " (date_column) VALUES (TIMESTAMP '2019-11-18 22:13:40')", 1); assertQuery( - "SELECT * FROM test_insert_with_coercion", + "SELECT * FROM " + tableName + "", "VALUES " + "(1, 2, 3, 4, NULL, NULL, NULL, NULL), " + "(NULL, NULL, NULL, NULL, 'aa ', 'aa ', 'aa ', NULL), " + @@ -515,11 +517,11 @@ public void testInsertWithCoercion() "(NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL), " + "(NULL, NULL, NULL, NULL, NULL, NULL, NULL, DATE '2019-11-18')"); - assertQueryFails("INSERT INTO test_insert_with_coercion (integer_column) VALUES (3e9)", "Out of range for integer: 3.0E9"); - assertQueryFails("INSERT INTO test_insert_with_coercion (char_column) VALUES ('abcd')", "Cannot truncate non-space characters on INSERT"); - assertQueryFails("INSERT INTO test_insert_with_coercion (bounded_varchar_column) VALUES ('abcd')", "Cannot truncate non-space characters on INSERT"); + assertQueryFails("INSERT INTO " + tableName + " (integer_column) VALUES (3e9)", "Out of range for integer: 3.0E9"); + assertQueryFails("INSERT INTO " + tableName + " (char_column) VALUES ('abcd')", "Cannot truncate non-space characters on INSERT"); + assertQueryFails("INSERT INTO " + tableName + " (bounded_varchar_column) VALUES ('abcd')", "Cannot truncate non-space characters on INSERT"); - assertUpdate("DROP TABLE test_insert_with_coercion"); + assertUpdate("DROP TABLE " + tableName); } @Test From c984f64ed9cb591e14aaa9e69208e8d971ad5fdc Mon Sep 17 00:00:00 2001 From: David Phillips Date: Fri, 3 Apr 2020 20:33:13 -0700 Subject: [PATCH 052/519] Support exclusions in InterfaceTestUtils --- .../spi/testing/InterfaceTestUtils.java | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/presto-spi/src/test/java/io/prestosql/spi/testing/InterfaceTestUtils.java b/presto-spi/src/test/java/io/prestosql/spi/testing/InterfaceTestUtils.java index 0f1b12bed2ed..6a6e5756b806 100644 --- a/presto-spi/src/test/java/io/prestosql/spi/testing/InterfaceTestUtils.java +++ b/presto-spi/src/test/java/io/prestosql/spi/testing/InterfaceTestUtils.java @@ -13,12 +13,16 @@ */ package io.prestosql.spi.testing; +import com.google.common.collect.ImmutableSet; + import java.lang.reflect.Method; import java.lang.reflect.Modifier; +import java.util.Set; import java.util.function.Function; import static com.google.common.base.Defaults.defaultValue; import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.collect.Sets.difference; import static com.google.common.reflect.Reflection.newProxy; import static java.lang.String.format; import static org.testng.Assert.assertEquals; @@ -29,9 +33,14 @@ public final class InterfaceTestUtils private InterfaceTestUtils() {} public static void assertAllMethodsOverridden(Class iface, Class clazz) + { + assertAllMethodsOverridden(iface, clazz, ImmutableSet.of()); + } + + public static void assertAllMethodsOverridden(Class iface, Class clazz, Set exclusions) { checkArgument(iface.isAssignableFrom(clazz), "%s is not supertype of %s", iface, clazz); - for (Method method : iface.getMethods()) { + for (Method method : difference(ImmutableSet.copyOf(iface.getMethods()), exclusions)) { if (Modifier.isStatic(method.getModifiers())) { continue; } @@ -52,7 +61,12 @@ public static void assertAllMethodsOverridden(Class iface, C public static void assertProperForwardingMethodsAreCalled(Class iface, Function forwardingInstanceFactory) { - for (Method actualMethod : iface.getDeclaredMethods()) { + assertProperForwardingMethodsAreCalled(iface, forwardingInstanceFactory, ImmutableSet.of()); + } + + public static void assertProperForwardingMethodsAreCalled(Class iface, Function forwardingInstanceFactory, Set exclusions) + { + for (Method actualMethod : difference(ImmutableSet.copyOf(iface.getDeclaredMethods()), exclusions)) { Object[] actualArguments = new Object[actualMethod.getParameterCount()]; for (int i = 0; i < actualArguments.length; i++) { if (actualMethod.getParameterTypes()[i].isPrimitive()) { From 7e7bbc7f6a0aafdeb1862b5fa5ee9f6df9595a63 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Wed, 1 Apr 2020 22:55:33 -0700 Subject: [PATCH 053/519] Move JdbcClientStats to upper level --- .../plugin/jdbc/jmx/JdbcClientStats.java | 245 +++++++++++++++ .../jdbc/jmx/StatisticsAwareJdbcClient.java | 281 ++---------------- 2 files changed, 271 insertions(+), 255 deletions(-) create mode 100644 presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/jmx/JdbcClientStats.java diff --git a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/jmx/JdbcClientStats.java b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/jmx/JdbcClientStats.java new file mode 100644 index 000000000000..665ed522df28 --- /dev/null +++ b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/jmx/JdbcClientStats.java @@ -0,0 +1,245 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.jdbc.jmx; + +import org.weakref.jmx.Managed; +import org.weakref.jmx.Nested; + +public final class JdbcClientStats +{ + private final JdbcApiStats abortReadConnection = new JdbcApiStats(); + private final JdbcApiStats addColumn = new JdbcApiStats(); + private final JdbcApiStats beginCreateTable = new JdbcApiStats(); + private final JdbcApiStats beginInsertTable = new JdbcApiStats(); + private final JdbcApiStats buildInsertSql = new JdbcApiStats(); + private final JdbcApiStats buildSql = new JdbcApiStats(); + private final JdbcApiStats commitCreateTable = new JdbcApiStats(); + private final JdbcApiStats createSchema = new JdbcApiStats(); + private final JdbcApiStats createTable = new JdbcApiStats(); + private final JdbcApiStats dropColumn = new JdbcApiStats(); + private final JdbcApiStats dropSchema = new JdbcApiStats(); + private final JdbcApiStats dropTable = new JdbcApiStats(); + private final JdbcApiStats finishInsertTable = new JdbcApiStats(); + private final JdbcApiStats getColumns = new JdbcApiStats(); + private final JdbcApiStats getConnectionWithHandle = new JdbcApiStats(); + private final JdbcApiStats getConnectionWithSplit = new JdbcApiStats(); + private final JdbcApiStats getPreparedStatement = new JdbcApiStats(); + private final JdbcApiStats getSchemaNames = new JdbcApiStats(); + private final JdbcApiStats getSplits = new JdbcApiStats(); + private final JdbcApiStats getTableHandle = new JdbcApiStats(); + private final JdbcApiStats getTableNames = new JdbcApiStats(); + private final JdbcApiStats getTableStatistics = new JdbcApiStats(); + private final JdbcApiStats renameColumn = new JdbcApiStats(); + private final JdbcApiStats renameTable = new JdbcApiStats(); + private final JdbcApiStats rollbackCreateTable = new JdbcApiStats(); + private final JdbcApiStats schemaExists = new JdbcApiStats(); + private final JdbcApiStats toPrestoType = new JdbcApiStats(); + private final JdbcApiStats toWriteMapping = new JdbcApiStats(); + + @Managed + @Nested + public JdbcApiStats getAbortReadConnection() + { + return abortReadConnection; + } + + @Managed + @Nested + public JdbcApiStats getAddColumn() + { + return addColumn; + } + + @Managed + @Nested + public JdbcApiStats getBeginCreateTable() + { + return beginCreateTable; + } + + @Managed + @Nested + public JdbcApiStats getBeginInsertTable() + { + return beginInsertTable; + } + + @Managed + @Nested + public JdbcApiStats getBuildInsertSql() + { + return buildInsertSql; + } + + @Managed + @Nested + public JdbcApiStats getBuildSql() + { + return buildSql; + } + + @Managed + @Nested + public JdbcApiStats getCommitCreateTable() + { + return commitCreateTable; + } + + @Managed + @Nested + public JdbcApiStats getCreateSchema() + { + return createSchema; + } + + @Managed + @Nested + public JdbcApiStats getCreateTable() + { + return createTable; + } + + @Managed + @Nested + public JdbcApiStats getDropColumn() + { + return dropColumn; + } + + @Managed + @Nested + public JdbcApiStats getDropSchema() + { + return dropSchema; + } + + @Managed + @Nested + public JdbcApiStats getDropTable() + { + return dropTable; + } + + @Managed + @Nested + public JdbcApiStats getFinishInsertTable() + { + return finishInsertTable; + } + + @Managed + @Nested + public JdbcApiStats getGetColumns() + { + return getColumns; + } + + @Managed + @Nested + public JdbcApiStats getGetConnectionWithHandle() + { + return getConnectionWithHandle; + } + + @Managed + @Nested + public JdbcApiStats getGetConnectionWithSplit() + { + return getConnectionWithSplit; + } + + @Managed + @Nested + public JdbcApiStats getGetPreparedStatement() + { + return getPreparedStatement; + } + + @Managed + @Nested + public JdbcApiStats getGetSchemaNames() + { + return getSchemaNames; + } + + @Managed + @Nested + public JdbcApiStats getGetSplits() + { + return getSplits; + } + + @Managed + @Nested + public JdbcApiStats getGetTableHandle() + { + return getTableHandle; + } + + @Managed + @Nested + public JdbcApiStats getGetTableNames() + { + return getTableNames; + } + + @Managed + @Nested + public JdbcApiStats getGetTableStatistics() + { + return getTableStatistics; + } + + @Managed + @Nested + public JdbcApiStats getRenameColumn() + { + return renameColumn; + } + + @Managed + @Nested + public JdbcApiStats getRenameTable() + { + return renameTable; + } + + @Managed + @Nested + public JdbcApiStats getRollbackCreateTable() + { + return rollbackCreateTable; + } + + @Managed + @Nested + public JdbcApiStats getSchemaExists() + { + return schemaExists; + } + + @Managed + @Nested + public JdbcApiStats getToPrestoType() + { + return toPrestoType; + } + + @Managed + @Nested + public JdbcApiStats getToWriteMapping() + { + return toWriteMapping; + } +} diff --git a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/jmx/StatisticsAwareJdbcClient.java b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/jmx/StatisticsAwareJdbcClient.java index b7c5803adccb..a573e278220a 100644 --- a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/jmx/StatisticsAwareJdbcClient.java +++ b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/jmx/StatisticsAwareJdbcClient.java @@ -34,7 +34,6 @@ import io.prestosql.spi.type.Type; import org.weakref.jmx.Flatten; import org.weakref.jmx.Managed; -import org.weakref.jmx.Nested; import java.sql.Connection; import java.sql.PreparedStatement; @@ -72,162 +71,162 @@ public JdbcClientStats getStats() @Override public boolean schemaExists(JdbcIdentity identity, String schema) { - return stats.schemaExists.wrap(() -> delegate().schemaExists(identity, schema)); + return stats.getSchemaExists().wrap(() -> delegate().schemaExists(identity, schema)); } @Override public Set getSchemaNames(JdbcIdentity identity) { - return stats.getSchemaNames.wrap(() -> delegate().getSchemaNames(identity)); + return stats.getGetSchemaNames().wrap(() -> delegate().getSchemaNames(identity)); } @Override public List getTableNames(JdbcIdentity identity, Optional schema) { - return stats.getTableNames.wrap(() -> delegate().getTableNames(identity, schema)); + return stats.getGetTableNames().wrap(() -> delegate().getTableNames(identity, schema)); } @Override public Optional getTableHandle(JdbcIdentity identity, SchemaTableName schemaTableName) { - return stats.getTableHandle.wrap(() -> delegate().getTableHandle(identity, schemaTableName)); + return stats.getGetTableHandle().wrap(() -> delegate().getTableHandle(identity, schemaTableName)); } @Override public List getColumns(ConnectorSession session, JdbcTableHandle tableHandle) { - return stats.getColumns.wrap(() -> delegate().getColumns(session, tableHandle)); + return stats.getGetColumns().wrap(() -> delegate().getColumns(session, tableHandle)); } @Override public Optional toPrestoType(ConnectorSession session, Connection connection, JdbcTypeHandle typeHandle) { - return stats.toPrestoType.wrap(() -> delegate().toPrestoType(session, connection, typeHandle)); + return stats.getToPrestoType().wrap(() -> delegate().toPrestoType(session, connection, typeHandle)); } @Override public WriteMapping toWriteMapping(ConnectorSession session, Type type) { - return stats.toWriteMapping.wrap(() -> delegate().toWriteMapping(session, type)); + return stats.getToWriteMapping().wrap(() -> delegate().toWriteMapping(session, type)); } @Override public ConnectorSplitSource getSplits(ConnectorSession session, JdbcTableHandle layoutHandle) { - return stats.getSplits.wrap(() -> delegate().getSplits(session, layoutHandle)); + return stats.getGetSplits().wrap(() -> delegate().getSplits(session, layoutHandle)); } @Override public Connection getConnection(JdbcIdentity identity, JdbcSplit split) throws SQLException { - return stats.getConnectionWithSplit.wrap(() -> delegate().getConnection(identity, split)); + return stats.getGetConnectionWithSplit().wrap(() -> delegate().getConnection(identity, split)); } @Override public void abortReadConnection(Connection connection) throws SQLException { - stats.abortReadConnection.wrap(() -> delegate().abortReadConnection(connection)); + stats.getAbortReadConnection().wrap(() -> delegate().abortReadConnection(connection)); } @Override public PreparedStatement buildSql(ConnectorSession session, Connection connection, JdbcSplit split, JdbcTableHandle tableHandle, List columnHandles) throws SQLException { - return stats.buildSql.wrap(() -> delegate().buildSql(session, connection, split, tableHandle, columnHandles)); + return stats.getBuildSql().wrap(() -> delegate().buildSql(session, connection, split, tableHandle, columnHandles)); } @Override public void addColumn(ConnectorSession session, JdbcTableHandle handle, ColumnMetadata column) { - stats.addColumn.wrap(() -> delegate().addColumn(session, handle, column)); + stats.getAddColumn().wrap(() -> delegate().addColumn(session, handle, column)); } @Override public void dropColumn(JdbcIdentity identity, JdbcTableHandle handle, JdbcColumnHandle column) { - stats.dropColumn.wrap(() -> delegate().dropColumn(identity, handle, column)); + stats.getDropColumn().wrap(() -> delegate().dropColumn(identity, handle, column)); } @Override public void renameColumn(JdbcIdentity identity, JdbcTableHandle handle, JdbcColumnHandle jdbcColumn, String newColumnName) { - stats.renameColumn.wrap(() -> delegate().renameColumn(identity, handle, jdbcColumn, newColumnName)); + stats.getRenameColumn().wrap(() -> delegate().renameColumn(identity, handle, jdbcColumn, newColumnName)); } @Override public void renameTable(JdbcIdentity identity, JdbcTableHandle handle, SchemaTableName newTableName) { - stats.renameTable.wrap(() -> delegate().renameTable(identity, handle, newTableName)); + stats.getRenameTable().wrap(() -> delegate().renameTable(identity, handle, newTableName)); } @Override public void createTable(ConnectorSession session, ConnectorTableMetadata tableMetadata) { - stats.createTable.wrap(() -> delegate().createTable(session, tableMetadata)); + stats.getCreateTable().wrap(() -> delegate().createTable(session, tableMetadata)); } @Override public JdbcOutputTableHandle beginCreateTable(ConnectorSession session, ConnectorTableMetadata tableMetadata) { - return stats.beginCreateTable.wrap(() -> delegate().beginCreateTable(session, tableMetadata)); + return stats.getBeginCreateTable().wrap(() -> delegate().beginCreateTable(session, tableMetadata)); } @Override public void commitCreateTable(JdbcIdentity identity, JdbcOutputTableHandle handle) { - stats.commitCreateTable.wrap(() -> delegate().commitCreateTable(identity, handle)); + stats.getCommitCreateTable().wrap(() -> delegate().commitCreateTable(identity, handle)); } @Override public JdbcOutputTableHandle beginInsertTable(ConnectorSession session, JdbcTableHandle tableHandle, List columns) { - return stats.beginInsertTable.wrap(() -> delegate().beginInsertTable(session, tableHandle, columns)); + return stats.getBeginInsertTable().wrap(() -> delegate().beginInsertTable(session, tableHandle, columns)); } @Override public void finishInsertTable(JdbcIdentity identity, JdbcOutputTableHandle handle) { - stats.finishInsertTable.wrap(() -> delegate().finishInsertTable(identity, handle)); + stats.getFinishInsertTable().wrap(() -> delegate().finishInsertTable(identity, handle)); } @Override public void dropTable(JdbcIdentity identity, JdbcTableHandle jdbcTableHandle) { - stats.dropTable.wrap(() -> delegate().dropTable(identity, jdbcTableHandle)); + stats.getDropTable().wrap(() -> delegate().dropTable(identity, jdbcTableHandle)); } @Override public void rollbackCreateTable(JdbcIdentity identity, JdbcOutputTableHandle handle) { - stats.rollbackCreateTable.wrap(() -> delegate().rollbackCreateTable(identity, handle)); + stats.getRollbackCreateTable().wrap(() -> delegate().rollbackCreateTable(identity, handle)); } @Override public String buildInsertSql(JdbcOutputTableHandle handle) { - return stats.buildInsertSql.wrap(() -> delegate().buildInsertSql(handle)); + return stats.getBuildInsertSql().wrap(() -> delegate().buildInsertSql(handle)); } @Override public Connection getConnection(JdbcIdentity identity, JdbcOutputTableHandle handle) throws SQLException { - return stats.getConnectionWithHandle.wrap(() -> delegate().getConnection(identity, handle)); + return stats.getGetConnectionWithHandle().wrap(() -> delegate().getConnection(identity, handle)); } @Override public PreparedStatement getPreparedStatement(Connection connection, String sql) throws SQLException { - return stats.getPreparedStatement.wrap(() -> delegate().getPreparedStatement(connection, sql)); + return stats.getGetPreparedStatement().wrap(() -> delegate().getPreparedStatement(connection, sql)); } @Override public TableStatistics getTableStatistics(ConnectorSession session, JdbcTableHandle handle, TupleDomain tupleDomain) { - return stats.getTableStatistics.wrap(() -> delegate().getTableStatistics(session, handle, tupleDomain)); + return stats.getGetTableStatistics().wrap(() -> delegate().getTableStatistics(session, handle, tupleDomain)); } @Override @@ -241,232 +240,4 @@ public void dropSchema(JdbcIdentity identity, String schemaName) { stats.getDropSchema().wrap(() -> delegate().dropSchema(identity, schemaName)); } - - public static final class JdbcClientStats - { - private final JdbcApiStats schemaExists = new JdbcApiStats(); - private final JdbcApiStats getSchemaNames = new JdbcApiStats(); - private final JdbcApiStats getTableNames = new JdbcApiStats(); - private final JdbcApiStats getTableHandle = new JdbcApiStats(); - private final JdbcApiStats getColumns = new JdbcApiStats(); - private final JdbcApiStats toPrestoType = new JdbcApiStats(); - private final JdbcApiStats toWriteMapping = new JdbcApiStats(); - private final JdbcApiStats getSplits = new JdbcApiStats(); - private final JdbcApiStats getConnectionWithSplit = new JdbcApiStats(); - private final JdbcApiStats getConnectionWithHandle = new JdbcApiStats(); - private final JdbcApiStats abortReadConnection = new JdbcApiStats(); - private final JdbcApiStats buildSql = new JdbcApiStats(); - private final JdbcApiStats beginCreateTable = new JdbcApiStats(); - private final JdbcApiStats commitCreateTable = new JdbcApiStats(); - private final JdbcApiStats beginInsertTable = new JdbcApiStats(); - private final JdbcApiStats finishInsertTable = new JdbcApiStats(); - private final JdbcApiStats dropTable = new JdbcApiStats(); - private final JdbcApiStats rollbackCreateTable = new JdbcApiStats(); - private final JdbcApiStats buildInsertSql = new JdbcApiStats(); - private final JdbcApiStats getPreparedStatement = new JdbcApiStats(); - private final JdbcApiStats getTableStatistics = new JdbcApiStats(); - private final JdbcApiStats addColumn = new JdbcApiStats(); - private final JdbcApiStats dropColumn = new JdbcApiStats(); - private final JdbcApiStats renameColumn = new JdbcApiStats(); - private final JdbcApiStats renameTable = new JdbcApiStats(); - private final JdbcApiStats createTable = new JdbcApiStats(); - private final JdbcApiStats createSchema = new JdbcApiStats(); - private final JdbcApiStats dropSchema = new JdbcApiStats(); - - @Managed - @Nested - public JdbcApiStats getSchemaExists() - { - return schemaExists; - } - - @Managed - @Nested - public JdbcApiStats getGetSchemaNames() - { - return getSchemaNames; - } - - @Managed - @Nested - public JdbcApiStats getGetTableNames() - { - return getTableNames; - } - - @Managed - @Nested - public JdbcApiStats getGetTableHandle() - { - return getTableHandle; - } - - @Managed - @Nested - public JdbcApiStats getGetColumns() - { - return getColumns; - } - - @Managed - @Nested - public JdbcApiStats getToPrestoType() - { - return toPrestoType; - } - - @Managed - @Nested - public JdbcApiStats getToWriteMapping() - { - return toWriteMapping; - } - - @Managed - @Nested - public JdbcApiStats getGetSplits() - { - return getSplits; - } - - @Managed - @Nested - public JdbcApiStats getGetConnectionWithSplit() - { - return getConnectionWithSplit; - } - - @Managed - @Nested - public JdbcApiStats getGetConnectionWithHandle() - { - return getConnectionWithHandle; - } - - @Managed - @Nested - public JdbcApiStats getAbortReadConnection() - { - return abortReadConnection; - } - - @Managed - @Nested - public JdbcApiStats getBuildSql() - { - return buildSql; - } - - @Managed - @Nested - public JdbcApiStats getBeginCreateTable() - { - return beginCreateTable; - } - - @Managed - @Nested - public JdbcApiStats getCommitCreateTable() - { - return commitCreateTable; - } - - @Managed - @Nested - public JdbcApiStats getBeginInsertTable() - { - return beginInsertTable; - } - - @Managed - @Nested - public JdbcApiStats getFinishInsertTable() - { - return finishInsertTable; - } - - @Managed - @Nested - public JdbcApiStats getDropTable() - { - return dropTable; - } - - @Managed - @Nested - public JdbcApiStats getRollbackCreateTable() - { - return rollbackCreateTable; - } - - @Managed - @Nested - public JdbcApiStats getBuildInsertSql() - { - return buildInsertSql; - } - - @Managed - @Nested - public JdbcApiStats getGetPreparedStatement() - { - return getPreparedStatement; - } - - @Managed - @Nested - public JdbcApiStats getGetTableStatistics() - { - return getTableStatistics; - } - - @Managed - @Nested - public JdbcApiStats getAddColumn() - { - return addColumn; - } - - @Managed - @Nested - public JdbcApiStats getDropColumn() - { - return dropColumn; - } - - @Managed - @Nested - public JdbcApiStats getRenameColumn() - { - return renameColumn; - } - - @Managed - @Nested - public JdbcApiStats getRenameTable() - { - return renameTable; - } - - @Managed - @Nested - public JdbcApiStats getCreateTable() - { - return createTable; - } - - @Managed - @Nested - public JdbcApiStats getCreateSchema() - { - return createSchema; - } - - @Managed - @Nested - public JdbcApiStats getDropSchema() - { - return dropSchema; - } - } } From 5e466fb5e62b12b2c08d28b779c8b7666cc756f0 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Wed, 1 Apr 2020 23:21:28 -0700 Subject: [PATCH 054/519] Ensure StatisticsAwareJdbcClient implements all methods --- .../jdbc/jmx/StatisticsAwareJdbcClient.java | 27 +++++++++++--- .../jdbc/TestStatisticsAwareJdbcClient.java | 35 +++++++++++++++++++ 2 files changed, 57 insertions(+), 5 deletions(-) create mode 100644 presto-base-jdbc/src/test/java/io/prestosql/plugin/jdbc/TestStatisticsAwareJdbcClient.java diff --git a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/jmx/StatisticsAwareJdbcClient.java b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/jmx/StatisticsAwareJdbcClient.java index a573e278220a..27bdc5203305 100644 --- a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/jmx/StatisticsAwareJdbcClient.java +++ b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/jmx/StatisticsAwareJdbcClient.java @@ -14,7 +14,6 @@ package io.prestosql.plugin.jdbc.jmx; import io.prestosql.plugin.jdbc.ColumnMapping; -import io.prestosql.plugin.jdbc.ForwardingJdbcClient; import io.prestosql.plugin.jdbc.JdbcClient; import io.prestosql.plugin.jdbc.JdbcColumnHandle; import io.prestosql.plugin.jdbc.JdbcIdentity; @@ -29,6 +28,7 @@ import io.prestosql.spi.connector.ConnectorSplitSource; import io.prestosql.spi.connector.ConnectorTableMetadata; import io.prestosql.spi.connector.SchemaTableName; +import io.prestosql.spi.connector.SystemTable; import io.prestosql.spi.predicate.TupleDomain; import io.prestosql.spi.statistics.TableStatistics; import io.prestosql.spi.type.Type; @@ -44,8 +44,8 @@ import static java.util.Objects.requireNonNull; -public class StatisticsAwareJdbcClient - extends ForwardingJdbcClient +public final class StatisticsAwareJdbcClient + implements JdbcClient { private final JdbcClientStats stats = new JdbcClientStats(); private final JdbcClient delegate; @@ -55,8 +55,7 @@ public StatisticsAwareJdbcClient(JdbcClient delegate) this.delegate = requireNonNull(delegate, "delegate is null"); } - @Override - protected JdbcClient delegate() + private JdbcClient delegate() { return delegate; } @@ -229,6 +228,18 @@ public TableStatistics getTableStatistics(ConnectorSession session, JdbcTableHan return stats.getGetTableStatistics().wrap(() -> delegate().getTableStatistics(session, handle, tupleDomain)); } + @Override + public boolean supportsLimit() + { + return delegate().supportsLimit(); + } + + @Override + public boolean isLimitGuaranteed() + { + return delegate().isLimitGuaranteed(); + } + @Override public void createSchema(JdbcIdentity identity, String schemaName) { @@ -240,4 +251,10 @@ public void dropSchema(JdbcIdentity identity, String schemaName) { stats.getDropSchema().wrap(() -> delegate().dropSchema(identity, schemaName)); } + + @Override + public Optional getSystemTable(ConnectorSession session, SchemaTableName tableName) + { + return delegate().getSystemTable(session, tableName); + } } diff --git a/presto-base-jdbc/src/test/java/io/prestosql/plugin/jdbc/TestStatisticsAwareJdbcClient.java b/presto-base-jdbc/src/test/java/io/prestosql/plugin/jdbc/TestStatisticsAwareJdbcClient.java new file mode 100644 index 000000000000..b024440eaab6 --- /dev/null +++ b/presto-base-jdbc/src/test/java/io/prestosql/plugin/jdbc/TestStatisticsAwareJdbcClient.java @@ -0,0 +1,35 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.jdbc; + +import io.prestosql.plugin.jdbc.jmx.StatisticsAwareJdbcClient; +import org.testng.annotations.Test; + +import static io.prestosql.spi.testing.InterfaceTestUtils.assertAllMethodsOverridden; +import static io.prestosql.spi.testing.InterfaceTestUtils.assertProperForwardingMethodsAreCalled; + +public class TestStatisticsAwareJdbcClient +{ + @Test + public void testEverythingImplemented() + { + assertAllMethodsOverridden(JdbcClient.class, StatisticsAwareJdbcClient.class); + } + + @Test + public void testProperForwardingMethodsAreCalled() + { + assertProperForwardingMethodsAreCalled(JdbcClient.class, StatisticsAwareJdbcClient::new); + } +} From bcea275371c914f9f081592dfb729577d2615865 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Fri, 3 Apr 2020 20:31:52 -0700 Subject: [PATCH 055/519] Ensure CachingJdbcClient delegates all unhandled methods --- .../plugin/jdbc/CachingJdbcClient.java | 16 ++++++- .../plugin/jdbc/TestCachingJdbcClient.java | 43 +++++++++++++++++++ 2 files changed, 58 insertions(+), 1 deletion(-) create mode 100644 presto-base-jdbc/src/test/java/io/prestosql/plugin/jdbc/TestCachingJdbcClient.java diff --git a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/CachingJdbcClient.java b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/CachingJdbcClient.java index 2f3c4d5a6060..8756c3b45d69 100644 --- a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/CachingJdbcClient.java +++ b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/CachingJdbcClient.java @@ -26,6 +26,7 @@ import io.prestosql.spi.connector.ConnectorSplitSource; import io.prestosql.spi.connector.ConnectorTableMetadata; import io.prestosql.spi.connector.SchemaTableName; +import io.prestosql.spi.connector.SystemTable; import io.prestosql.spi.predicate.TupleDomain; import io.prestosql.spi.statistics.TableStatistics; import io.prestosql.spi.type.Type; @@ -45,7 +46,7 @@ import static com.google.common.base.Throwables.throwIfInstanceOf; import static java.util.Objects.requireNonNull; -public class CachingJdbcClient +public final class CachingJdbcClient implements JdbcClient { private final JdbcClient delegate; @@ -77,6 +78,13 @@ public CachingJdbcClient(JdbcClient delegate, Duration metadataCachingTtl, boole columnsCache = cacheBuilder.build(); } + @Override + public boolean schemaExists(JdbcIdentity identity, String schema) + { + // this method cannot be delegated as that would bypass the cache + return getSchemaNames(identity).contains(schema); + } + @Override public Set getSchemaNames(JdbcIdentity identity) { @@ -283,6 +291,12 @@ public JdbcOutputTableHandle beginCreateTable(ConnectorSession session, Connecto return delegate.beginCreateTable(session, tableMetadata); } + @Override + public Optional getSystemTable(ConnectorSession session, SchemaTableName tableName) + { + return delegate.getSystemTable(session, tableName); + } + private void invalidateSchemasCache() { schemaNamesCache.invalidateAll(); diff --git a/presto-base-jdbc/src/test/java/io/prestosql/plugin/jdbc/TestCachingJdbcClient.java b/presto-base-jdbc/src/test/java/io/prestosql/plugin/jdbc/TestCachingJdbcClient.java new file mode 100644 index 000000000000..5293ccd1a2f5 --- /dev/null +++ b/presto-base-jdbc/src/test/java/io/prestosql/plugin/jdbc/TestCachingJdbcClient.java @@ -0,0 +1,43 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.jdbc; + +import com.google.common.collect.ImmutableSet; +import org.testng.annotations.Test; + +import java.lang.reflect.Method; +import java.util.Set; + +import static io.prestosql.spi.testing.InterfaceTestUtils.assertAllMethodsOverridden; + +public class TestCachingJdbcClient +{ + @Test + public void testEverythingImplemented() + { + assertAllMethodsOverridden(JdbcClient.class, CachingJdbcClient.class, nonOverridenMethods()); + } + + private static Set nonOverridenMethods() + { + try { + return ImmutableSet.builder() + .add(JdbcClient.class.getMethod("schemaExists", JdbcIdentity.class, String.class)) + .build(); + } + catch (NoSuchMethodException e) { + throw new AssertionError(e); + } + } +} From ab595fae7d0ab3de9854c2dea10c641cf5be5a26 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Sat, 4 Apr 2020 11:53:22 -0700 Subject: [PATCH 056/519] Remove unnecessary test in TestPrestoS3FileSystem --- .../plugin/hive/s3/TestPrestoS3FileSystem.java | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestPrestoS3FileSystem.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestPrestoS3FileSystem.java index 402e3c640770..2c4e639c77c7 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestPrestoS3FileSystem.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestPrestoS3FileSystem.java @@ -67,7 +67,6 @@ import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_PATH_STYLE_ACCESS; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_PIN_CLIENT_TO_CURRENT_REGION; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_SECRET_KEY; -import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_SIGNER_TYPE; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_SKIP_GLACIER_OBJECTS; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_STAGING_DIRECTORY; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_USER_AGENT_PREFIX; @@ -99,22 +98,6 @@ public void testStaticCredentials() } } - @Test - public void testCompatibleStaticCredentials() - throws Exception - { - Configuration config = new Configuration(false); - config.set(S3_ACCESS_KEY, "test_access_key"); - config.set(S3_SECRET_KEY, "test_secret_key"); - config.set(S3_ENDPOINT, "test.example.endpoint.com"); - config.set(S3_SIGNER_TYPE, "S3SignerType"); - // the static credentials should be preferred - - try (PrestoS3FileSystem fs = new PrestoS3FileSystem()) { - verifyStaticCredentials(config, fs, "s3a://test-bucket/", "test_access_key", "test_secret_key"); - } - } - private static void verifyStaticCredentials(Configuration config, PrestoS3FileSystem fileSystem, String uri, String expectedAccessKey, String expectedSecretKey) throws IOException, URISyntaxException { From 317eb98f8431fd6f2f9e40b68b30ff7a54ef5296 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Sat, 4 Apr 2020 12:03:28 -0700 Subject: [PATCH 057/519] Add session token support to PrestoS3FileSystem --- .../plugin/hive/s3/PrestoS3FileSystem.java | 8 +++++++ .../hive/s3/TestPrestoS3FileSystem.java | 23 +++++++++++++++---- 2 files changed, 26 insertions(+), 5 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystem.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystem.java index cd2e24b44cc6..dcb7e3ce97f9 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystem.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystem.java @@ -22,6 +22,7 @@ import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; +import com.amazonaws.auth.BasicSessionCredentials; import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider; import com.amazonaws.auth.Signer; @@ -159,6 +160,7 @@ public class PrestoS3FileSystem public static final String S3_ENDPOINT = "presto.s3.endpoint"; public static final String S3_SECRET_KEY = "presto.s3.secret-key"; public static final String S3_ACCESS_KEY = "presto.s3.access-key"; + public static final String S3_SESSION_TOKEN = "presto.s3.session-token"; public static final String S3_IAM_ROLE = "presto.s3.iam-role"; public static final String S3_ACL_TYPE = "presto.s3.upload-acl-type"; public static final String S3_SKIP_GLACIER_OBJECTS = "presto.s3.skip-glacier-objects"; @@ -840,6 +842,12 @@ private static Optional getAwsCredentials(URI uri, Configuration if (isNullOrEmpty(accessKey) || isNullOrEmpty(secretKey)) { return Optional.empty(); } + + String sessionToken = conf.get(S3_SESSION_TOKEN); + if (!isNullOrEmpty(sessionToken)) { + return Optional.of(new BasicSessionCredentials(accessKey, secretKey, sessionToken)); + } + return Optional.of(new BasicAWSCredentials(accessKey, secretKey)); } diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestPrestoS3FileSystem.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestPrestoS3FileSystem.java index 2c4e639c77c7..6ae1f084b7aa 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestPrestoS3FileSystem.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestPrestoS3FileSystem.java @@ -17,6 +17,7 @@ import com.amazonaws.ClientConfiguration; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.auth.AWSSessionCredentials; import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider; @@ -67,6 +68,7 @@ import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_PATH_STYLE_ACCESS; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_PIN_CLIENT_TO_CURRENT_REGION; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_SECRET_KEY; +import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_SESSION_TOKEN; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_SKIP_GLACIER_OBJECTS; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_STAGING_DIRECTORY; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_USER_AGENT_PREFIX; @@ -76,6 +78,7 @@ import static java.net.HttpURLConnection.HTTP_NOT_FOUND; import static java.nio.file.Files.createTempDirectory; import static java.nio.file.Files.createTempFile; +import static org.assertj.core.api.Assertions.assertThat; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNull; import static org.testng.Assert.assertTrue; @@ -91,21 +94,31 @@ public void testStaticCredentials() Configuration config = new Configuration(false); config.set(S3_ACCESS_KEY, "test_access_key"); config.set(S3_SECRET_KEY, "test_secret_key"); - // the static credentials should be preferred try (PrestoS3FileSystem fs = new PrestoS3FileSystem()) { - verifyStaticCredentials(config, fs, "s3n://test-bucket/", "test_access_key", "test_secret_key"); + AWSCredentials credentials = getStaticCredentials(config, fs, "s3n://test-bucket/"); + assertEquals(credentials.getAWSAccessKeyId(), "test_access_key"); + assertEquals(credentials.getAWSSecretKey(), "test_secret_key"); + assertThat(credentials).isNotInstanceOf(AWSSessionCredentials.class); + } + + config.set(S3_SESSION_TOKEN, "test_token"); + try (PrestoS3FileSystem fs = new PrestoS3FileSystem()) { + AWSCredentials credentials = getStaticCredentials(config, fs, "s3n://test-bucket/"); + assertEquals(credentials.getAWSAccessKeyId(), "test_access_key"); + assertEquals(credentials.getAWSSecretKey(), "test_secret_key"); + assertThat(credentials).isInstanceOfSatisfying(AWSSessionCredentials.class, sessionCredentials -> + assertEquals(sessionCredentials.getSessionToken(), "test_token")); } } - private static void verifyStaticCredentials(Configuration config, PrestoS3FileSystem fileSystem, String uri, String expectedAccessKey, String expectedSecretKey) + private static AWSCredentials getStaticCredentials(Configuration config, PrestoS3FileSystem fileSystem, String uri) throws IOException, URISyntaxException { fileSystem.initialize(new URI(uri), config); AWSCredentialsProvider awsCredentialsProvider = getAwsCredentialsProvider(fileSystem); assertInstanceOf(awsCredentialsProvider, AWSStaticCredentialsProvider.class); - assertEquals(awsCredentialsProvider.getCredentials().getAWSAccessKeyId(), expectedAccessKey); - assertEquals(awsCredentialsProvider.getCredentials().getAWSSecretKey(), expectedSecretKey); + return awsCredentialsProvider.getCredentials(); } @Test(expectedExceptions = VerifyException.class, expectedExceptionsMessageRegExp = "Invalid configuration: either endpoint can be set or S3 client can be pinned to the current region") From af9f58a96c62bdfe2be756a714679bfbecbfed40 Mon Sep 17 00:00:00 2001 From: Dain Sundstrom Date: Sat, 4 Apr 2020 16:33:57 -0700 Subject: [PATCH 058/519] Fix warnings in PluginClassLoader --- .../java/io/prestosql/server/PluginClassLoader.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/server/PluginClassLoader.java b/presto-main/src/main/java/io/prestosql/server/PluginClassLoader.java index 7964ad7af9ee..9f6da19c01fb 100644 --- a/presto-main/src/main/java/io/prestosql/server/PluginClassLoader.java +++ b/presto-main/src/main/java/io/prestosql/server/PluginClassLoader.java @@ -14,7 +14,6 @@ package io.prestosql.server; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Iterables; import java.io.IOException; import java.lang.reflect.InvocationTargetException; @@ -24,6 +23,7 @@ import java.util.Enumeration; import java.util.List; +import static com.google.common.collect.ImmutableList.toImmutableList; import static java.util.Objects.requireNonNull; class PluginClassLoader @@ -38,12 +38,14 @@ class PluginClassLoader public PluginClassLoader( List urls, ClassLoader spiClassLoader, - Iterable spiPackages) + List spiPackages) { this(urls, spiClassLoader, spiPackages, - Iterables.transform(spiPackages, PluginClassLoader::classNameToResource)); + spiPackages.stream() + .map(PluginClassLoader::classNameToResource) + .collect(toImmutableList())); } private PluginClassLoader( @@ -53,7 +55,7 @@ private PluginClassLoader( Iterable spiResources) { // plugins should not have access to the system (application) class loader - super(urls.toArray(new URL[urls.size()]), PLATFORM_CLASS_LOADER); + super(urls.toArray(new URL[0]), PLATFORM_CLASS_LOADER); this.spiClassLoader = requireNonNull(spiClassLoader, "spiClassLoader is null"); this.spiPackages = ImmutableList.copyOf(spiPackages); this.spiResources = ImmutableList.copyOf(spiResources); From 3195d8a3bb6b1b87b63d7df010a7312e8b960c28 Mon Sep 17 00:00:00 2001 From: Dain Sundstrom Date: Sat, 4 Apr 2020 16:39:09 -0700 Subject: [PATCH 059/519] Use platform classloader in Java 8 and 11 for plugins --- .../prestosql/server/PluginClassLoader.java | 23 +------------------ 1 file changed, 1 insertion(+), 22 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/server/PluginClassLoader.java b/presto-main/src/main/java/io/prestosql/server/PluginClassLoader.java index 9f6da19c01fb..0bca6887a277 100644 --- a/presto-main/src/main/java/io/prestosql/server/PluginClassLoader.java +++ b/presto-main/src/main/java/io/prestosql/server/PluginClassLoader.java @@ -16,8 +16,6 @@ import com.google.common.collect.ImmutableList; import java.io.IOException; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; import java.net.URL; import java.net.URLClassLoader; import java.util.Enumeration; @@ -29,8 +27,6 @@ class PluginClassLoader extends URLClassLoader { - private static final ClassLoader PLATFORM_CLASS_LOADER = findPlatformClassLoader(); - private final ClassLoader spiClassLoader; private final List spiPackages; private final List spiResources; @@ -55,7 +51,7 @@ private PluginClassLoader( Iterable spiResources) { // plugins should not have access to the system (application) class loader - super(urls.toArray(new URL[0]), PLATFORM_CLASS_LOADER); + super(urls.toArray(new URL[0]), getSystemClassLoader().getParent()); this.spiClassLoader = requireNonNull(spiClassLoader, "spiClassLoader is null"); this.spiPackages = ImmutableList.copyOf(spiPackages); this.spiResources = ImmutableList.copyOf(spiResources); @@ -143,21 +139,4 @@ private static String classNameToResource(String className) { return className.replace('.', '/'); } - - @SuppressWarnings("JavaReflectionMemberAccess") - private static ClassLoader findPlatformClassLoader() - { - try { - // use platform class loader on Java 9 - Method method = ClassLoader.class.getMethod("getPlatformClassLoader"); - return (ClassLoader) method.invoke(null); - } - catch (NoSuchMethodException ignored) { - // use null class loader on Java 8 - return null; - } - catch (IllegalAccessException | InvocationTargetException e) { - throw new AssertionError(e); - } - } } From 588e502c2ab811dea1d8704556d1a4c8b4424e8a Mon Sep 17 00:00:00 2001 From: Chun Han Hsiao Date: Thu, 19 Mar 2020 09:25:05 +0800 Subject: [PATCH 060/519] Allow Hive on S3 to assume AWS role with External ID AWS provides 'external-id' option as optional information which can be used in an IAM role trust policy to designate who can assume the role. --- .../src/main/sphinx/connector/hive.rst | 5 +++++ .../metastore/glue/GlueHiveMetastore.java | 1 + .../glue/GlueHiveMetastoreConfig.java | 16 +++++++++++++- .../plugin/hive/s3/HiveS3Config.java | 16 +++++++++++++- .../s3/PrestoS3ConfigurationInitializer.java | 6 +++++ .../plugin/hive/s3/PrestoS3FileSystem.java | 7 +++++- .../glue/TestGlueHiveMetastoreConfig.java | 3 +++ .../plugin/hive/s3/TestHiveS3Config.java | 3 +++ .../hive/s3/TestPrestoS3FileSystem.java | 22 ++++++++++++++++++- 9 files changed, 75 insertions(+), 4 deletions(-) diff --git a/presto-docs/src/main/sphinx/connector/hive.rst b/presto-docs/src/main/sphinx/connector/hive.rst index 10aeaf58ff08..3772d85dcb81 100644 --- a/presto-docs/src/main/sphinx/connector/hive.rst +++ b/presto-docs/src/main/sphinx/connector/hive.rst @@ -316,6 +316,9 @@ Property Name Description ``hive.metastore.glue.iam-role`` ARN of an IAM role to assume when connecting to the Glue Catalog. + +``hive.metastore.glue.external-id`` External ID for the IAM role trust policy when connecting + to the Glue Catalog. ==================================================== ============================================================ .. _hive-s3: @@ -342,6 +345,8 @@ Property Name Description ``hive.s3.iam-role`` IAM role to assume. +``hive.s3.external-id`` External ID for the IAM role trust policy. + ``hive.s3.endpoint`` The S3 storage endpoint server. This can be used to connect to an S3-compatible storage system instead of AWS. When using v4 signatures, it is recommended to diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastore.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastore.java index 65c75f0b1f22..258f1cf4f026 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastore.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastore.java @@ -216,6 +216,7 @@ private static AWSCredentialsProvider getAwsCredentialsProvider(GlueHiveMetastor if (config.getIamRole().isPresent()) { return new STSAssumeRoleSessionCredentialsProvider .Builder(config.getIamRole().get(), "presto-session") + .withExternalId(config.getExternalId().orElse(null)) .build(); } if (config.getAwsCredentialsProvider().isPresent()) { diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastoreConfig.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastoreConfig.java index 605229f00fa1..738b4a097664 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastoreConfig.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/glue/GlueHiveMetastoreConfig.java @@ -32,6 +32,7 @@ public class GlueHiveMetastoreConfig private int maxGlueConnections = 5; private Optional defaultWarehouseDir = Optional.empty(); private Optional iamRole = Optional.empty(); + private Optional externalId = Optional.empty(); private Optional awsAccessKey = Optional.empty(); private Optional awsSecretKey = Optional.empty(); private Optional awsCredentialsProvider = Optional.empty(); @@ -111,13 +112,26 @@ public Optional getIamRole() } @Config("hive.metastore.glue.iam-role") - @ConfigDescription("ARN of an IAM role to assume when connecting to the Hive Glue metastore") + @ConfigDescription("ARN of an IAM role to assume when connecting to Glue") public GlueHiveMetastoreConfig setIamRole(String iamRole) { this.iamRole = Optional.ofNullable(iamRole); return this; } + public Optional getExternalId() + { + return externalId; + } + + @Config("hive.metastore.glue.external-id") + @ConfigDescription("External ID for the IAM role trust policy when connecting to Glue") + public GlueHiveMetastoreConfig setExternalId(String externalId) + { + this.externalId = Optional.ofNullable(externalId); + return this; + } + public Optional getAwsAccessKey() { return awsAccessKey; diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/HiveS3Config.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/HiveS3Config.java index c019dd387c97..c31e715fdfd9 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/HiveS3Config.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/HiveS3Config.java @@ -42,6 +42,7 @@ public class HiveS3Config private String s3SignerClass; private boolean s3PathStyleAccess; private String s3IamRole; + private String s3ExternalId; private boolean s3SslEnabled = true; private boolean s3SseEnabled; private PrestoS3SseType s3SseType = PrestoS3SseType.S3; @@ -158,13 +159,26 @@ public String getS3IamRole() } @Config("hive.s3.iam-role") - @ConfigDescription("ARN of an IAM role to assume when connecting to the S3") + @ConfigDescription("ARN of an IAM role to assume when connecting to S3") public HiveS3Config setS3IamRole(String s3IamRole) { this.s3IamRole = s3IamRole; return this; } + public String getS3ExternalId() + { + return s3ExternalId; + } + + @Config("hive.s3.external-id") + @ConfigDescription("External ID for the IAM role trust policy when connecting to S3") + public HiveS3Config setS3ExternalId(String s3ExternalId) + { + this.s3ExternalId = s3ExternalId; + return this; + } + public boolean isS3SslEnabled() { return s3SslEnabled; diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3ConfigurationInitializer.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3ConfigurationInitializer.java index 7fe4ffc6c7bf..324ce5508e36 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3ConfigurationInitializer.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3ConfigurationInitializer.java @@ -27,6 +27,7 @@ import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_CONNECT_TIMEOUT; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_ENCRYPTION_MATERIALS_PROVIDER; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_ENDPOINT; +import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_EXTERNAL_ID; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_IAM_ROLE; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_KMS_KEY_ID; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_MAX_BACKOFF_TIME; @@ -62,6 +63,7 @@ public class PrestoS3ConfigurationInitializer private final PrestoS3SignerType signerType; private final boolean pathStyleAccess; private final String iamRole; + private final String externalId; private final boolean sslEnabled; private final boolean sseEnabled; private final PrestoS3SseType sseType; @@ -96,6 +98,7 @@ public PrestoS3ConfigurationInitializer(HiveS3Config config) this.signerClass = config.getS3SignerClass(); this.pathStyleAccess = config.isS3PathStyleAccess(); this.iamRole = config.getS3IamRole(); + this.externalId = config.getS3ExternalId(); this.sslEnabled = config.isS3SslEnabled(); this.sseEnabled = config.isS3SseEnabled(); this.sseType = config.getS3SseType(); @@ -147,6 +150,9 @@ public void initializeConfiguration(Configuration config) if (iamRole != null) { config.set(S3_IAM_ROLE, iamRole); } + if (externalId != null) { + config.set(S3_EXTERNAL_ID, externalId); + } config.setBoolean(S3_SSL_ENABLED, sslEnabled); config.setBoolean(S3_SSE_ENABLED, sseEnabled); config.set(S3_SSE_TYPE, sseType.name()); diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystem.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystem.java index dcb7e3ce97f9..191530562968 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystem.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystem.java @@ -162,6 +162,7 @@ public class PrestoS3FileSystem public static final String S3_ACCESS_KEY = "presto.s3.access-key"; public static final String S3_SESSION_TOKEN = "presto.s3.session-token"; public static final String S3_IAM_ROLE = "presto.s3.iam-role"; + public static final String S3_EXTERNAL_ID = "presto.s3.external-id"; public static final String S3_ACL_TYPE = "presto.s3.upload-acl-type"; public static final String S3_SKIP_GLACIER_OBJECTS = "presto.s3.skip-glacier-objects"; public static final String S3_REQUESTER_PAYS_ENABLED = "presto.s3.requester-pays.enabled"; @@ -189,6 +190,7 @@ public class PrestoS3FileSystem private Duration maxBackoffTime; private Duration maxRetryTime; private String iamRole; + private String externalId; private boolean pinS3ClientToCurrentRegion; private boolean sseEnabled; private PrestoS3SseType sseType; @@ -232,6 +234,7 @@ public void initialize(URI uri, Configuration conf) this.multiPartUploadMinPartSize = conf.getLong(S3_MULTIPART_MIN_PART_SIZE, defaults.getS3MultipartMinPartSize().toBytes()); this.isPathStyleAccess = conf.getBoolean(S3_PATH_STYLE_ACCESS, defaults.isS3PathStyleAccess()); this.iamRole = conf.get(S3_IAM_ROLE, defaults.getS3IamRole()); + this.externalId = conf.get(S3_EXTERNAL_ID, defaults.getS3ExternalId()); this.pinS3ClientToCurrentRegion = conf.getBoolean(S3_PIN_CLIENT_TO_CURRENT_REGION, defaults.isPinS3ClientToCurrentRegion()); verify(!pinS3ClientToCurrentRegion || conf.get(S3_ENDPOINT) == null, "Invalid configuration: either endpoint can be set or S3 client can be pinned to the current region"); @@ -797,7 +800,9 @@ private AWSCredentialsProvider createAwsCredentialsProvider(URI uri, Configurati } if (iamRole != null) { - return new STSAssumeRoleSessionCredentialsProvider.Builder(this.iamRole, "presto-session").build(); + return new STSAssumeRoleSessionCredentialsProvider.Builder(this.iamRole, "presto-session") + .withExternalId(this.externalId) + .build(); } String providerClass = conf.get(S3_CREDENTIALS_PROVIDER); diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/glue/TestGlueHiveMetastoreConfig.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/glue/TestGlueHiveMetastoreConfig.java index 4dfb80e09241..8402e2e641e7 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/glue/TestGlueHiveMetastoreConfig.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/glue/TestGlueHiveMetastoreConfig.java @@ -34,6 +34,7 @@ public void testDefaults() .setMaxGlueConnections(5) .setDefaultWarehouseDir(null) .setIamRole(null) + .setExternalId(null) .setAwsAccessKey(null) .setAwsSecretKey(null) .setAwsCredentialsProvider(null) @@ -52,6 +53,7 @@ public void testExplicitPropertyMapping() .put("hive.metastore.glue.max-connections", "10") .put("hive.metastore.glue.default-warehouse-dir", "/location") .put("hive.metastore.glue.iam-role", "role") + .put("hive.metastore.glue.external-id", "external-id") .put("hive.metastore.glue.aws-access-key", "ABC") .put("hive.metastore.glue.aws-secret-key", "DEF") .put("hive.metastore.glue.aws-credentials-provider", "custom") @@ -67,6 +69,7 @@ public void testExplicitPropertyMapping() .setMaxGlueConnections(10) .setDefaultWarehouseDir("/location") .setIamRole("role") + .setExternalId("external-id") .setAwsAccessKey("ABC") .setAwsSecretKey("DEF") .setAwsCredentialsProvider("custom") diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestHiveS3Config.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestHiveS3Config.java index 5e35db3a7f1c..09cbd850e2e3 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestHiveS3Config.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestHiveS3Config.java @@ -41,6 +41,7 @@ public void testDefaults() .setS3SignerClass(null) .setS3PathStyleAccess(false) .setS3IamRole(null) + .setS3ExternalId(null) .setS3StorageClass(PrestoS3StorageClass.STANDARD) .setS3SslEnabled(true) .setS3SseEnabled(false) @@ -76,6 +77,7 @@ public void testExplicitPropertyMappings() .put("hive.s3.signer-class", "com.amazonaws.services.s3.internal.AWSS3V4Signer") .put("hive.s3.path-style-access", "true") .put("hive.s3.iam-role", "roleArn") + .put("hive.s3.external-id", "externalId") .put("hive.s3.storage-class", "INTELLIGENT_TIERING") .put("hive.s3.ssl.enabled", "false") .put("hive.s3.sse.enabled", "true") @@ -108,6 +110,7 @@ public void testExplicitPropertyMappings() .setS3SignerClass("com.amazonaws.services.s3.internal.AWSS3V4Signer") .setS3PathStyleAccess(true) .setS3IamRole("roleArn") + .setS3ExternalId("externalId") .setS3StorageClass(PrestoS3StorageClass.INTELLIGENT_TIERING) .setS3SslEnabled(false) .setS3SseEnabled(true) diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestPrestoS3FileSystem.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestPrestoS3FileSystem.java index 6ae1f084b7aa..a52f3a2f05c4 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestPrestoS3FileSystem.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestPrestoS3FileSystem.java @@ -60,6 +60,7 @@ import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_DIRECTORY_OBJECT_CONTENT_TYPE; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_ENCRYPTION_MATERIALS_PROVIDER; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_ENDPOINT; +import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_EXTERNAL_ID; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_IAM_ROLE; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_KMS_KEY_ID; import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_MAX_BACKOFF_TIME; @@ -142,7 +143,26 @@ public void testAssumeRoleCredentials() try (PrestoS3FileSystem fs = new PrestoS3FileSystem()) { fs.initialize(new URI("s3n://test-bucket/"), config); - assertInstanceOf(getAwsCredentialsProvider(fs), STSAssumeRoleSessionCredentialsProvider.class); + AWSCredentialsProvider awsCredentialsProvider = getAwsCredentialsProvider(fs); + assertInstanceOf(awsCredentialsProvider, STSAssumeRoleSessionCredentialsProvider.class); + assertEquals(getFieldValue(awsCredentialsProvider, "roleArn", String.class), "role"); + } + } + + @Test + public void testAssumeRoleCredentialsWithExternalId() + throws Exception + { + Configuration config = new Configuration(false); + config.set(S3_IAM_ROLE, "role"); + config.set(S3_EXTERNAL_ID, "externalId"); + + try (PrestoS3FileSystem fs = new PrestoS3FileSystem()) { + fs.initialize(new URI("s3n://test-bucket/"), config); + AWSCredentialsProvider awsCredentialsProvider = getAwsCredentialsProvider(fs); + assertInstanceOf(awsCredentialsProvider, STSAssumeRoleSessionCredentialsProvider.class); + assertEquals(getFieldValue(awsCredentialsProvider, "roleArn", String.class), "role"); + assertEquals(getFieldValue(awsCredentialsProvider, "roleExternalId", String.class), "externalId"); } } From a2b30a92c2b501a7fe87cb68264bd7f3865953bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Grzegorz=20Kokosi=C5=84ski?= Date: Sun, 5 Apr 2020 20:25:34 +0200 Subject: [PATCH 061/519] Use unique table names in distributed tests --- .../TestAccumuloDistributedQueries.java | 3 - .../AbstractTestDistributedQueries.java | 324 +++++++++--------- 2 files changed, 169 insertions(+), 158 deletions(-) diff --git a/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/TestAccumuloDistributedQueries.java b/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/TestAccumuloDistributedQueries.java index 04db9ffc5c27..fd99126f88a4 100644 --- a/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/TestAccumuloDistributedQueries.java +++ b/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/TestAccumuloDistributedQueries.java @@ -93,18 +93,15 @@ public void testCreateTableAsSelect() assertFalse(getQueryRunner().tableExists(getSession(), "test_create_table_as_if_not_exists")); this.assertCreateTableAsSelect( - "test_group", "SELECT orderstatus, sum(totalprice) x FROM orders GROUP BY orderstatus", "SELECT count(DISTINCT orderstatus) FROM orders"); this.assertCreateTableAsSelect( - "test_with_data", "SELECT * FROM orders WITH DATA", "SELECT * FROM orders", "SELECT count(*) FROM orders"); this.assertCreateTableAsSelect( - "test_with_no_data", "SELECT * FROM orders WITH NO DATA", "SELECT * FROM orders LIMIT 0", "SELECT 0"); diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java index 3374e5f40435..243f8a94b994 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java @@ -195,47 +195,41 @@ public void testCreateTable() @Test public void testCreateTableAsSelect() { - assertUpdate("CREATE TABLE IF NOT EXISTS test_ctas AS SELECT name, regionkey FROM nation", "SELECT count(*) FROM nation"); - assertTableColumnNames("test_ctas", "name", "regionkey"); - assertUpdate("DROP TABLE test_ctas"); + String tableName = "test_ctas" + randomTableSuffix(); + assertUpdate("CREATE TABLE IF NOT EXISTS " + tableName + " AS SELECT name, regionkey FROM nation", "SELECT count(*) FROM nation"); + assertTableColumnNames(tableName, "name", "regionkey"); + assertUpdate("DROP TABLE " + tableName); // Some connectors support CREATE TABLE AS but not the ordinary CREATE TABLE. Let's test CTAS IF NOT EXISTS with a table that is guaranteed to exist. assertUpdate("CREATE TABLE IF NOT EXISTS nation AS SELECT orderkey, discount FROM lineitem", 0); assertTableColumnNames("nation", "nationkey", "name", "regionkey", "comment"); assertCreateTableAsSelect( - "test_select", "SELECT orderdate, orderkey, totalprice FROM orders", "SELECT count(*) FROM orders"); assertCreateTableAsSelect( - "test_group", "SELECT orderstatus, sum(totalprice) x FROM orders GROUP BY orderstatus", "SELECT count(DISTINCT orderstatus) FROM orders"); assertCreateTableAsSelect( - "test_join", "SELECT count(*) x FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey", "SELECT 1"); assertCreateTableAsSelect( - "test_limit", "SELECT orderkey FROM orders ORDER BY orderkey LIMIT 10", "SELECT 10"); assertCreateTableAsSelect( - "test_unicode", "SELECT '\u2603' unicode", "SELECT 1"); assertCreateTableAsSelect( - "test_with_data", "SELECT * FROM orders WITH DATA", "SELECT * FROM orders", "SELECT count(*) FROM orders"); assertCreateTableAsSelect( - "test_with_no_data", "SELECT * FROM orders WITH NO DATA", "SELECT * FROM orders LIMIT 0", "SELECT 0"); @@ -243,7 +237,6 @@ public void testCreateTableAsSelect() // Tests for CREATE TABLE with UNION ALL: exercises PushTableWriteThroughUnion optimizer assertCreateTableAsSelect( - "test_union_all", "SELECT orderdate, orderkey, totalprice FROM orders WHERE orderkey % 2 = 0 UNION ALL " + "SELECT orderdate, orderkey, totalprice FROM orders WHERE orderkey % 2 = 1", "SELECT orderdate, orderkey, totalprice FROM orders", @@ -251,7 +244,6 @@ public void testCreateTableAsSelect() assertCreateTableAsSelect( Session.builder(getSession()).setSystemProperty("redistribute_writes", "true").build(), - "test_union_all", "SELECT CAST(orderdate AS DATE) orderdate, orderkey, totalprice FROM orders UNION ALL " + "SELECT DATE '2000-01-01', 1234567890, 1.23", "SELECT orderdate, orderkey, totalprice FROM orders UNION ALL " + @@ -260,16 +252,15 @@ public void testCreateTableAsSelect() assertCreateTableAsSelect( Session.builder(getSession()).setSystemProperty("redistribute_writes", "false").build(), - "test_union_all", "SELECT CAST(orderdate AS DATE) orderdate, orderkey, totalprice FROM orders UNION ALL " + "SELECT DATE '2000-01-01', 1234567890, 1.23", "SELECT orderdate, orderkey, totalprice FROM orders UNION ALL " + "SELECT DATE '2000-01-01', 1234567890, 1.23", "SELECT count(*) + 1 FROM orders"); - assertExplainAnalyze("EXPLAIN ANALYZE CREATE TABLE analyze_test AS SELECT orderstatus FROM orders"); - assertQuery("SELECT * from analyze_test", "SELECT orderstatus FROM orders"); - assertUpdate("DROP TABLE analyze_test"); + assertExplainAnalyze("EXPLAIN ANALYZE CREATE TABLE " + tableName + " AS SELECT orderstatus FROM orders"); + assertQuery("SELECT * from " + tableName, "SELECT orderstatus FROM orders"); + assertUpdate("DROP TABLE " + tableName); } @Test @@ -336,18 +327,19 @@ protected void assertExplainAnalyze(@Language("SQL") String query) // assertTrue(value.contains("Cost: "), format("Expected output to contain \"Cost: \", but it is %s", value)); } - protected void assertCreateTableAsSelect(String table, @Language("SQL") String query, @Language("SQL") String rowCountQuery) + protected void assertCreateTableAsSelect(@Language("SQL") String query, @Language("SQL") String rowCountQuery) { - assertCreateTableAsSelect(getSession(), table, query, query, rowCountQuery); + assertCreateTableAsSelect(getSession(), query, query, rowCountQuery); } - protected void assertCreateTableAsSelect(String table, @Language("SQL") String query, @Language("SQL") String expectedQuery, @Language("SQL") String rowCountQuery) + protected void assertCreateTableAsSelect(@Language("SQL") String query, @Language("SQL") String expectedQuery, @Language("SQL") String rowCountQuery) { - assertCreateTableAsSelect(getSession(), table, query, expectedQuery, rowCountQuery); + assertCreateTableAsSelect(getSession(), query, expectedQuery, rowCountQuery); } - protected void assertCreateTableAsSelect(Session session, String table, @Language("SQL") String query, @Language("SQL") String expectedQuery, @Language("SQL") String rowCountQuery) + protected void assertCreateTableAsSelect(Session session, @Language("SQL") String query, @Language("SQL") String expectedQuery, @Language("SQL") String rowCountQuery) { + String table = "test_table_" + randomTableSuffix(); assertUpdate(session, "CREATE TABLE " + table + " AS " + query, rowCountQuery); assertQuery(session, "SELECT * FROM " + table, expectedQuery); assertUpdate(session, "DROP TABLE " + table); @@ -509,7 +501,7 @@ public void testInsertWithCoercion() assertUpdate("INSERT INTO " + tableName + " (date_column) VALUES (TIMESTAMP '2019-11-18 22:13:40')", 1); assertQuery( - "SELECT * FROM " + tableName + "", + "SELECT * FROM " + tableName, "VALUES " + "(1, 2, 3, 4, NULL, NULL, NULL, NULL), " + "(NULL, NULL, NULL, NULL, 'aa ', 'aa ', 'aa ', NULL), " + @@ -527,31 +519,31 @@ public void testInsertWithCoercion() @Test public void testInsertUnicode() { - assertUpdate("DROP TABLE IF EXISTS test_insert_unicode"); + String tableName = "test_insert_unicode_" + randomTableSuffix(); - assertUpdate("CREATE TABLE test_insert_unicode(test varchar)"); - assertUpdate("INSERT INTO test_insert_unicode(test) VALUES 'Hello', U&'hello\\6d4B\\8Bd5\\+10FFFFworld\\7F16\\7801' ", 2); - assertThat(computeActual("SELECT test FROM test_insert_unicode").getOnlyColumnAsSet()) + assertUpdate("CREATE TABLE " + tableName + "(test varchar)"); + assertUpdate("INSERT INTO " + tableName + "(test) VALUES 'Hello', U&'hello\\6d4B\\8Bd5\\+10FFFFworld\\7F16\\7801' ", 2); + assertThat(computeActual("SELECT test FROM " + tableName).getOnlyColumnAsSet()) .containsExactlyInAnyOrder("Hello", "hello测试􏿿world编码"); - assertUpdate("DROP TABLE test_insert_unicode"); - - assertUpdate("CREATE TABLE test_insert_unicode(test varchar)"); - assertUpdate("INSERT INTO test_insert_unicode(test) VALUES 'aa', 'bé'", 2); - assertQuery("SELECT test FROM test_insert_unicode", "VALUES 'aa', 'bé'"); - assertQuery("SELECT test FROM test_insert_unicode WHERE test = 'aa'", "VALUES 'aa'"); - assertQuery("SELECT test FROM test_insert_unicode WHERE test > 'ba'", "VALUES 'bé'"); - assertQuery("SELECT test FROM test_insert_unicode WHERE test < 'ba'", "VALUES 'aa'"); - assertQueryReturnsEmptyResult("SELECT test FROM test_insert_unicode WHERE test = 'ba'"); - assertUpdate("DROP TABLE test_insert_unicode"); - - assertUpdate("CREATE TABLE test_insert_unicode(test varchar)"); - assertUpdate("INSERT INTO test_insert_unicode(test) VALUES 'a', 'é'", 2); - assertQuery("SELECT test FROM test_insert_unicode", "VALUES 'a', 'é'"); - assertQuery("SELECT test FROM test_insert_unicode WHERE test = 'a'", "VALUES 'a'"); - assertQuery("SELECT test FROM test_insert_unicode WHERE test > 'b'", "VALUES 'é'"); - assertQuery("SELECT test FROM test_insert_unicode WHERE test < 'b'", "VALUES 'a'"); - assertQueryReturnsEmptyResult("SELECT test FROM test_insert_unicode WHERE test = 'b'"); - assertUpdate("DROP TABLE test_insert_unicode"); + assertUpdate("DROP TABLE " + tableName); + + assertUpdate("CREATE TABLE " + tableName + "(test varchar)"); + assertUpdate("INSERT INTO " + tableName + "(test) VALUES 'aa', 'bé'", 2); + assertQuery("SELECT test FROM " + tableName, "VALUES 'aa', 'bé'"); + assertQuery("SELECT test FROM " + tableName + " WHERE test = 'aa'", "VALUES 'aa'"); + assertQuery("SELECT test FROM " + tableName + " WHERE test > 'ba'", "VALUES 'bé'"); + assertQuery("SELECT test FROM " + tableName + " WHERE test < 'ba'", "VALUES 'aa'"); + assertQueryReturnsEmptyResult("SELECT test FROM " + tableName + " WHERE test = 'ba'"); + assertUpdate("DROP TABLE " + tableName); + + assertUpdate("CREATE TABLE " + tableName + "(test varchar)"); + assertUpdate("INSERT INTO " + tableName + "(test) VALUES 'a', 'é'", 2); + assertQuery("SELECT test FROM " + tableName, "VALUES 'a', 'é'"); + assertQuery("SELECT test FROM " + tableName + " WHERE test = 'a'", "VALUES 'a'"); + assertQuery("SELECT test FROM " + tableName + " WHERE test > 'b'", "VALUES 'é'"); + assertQuery("SELECT test FROM " + tableName + " WHERE test < 'b'", "VALUES 'a'"); + assertQueryReturnsEmptyResult("SELECT test FROM " + tableName + " WHERE test = 'b'"); + assertUpdate("DROP TABLE " + tableName); } @Test @@ -559,139 +551,142 @@ public void testInsertArray() { skipTestUnless(supportsArrays()); - assertUpdate("CREATE TABLE test_insert_array (a ARRAY, b ARRAY)"); + String tableName = "test_insert_array_" + randomTableSuffix(); - assertUpdate("INSERT INTO test_insert_array (a) VALUES (ARRAY[null])", 1); - assertUpdate("INSERT INTO test_insert_array (a, b) VALUES (ARRAY[1.23E1], ARRAY[1.23E1])", 1); - assertQuery("SELECT a[1], b[1] FROM test_insert_array", "VALUES (null, null), (12.3, 12)"); + assertUpdate("CREATE TABLE " + tableName + " (a ARRAY, b ARRAY)"); - assertUpdate("DROP TABLE test_insert_array"); + assertUpdate("INSERT INTO " + tableName + " (a) VALUES (ARRAY[null])", 1); + assertUpdate("INSERT INTO " + tableName + " (a, b) VALUES (ARRAY[1.23E1], ARRAY[1.23E1])", 1); + assertQuery("SELECT a[1], b[1] FROM " + tableName, "VALUES (null, null), (12.3, 12)"); + + assertUpdate("DROP TABLE " + tableName); } @Test public void testDelete() { // delete half the table, then delete the rest + String tableName = "test_delete_" + randomTableSuffix(); - assertUpdate("CREATE TABLE test_delete AS SELECT * FROM orders", "SELECT count(*) FROM orders"); + assertUpdate("CREATE TABLE " + tableName + " AS SELECT * FROM orders", "SELECT count(*) FROM orders"); - assertUpdate("DELETE FROM test_delete WHERE orderkey % 2 = 0", "SELECT count(*) FROM orders WHERE orderkey % 2 = 0"); - assertQuery("SELECT * FROM test_delete", "SELECT * FROM orders WHERE orderkey % 2 <> 0"); + assertUpdate("DELETE FROM " + tableName + " WHERE orderkey % 2 = 0", "SELECT count(*) FROM orders WHERE orderkey % 2 = 0"); + assertQuery("SELECT * FROM " + tableName, "SELECT * FROM orders WHERE orderkey % 2 <> 0"); - assertUpdate("DELETE FROM test_delete", "SELECT count(*) FROM orders WHERE orderkey % 2 <> 0"); - assertQuery("SELECT * FROM test_delete", "SELECT * FROM orders LIMIT 0"); + assertUpdate("DELETE FROM " + tableName, "SELECT count(*) FROM orders WHERE orderkey % 2 <> 0"); + assertQuery("SELECT * FROM " + tableName, "SELECT * FROM orders LIMIT 0"); - assertUpdate("DROP TABLE test_delete"); + assertUpdate("DROP TABLE " + tableName); // delete successive parts of the table - assertUpdate("CREATE TABLE test_delete AS SELECT * FROM orders", "SELECT count(*) FROM orders"); + assertUpdate("CREATE TABLE " + tableName + " AS SELECT * FROM orders", "SELECT count(*) FROM orders"); - assertUpdate("DELETE FROM test_delete WHERE custkey <= 100", "SELECT count(*) FROM orders WHERE custkey <= 100"); - assertQuery("SELECT * FROM test_delete", "SELECT * FROM orders WHERE custkey > 100"); + assertUpdate("DELETE FROM " + tableName + " WHERE custkey <= 100", "SELECT count(*) FROM orders WHERE custkey <= 100"); + assertQuery("SELECT * FROM " + tableName, "SELECT * FROM orders WHERE custkey > 100"); - assertUpdate("DELETE FROM test_delete WHERE custkey <= 300", "SELECT count(*) FROM orders WHERE custkey > 100 AND custkey <= 300"); - assertQuery("SELECT * FROM test_delete", "SELECT * FROM orders WHERE custkey > 300"); + assertUpdate("DELETE FROM " + tableName + " WHERE custkey <= 300", "SELECT count(*) FROM orders WHERE custkey > 100 AND custkey <= 300"); + assertQuery("SELECT * FROM " + tableName, "SELECT * FROM orders WHERE custkey > 300"); - assertUpdate("DELETE FROM test_delete WHERE custkey <= 500", "SELECT count(*) FROM orders WHERE custkey > 300 AND custkey <= 500"); - assertQuery("SELECT * FROM test_delete", "SELECT * FROM orders WHERE custkey > 500"); + assertUpdate("DELETE FROM " + tableName + " WHERE custkey <= 500", "SELECT count(*) FROM orders WHERE custkey > 300 AND custkey <= 500"); + assertQuery("SELECT * FROM " + tableName, "SELECT * FROM orders WHERE custkey > 500"); - assertUpdate("DROP TABLE test_delete"); + assertUpdate("DROP TABLE " + tableName); // delete using a constant property - assertUpdate("CREATE TABLE test_delete AS SELECT * FROM orders", "SELECT count(*) FROM orders"); + assertUpdate("CREATE TABLE " + tableName + " AS SELECT * FROM orders", "SELECT count(*) FROM orders"); - assertUpdate("DELETE FROM test_delete WHERE orderstatus = 'O'", "SELECT count(*) FROM orders WHERE orderstatus = 'O'"); - assertQuery("SELECT * FROM test_delete", "SELECT * FROM orders WHERE orderstatus <> 'O'"); + assertUpdate("DELETE FROM " + tableName + " WHERE orderstatus = 'O'", "SELECT count(*) FROM orders WHERE orderstatus = 'O'"); + assertQuery("SELECT * FROM " + tableName, "SELECT * FROM orders WHERE orderstatus <> 'O'"); - assertUpdate("DROP TABLE test_delete"); + assertUpdate("DROP TABLE " + tableName); // delete without matching any rows - assertUpdate("CREATE TABLE test_delete AS SELECT * FROM orders", "SELECT count(*) FROM orders"); - assertUpdate("DELETE FROM test_delete WHERE rand() < 0", 0); - assertUpdate("DELETE FROM test_delete WHERE orderkey < 0", 0); - assertUpdate("DROP TABLE test_delete"); + assertUpdate("CREATE TABLE " + tableName + " AS SELECT * FROM orders", "SELECT count(*) FROM orders"); + assertUpdate("DELETE FROM " + tableName + " WHERE rand() < 0", 0); + assertUpdate("DELETE FROM " + tableName + " WHERE orderkey < 0", 0); + assertUpdate("DROP TABLE " + tableName); // delete with a predicate that optimizes to false - assertUpdate("CREATE TABLE test_delete AS SELECT * FROM orders", "SELECT count(*) FROM orders"); - assertUpdate("DELETE FROM test_delete WHERE orderkey > 5 AND orderkey < 4", 0); - assertUpdate("DROP TABLE test_delete"); + assertUpdate("CREATE TABLE " + tableName + " AS SELECT * FROM orders", "SELECT count(*) FROM orders"); + assertUpdate("DELETE FROM " + tableName + " WHERE orderkey > 5 AND orderkey < 4", 0); + assertUpdate("DROP TABLE " + tableName); // delete using a subquery - assertUpdate("CREATE TABLE test_delete AS SELECT * FROM lineitem", "SELECT count(*) FROM lineitem"); + assertUpdate("CREATE TABLE " + tableName + " AS SELECT * FROM lineitem", "SELECT count(*) FROM lineitem"); assertUpdate( - "DELETE FROM test_delete WHERE orderkey IN (SELECT orderkey FROM orders WHERE orderstatus = 'F')", + "DELETE FROM " + tableName + " WHERE orderkey IN (SELECT orderkey FROM orders WHERE orderstatus = 'F')", "SELECT count(*) FROM lineitem WHERE orderkey IN (SELECT orderkey FROM orders WHERE orderstatus = 'F')"); assertQuery( - "SELECT * FROM test_delete", + "SELECT * FROM " + tableName, "SELECT * FROM lineitem WHERE orderkey IN (SELECT orderkey FROM orders WHERE orderstatus <> 'F')"); - assertUpdate("DROP TABLE test_delete"); + assertUpdate("DROP TABLE " + tableName); // delete with multiple SemiJoin - assertUpdate("CREATE TABLE test_delete AS SELECT * FROM lineitem", "SELECT count(*) FROM lineitem"); + assertUpdate("CREATE TABLE " + tableName + " AS SELECT * FROM lineitem", "SELECT count(*) FROM lineitem"); assertUpdate( - "DELETE FROM test_delete\n" + + "DELETE FROM " + tableName + "\n" + "WHERE orderkey IN (SELECT orderkey FROM orders WHERE orderstatus = 'F')\n" + " AND orderkey IN (SELECT orderkey FROM orders WHERE custkey % 5 = 0)\n", "SELECT count(*) FROM lineitem\n" + "WHERE orderkey IN (SELECT orderkey FROM orders WHERE orderstatus = 'F')\n" + " AND orderkey IN (SELECT orderkey FROM orders WHERE custkey % 5 = 0)"); assertQuery( - "SELECT * FROM test_delete", + "SELECT * FROM " + tableName, "SELECT * FROM lineitem\n" + "WHERE orderkey IN (SELECT orderkey FROM orders WHERE orderstatus <> 'F')\n" + " OR orderkey IN (SELECT orderkey FROM orders WHERE custkey % 5 <> 0)"); - assertUpdate("DROP TABLE test_delete"); + assertUpdate("DROP TABLE " + tableName); // delete with SemiJoin null handling - assertUpdate("CREATE TABLE test_delete AS SELECT * FROM orders", "SELECT count(*) FROM orders"); + assertUpdate("CREATE TABLE " + tableName + " AS SELECT * FROM orders", "SELECT count(*) FROM orders"); assertUpdate( - "DELETE FROM test_delete\n" + + "DELETE FROM " + tableName + "\n" + "WHERE (orderkey IN (SELECT CASE WHEN orderkey % 3 = 0 THEN NULL ELSE orderkey END FROM lineitem)) IS NULL\n", "SELECT count(*) FROM orders\n" + "WHERE (orderkey IN (SELECT CASE WHEN orderkey % 3 = 0 THEN NULL ELSE orderkey END FROM lineitem)) IS NULL\n"); assertQuery( - "SELECT * FROM test_delete", + "SELECT * FROM " + tableName, "SELECT * FROM orders\n" + "WHERE (orderkey IN (SELECT CASE WHEN orderkey % 3 = 0 THEN NULL ELSE orderkey END FROM lineitem)) IS NOT NULL\n"); - assertUpdate("DROP TABLE test_delete"); + assertUpdate("DROP TABLE " + tableName); // delete using a scalar and EXISTS subquery - assertUpdate("CREATE TABLE test_delete AS SELECT * FROM orders", "SELECT count(*) FROM orders"); - assertUpdate("DELETE FROM test_delete WHERE orderkey = (SELECT orderkey FROM orders ORDER BY orderkey LIMIT 1)", 1); - assertUpdate("DELETE FROM test_delete WHERE orderkey = (SELECT orderkey FROM orders WHERE false)", 0); - assertUpdate("DELETE FROM test_delete WHERE EXISTS(SELECT 1 WHERE false)", 0); - assertUpdate("DELETE FROM test_delete WHERE EXISTS(SELECT 1)", "SELECT count(*) - 1 FROM orders"); - assertUpdate("DROP TABLE test_delete"); + assertUpdate("CREATE TABLE " + tableName + " AS SELECT * FROM orders", "SELECT count(*) FROM orders"); + assertUpdate("DELETE FROM " + tableName + " WHERE orderkey = (SELECT orderkey FROM orders ORDER BY orderkey LIMIT 1)", 1); + assertUpdate("DELETE FROM " + tableName + " WHERE orderkey = (SELECT orderkey FROM orders WHERE false)", 0); + assertUpdate("DELETE FROM " + tableName + " WHERE EXISTS(SELECT 1 WHERE false)", 0); + assertUpdate("DELETE FROM " + tableName + " WHERE EXISTS(SELECT 1)", "SELECT count(*) - 1 FROM orders"); + assertUpdate("DROP TABLE " + tableName); // test EXPLAIN ANALYZE with CTAS - assertExplainAnalyze("EXPLAIN ANALYZE CREATE TABLE analyze_test AS SELECT CAST(orderstatus AS VARCHAR(15)) orderstatus FROM orders"); - assertQuery("SELECT * from analyze_test", "SELECT orderstatus FROM orders"); + assertExplainAnalyze("EXPLAIN ANALYZE CREATE TABLE " + tableName + " AS SELECT CAST(orderstatus AS VARCHAR(15)) orderstatus FROM orders"); + assertQuery("SELECT * from " + tableName, "SELECT orderstatus FROM orders"); // check that INSERT works also - assertExplainAnalyze("EXPLAIN ANALYZE INSERT INTO analyze_test SELECT clerk FROM orders"); - assertQuery("SELECT * from analyze_test", "SELECT orderstatus FROM orders UNION ALL SELECT clerk FROM orders"); + assertExplainAnalyze("EXPLAIN ANALYZE INSERT INTO " + tableName + " SELECT clerk FROM orders"); + assertQuery("SELECT * from " + tableName, "SELECT orderstatus FROM orders UNION ALL SELECT clerk FROM orders"); // check DELETE works with EXPLAIN ANALYZE - assertExplainAnalyze("EXPLAIN ANALYZE DELETE FROM analyze_test WHERE TRUE"); - assertQuery("SELECT COUNT(*) from analyze_test", "SELECT 0"); - assertUpdate("DROP TABLE analyze_test"); + assertExplainAnalyze("EXPLAIN ANALYZE DELETE FROM " + tableName + " WHERE TRUE"); + assertQuery("SELECT COUNT(*) from " + tableName, "SELECT 0"); + assertUpdate("DROP TABLE " + tableName); // Test DELETE access control - assertUpdate("CREATE TABLE test_delete AS SELECT * FROM orders", "SELECT count(*) FROM orders"); - assertAccessDenied("DELETE FROM test_delete where orderkey < 12", "Cannot select from columns \\[orderkey\\] in table or view .*.test_delete.*", privilege("orderkey", SELECT_COLUMN)); - assertAccessAllowed("DELETE FROM test_delete where orderkey < 12", privilege("orderdate", SELECT_COLUMN)); - assertAccessAllowed("DELETE FROM test_delete", privilege("orders", SELECT_COLUMN)); + assertUpdate("CREATE TABLE " + tableName + " AS SELECT * FROM orders", "SELECT count(*) FROM orders"); + assertAccessDenied("DELETE FROM " + tableName + " where orderkey < 12", "Cannot select from columns \\[orderkey\\] in table or view .*." + tableName + ".*", privilege("orderkey", SELECT_COLUMN)); + assertAccessAllowed("DELETE FROM " + tableName + " where orderkey < 12", privilege("orderdate", SELECT_COLUMN)); + assertAccessAllowed("DELETE FROM " + tableName, privilege("orders", SELECT_COLUMN)); } @Test @@ -709,29 +704,31 @@ public void testView() @Language("SQL") String query = "SELECT orderkey, orderstatus, totalprice / 2 half FROM orders"; - assertUpdate("CREATE VIEW test_view AS SELECT 123 x"); - assertUpdate("CREATE OR REPLACE VIEW test_view AS " + query); + String testView = "test_view_" + randomTableSuffix(); + String testViewWithComment = "test_view_with_comment_" + randomTableSuffix(); + assertUpdate("CREATE VIEW " + testView + " AS SELECT 123 x"); + assertUpdate("CREATE OR REPLACE VIEW " + testView + " AS " + query); - assertUpdate("CREATE VIEW test_view_with_comment COMMENT 'orders' AS SELECT 123 x"); - assertUpdate("CREATE OR REPLACE VIEW test_view_with_comment COMMENT 'orders' AS " + query); + assertUpdate("CREATE VIEW " + testViewWithComment + " COMMENT 'orders' AS SELECT 123 x"); + assertUpdate("CREATE OR REPLACE VIEW " + testViewWithComment + " COMMENT 'orders' AS " + query); - MaterializedResult materializedRows = computeActual("SHOW CREATE VIEW test_view_with_comment"); + MaterializedResult materializedRows = computeActual("SHOW CREATE VIEW " + testViewWithComment); assertTrue(materializedRows.getMaterializedRows().get(0).getField(0).toString().contains("COMMENT 'orders'")); - assertQuery("SELECT * FROM test_view", query); - assertQuery("SELECT * FROM test_view_with_comment", query); + assertQuery("SELECT * FROM " + testView, query); + assertQuery("SELECT * FROM " + testViewWithComment, query); assertQuery( - "SELECT * FROM test_view a JOIN test_view b on a.orderkey = b.orderkey", + "SELECT * FROM " + testView + " a JOIN " + testView + " b on a.orderkey = b.orderkey", format("SELECT * FROM (%s) a JOIN (%s) b ON a.orderkey = b.orderkey", query, query)); - assertQuery("WITH orders AS (SELECT * FROM orders LIMIT 0) SELECT * FROM test_view", query); + assertQuery("WITH orders AS (SELECT * FROM orders LIMIT 0) SELECT * FROM " + testView, query); - String name = format("%s.%s.test_view", getSession().getCatalog().get(), getSession().getSchema().get()); + String name = format("%s.%s." + testView, getSession().getCatalog().get(), getSession().getSchema().get()); assertQuery("SELECT * FROM " + name, query); - assertUpdate("DROP VIEW test_view"); - assertUpdate("DROP VIEW test_view_with_comment"); + assertUpdate("DROP VIEW " + testView); + assertUpdate("DROP VIEW " + testViewWithComment); } @Test @@ -739,10 +736,16 @@ public void testViewCaseSensitivity() { skipTestUnless(supportsViews()); - computeActual("CREATE VIEW test_view_uppercase AS SELECT X FROM (SELECT 123 X)"); - computeActual("CREATE VIEW test_view_mixedcase AS SELECT XyZ FROM (SELECT 456 XyZ)"); - assertQuery("SELECT * FROM test_view_uppercase", "SELECT X FROM (SELECT 123 X)"); - assertQuery("SELECT * FROM test_view_mixedcase", "SELECT XyZ FROM (SELECT 456 XyZ)"); + String upperCaseView = "test_view_uppercase_" + randomTableSuffix(); + String mixedCaseView = "test_view_mixedcase_" + randomTableSuffix(); + + computeActual("CREATE VIEW " + upperCaseView + " AS SELECT X FROM (SELECT 123 X)"); + computeActual("CREATE VIEW " + mixedCaseView + " AS SELECT XyZ FROM (SELECT 456 XyZ)"); + assertQuery("SELECT * FROM " + upperCaseView, "SELECT X FROM (SELECT 123 X)"); + assertQuery("SELECT * FROM " + mixedCaseView, "SELECT XyZ FROM (SELECT 456 XyZ)"); + + assertUpdate("DROP VIEW " + upperCaseView); + assertUpdate("DROP VIEW " + mixedCaseView); } @Test @@ -750,19 +753,22 @@ public void testCompatibleTypeChangeForView() { skipTestUnless(supportsViews()); - assertUpdate("CREATE TABLE test_table_1 AS SELECT 'abcdefg' a", 1); - assertUpdate("CREATE VIEW test_view_1 AS SELECT a FROM test_table_1"); + String tableName = "test_table_" + randomTableSuffix(); + String viewName = "test_view_" + randomTableSuffix(); - assertQuery("SELECT * FROM test_view_1", "VALUES 'abcdefg'"); + assertUpdate("CREATE TABLE " + tableName + " AS SELECT 'abcdefg' a", 1); + assertUpdate("CREATE VIEW " + viewName + " AS SELECT a FROM " + tableName); + + assertQuery("SELECT * FROM " + viewName, "VALUES 'abcdefg'"); // replace table with a version that's implicitly coercible to the previous one - assertUpdate("DROP TABLE test_table_1"); - assertUpdate("CREATE TABLE test_table_1 AS SELECT 'abc' a", 1); + assertUpdate("DROP TABLE " + tableName); + assertUpdate("CREATE TABLE " + tableName + " AS SELECT 'abc' a", 1); - assertQuery("SELECT * FROM test_view_1", "VALUES 'abc'"); + assertQuery("SELECT * FROM " + viewName, "VALUES 'abc'"); - assertUpdate("DROP VIEW test_view_1"); - assertUpdate("DROP TABLE test_table_1"); + assertUpdate("DROP VIEW " + viewName); + assertUpdate("DROP TABLE " + tableName); } @Test @@ -770,19 +776,22 @@ public void testCompatibleTypeChangeForView2() { skipTestUnless(supportsViews()); - assertUpdate("CREATE TABLE test_table_2 AS SELECT BIGINT '1' v", 1); - assertUpdate("CREATE VIEW test_view_2 AS SELECT * FROM test_table_2"); + String tableName = "test_table_" + randomTableSuffix(); + String viewName = "test_view_" + randomTableSuffix(); - assertQuery("SELECT * FROM test_view_2", "VALUES 1"); + assertUpdate("CREATE TABLE " + tableName + " AS SELECT BIGINT '1' v", 1); + assertUpdate("CREATE VIEW " + viewName + " AS SELECT * FROM " + tableName); + + assertQuery("SELECT * FROM " + viewName, "VALUES 1"); // replace table with a version that's implicitly coercible to the previous one - assertUpdate("DROP TABLE test_table_2"); - assertUpdate("CREATE TABLE test_table_2 AS SELECT INTEGER '1' v", 1); + assertUpdate("DROP TABLE " + tableName); + assertUpdate("CREATE TABLE " + tableName + " AS SELECT INTEGER '1' v", 1); - assertQuery("SELECT * FROM test_view_2 WHERE v = 1", "VALUES 1"); + assertQuery("SELECT * FROM " + viewName + " WHERE v = 1", "VALUES 1"); - assertUpdate("DROP VIEW test_view_2"); - assertUpdate("DROP TABLE test_table_2"); + assertUpdate("DROP VIEW " + viewName); + assertUpdate("DROP TABLE " + tableName); } @Test @@ -790,8 +799,10 @@ public void testViewMetadata() { skipTestUnless(supportsViews()); + String viewName = "meta_test_view_" + randomTableSuffix(); + @Language("SQL") String query = "SELECT BIGINT '123' x, 'foo' y"; - assertUpdate("CREATE VIEW meta_test_view AS " + query); + assertUpdate("CREATE VIEW " + viewName + " AS " + query); // test INFORMATION_SCHEMA.TABLES MaterializedResult actual = computeActual(format( @@ -801,7 +812,7 @@ public void testViewMetadata() MaterializedResult expected = resultBuilder(getSession(), actual.getTypes()) .row("customer", "BASE TABLE") .row("lineitem", "BASE TABLE") - .row("meta_test_view", "VIEW") + .row(viewName, "VIEW") .row("nation", "BASE TABLE") .row("orders", "BASE TABLE") .row("part", "BASE TABLE") @@ -829,13 +840,13 @@ public void testViewMetadata() getSession().getSchema().get())); expected = resultBuilder(getSession(), actual.getTypes()) - .row("meta_test_view", formatSqlText(query)) + .row(viewName, formatSqlText(query)) .build(); assertContains(actual, expected); // test SHOW COLUMNS - actual = computeActual("SHOW COLUMNS FROM meta_test_view"); + actual = computeActual("SHOW COLUMNS FROM " + viewName); expected = resultBuilder(getSession(), VARCHAR, VARCHAR, VARCHAR, VARCHAR) .row("x", "bigint", "", "") @@ -849,18 +860,18 @@ public void testViewMetadata() "CREATE VIEW %s.%s.%s AS %s", getSession().getCatalog().get(), getSession().getSchema().get(), - "meta_test_view", + viewName, query)).trim(); - actual = computeActual("SHOW CREATE VIEW meta_test_view"); + actual = computeActual("SHOW CREATE VIEW " + viewName); assertEquals(getOnlyElement(actual.getOnlyColumnAsSet()), expectedSql); - actual = computeActual(format("SHOW CREATE VIEW %s.%s.meta_test_view", getSession().getCatalog().get(), getSession().getSchema().get())); + actual = computeActual(format("SHOW CREATE VIEW %s.%s." + viewName, getSession().getCatalog().get(), getSession().getSchema().get())); assertEquals(getOnlyElement(actual.getOnlyColumnAsSet()), expectedSql); - assertUpdate("DROP VIEW meta_test_view"); + assertUpdate("DROP VIEW " + viewName); } @Test @@ -870,7 +881,7 @@ public void testShowCreateView() checkState(getSession().getCatalog().isPresent(), "catalog is not set"); checkState(getSession().getSchema().isPresent(), "schema is not set"); - String viewName = "test_show_create_view"; + String viewName = "test_show_create_view" + randomTableSuffix(); assertUpdate("DROP VIEW IF EXISTS " + viewName); String ddl = format( "CREATE VIEW %s.%s.%s AS\n" + @@ -913,10 +924,11 @@ public void testQueryLoggingCount() DispatchManager dispatchManager = ((DistributedQueryRunner) getQueryRunner()).getCoordinator().getDispatchManager(); long beforeCompletedQueriesCount = waitUntilStable(() -> dispatchManager.getStats().getCompletedQueries().getTotalCount(), new Duration(5, SECONDS)); long beforeSubmittedQueriesCount = dispatchManager.getStats().getSubmittedQueries().getTotalCount(); - assertUpdate("CREATE TABLE test_query_logging_count AS SELECT 1 foo_1, 2 foo_2_4", 1); - assertQuery("SELECT foo_1, foo_2_4 FROM test_query_logging_count", "SELECT 1, 2"); - assertUpdate("DROP TABLE test_query_logging_count"); - assertQueryFails("SELECT * FROM test_query_logging_count", ".*Table .* does not exist"); + String tableName = "test_query_logging_count" + randomTableSuffix(); + assertUpdate("CREATE TABLE " + tableName + " AS SELECT 1 foo_1, 2 foo_2_4", 1); + assertQuery("SELECT foo_1, foo_2_4 FROM " + tableName, "SELECT 1, 2"); + assertUpdate("DROP TABLE " + tableName); + assertQueryFails("SELECT * FROM " + tableName, ".*Table .* does not exist"); // TODO: Figure out a better way of synchronization assertUntilTimeout( @@ -1000,9 +1012,10 @@ public void testTableSampleWithFiltering() @Test public void testSymbolAliasing() { - assertUpdate("CREATE TABLE test_symbol_aliasing AS SELECT 1 foo_1, 2 foo_2_4", 1); - assertQuery("SELECT foo_1, foo_2_4 FROM test_symbol_aliasing", "SELECT 1, 2"); - assertUpdate("DROP TABLE test_symbol_aliasing"); + String tableName = "test_symbol_aliasing" + randomTableSuffix(); + assertUpdate("CREATE TABLE " + tableName + " AS SELECT 1 foo_1, 2 foo_2_4", 1); + assertQuery("SELECT foo_1, foo_2_4 FROM " + tableName, "SELECT 1, 2"); + assertUpdate("DROP TABLE " + tableName); } @Test @@ -1167,7 +1180,8 @@ public void testViewFunctionAccessControl() @Test public void testWrittenStats() { - String sql = "CREATE TABLE test_written_stats AS SELECT * FROM nation"; + String tableName = "test_written_stats_" + randomTableSuffix(); + String sql = "CREATE TABLE " + tableName + " AS SELECT * FROM nation"; DistributedQueryRunner distributedQueryRunner = (DistributedQueryRunner) getQueryRunner(); ResultWithQueryId resultResultWithQueryId = distributedQueryRunner.executeWithQueryId(getSession(), sql); QueryInfo queryInfo = distributedQueryRunner.getCoordinator().getQueryManager().getFullQueryInfo(resultResultWithQueryId.getQueryId()); @@ -1176,7 +1190,7 @@ public void testWrittenStats() assertEquals(queryInfo.getQueryStats().getWrittenPositions(), 25L); assertTrue(queryInfo.getQueryStats().getLogicalWrittenDataSize().toBytes() > 0L); - sql = "INSERT INTO test_written_stats SELECT * FROM nation LIMIT 10"; + sql = "INSERT INTO " + tableName + " SELECT * FROM nation LIMIT 10"; resultResultWithQueryId = distributedQueryRunner.executeWithQueryId(getSession(), sql); queryInfo = distributedQueryRunner.getCoordinator().getQueryManager().getFullQueryInfo(resultResultWithQueryId.getQueryId()); @@ -1184,7 +1198,7 @@ public void testWrittenStats() assertEquals(queryInfo.getQueryStats().getWrittenPositions(), 10L); assertTrue(queryInfo.getQueryStats().getLogicalWrittenDataSize().toBytes() > 0L); - assertUpdate("DROP TABLE test_written_stats"); + assertUpdate("DROP TABLE " + tableName); } @Test From 511cf18a360f4d4e0f649ee7ad4924dcf207bb9b Mon Sep 17 00:00:00 2001 From: James Petty Date: Sun, 1 Mar 2020 15:49:16 -0500 Subject: [PATCH 062/519] Reduce instance size and block copies in Page class Refactors Page class to avoid extra allocations and copies. Fields didn't require AtomicLong semantics and simple volatile fields are now used instead. Additionally, trusted methods can avoid copying Block[] in the page constructor by using a new static helper method. Finally, no valid reason should exist for Page subclassing, so the class is now final. --- .../src/main/java/io/prestosql/spi/Page.java | 88 +++++++++++-------- .../java/io/prestosql/spi/PageBuilder.java | 2 +- 2 files changed, 54 insertions(+), 36 deletions(-) diff --git a/presto-spi/src/main/java/io/prestosql/spi/Page.java b/presto-spi/src/main/java/io/prestosql/spi/Page.java index 7eec5cc19edd..f617d37b7011 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/Page.java +++ b/presto-spi/src/main/java/io/prestosql/spi/Page.java @@ -23,7 +23,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.atomic.AtomicLong; import static io.airlift.slice.SizeOf.sizeOf; import static io.prestosql.spi.block.DictionaryId.randomDictionaryId; @@ -31,27 +30,40 @@ import static java.lang.String.format; import static java.util.Objects.requireNonNull; -public class Page +public final class Page { - public static final int INSTANCE_SIZE = ClassLayout.parseClass(Page.class).instanceSize() + - (3 * ClassLayout.parseClass(AtomicLong.class).instanceSize()); + public static final int INSTANCE_SIZE = ClassLayout.parseClass(Page.class).instanceSize(); + + /** + * Visible to give trusted classes like {@link PageBuilder} access to a constructor that doesn't + * defensively copy the blocks + */ + static Page wrapBlocksWithoutCopy(int positionCount, Block[] blocks) + { + return new Page(false, positionCount, blocks); + } private final Block[] blocks; private final int positionCount; - private final AtomicLong sizeInBytes = new AtomicLong(-1); - private final AtomicLong retainedSizeInBytes = new AtomicLong(-1); - private final AtomicLong logicalSizeInBytes = new AtomicLong(-1); + private volatile long sizeInBytes = -1; + private volatile long retainedSizeInBytes = -1; + private volatile long logicalSizeInBytes = -1; public Page(Block... blocks) { - this(determinePositionCount(blocks), blocks); + this(true, determinePositionCount(blocks), blocks); } public Page(int positionCount, Block... blocks) + { + this(true, positionCount, blocks); + } + + private Page(boolean blocksCopyRequired, int positionCount, Block[] blocks) { requireNonNull(blocks, "blocks is null"); - this.blocks = Arrays.copyOf(blocks, blocks.length); this.positionCount = positionCount; + this.blocks = blocksCopyRequired ? blocks.clone() : blocks; } public int getChannelCount() @@ -66,36 +78,37 @@ public int getPositionCount() public long getSizeInBytes() { - long sizeInBytes = this.sizeInBytes.get(); + long sizeInBytes = this.sizeInBytes; if (sizeInBytes < 0) { sizeInBytes = 0; for (Block block : blocks) { sizeInBytes += block.getLoadedBlock().getSizeInBytes(); } - this.sizeInBytes.set(sizeInBytes); + this.sizeInBytes = sizeInBytes; } return sizeInBytes; } public long getLogicalSizeInBytes() { - long size = logicalSizeInBytes.get(); - if (size < 0) { - size = 0; + long logicalSizeInBytes = this.logicalSizeInBytes; + if (logicalSizeInBytes < 0) { + logicalSizeInBytes = 0; for (Block block : blocks) { - size += block.getLogicalSizeInBytes(); + logicalSizeInBytes += block.getLogicalSizeInBytes(); } - logicalSizeInBytes.set(size); + this.logicalSizeInBytes = logicalSizeInBytes; } - return size; + return logicalSizeInBytes; } public long getRetainedSizeInBytes() { - if (retainedSizeInBytes.get() < 0) { - updateRetainedSize(); + long retainedSizeInBytes = this.retainedSizeInBytes; + if (retainedSizeInBytes < 0) { + return updateRetainedSize(); } - return retainedSizeInBytes.get(); + return retainedSizeInBytes; } public Block getBlock(int channel) @@ -113,7 +126,7 @@ public Page getSingleValuePage(int position) for (int i = 0; i < this.blocks.length; i++) { singleValueBlocks[i] = this.blocks[i].getSingleValueBlock(position); } - return new Page(1, singleValueBlocks); + return wrapBlocksWithoutCopy(1, singleValueBlocks); } public Page getRegion(int positionOffset, int length) @@ -127,7 +140,7 @@ public Page getRegion(int positionOffset, int length) for (int i = 0; i < channelCount; i++) { slicedBlocks[i] = blocks[i].getRegion(positionOffset, length); } - return new Page(length, slicedBlocks); + return wrapBlocksWithoutCopy(length, slicedBlocks); } public Page appendColumn(Block block) @@ -139,7 +152,7 @@ public Page appendColumn(Block block) Block[] newBlocks = Arrays.copyOf(blocks, blocks.length + 1); newBlocks[blocks.length] = block; - return new Page(newBlocks); + return wrapBlocksWithoutCopy(positionCount, newBlocks); } public void compact() @@ -255,20 +268,22 @@ private static int[] getNewIds(int positionCount, DictionaryBlock dictionaryBloc */ public Page getLoadedPage() { - boolean allLoaded = true; - Block[] loadedBlocks = new Block[blocks.length]; + Block[] loadedBlocks = null; for (int i = 0; i < blocks.length; i++) { - loadedBlocks[i] = blocks[i].getLoadedBlock(); - if (loadedBlocks[i] != blocks[i]) { - allLoaded = false; + Block loaded = blocks[i].getLoadedBlock(); + if (loaded != blocks[i]) { + if (loadedBlocks == null) { + loadedBlocks = blocks.clone(); + } + loadedBlocks[i] = loaded; } } - if (allLoaded) { + if (loadedBlocks == null) { return this; } - return new Page(loadedBlocks); + return wrapBlocksWithoutCopy(positionCount, loadedBlocks); } @Override @@ -297,8 +312,10 @@ public Page getPositions(int[] retainedPositions, int offset, int length) requireNonNull(retainedPositions, "retainedPositions is null"); Block[] blocks = new Block[this.blocks.length]; - Arrays.setAll(blocks, i -> this.blocks[i].getPositions(retainedPositions, offset, length)); - return new Page(length, blocks); + for (int i = 0; i < blocks.length; i++) { + blocks[i] = this.blocks[i].getPositions(retainedPositions, offset, length); + } + return wrapBlocksWithoutCopy(length, blocks); } public Page prependColumn(Block column) @@ -311,16 +328,17 @@ public Page prependColumn(Block column) result[0] = column; System.arraycopy(blocks, 0, result, 1, blocks.length); - return new Page(positionCount, result); + return wrapBlocksWithoutCopy(positionCount, result); } - private void updateRetainedSize() + private long updateRetainedSize() { long retainedSizeInBytes = INSTANCE_SIZE + sizeOf(blocks); for (Block block : blocks) { retainedSizeInBytes += block.getRetainedSizeInBytes(); } - this.retainedSizeInBytes.set(retainedSizeInBytes); + this.retainedSizeInBytes = retainedSizeInBytes; + return retainedSizeInBytes; } private static class DictionaryBlockIndexes diff --git a/presto-spi/src/main/java/io/prestosql/spi/PageBuilder.java b/presto-spi/src/main/java/io/prestosql/spi/PageBuilder.java index d5dfda33c875..dd19b425fc4f 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/PageBuilder.java +++ b/presto-spi/src/main/java/io/prestosql/spi/PageBuilder.java @@ -170,7 +170,7 @@ public Page build() } } - return new Page(blocks); + return Page.wrapBlocksWithoutCopy(declaredPositions, blocks); } private static void checkArgument(boolean expression, String errorMessage) From bcaa005e178681b7cebc23aafd01cb960538e59c Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 6 Apr 2020 07:34:14 +0200 Subject: [PATCH 063/519] Update outdated comment When the comment was added, indeed `CAST` logic was used, but it no longer is. Update comment not to imply there is SQL CAST being performed. --- .../java/io/prestosql/plugin/postgresql/PostgreSqlClient.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-postgresql/src/main/java/io/prestosql/plugin/postgresql/PostgreSqlClient.java b/presto-postgresql/src/main/java/io/prestosql/plugin/postgresql/PostgreSqlClient.java index f2a2ce481a46..1298010dc402 100644 --- a/presto-postgresql/src/main/java/io/prestosql/plugin/postgresql/PostgreSqlClient.java +++ b/presto-postgresql/src/main/java/io/prestosql/plugin/postgresql/PostgreSqlClient.java @@ -606,7 +606,7 @@ private static SliceReadFunction arrayAsJsonReadFunction(ConnectorSession sessio return Slices.wrappedBuffer(SORTED_MAPPER.writeValueAsBytes(value)); } catch (JsonProcessingException e) { - throw new PrestoException(JDBC_ERROR, "Cast to JSON failed for " + type.getDisplayName(), e); + throw new PrestoException(JDBC_ERROR, "Conversion to JSON failed for " + type.getDisplayName(), e); } }; } From 14f3bea53b797061fc02fecaf10d3830320c9c01 Mon Sep 17 00:00:00 2001 From: Martin Traverso Date: Mon, 6 Apr 2020 15:28:45 -0700 Subject: [PATCH 064/519] Add main() to PostgreSqlQueryRunner --- presto-postgresql/pom.xml | 7 +++++ .../postgresql/PostgreSqlQueryRunner.java | 30 +++++++++++++++++-- .../TestPostgreSqlCaseInsensitiveMapping.java | 1 + .../TestPostgreSqlDistributedQueries.java | 1 + .../postgresql/TestPostgreSqlTypeMapping.java | 1 + 5 files changed, 37 insertions(+), 3 deletions(-) diff --git a/presto-postgresql/pom.xml b/presto-postgresql/pom.xml index 032816549d4e..aa6e7cfb2437 100644 --- a/presto-postgresql/pom.xml +++ b/presto-postgresql/pom.xml @@ -77,6 +77,13 @@ validation-api + + + io.airlift + log-manager + runtime + + io.prestosql diff --git a/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/PostgreSqlQueryRunner.java b/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/PostgreSqlQueryRunner.java index 7c714e017c17..0733cf60df86 100644 --- a/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/PostgreSqlQueryRunner.java +++ b/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/PostgreSqlQueryRunner.java @@ -15,6 +15,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import io.airlift.log.Logger; +import io.airlift.log.Logging; import io.prestosql.Session; import io.prestosql.plugin.tpch.TpchPlugin; import io.prestosql.testing.DistributedQueryRunner; @@ -38,15 +40,21 @@ private PostgreSqlQueryRunner() {} public static QueryRunner createPostgreSqlQueryRunner(TestingPostgreSqlServer server, TpchTable... tables) throws Exception { - return createPostgreSqlQueryRunner(server, ImmutableMap.of(), ImmutableList.copyOf(tables)); + return createPostgreSqlQueryRunner(server, ImmutableMap.of(), ImmutableMap.of(), ImmutableList.copyOf(tables)); } - public static QueryRunner createPostgreSqlQueryRunner(TestingPostgreSqlServer server, Map connectorProperties, Iterable> tables) + public static DistributedQueryRunner createPostgreSqlQueryRunner( + TestingPostgreSqlServer server, + Map extraProperties, + Map connectorProperties, + Iterable> tables) throws Exception { DistributedQueryRunner queryRunner = null; try { - queryRunner = DistributedQueryRunner.builder(createSession()).build(); + queryRunner = DistributedQueryRunner.builder(createSession()) + .setExtraProperties(extraProperties) + .build(); queryRunner.installPlugin(new TpchPlugin()); queryRunner.createCatalog("tpch", "tpch"); @@ -78,4 +86,20 @@ public static Session createSession() .setSchema(TPCH_SCHEMA) .build(); } + + public static void main(String[] args) + throws Exception + { + Logging.initialize(); + + DistributedQueryRunner queryRunner = createPostgreSqlQueryRunner( + new TestingPostgreSqlServer(), + ImmutableMap.of("http-server.http.port", "8080"), + ImmutableMap.of(), + TpchTable.getTables()); + + Logger log = Logger.get(PostgreSqlQueryRunner.class); + log.info("======== SERVER STARTED ========"); + log.info("\n====\n%s\n====", queryRunner.getCoordinator().getBaseUrl()); + } } diff --git a/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlCaseInsensitiveMapping.java b/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlCaseInsensitiveMapping.java index 71a4e00642b6..22232e2c6ef9 100644 --- a/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlCaseInsensitiveMapping.java +++ b/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlCaseInsensitiveMapping.java @@ -44,6 +44,7 @@ protected QueryRunner createQueryRunner() this.postgreSqlServer = new TestingPostgreSqlServer(); return PostgreSqlQueryRunner.createPostgreSqlQueryRunner( postgreSqlServer, + ImmutableMap.of(), ImmutableMap.of("case-insensitive-name-matching", "true"), ImmutableSet.of()); } diff --git a/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlDistributedQueries.java b/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlDistributedQueries.java index 2b7c6663aa0f..5ed73b6064c4 100644 --- a/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlDistributedQueries.java +++ b/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlDistributedQueries.java @@ -37,6 +37,7 @@ protected QueryRunner createQueryRunner() this.postgreSqlServer = new TestingPostgreSqlServer(); return createPostgreSqlQueryRunner( postgreSqlServer, + ImmutableMap.of(), ImmutableMap.builder() // caching here speeds up tests highly, caching is not used in smoke tests .put("metadata.cache-ttl", "10m") diff --git a/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java b/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java index 88d9b4ffebdf..d363c624a40f 100644 --- a/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java +++ b/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java @@ -141,6 +141,7 @@ protected QueryRunner createQueryRunner() this.postgreSqlServer = new TestingPostgreSqlServer(); return createPostgreSqlQueryRunner( postgreSqlServer, + ImmutableMap.of(), ImmutableMap.of("jdbc-types-mapped-to-varchar", "Tsrange, Inet" /* make sure that types are compared case insensitively */), ImmutableList.of()); } From efe1d4194793d72f3b2b91bf2472fab6402f6dd7 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 6 Apr 2020 18:40:34 -0700 Subject: [PATCH 065/519] Cleanup toString for JdbcColumnHandle --- .../main/java/io/prestosql/plugin/jdbc/JdbcColumnHandle.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcColumnHandle.java b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcColumnHandle.java index 23615baa9ea2..9e7a253fa0c6 100644 --- a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcColumnHandle.java +++ b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcColumnHandle.java @@ -130,10 +130,10 @@ public int hashCode() @Override public String toString() { - return Joiner.on(":").join( + return Joiner.on(":").skipNulls().join( columnName, columnType.getDisplayName(), - jdbcTypeHandle.getJdbcTypeName()); + jdbcTypeHandle.getJdbcTypeName().orElse(null)); } public static Builder builder() From af42e81b6ad25fab0002ee700caeb6bff903a63e Mon Sep 17 00:00:00 2001 From: Szymon Homa Date: Tue, 7 Apr 2020 13:55:40 +0200 Subject: [PATCH 066/519] Fix flaky TestConnectorEventListner Test was expecting 2 events only, instead of 3 (create, finish, split) and it seems that from time to time these events were handled in a different order hence, later assertions were failing. --- .../java/io/prestosql/execution/TestConnectorEventListener.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-tests/src/test/java/io/prestosql/execution/TestConnectorEventListener.java b/presto-tests/src/test/java/io/prestosql/execution/TestConnectorEventListener.java index 31435b9261f4..f4e33c435ed9 100644 --- a/presto-tests/src/test/java/io/prestosql/execution/TestConnectorEventListener.java +++ b/presto-tests/src/test/java/io/prestosql/execution/TestConnectorEventListener.java @@ -71,7 +71,7 @@ private void tearDown() public void testConnectorEventHandlerReceivingEvents() throws Exception { - queries.runQueryAndWaitForEvents("SELECT 1", 2, session); + queries.runQueryAndWaitForEvents("SELECT 1", 3, session); assertThat(generatedEvents.getQueryCreatedEvents()) .size().isEqualTo(1); From 1821902f1a05ad5d485f0f62405305941a04bd21 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Walkiewicz?= Date: Tue, 7 Apr 2020 12:37:55 +0200 Subject: [PATCH 067/519] Skip setting admin role if it's already enabled --- .../tests/hive/TestHiveStorageFormats.java | 23 ++++++++++++++----- 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/presto-product-tests/src/main/java/io/prestosql/tests/hive/TestHiveStorageFormats.java b/presto-product-tests/src/main/java/io/prestosql/tests/hive/TestHiveStorageFormats.java index 4a73030204ef..c8bd03e8a2eb 100644 --- a/presto-product-tests/src/main/java/io/prestosql/tests/hive/TestHiveStorageFormats.java +++ b/presto-product-tests/src/main/java/io/prestosql/tests/hive/TestHiveStorageFormats.java @@ -14,6 +14,7 @@ package io.prestosql.tests.hive; import com.google.common.collect.ImmutableMap; +import com.google.inject.Inject; import io.prestosql.tempto.ProductTest; import io.prestosql.tempto.assertions.QueryAssert.Row; import io.prestosql.tempto.query.QueryResult; @@ -21,6 +22,8 @@ import org.testng.annotations.DataProvider; import org.testng.annotations.Test; +import javax.inject.Named; + import java.sql.Connection; import java.sql.SQLException; import java.util.List; @@ -48,6 +51,10 @@ public class TestHiveStorageFormats { private static final String TPCH_SCHEMA = "tiny"; + @Inject(optional = true) + @Named("databases.presto.admin_role_enabled") + private boolean adminRoleEnabled; + @DataProvider(name = "storage_formats") public static Object[][] storageFormats() { @@ -67,7 +74,7 @@ public static Object[][] storageFormats() public void testInsertIntoTable(StorageFormat storageFormat) { // only admin user is allowed to change session properties - setRole("admin"); + setAdminRole(); setSessionProperties(storageFormat); String tableName = "storage_formats_test_insert_into_" + storageFormat.getName().toLowerCase(Locale.ENGLISH); @@ -110,7 +117,7 @@ public void testInsertIntoTable(StorageFormat storageFormat) public void testCreateTableAs(StorageFormat storageFormat) { // only admin user is allowed to change session properties - setRole("admin"); + setAdminRole(); setSessionProperties(storageFormat); String tableName = "storage_formats_test_create_table_as_select_" + storageFormat.getName().toLowerCase(Locale.ENGLISH); @@ -136,7 +143,7 @@ public void testCreateTableAs(StorageFormat storageFormat) public void testInsertIntoPartitionedTable(StorageFormat storageFormat) { // only admin user is allowed to change session properties - setRole("admin"); + setAdminRole(); setSessionProperties(storageFormat); String tableName = "storage_formats_test_insert_into_partitioned_" + storageFormat.getName().toLowerCase(Locale.ENGLISH); @@ -179,7 +186,7 @@ public void testInsertIntoPartitionedTable(StorageFormat storageFormat) public void testCreatePartitionedTableAs(StorageFormat storageFormat) { // only admin user is allowed to change session properties - setRole("admin"); + setAdminRole(); setSessionProperties(storageFormat); String tableName = "storage_formats_test_create_table_as_select_partitioned_" + storageFormat.getName().toLowerCase(Locale.ENGLISH); @@ -249,11 +256,15 @@ private static void assertSelect(String query, String tableName) .containsExactly(expectedRows); } - private static void setRole(String role) + private void setAdminRole() { + if (adminRoleEnabled) { + return; + } + Connection connection = defaultQueryExecutor().getConnection(); try { - JdbcDriverUtils.setRole(connection, role); + JdbcDriverUtils.setRole(connection, "admin"); } catch (SQLException e) { throw new RuntimeException(e); From 8b490ec25dbf03c7a4729f9a40957f5ea02430a6 Mon Sep 17 00:00:00 2001 From: Dain Sundstrom Date: Mon, 6 Apr 2020 15:54:29 -0700 Subject: [PATCH 068/519] Bind NodeManager and VersionEmbedder in base JDBC connector --- .../java/io/prestosql/plugin/jdbc/JdbcConnectorFactory.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcConnectorFactory.java b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcConnectorFactory.java index 268c76b18df2..a783be6c07e6 100644 --- a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcConnectorFactory.java +++ b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcConnectorFactory.java @@ -17,6 +17,8 @@ import com.google.inject.Module; import io.airlift.bootstrap.Bootstrap; import io.prestosql.plugin.jdbc.credential.CredentialProviderModule; +import io.prestosql.spi.NodeManager; +import io.prestosql.spi.VersionEmbedder; import io.prestosql.spi.connector.Connector; import io.prestosql.spi.connector.ConnectorContext; import io.prestosql.spi.connector.ConnectorFactory; @@ -66,6 +68,8 @@ public Connector create(String catalogName, Map requiredConfig, Bootstrap app = new Bootstrap( binder -> binder.bind(TypeManager.class).toInstance(context.getTypeManager()), + binder -> binder.bind(NodeManager.class).toInstance(context.getNodeManager()), + binder -> binder.bind(VersionEmbedder.class).toInstance(context.getVersionEmbedder()), new JdbcModule(catalogName), new CredentialProviderModule(), moduleProvider.getModule(catalogName)); From c9972e69ff490f8f66c9bc61309479a385428998 Mon Sep 17 00:00:00 2001 From: Dain Sundstrom Date: Tue, 7 Apr 2020 10:24:12 -0700 Subject: [PATCH 069/519] Remove default for hive.cache.location --- .../java/io/prestosql/plugin/hive/rubix/RubixConfig.java | 5 ++++- .../io/prestosql/plugin/hive/rubix/TestRubixConfig.java | 6 +++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfig.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfig.java index abb31afab505..b46d3e7ca2eb 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfig.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfig.java @@ -16,10 +16,12 @@ import com.qubole.rubix.spi.CacheConfig; import io.airlift.configuration.Config; +import javax.validation.constraints.NotNull; + public class RubixConfig { private boolean parallelWarmupEnabled = true; - private String cacheLocation = "/tmp"; + private String cacheLocation; private int bookKeeperServerPort = CacheConfig.DEFAULT_BOOKKEEPER_SERVER_PORT; private int dataTransferServerPort = CacheConfig.DEFAULT_DATA_TRANSFER_SERVER_PORT; @@ -35,6 +37,7 @@ public RubixConfig setParallelWarmupEnabled(boolean value) return this; } + @NotNull public String getCacheLocation() { return cacheLocation; diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/rubix/TestRubixConfig.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/rubix/TestRubixConfig.java index cb86f06b8114..e01690e4d256 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/rubix/TestRubixConfig.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/rubix/TestRubixConfig.java @@ -31,7 +31,7 @@ public void testDefaults() assertRecordedDefaults(recordDefaults(RubixConfig.class) .setBookKeeperServerPort(CacheConfig.DEFAULT_BOOKKEEPER_SERVER_PORT) .setDataTransferServerPort(CacheConfig.DEFAULT_DATA_TRANSFER_SERVER_PORT) - .setCacheLocation("/tmp") + .setCacheLocation(null) .setParallelWarmupEnabled(true)); } @@ -40,14 +40,14 @@ public void testExplicitPropertyMappings() { Map properties = new ImmutableMap.Builder() .put("hive.cache.parallel-warmup-enabled", "false") - .put("hive.cache.location", "/etc") + .put("hive.cache.location", "/some-directory") .put("hive.cache.bookkeeper-port", "1234") .put("hive.cache.data-transfer-port", "1235") .build(); RubixConfig expected = new RubixConfig() .setParallelWarmupEnabled(false) - .setCacheLocation("/etc") + .setCacheLocation("/some-directory") .setBookKeeperServerPort(1234) .setDataTransferServerPort(1235); From bdf5f7b90c1d8e3019e4f9367880ea818f3ca95f Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Fri, 3 Apr 2020 21:21:42 +0200 Subject: [PATCH 070/519] Update to Airbase 99 To allow Jackson 2.10.2 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index c0c001478c5c..2608203ea32d 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ io.airlift airbase - 98 + 99 io.prestosql From 2c25a33e06c96a4e9cf43c9f3d1c91cc662e89c0 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Fri, 3 Apr 2020 20:03:16 +0200 Subject: [PATCH 071/519] Update to Avro 1.9.2 This minor version bump fixes regression issues: https://github.com/apache/avro/releases/tag/release-1.9.2 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 2608203ea32d..7a3beca00f97 100644 --- a/pom.xml +++ b/pom.xml @@ -825,7 +825,7 @@ org.apache.avro avro - 1.9.1 + 1.9.2 From 926fcf1ed49d4b9d8d0853ea3eb31ea46eb309d0 Mon Sep 17 00:00:00 2001 From: Manfred Moser Date: Thu, 26 Mar 2020 12:51:55 -0700 Subject: [PATCH 072/519] Add tip about reverse parameters for date diff --- .../src/main/sphinx/functions/datetime.rst | 2 ++ .../src/main/sphinx/migration/from-hive.rst | 23 +++++++++++++++++++ 2 files changed, 25 insertions(+) diff --git a/presto-docs/src/main/sphinx/functions/datetime.rst b/presto-docs/src/main/sphinx/functions/datetime.rst index 8b64d401e015..98cb75fbf7d9 100644 --- a/presto-docs/src/main/sphinx/functions/datetime.rst +++ b/presto-docs/src/main/sphinx/functions/datetime.rst @@ -150,6 +150,8 @@ The above examples use the timestamp ``2001-08-22 03:04:05.321`` as the input. Returns ``x`` truncated to ``unit``. +.. _datetime-interval-functions: + Interval Functions ------------------ diff --git a/presto-docs/src/main/sphinx/migration/from-hive.rst b/presto-docs/src/main/sphinx/migration/from-hive.rst index 0c278b0a845d..b6fd4b72346f 100644 --- a/presto-docs/src/main/sphinx/migration/from-hive.rst +++ b/presto-docs/src/main/sphinx/migration/from-hive.rst @@ -109,3 +109,26 @@ Presto query:: SELECT student, score FROM tests CROSS JOIN UNNEST(scores) AS t (score); + +Caution with datediff +--------------------- + +The Hive ``datediff`` function returns the difference between the two dates in +days and is declared as: + +.. code-block:: none + + datediff(string enddate, string startdate) -> integer + +The equivalent Presto function :ref:`date_diff` +uses a reverse order for the two date parameters and requires a unit. This has +to be taken into account when migrating: + +Hive query:: + + datediff(enddate, startdate) + +Presto query:: + + date_diff('day', startdate, enddate) + From 58dffcd29a4694e7fb01b6e9e68a617affed7f23 Mon Sep 17 00:00:00 2001 From: eskabetxe Date: Tue, 7 Apr 2020 20:41:05 +0200 Subject: [PATCH 073/519] Add to Elastic connector IpAddress field --- .../main/sphinx/connector/elasticsearch.rst | 1 + .../elasticsearch/ElasticsearchMetadata.java | 14 ++- .../ElasticsearchPageSource.java | 5 ++ .../decoders/IpAddressDecoder.java | 90 +++++++++++++++++++ ...TestElasticsearchIntegrationSmokeTest.java | 48 +++++++--- .../testing/TestingPrestoClient.java | 4 + 6 files changed, 148 insertions(+), 14 deletions(-) create mode 100644 presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/IpAddressDecoder.java diff --git a/presto-docs/src/main/sphinx/connector/elasticsearch.rst b/presto-docs/src/main/sphinx/connector/elasticsearch.rst index d36263a48534..07719a876d5e 100644 --- a/presto-docs/src/main/sphinx/connector/elasticsearch.rst +++ b/presto-docs/src/main/sphinx/connector/elasticsearch.rst @@ -182,6 +182,7 @@ Elasticsearch Presto ``keyword`` ``VARCHAR`` ``text`` ``VARCHAR`` ``date`` ``TIMESTAMP`` +``ip`` ``IPADDRESS`` (all others) (unsupported) ============= ============= diff --git a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/ElasticsearchMetadata.java b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/ElasticsearchMetadata.java index c73ccb2ab70b..c9124c1a3e5f 100644 --- a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/ElasticsearchMetadata.java +++ b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/ElasticsearchMetadata.java @@ -36,7 +36,10 @@ import io.prestosql.spi.predicate.TupleDomain; import io.prestosql.spi.type.ArrayType; import io.prestosql.spi.type.RowType; +import io.prestosql.spi.type.StandardTypes; import io.prestosql.spi.type.Type; +import io.prestosql.spi.type.TypeManager; +import io.prestosql.spi.type.TypeSignature; import javax.inject.Inject; @@ -67,15 +70,17 @@ public class ElasticsearchMetadata private static final String ORIGINAL_NAME = "original-name"; public static final String SUPPORTS_PREDICATES = "supports-predicates"; + private final Type ipAddressType; private final ElasticsearchClient client; private final String schemaName; @Inject - public ElasticsearchMetadata(ElasticsearchClient client, ElasticsearchConfig config) + public ElasticsearchMetadata(TypeManager typeManager, ElasticsearchClient client, ElasticsearchConfig config) { - requireNonNull(config, "config is null"); - + requireNonNull(typeManager, "typeManager is null"); + this.ipAddressType = typeManager.getType(new TypeSignature(StandardTypes.IPADDRESS)); this.client = requireNonNull(client, "client is null"); + requireNonNull(config, "config is null"); this.schemaName = config.getDefaultSchema(); } @@ -195,10 +200,11 @@ private Type toPrestoType(IndexMetadata.Field metaDataField, boolean isArray) return INTEGER; case "long": return BIGINT; - case "string": case "text": case "keyword": return VARCHAR; + case "ip": + return ipAddressType; case "boolean": return BOOLEAN; case "binary": diff --git a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/ElasticsearchPageSource.java b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/ElasticsearchPageSource.java index 164e4005daa8..195ec46bdf85 100644 --- a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/ElasticsearchPageSource.java +++ b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/ElasticsearchPageSource.java @@ -24,6 +24,7 @@ import io.prestosql.elasticsearch.decoders.DoubleDecoder; import io.prestosql.elasticsearch.decoders.IdColumnDecoder; import io.prestosql.elasticsearch.decoders.IntegerDecoder; +import io.prestosql.elasticsearch.decoders.IpAddressDecoder; import io.prestosql.elasticsearch.decoders.RealDecoder; import io.prestosql.elasticsearch.decoders.RowDecoder; import io.prestosql.elasticsearch.decoders.ScoreColumnDecoder; @@ -41,6 +42,7 @@ import io.prestosql.spi.connector.ConnectorSession; import io.prestosql.spi.type.ArrayType; import io.prestosql.spi.type.RowType; +import io.prestosql.spi.type.StandardTypes; import io.prestosql.spi.type.Type; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.search.SearchHit; @@ -316,6 +318,9 @@ private Decoder createDecoder(ConnectorSession session, String path, Type type) if (type.equals(BIGINT)) { return new BigintDecoder(path); } + if (type.getBaseName().equals(StandardTypes.IPADDRESS)) { + return new IpAddressDecoder(path, type); + } if (type instanceof RowType) { RowType rowType = (RowType) type; diff --git a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/IpAddressDecoder.java b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/IpAddressDecoder.java new file mode 100644 index 000000000000..e8f4d1f763e0 --- /dev/null +++ b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/IpAddressDecoder.java @@ -0,0 +1,90 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.elasticsearch.decoders; + +import com.google.common.net.InetAddresses; +import io.airlift.slice.Slice; +import io.airlift.slice.Slices; +import io.prestosql.spi.PrestoException; +import io.prestosql.spi.block.BlockBuilder; +import io.prestosql.spi.type.Type; +import org.elasticsearch.search.SearchHit; + +import java.util.function.Supplier; + +import static io.airlift.slice.Slices.wrappedBuffer; +import static io.prestosql.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR; +import static io.prestosql.spi.StandardErrorCode.INVALID_CAST_ARGUMENT; +import static io.prestosql.spi.StandardErrorCode.TYPE_MISMATCH; +import static java.lang.String.format; +import static java.lang.System.arraycopy; +import static java.util.Objects.requireNonNull; + +public class IpAddressDecoder + implements Decoder +{ + private final String path; + private final Type ipAddressType; + + public IpAddressDecoder(String path, Type type) + { + this.path = requireNonNull(path, "path is null"); + this.ipAddressType = requireNonNull(type, "type is null"); + } + + @Override + public void decode(SearchHit hit, Supplier getter, BlockBuilder output) + { + Object value = getter.get(); + if (value == null) { + output.appendNull(); + } + else if (value instanceof String) { + String address = (String) value; + Slice slice = castToIpAddress(Slices.utf8Slice(address)); + ipAddressType.writeSlice(output, slice); + } + else { + throw new PrestoException(TYPE_MISMATCH, format("Expected a string value for field '%s' of type IP: %s [%s]", path, value, value.getClass().getSimpleName())); + } + } + + // This is a copy of IpAddressOperators.castFromVarcharToIpAddress method + private Slice castToIpAddress(Slice slice) + { + byte[] address; + try { + address = InetAddresses.forString(slice.toStringUtf8()).getAddress(); + } + catch (IllegalArgumentException e) { + throw new PrestoException(INVALID_CAST_ARGUMENT, "Cannot cast value to IPADDRESS: " + slice.toStringUtf8()); + } + + byte[] bytes; + if (address.length == 4) { + bytes = new byte[16]; + bytes[10] = (byte) 0xff; + bytes[11] = (byte) 0xff; + arraycopy(address, 0, bytes, 12, 4); + } + else if (address.length == 16) { + bytes = address; + } + else { + throw new PrestoException(GENERIC_INTERNAL_ERROR, "Invalid InetAddress length: " + address.length); + } + + return wrappedBuffer(bytes); + } +} diff --git a/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearchIntegrationSmokeTest.java b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearchIntegrationSmokeTest.java index f9bc6776e5cf..9f8cf01183e0 100644 --- a/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearchIntegrationSmokeTest.java +++ b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearchIntegrationSmokeTest.java @@ -324,7 +324,9 @@ public void testDataTypes() " \"keyword_column\": { \"type\": \"keyword\" }," + " \"text_column\": { \"type\": \"text\" }," + " \"binary_column\": { \"type\": \"binary\" }," + - " \"timestamp_column\": { \"type\": \"date\" }" + + " \"timestamp_column\": { \"type\": \"date\" }," + + " \"ipv4_column\": { \"type\": \"ip\" }," + + " \"ipv6_column\": { \"type\": \"ip\" }" + " }" + " }" + " }" + @@ -342,6 +344,8 @@ public void testDataTypes() .put("text_column", "some text") .put("binary_column", new byte[] {(byte) 0xCA, (byte) 0xFE}) .put("timestamp_column", 0) + .put("ipv4_column", "1.2.3.4") + .put("ipv6_column", "2001:db8:0:0:1:0:0:1") .build()); MaterializedResult rows = computeActual("" + @@ -354,11 +358,14 @@ public void testDataTypes() "keyword_column, " + "text_column, " + "binary_column, " + - "timestamp_column " + + "timestamp_column, " + + "ipv4_column, " + + "ipv6_column " + "FROM types"); MaterializedResult expected = resultBuilder(getSession(), rows.getTypes()) - .row(true, 1.0f, 1.0d, 1, 1L, "cool", "some text", new byte[] {(byte) 0xCA, (byte) 0xFE}, LocalDateTime.of(1970, 1, 1, 0, 0)) + .row(true, 1.0f, 1.0d, 1, 1L, "cool", "some text", new byte[] {(byte) 0xCA, (byte) 0xFE}, + LocalDateTime.of(1970, 1, 1, 0, 0), "1.2.3.4", "2001:db8::1:0:0:1") .build(); assertEquals(rows.getMaterializedRows(), expected.getMaterializedRows()); @@ -383,7 +390,9 @@ public void testFilters() " \"keyword_column\": { \"type\": \"keyword\" }," + " \"text_column\": { \"type\": \"text\" }," + " \"binary_column\": { \"type\": \"binary\" }," + - " \"timestamp_column\": { \"type\": \"date\" }" + + " \"timestamp_column\": { \"type\": \"date\" }," + + " \"ipv4_column\": { \"type\": \"ip\" }," + + " \"ipv6_column\": { \"type\": \"ip\" }" + " }" + " }" + " }" + @@ -403,6 +412,8 @@ public void testFilters() .put("text_column", "some text") .put("binary_column", new byte[] {(byte) 0xCA, (byte) 0xFE}) .put("timestamp_column", 1569888000000L) + .put("ipv4_column", "1.2.3.4") + .put("ipv6_column", "2001:db8:0:0:1:0:0:1") .build()); // _score column @@ -477,6 +488,10 @@ public void testFilters() assertQuery("SELECT count(*) FROM filter_pushdown WHERE timestamp_column = TIMESTAMP '2019-10-02 00:00:00'", "VALUES 0"); assertQuery("SELECT count(*) FROM filter_pushdown WHERE timestamp_column > TIMESTAMP '2001-01-01 00:00:00'", "VALUES 1"); assertQuery("SELECT count(*) FROM filter_pushdown WHERE timestamp_column < TIMESTAMP '2030-01-01 00:00:00'", "VALUES 1"); + + // ipaddress + assertQuery("SELECT count(*) FROM filter_pushdown WHERE ipv4_column = IPADDRESS '1.2.3.4'", "VALUES 1"); + assertQuery("SELECT count(*) FROM filter_pushdown WHERE ipv6_column = IPADDRESS '2001:db8::1:0:0:1'", "VALUES 1"); } @Test @@ -514,7 +529,9 @@ public void testDataTypesNested() " \"keyword_column\": { \"type\": \"keyword\" }," + " \"text_column\": { \"type\": \"text\" }," + " \"binary_column\": { \"type\": \"binary\" }," + - " \"timestamp_column\": { \"type\": \"date\" }" + + " \"timestamp_column\": { \"type\": \"date\" }," + + " \"ipv4_column\": { \"type\": \"ip\" }," + + " \"ipv6_column\": { \"type\": \"ip\" }" + " }" + " }" + " }" + @@ -536,6 +553,8 @@ public void testDataTypesNested() .put("text_column", "some text") .put("binary_column", new byte[] {(byte) 0xCA, (byte) 0xFE}) .put("timestamp_column", 0) + .put("ipv4_column", "1.2.3.4") + .put("ipv6_column", "2001:db8:0:0:1:0:0:1") .build())); MaterializedResult rows = computeActual("" + @@ -548,11 +567,14 @@ public void testDataTypesNested() "field.keyword_column, " + "field.text_column, " + "field.binary_column, " + - "field.timestamp_column " + + "field.timestamp_column, " + + "field.ipv4_column, " + + "field.ipv6_column " + "FROM types_nested"); MaterializedResult expected = resultBuilder(getSession(), rows.getTypes()) - .row(true, 1.0f, 1.0d, 1, 1L, "cool", "some text", new byte[] {(byte) 0xCA, (byte) 0xFE}, LocalDateTime.of(1970, 1, 1, 0, 0)) + .row(true, 1.0f, 1.0d, 1, 1L, "cool", "some text", new byte[] {(byte) 0xCA, (byte) 0xFE}, + LocalDateTime.of(1970, 1, 1, 0, 0), "1.2.3.4", "2001:db8::1:0:0:1") .build(); assertEquals(rows.getMaterializedRows(), expected.getMaterializedRows()); @@ -580,7 +602,9 @@ public void testNestedTypeDataTypesNested() " \"keyword_column\": { \"type\": \"keyword\" }," + " \"text_column\": { \"type\": \"text\" }," + " \"binary_column\": { \"type\": \"binary\" }," + - " \"timestamp_column\": { \"type\": \"date\" }" + + " \"timestamp_column\": { \"type\": \"date\" }," + + " \"ipv4_column\": { \"type\": \"ip\" }," + + " \"ipv6_column\": { \"type\": \"ip\" }" + " }" + " }" + " }" + @@ -602,6 +626,8 @@ public void testNestedTypeDataTypesNested() .put("text_column", "some text") .put("binary_column", new byte[] {(byte) 0xCA, (byte) 0xFE}) .put("timestamp_column", 0) + .put("ipv4_column", "1.2.3.4") + .put("ipv6_column", "2001:db8:0:0:1:0:0:1") .build())); MaterializedResult rows = computeActual("" + @@ -614,12 +640,14 @@ public void testNestedTypeDataTypesNested() "nested_field.keyword_column, " + "nested_field.text_column, " + "nested_field.binary_column, " + - "nested_field.timestamp_column " + + "nested_field.timestamp_column, " + + "nested_field.ipv4_column, " + + "nested_field.ipv6_column " + "FROM nested_type_nested"); MaterializedResult expected = resultBuilder(getSession(), rows.getTypes()) .row(true, 1.0f, 1.0d, 1, 1L, "cool", "some text", new byte[] {(byte) 0xCA, (byte) 0xFE}, - LocalDateTime.of(1970, 1, 1, 0, 0)) + LocalDateTime.of(1970, 1, 1, 0, 0), "1.2.3.4", "2001:db8::1:0:0:1") .build(); assertEquals(rows.getMaterializedRows(), expected.getMaterializedRows()); diff --git a/presto-testing/src/main/java/io/prestosql/testing/TestingPrestoClient.java b/presto-testing/src/main/java/io/prestosql/testing/TestingPrestoClient.java index 0855c557e869..64c1845f86cb 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/TestingPrestoClient.java +++ b/presto-testing/src/main/java/io/prestosql/testing/TestingPrestoClient.java @@ -70,6 +70,7 @@ import static io.prestosql.testing.MaterializedResult.DEFAULT_PRECISION; import static io.prestosql.type.IntervalDayTimeType.INTERVAL_DAY_TIME; import static io.prestosql.type.IntervalYearMonthType.INTERVAL_YEAR_MONTH; +import static io.prestosql.type.IpAddressType.IPADDRESS; import static io.prestosql.type.JsonType.JSON; import static io.prestosql.type.UuidType.UUID; import static io.prestosql.util.MoreLists.mappedCopy; @@ -195,6 +196,9 @@ else if (REAL.equals(type)) { else if (UUID.equals(type)) { return java.util.UUID.fromString((String) value); } + else if (IPADDRESS.equals(type)) { + return value; + } else if (type instanceof VarcharType) { return value; } From 083bd113908d1c478f4d7b4dbd0c273667be6e85 Mon Sep 17 00:00:00 2001 From: Manfred Moser Date: Thu, 26 Mar 2020 09:42:05 -0700 Subject: [PATCH 074/519] Add WebUi properties and info about login page --- .../src/main/sphinx/admin/properties.rst | 33 +++++++++++++++++++ .../src/main/sphinx/admin/web-interface.rst | 15 +++++++-- 2 files changed, 46 insertions(+), 2 deletions(-) diff --git a/presto-docs/src/main/sphinx/admin/properties.rst b/presto-docs/src/main/sphinx/admin/properties.rst index 3ddad6e9808b..20a40d8c4aba 100644 --- a/presto-docs/src/main/sphinx/admin/properties.rst +++ b/presto-docs/src/main/sphinx/admin/properties.rst @@ -836,4 +836,37 @@ Logging Properties The maximum file size for the log file of the HTTP server. +.. _web-ui-properties: +Web UI Properties +----------------- + +The following properties can be used to configure the :doc:`./web-interface`. + +``web-ui.enabled`` +^^^^^^^^^^^^^^^^^^ + + * **Type:** ``boolean`` + * **Default value:** ``true`` + + This property controls whether or not the Web UI is available. + +``web-ui.shared-secret`` +^^^^^^^^^^^^^^^^^^^^^^^^ + + * **Type:** ``string`` + * **Default value:** randomly generated unless set + + The shared secret is used to generate authentication cookies for users of + the Web UI. If not set to a static value, any coordinator restart generates + a new random value, which in turn invalidates the session of any currently + logged in Web UI user. + +web-ui.session-timeout +^^^^^^^^^^^^^^^^^^^^^^ + + * **Type:** ``duration`` + * **Default value:** ``1 day`` + + The duration how long a user can be logged into the Web UI, before the + session times out, which forces an automatic log-out. diff --git a/presto-docs/src/main/sphinx/admin/web-interface.rst b/presto-docs/src/main/sphinx/admin/web-interface.rst index 523405b98dc2..65aa8b738949 100644 --- a/presto-docs/src/main/sphinx/admin/web-interface.rst +++ b/presto-docs/src/main/sphinx/admin/web-interface.rst @@ -3,9 +3,20 @@ Web UI ====== Presto provides a web-based user interface (UI) for monitoring a Presto cluster -and managing queries. The web UI is accessible on the Presto coordinator via +and managing queries. The Web UI is accessible on the coordinator via HTTP/HTTPS, using the corresponding port number specified in the coordinator -:ref:`config_properties`. +:ref:`config_properties`. It can be configured with :ref:`specific related +properties `. + +The Web UI requires users to log in. If Presto is not configured to require +authentication, then any username can be used, and no password is required or +allowed. Typically, users should login with the same username that they use for +running queries. + +When the server is configured to use HTTPS, a :doc:`password authenticator +` such as :doc:`LDAP ` or +:doc:`password file ` must be configured in order to +use the Web UI. The main page has a list of queries along with information like unique query ID, query text, query state, percentage completed, username and source from which this query originated. From 8b722697e79ce29b7c78b9465422e165bd3afa21 Mon Sep 17 00:00:00 2001 From: Manfred Moser Date: Mon, 16 Mar 2020 12:34:10 -0700 Subject: [PATCH 075/519] Add note about parallel reads from topics --- presto-docs/src/main/sphinx/connector/kafka.rst | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/presto-docs/src/main/sphinx/connector/kafka.rst b/presto-docs/src/main/sphinx/connector/kafka.rst index 7d8644eb301a..70d3945fb2df 100644 --- a/presto-docs/src/main/sphinx/connector/kafka.rst +++ b/presto-docs/src/main/sphinx/connector/kafka.rst @@ -11,13 +11,17 @@ Overview -------- This connector allows the use of `Apache Kafka `_ -topics as tables in Presto. -Each message is presented as a row in Presto. +topics as tables in Presto. Each message is presented as a row in Presto. Topics can be live. Rows appear as data arrives, and disappear as segments get dropped. This can result in strange behavior if accessing the same table multiple times in a single query (e.g., performing a self join). +The connector reads message data from Kafka topics in parallel across workers to +achieve a significant performance gain. The size of data sets for this +parallelization is configurable and can therefore be adapted to your specific +needs. + .. note:: The minimum supported Kafka broker version is 0.10.0. @@ -59,7 +63,7 @@ Property Name Description ``kafka.buffer-size`` Kafka read buffer size ``kafka.table-description-dir`` Directory containing topic description files ``kafka.hide-internal-columns`` Controls whether internal columns are part of the table schema or not -``kafka.messages-per-split`` Number of messages that will be processed by single Presto split +``kafka.messages-per-split`` Number of messages that are processed by each Presto split, defaults to 100000 =============================== ============================================================== ``kafka.table-names`` From 42789d8efef6d39fd860799182bdaedadf110d76 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Tue, 7 Apr 2020 10:09:44 +0200 Subject: [PATCH 076/519] Merge CI suites into one They were split to make rerunning easier, but rerunning does not work. Merge them back to make cancelling jobs easier. --- .github/workflows/checks.yml | 62 ---------- .../{module-tests.yml => ci-tests.yml} | 113 +++++++++++++++++- .github/workflows/product-tests.yml | 61 ---------- 3 files changed, 109 insertions(+), 127 deletions(-) delete mode 100644 .github/workflows/checks.yml rename .github/workflows/{module-tests.yml => ci-tests.yml} (53%) delete mode 100644 .github/workflows/product-tests.yml diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml deleted file mode 100644 index 6ec1772d25b3..000000000000 --- a/.github/workflows/checks.yml +++ /dev/null @@ -1,62 +0,0 @@ -name: checks - -on: [push, pull_request] - -env: - # maven.wagon.rto is in millis, defaults to 30m - MAVEN_OPTS: "-Xmx512M -XX:+ExitOnOutOfMemoryError -Dmaven.wagon.rto=60000" - MAVEN_INSTALL_OPTS: "-Xmx2G -XX:+ExitOnOutOfMemoryError -Dmaven.wagon.rto=60000" - MAVEN_FAST_INSTALL: "-B -V --quiet -T C1 -DskipTests -Dair.check.skip-all" - -jobs: - maven-checks: - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - java-version: [ - 1.8, - 11, - 13 - ] - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-java@v1 - with: - java-version: ${{ matrix.java-version }} - - name: Maven Checks - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - ./bin/retry ./mvnw install -B -V -T C1 -DskipTests -P ci -pl '!presto-server-rpm' - - name: Test Server RPM - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - ./bin/retry ./mvnw verify -B -P ci -pl presto-server-rpm - - name: Free Disk Space - run: ./mvnw clean -pl '!presto-server,!presto-cli' - - name: Test Docker Image - run: docker/build-local.sh - - error-prone-checks: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-java@v1 - with: - java-version: 11 - - name: Maven Install - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - ./bin/retry ./mvnw install ${MAVEN_FAST_INSTALL} -pl '!presto-docs,!presto-server,!presto-server-rpm' - - name: Error Prone Checks - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - ./mvnw -B -T C1 clean test-compile -Dair.check.skip-all -P errorprone-compiler-presto \ - -pl '!presto-docs,!presto-server,!presto-server-rpm' - - web-ui-checks: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Web UI Checks - run: presto-main/bin/check_webui.sh diff --git a/.github/workflows/module-tests.yml b/.github/workflows/ci-tests.yml similarity index 53% rename from .github/workflows/module-tests.yml rename to .github/workflows/ci-tests.yml index fc03212f9c8b..86b1ea48e5e1 100644 --- a/.github/workflows/module-tests.yml +++ b/.github/workflows/ci-tests.yml @@ -1,13 +1,66 @@ -name: module-tests +name: ci-tests on: [push, pull_request] env: - MAVEN_OPTS: "-Xmx512M -XX:+ExitOnOutOfMemoryError" - MAVEN_INSTALL_OPTS: "-Xmx2G -XX:+ExitOnOutOfMemoryError" + # maven.wagon.rto is in millis, defaults to 30m + MAVEN_OPTS: "-Xmx512M -XX:+ExitOnOutOfMemoryError -Dmaven.wagon.rto=60000" + MAVEN_INSTALL_OPTS: "-Xmx2G -XX:+ExitOnOutOfMemoryError -Dmaven.wagon.rto=60000" MAVEN_FAST_INSTALL: "-B -V --quiet -T C1 -DskipTests -Dair.check.skip-all" jobs: + maven-checks: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + java-version: [ + 1.8, + 11, + 13 + ] + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-java@v1 + with: + java-version: ${{ matrix.java-version }} + - name: Maven Checks + run: | + export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" + ./bin/retry ./mvnw install -B -V -T C1 -DskipTests -P ci -pl '!presto-server-rpm' + - name: Test Server RPM + run: | + export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" + ./bin/retry ./mvnw verify -B -P ci -pl presto-server-rpm + - name: Free Disk Space + run: ./mvnw clean -pl '!presto-server,!presto-cli' + - name: Test Docker Image + run: docker/build-local.sh + + error-prone-checks: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-java@v1 + with: + java-version: 11 + - name: Maven Install + run: | + export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" + ./bin/retry ./mvnw install ${MAVEN_FAST_INSTALL} -pl '!presto-docs,!presto-server,!presto-server-rpm' + - name: Error Prone Checks + run: | + export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" + ./mvnw -B -T C1 clean test-compile -Dair.check.skip-all -P errorprone-compiler-presto \ + -pl '!presto-docs,!presto-server,!presto-server-rpm' + + web-ui-checks: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Web UI Checks + run: presto-main/bin/check_webui.sh + hive-tests: runs-on: ubuntu-latest strategy: @@ -83,7 +136,7 @@ jobs: !presto-docs,!presto-server,!presto-server-rpm, !presto-kudu' - x: + test: runs-on: ubuntu-latest strategy: fail-fast: false @@ -114,3 +167,55 @@ jobs: ./bin/retry ./mvnw install ${MAVEN_FAST_INSTALL} -am -pl $(echo '${{ matrix.modules }}' | cut -d' ' -f1) - name: Maven Tests run: ./mvnw test -B -Dair.check.skip-all -pl ${{ matrix.modules }} + + pt: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + config: [ + config-empty, + config-hdp3, + config-cdh5, + ] + suite: [ + suite-1, + suite-2, + suite-3, + # suite-4 does not exist + suite-5, + suite-6-non-generic, + suite-7-non-generic, + suite-8-non-generic, + ] + exclude: + - config: config-hdp3 + suite: suite-6-non-generic + - config: config-hdp3 + suite: suite-7-non-generic + - config: config-hdp3 + suite: suite-8-non-generic + - config: config-cdh5 + suite: suite-6-non-generic + - config: config-cdh5 + suite: suite-7-non-generic + - config: config-cdh5 + suite: suite-8-non-generic + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-java@v1 + with: + java-version: 8 + - name: Maven Install + run: | + export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" + ./bin/retry ./mvnw install ${MAVEN_FAST_INSTALL} -pl '!presto-docs,!presto-server-rpm' + - name: Free Disk Space + run: | + docker image prune -af + sudo apt-get clean + rm -rf ~/.m2/repository + - name: Product Tests + run: | + source presto-product-tests/conf/product-tests-${{ matrix.config }}.sh && + presto-product-tests/bin/product-tests-${{ matrix.suite }}.sh diff --git a/.github/workflows/product-tests.yml b/.github/workflows/product-tests.yml deleted file mode 100644 index fc5007e27551..000000000000 --- a/.github/workflows/product-tests.yml +++ /dev/null @@ -1,61 +0,0 @@ -name: product-tests - -on: [push, pull_request] - -env: - MAVEN_OPTS: "-Xmx512M -XX:+ExitOnOutOfMemoryError" - MAVEN_INSTALL_OPTS: "-Xmx2G -XX:+ExitOnOutOfMemoryError" - MAVEN_FAST_INSTALL: "-B -V --quiet -T C1 -DskipTests -Dair.check.skip-all" - -jobs: - x: - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - config: [ - config-empty, - config-hdp3, - config-cdh5, - ] - suite: [ - suite-1, - suite-2, - suite-3, - # suite-4 does not exist - suite-5, - suite-6-non-generic, - suite-7-non-generic, - suite-8-non-generic, - ] - exclude: - - config: config-hdp3 - suite: suite-6-non-generic - - config: config-hdp3 - suite: suite-7-non-generic - - config: config-hdp3 - suite: suite-8-non-generic - - config: config-cdh5 - suite: suite-6-non-generic - - config: config-cdh5 - suite: suite-7-non-generic - - config: config-cdh5 - suite: suite-8-non-generic - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-java@v1 - with: - java-version: 8 - - name: Maven Install - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - ./bin/retry ./mvnw install ${MAVEN_FAST_INSTALL} -pl '!presto-docs,!presto-server-rpm' - - name: Free Disk Space - run: | - docker image prune -af - sudo apt-get clean - rm -rf ~/.m2/repository - - name: Product Tests - run: | - source presto-product-tests/conf/product-tests-${{ matrix.config }}.sh && - presto-product-tests/bin/product-tests-${{ matrix.suite }}.sh From 6cdba2590e9115a076ffe10cd8faae0d584414e9 Mon Sep 17 00:00:00 2001 From: Mateusz Gajewski Date: Tue, 7 Apr 2020 11:39:57 +0200 Subject: [PATCH 077/519] Disallow reading from Delta Lake tables in hive connector --- .../prestosql/plugin/hive/HiveMetadata.java | 12 ++++++ .../tests/hive/TestHiveDeltaLakeTable.java | 38 +++++++++++++++++++ 2 files changed, 50 insertions(+) create mode 100644 presto-product-tests/src/main/java/io/prestosql/tests/hive/TestHiveDeltaLakeTable.java diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java index b7b8cd072205..95f3fa959b0a 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java @@ -257,6 +257,8 @@ public class HiveMetadata private static final String TEXT_FIELD_SEPARATOR_ESCAPE_KEY = serdeConstants.ESCAPE_CHAR; public static final String AVRO_SCHEMA_URL_KEY = "avro.schema.url"; + public static final String SPARK_TABLE_PROVIDER_KEY = "spark.sql.sources.provider"; + public static final String DELTA_LAKE_PROVIDER = "delta"; private static final String CSV_SEPARATOR_KEY = OpenCSVSerde.SEPARATORCHAR; private static final String CSV_QUOTE_KEY = OpenCSVSerde.QUOTECHAR; @@ -341,6 +343,10 @@ public HiveTableHandle getTableHandle(ConnectorSession session, SchemaTableName return null; } + if (isDeltaLakeTable(table.get())) { + throw new PrestoException(HIVE_UNSUPPORTED_FORMAT, "Cannot query Delta Lake table"); + } + // we must not allow system tables due to how permissions are checked in SystemTableAwareAccessControl if (getSourceTableNameFromSystemTable(tableName).isPresent()) { throw new PrestoException(HIVE_INVALID_METADATA, "Unexpected table present in Hive metastore: " + tableName); @@ -2296,6 +2302,12 @@ private static void validateBucketColumns(ConnectorTableMetadata tableMetadata) } } + private static boolean isDeltaLakeTable(Table table) + { + return table.getParameters().containsKey(SPARK_TABLE_PROVIDER_KEY) + && table.getParameters().get(SPARK_TABLE_PROVIDER_KEY).toLowerCase(ENGLISH).equals(DELTA_LAKE_PROVIDER); + } + private static void validatePartitionColumns(ConnectorTableMetadata tableMetadata) { List partitionedBy = getPartitionedBy(tableMetadata.getProperties()); diff --git a/presto-product-tests/src/main/java/io/prestosql/tests/hive/TestHiveDeltaLakeTable.java b/presto-product-tests/src/main/java/io/prestosql/tests/hive/TestHiveDeltaLakeTable.java new file mode 100644 index 000000000000..c283fd25ed8a --- /dev/null +++ b/presto-product-tests/src/main/java/io/prestosql/tests/hive/TestHiveDeltaLakeTable.java @@ -0,0 +1,38 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.tests.hive; + +import org.testng.annotations.Test; + +import static io.prestosql.tempto.assertions.QueryAssert.assertThat; +import static io.prestosql.tests.utils.QueryExecutors.onHive; +import static io.prestosql.tests.utils.QueryExecutors.onPresto; + +public class TestHiveDeltaLakeTable + extends HiveProductTest +{ + @Test + public void testReadDeltaLakeTable() + { + onHive().executeQuery("DROP TABLE IF EXISTS test_delta_lake_table"); + + onHive().executeQuery("" + + "CREATE TABLE test_delta_lake_table (ignored int) " + + "TBLPROPERTIES ('spark.sql.sources.provider'='DELTA')"); + + assertThat(() -> onPresto().executeQuery("SELECT * FROM test_delta_lake_table")).failsWithMessage("Cannot query Delta Lake table"); + + onHive().executeQuery("DROP TABLE test_delta_lake_table"); + } +} From b9901a58dbda3a202e7ba1ca127484fa1a387101 Mon Sep 17 00:00:00 2001 From: Martin Traverso Date: Mon, 6 Apr 2020 10:13:31 -0700 Subject: [PATCH 078/519] Allow table metadata with no columns Since the introduction of the applyXXX methods, a TableScan represents a subquery, not just a raw table. It is possible that such a subquery might have no columns due to a projection being applied by the optimizer. --- .../src/main/java/io/prestosql/metadata/MetadataManager.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java b/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java index f66868f167c7..49a94ae7f494 100644 --- a/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java +++ b/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java @@ -480,9 +480,6 @@ public TableMetadata getTableMetadata(Session session, TableHandle tableHandle) CatalogName catalogName = tableHandle.getCatalogName(); ConnectorMetadata metadata = getMetadata(session, catalogName); ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(session.toConnectorSession(catalogName), tableHandle.getConnectorHandle()); - if (tableMetadata.getColumns().isEmpty()) { - throw new PrestoException(NOT_SUPPORTED, "Table has no columns: " + tableHandle); - } return new TableMetadata(catalogName, tableMetadata); } From ffd54c918ba7408606687eab5c313435cd98e0a1 Mon Sep 17 00:00:00 2001 From: Martin Traverso Date: Mon, 6 Apr 2020 15:29:40 -0700 Subject: [PATCH 079/519] Implement applyProjection for JDBC connectors --- .../plugin/jdbc/CachingJdbcClient.java | 33 +++++++------ .../prestosql/plugin/jdbc/JdbcMetadata.java | 46 +++++++++++++++++++ .../plugin/jdbc/JdbcRecordSetProvider.java | 8 ++++ .../plugin/jdbc/JdbcTableHandle.java | 16 ++++++- .../jdbc/TestJdbcRecordSetProvider.java | 2 + 5 files changed, 87 insertions(+), 18 deletions(-) diff --git a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/CachingJdbcClient.java b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/CachingJdbcClient.java index 8756c3b45d69..f5035504bb38 100644 --- a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/CachingJdbcClient.java +++ b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/CachingJdbcClient.java @@ -100,7 +100,11 @@ public List getTableNames(JdbcIdentity identity, Optional getColumns(ConnectorSession session, JdbcTableHandle tableHandle) { - ColumnsCacheKey key = new ColumnsCacheKey(JdbcIdentity.from(session), tableHandle); + if (tableHandle.getColumns().isPresent()) { + return tableHandle.getColumns().get(); + } + + ColumnsCacheKey key = new ColumnsCacheKey(JdbcIdentity.from(session), tableHandle.getSchemaTableName()); List columns = columnsCache.getIfPresent(key); if (columns != null) { return columns; @@ -254,21 +258,21 @@ public void dropSchema(JdbcIdentity identity, String schemaName) public void addColumn(ConnectorSession session, JdbcTableHandle handle, ColumnMetadata column) { delegate.addColumn(session, handle, column); - invalidateColumnsCache(JdbcIdentity.from(session), handle); + invalidateColumnsCache(JdbcIdentity.from(session), handle.getSchemaTableName()); } @Override public void dropColumn(JdbcIdentity identity, JdbcTableHandle handle, JdbcColumnHandle column) { delegate.dropColumn(identity, handle, column); - invalidateColumnsCache(identity, handle); + invalidateColumnsCache(identity, handle.getSchemaTableName()); } @Override public void renameColumn(JdbcIdentity identity, JdbcTableHandle handle, JdbcColumnHandle jdbcColumn, String newColumnName) { delegate.renameColumn(identity, handle, jdbcColumn, newColumnName); - invalidateColumnsCache(identity, handle); + invalidateColumnsCache(identity, handle.getSchemaTableName()); } @Override @@ -309,20 +313,20 @@ private void invalidateTablesCaches() tableNamesCache.invalidateAll(); } - private void invalidateColumnsCache(JdbcIdentity identity, JdbcTableHandle handle) + private void invalidateColumnsCache(JdbcIdentity identity, SchemaTableName table) { - columnsCache.invalidate(new ColumnsCacheKey(identity, handle)); + columnsCache.invalidate(new ColumnsCacheKey(identity, table)); } private static final class ColumnsCacheKey { private final JdbcIdentity identity; - private final JdbcTableHandle tableHandle; + private final SchemaTableName table; - private ColumnsCacheKey(JdbcIdentity identity, JdbcTableHandle tableHandle) + private ColumnsCacheKey(JdbcIdentity identity, SchemaTableName table) { this.identity = requireNonNull(identity, "identity is null"); - this.tableHandle = requireNonNull(tableHandle, "schema is null"); + this.table = requireNonNull(table, "table is null"); } public JdbcIdentity getIdentity() @@ -330,11 +334,6 @@ public JdbcIdentity getIdentity() return identity; } - public JdbcTableHandle getTableHandle() - { - return tableHandle; - } - @Override public boolean equals(Object o) { @@ -346,13 +345,13 @@ public boolean equals(Object o) } ColumnsCacheKey that = (ColumnsCacheKey) o; return Objects.equals(identity, that.identity) && - Objects.equals(tableHandle, that.tableHandle); + Objects.equals(table, that.table); } @Override public int hashCode() { - return Objects.hash(identity, tableHandle); + return Objects.hash(identity, table); } @Override @@ -360,7 +359,7 @@ public String toString() { return toStringHelper(this) .add("identity", identity) - .add("tableHandle", tableHandle) + .add("table", table) .toString(); } } diff --git a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcMetadata.java b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcMetadata.java index 20bc2bd57933..2880e53b1571 100644 --- a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcMetadata.java +++ b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcMetadata.java @@ -15,6 +15,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import io.airlift.slice.Slice; import io.prestosql.spi.PrestoException; import io.prestosql.spi.connector.ColumnHandle; @@ -31,10 +32,13 @@ import io.prestosql.spi.connector.Constraint; import io.prestosql.spi.connector.ConstraintApplicationResult; import io.prestosql.spi.connector.LimitApplicationResult; +import io.prestosql.spi.connector.ProjectionApplicationResult; +import io.prestosql.spi.connector.ProjectionApplicationResult.Assignment; import io.prestosql.spi.connector.SchemaTableName; import io.prestosql.spi.connector.SchemaTablePrefix; import io.prestosql.spi.connector.SystemTable; import io.prestosql.spi.connector.TableNotFoundException; +import io.prestosql.spi.expression.ConnectorExpression; import io.prestosql.spi.predicate.TupleDomain; import io.prestosql.spi.security.PrestoPrincipal; import io.prestosql.spi.statistics.ComputedStatistics; @@ -107,12 +111,48 @@ public Optional> applyFilter(C handle.getCatalogName(), handle.getSchemaName(), handle.getTableName(), + handle.getColumns(), newDomain, handle.getLimit()); return Optional.of(new ConstraintApplicationResult<>(handle, constraint.getSummary())); } + @Override + public Optional> applyProjection( + ConnectorSession session, + ConnectorTableHandle table, + List projections, + Map assignments) + { + JdbcTableHandle handle = (JdbcTableHandle) table; + + List newColumns = assignments.values().stream() + .map(JdbcColumnHandle.class::cast) + .collect(toImmutableList()); + + if (handle.getColumns().isPresent() && containSameElements(newColumns, handle.getColumns().get())) { + return Optional.empty(); + } + + return Optional.of(new ProjectionApplicationResult<>( + new JdbcTableHandle( + handle.getSchemaTableName(), + handle.getCatalogName(), + handle.getSchemaName(), + handle.getTableName(), + Optional.of(newColumns), + handle.getConstraint(), + handle.getLimit()), + projections, + assignments.entrySet().stream() + .map(assignment -> new Assignment( + assignment.getKey(), + assignment.getValue(), + ((JdbcColumnHandle) assignment.getValue()).getColumnType())) + .collect(toImmutableList()))); + } + @Override public Optional> applyLimit(ConnectorSession session, ConnectorTableHandle table, long limit) { @@ -131,6 +171,7 @@ public Optional> applyLimit(Connect handle.getCatalogName(), handle.getSchemaName(), handle.getTableName(), + handle.getColumns(), handle.getConstraint(), OptionalLong.of(limit)); @@ -322,4 +363,9 @@ public void dropSchema(ConnectorSession session, String schemaName) { jdbcClient.dropSchema(JdbcIdentity.from(session), schemaName); } + + private static boolean containSameElements(Iterable first, Iterable second) + { + return ImmutableSet.copyOf(first).equals(ImmutableSet.copyOf(second)); + } } diff --git a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcRecordSetProvider.java b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcRecordSetProvider.java index 8d9d96aaadc1..d731f52390f4 100644 --- a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcRecordSetProvider.java +++ b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcRecordSetProvider.java @@ -26,6 +26,7 @@ import java.util.List; +import static com.google.common.base.Verify.verify; import static java.util.Objects.requireNonNull; public class JdbcRecordSetProvider @@ -45,6 +46,13 @@ public RecordSet getRecordSet(ConnectorTransactionHandle transaction, ConnectorS JdbcSplit jdbcSplit = (JdbcSplit) split; JdbcTableHandle jdbcTable = (JdbcTableHandle) table; + // In the current API, the columns (and order) needed by the engine are provided via an argument to this method. Make sure that + // any columns that were recorded in the table handle match the requested set. + // If no columns are recorded, it means that applyProjection never got called (e.g., in the case all columns are being used) and all + // table columns should be returned. TODO: this is something that should be addressed once the getRecordSet API is revamped + jdbcTable.getColumns() + .ifPresent(tableColumns -> verify(columns.equals(tableColumns))); + ImmutableList.Builder handles = ImmutableList.builder(); for (ColumnHandle handle : columns) { handles.add((JdbcColumnHandle) handle); diff --git a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcTableHandle.java b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcTableHandle.java index 533a318e4ba9..f8bb0428081f 100644 --- a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcTableHandle.java +++ b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcTableHandle.java @@ -16,6 +16,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Joiner; +import com.google.common.collect.ImmutableList; import io.prestosql.spi.connector.ColumnHandle; import io.prestosql.spi.connector.ConnectorTableHandle; import io.prestosql.spi.connector.SchemaTableName; @@ -23,7 +24,9 @@ import javax.annotation.Nullable; +import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.OptionalLong; import static java.util.Objects.requireNonNull; @@ -37,12 +40,13 @@ public final class JdbcTableHandle private final String catalogName; private final String schemaName; private final String tableName; + private final Optional> columns; private final TupleDomain constraint; private final OptionalLong limit; public JdbcTableHandle(SchemaTableName schemaTableName, @Nullable String catalogName, @Nullable String schemaName, String tableName) { - this(schemaTableName, catalogName, schemaName, tableName, TupleDomain.all(), OptionalLong.empty()); + this(schemaTableName, catalogName, schemaName, tableName, Optional.empty(), TupleDomain.all(), OptionalLong.empty()); } @JsonCreator @@ -51,6 +55,7 @@ public JdbcTableHandle( @JsonProperty("catalogName") @Nullable String catalogName, @JsonProperty("schemaName") @Nullable String schemaName, @JsonProperty("tableName") String tableName, + @JsonProperty("columns") Optional> columns, @JsonProperty("constraint") TupleDomain constraint, @JsonProperty("limit") OptionalLong limit) { @@ -58,6 +63,8 @@ public JdbcTableHandle( this.catalogName = catalogName; this.schemaName = schemaName; this.tableName = requireNonNull(tableName, "tableName is null"); + requireNonNull(columns, "columns is null"); + this.columns = columns.map(ImmutableList::copyOf); this.constraint = requireNonNull(constraint, "constraint is null"); this.limit = requireNonNull(limit, "limit is null"); } @@ -88,6 +95,12 @@ public String getTableName() return tableName; } + @JsonProperty + public Optional> getColumns() + { + return columns; + } + @JsonProperty public TupleDomain getConstraint() { @@ -125,6 +138,7 @@ public String toString() StringBuilder builder = new StringBuilder(); builder.append(schemaTableName).append(" "); Joiner.on(".").skipNulls().appendTo(builder, catalogName, schemaName, tableName); + columns.ifPresent(value -> builder.append(" columns=").append(value)); limit.ifPresent(value -> builder.append(" limit=").append(value)); return builder.toString(); } diff --git a/presto-base-jdbc/src/test/java/io/prestosql/plugin/jdbc/TestJdbcRecordSetProvider.java b/presto-base-jdbc/src/test/java/io/prestosql/plugin/jdbc/TestJdbcRecordSetProvider.java index df83fcdb69ba..b36c85bfb0b8 100644 --- a/presto-base-jdbc/src/test/java/io/prestosql/plugin/jdbc/TestJdbcRecordSetProvider.java +++ b/presto-base-jdbc/src/test/java/io/prestosql/plugin/jdbc/TestJdbcRecordSetProvider.java @@ -32,6 +32,7 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.OptionalLong; import static com.google.common.collect.Iterables.getOnlyElement; @@ -184,6 +185,7 @@ private RecordCursor getCursor(JdbcTableHandle jdbcTableHandle, List Date: Mon, 6 Apr 2020 00:21:40 -0700 Subject: [PATCH 080/519] Allow using configured S3 credentials with IAM role Previously, IAM roles could only be used with the default provider chain (typically, EC2 instance credentials). --- .../plugin/hive/s3/PrestoS3FileSystem.java | 53 +++++++++++------- .../hive/s3/TestPrestoS3FileSystem.java | 56 +++++++++++++++++-- 2 files changed, 83 insertions(+), 26 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystem.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystem.java index 191530562968..c44982771e7a 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystem.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystem.java @@ -59,6 +59,7 @@ import com.amazonaws.services.s3.transfer.TransferManagerBuilder; import com.amazonaws.services.s3.transfer.Upload; import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Splitter; import com.google.common.collect.AbstractSequentialIterator; import com.google.common.collect.Iterators; import com.google.common.io.Closer; @@ -794,23 +795,31 @@ private static Optional createEncryptionMaterialsPr private AWSCredentialsProvider createAwsCredentialsProvider(URI uri, Configuration conf) { - Optional credentials = getAwsCredentials(uri, conf); + // credentials embedded in the URI take precedence and are used alone + Optional credentials = getEmbeddedAwsCredentials(uri); if (credentials.isPresent()) { return new AWSStaticCredentialsProvider(credentials.get()); } - if (iamRole != null) { - return new STSAssumeRoleSessionCredentialsProvider.Builder(this.iamRole, "presto-session") - .withExternalId(this.externalId) - .build(); - } - + // a custom credential provider is also used alone String providerClass = conf.get(S3_CREDENTIALS_PROVIDER); if (!isNullOrEmpty(providerClass)) { return getCustomAWSCredentialsProvider(uri, conf, providerClass); } - return DefaultAWSCredentialsProviderChain.getInstance(); + // use configured credentials or default chain with optional role + AWSCredentialsProvider provider = getAwsCredentials(conf) + .map(value -> (AWSCredentialsProvider) new AWSStaticCredentialsProvider(value)) + .orElseGet(DefaultAWSCredentialsProviderChain::getInstance); + + if (iamRole != null) { + provider = new STSAssumeRoleSessionCredentialsProvider.Builder(iamRole, "presto-session") + .withExternalId(externalId) + .withLongLivedCredentialsProvider(provider) + .build(); + } + + return provider; } private static AWSCredentialsProvider getCustomAWSCredentialsProvider(URI uri, Configuration conf, String providerClass) @@ -827,22 +836,24 @@ private static AWSCredentialsProvider getCustomAWSCredentialsProvider(URI uri, C } } - private static Optional getAwsCredentials(URI uri, Configuration conf) + private static Optional getEmbeddedAwsCredentials(URI uri) { - String accessKey = conf.get(S3_ACCESS_KEY); - String secretKey = conf.get(S3_SECRET_KEY); - - String userInfo = uri.getUserInfo(); - if (userInfo != null) { - int index = userInfo.indexOf(':'); - if (index < 0) { - accessKey = userInfo; - } - else { - accessKey = userInfo.substring(0, index); - secretKey = userInfo.substring(index + 1); + String userInfo = nullToEmpty(uri.getUserInfo()); + List parts = Splitter.on(':').limit(2).splitToList(userInfo); + if (parts.size() == 2) { + String accessKey = parts.get(0); + String secretKey = parts.get(1); + if (!accessKey.isEmpty() && !secretKey.isEmpty()) { + return Optional.of(new BasicAWSCredentials(accessKey, secretKey)); } } + return Optional.empty(); + } + + private static Optional getAwsCredentials(Configuration conf) + { + String accessKey = conf.get(S3_ACCESS_KEY); + String secretKey = conf.get(S3_SECRET_KEY); if (isNullOrEmpty(accessKey) || isNullOrEmpty(secretKey)) { return Optional.empty(); diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestPrestoS3FileSystem.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestPrestoS3FileSystem.java index a52f3a2f05c4..a436f345cb18 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestPrestoS3FileSystem.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3/TestPrestoS3FileSystem.java @@ -30,6 +30,8 @@ import com.amazonaws.services.s3.model.EncryptionMaterialsProvider; import com.amazonaws.services.s3.model.GetObjectMetadataRequest; import com.amazonaws.services.s3.model.ObjectMetadata; +import com.amazonaws.services.securitytoken.AWSSecurityTokenService; +import com.amazonaws.services.securitytoken.AWSSecurityTokenServiceClient; import com.google.common.base.VerifyException; import io.prestosql.plugin.hive.s3.PrestoS3FileSystem.UnrecoverableS3OperationException; import org.apache.hadoop.conf.Configuration; @@ -88,6 +90,19 @@ public class TestPrestoS3FileSystem { private static final int HTTP_RANGE_NOT_SATISFIABLE = 416; + @Test + public void testEmbeddedCredentials() + throws Exception + { + Configuration config = new Configuration(false); + try (PrestoS3FileSystem fs = new PrestoS3FileSystem()) { + AWSCredentials credentials = getStaticCredentials(config, fs, "s3n://testAccess:testSecret@test-bucket/"); + assertEquals(credentials.getAWSAccessKeyId(), "testAccess"); + assertEquals(credentials.getAWSSecretKey(), "testSecret"); + assertThat(credentials).isNotInstanceOf(AWSSessionCredentials.class); + } + } + @Test public void testStaticCredentials() throws Exception @@ -135,20 +150,51 @@ public void testEndpointWithPinToCurrentRegionConfiguration() } @Test - public void testAssumeRoleCredentials() + public void testAssumeRoleDefaultCredentials() throws Exception { Configuration config = new Configuration(false); - config.set(S3_IAM_ROLE, "role"); + config.set(S3_IAM_ROLE, "test_role"); try (PrestoS3FileSystem fs = new PrestoS3FileSystem()) { fs.initialize(new URI("s3n://test-bucket/"), config); - AWSCredentialsProvider awsCredentialsProvider = getAwsCredentialsProvider(fs); - assertInstanceOf(awsCredentialsProvider, STSAssumeRoleSessionCredentialsProvider.class); - assertEquals(getFieldValue(awsCredentialsProvider, "roleArn", String.class), "role"); + AWSCredentialsProvider tokenService = getStsCredentialsProvider(fs, "test_role"); + assertInstanceOf(tokenService, DefaultAWSCredentialsProviderChain.class); } } + @Test + public void testAssumeRoleStaticCredentials() + throws Exception + { + Configuration config = new Configuration(false); + config.set(S3_ACCESS_KEY, "test_access_key"); + config.set(S3_SECRET_KEY, "test_secret_key"); + config.set(S3_IAM_ROLE, "test_role"); + + try (PrestoS3FileSystem fs = new PrestoS3FileSystem()) { + fs.initialize(new URI("s3n://test-bucket/"), config); + AWSCredentialsProvider tokenService = getStsCredentialsProvider(fs, "test_role"); + assertInstanceOf(tokenService, AWSStaticCredentialsProvider.class); + + AWSCredentials credentials = tokenService.getCredentials(); + assertEquals(credentials.getAWSAccessKeyId(), "test_access_key"); + assertEquals(credentials.getAWSSecretKey(), "test_secret_key"); + } + } + + private static AWSCredentialsProvider getStsCredentialsProvider(PrestoS3FileSystem fs, String expectedRole) + { + AWSCredentialsProvider awsCredentialsProvider = getAwsCredentialsProvider(fs); + assertInstanceOf(awsCredentialsProvider, STSAssumeRoleSessionCredentialsProvider.class); + + assertEquals(getFieldValue(awsCredentialsProvider, "roleArn", String.class), expectedRole); + + AWSSecurityTokenService tokenService = getFieldValue(awsCredentialsProvider, "securityTokenService", AWSSecurityTokenService.class); + assertInstanceOf(tokenService, AWSSecurityTokenServiceClient.class); + return getFieldValue(tokenService, "awsCredentialsProvider", AWSCredentialsProvider.class); + } + @Test public void testAssumeRoleCredentialsWithExternalId() throws Exception From 67daaa9596c17c07fda9ea87aa84754c10228401 Mon Sep 17 00:00:00 2001 From: afinkelstein Date: Sun, 22 Mar 2020 19:18:32 +0200 Subject: [PATCH 081/519] Allow specifying table location for Iceberg tables --- .../io/prestosql/plugin/iceberg/IcebergMetadata.java | 10 +++++++--- .../plugin/iceberg/IcebergTableProperties.java | 12 ++++++++++++ 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergMetadata.java b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergMetadata.java index c7b208713ee3..d45fff194de0 100644 --- a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergMetadata.java +++ b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergMetadata.java @@ -21,6 +21,7 @@ import io.prestosql.plugin.base.classloader.ClassLoaderSafeSystemTable; import io.prestosql.plugin.hive.HdfsEnvironment; import io.prestosql.plugin.hive.HdfsEnvironment.HdfsContext; +import io.prestosql.plugin.hive.HiveSchemaProperties; import io.prestosql.plugin.hive.HiveWrittenPartitions; import io.prestosql.plugin.hive.TableAlreadyExistsException; import io.prestosql.plugin.hive.authentication.HiveIdentity; @@ -82,12 +83,12 @@ import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableMap.toImmutableMap; -import static io.prestosql.plugin.hive.HiveSchemaProperties.getLocation; import static io.prestosql.plugin.hive.util.HiveWriteUtils.getTableDefaultLocation; import static io.prestosql.plugin.iceberg.DomainConverter.convertTupleDomainTypes; import static io.prestosql.plugin.iceberg.ExpressionConverter.toIcebergExpression; import static io.prestosql.plugin.iceberg.IcebergTableProperties.FILE_FORMAT_PROPERTY; import static io.prestosql.plugin.iceberg.IcebergTableProperties.PARTITIONING_PROPERTY; +import static io.prestosql.plugin.iceberg.IcebergTableProperties.getLocation; import static io.prestosql.plugin.iceberg.IcebergTableProperties.getPartitioning; import static io.prestosql.plugin.iceberg.IcebergUtil.getColumns; import static io.prestosql.plugin.iceberg.IcebergUtil.getDataPath; @@ -253,7 +254,7 @@ public Map> listTableColumns(ConnectorSess @Override public void createSchema(ConnectorSession session, String schemaName, Map properties, PrestoPrincipal owner) { - Optional location = getLocation(properties).map(uri -> { + Optional location = HiveSchemaProperties.getLocation(properties).map(uri -> { try { hdfsEnvironment.getFileSystem(new HdfsContext(session, schemaName), new Path(uri)); } @@ -319,7 +320,10 @@ public ConnectorOutputTableHandle beginCreateTable(ConnectorSession session, Con HdfsContext hdfsContext = new HdfsContext(session, schemaName, tableName); HiveIdentity identity = new HiveIdentity(session); - String targetPath = getTableDefaultLocation(database, hdfsContext, hdfsEnvironment, schemaName, tableName).toString(); + String targetPath = getLocation(tableMetadata.getProperties()); + if (targetPath == null) { + targetPath = getTableDefaultLocation(database, hdfsContext, hdfsEnvironment, schemaName, tableName).toString(); + } TableOperations operations = new HiveTableOperations(metastore, hdfsEnvironment, hdfsContext, identity, schemaName, tableName, session.getUser(), targetPath); if (operations.current() != null) { diff --git a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergTableProperties.java b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergTableProperties.java index 97f44f261fc1..9272a6aaf30b 100644 --- a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergTableProperties.java +++ b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergTableProperties.java @@ -26,6 +26,7 @@ import static com.google.common.collect.ImmutableList.toImmutableList; import static io.prestosql.spi.session.PropertyMetadata.enumProperty; +import static io.prestosql.spi.session.PropertyMetadata.stringProperty; import static io.prestosql.spi.type.VarcharType.VARCHAR; import static java.util.Locale.ENGLISH; @@ -33,6 +34,7 @@ public class IcebergTableProperties { public static final String FILE_FORMAT_PROPERTY = "format"; public static final String PARTITIONING_PROPERTY = "partitioning"; + public static final String LOCATION_PROPERTY = "location"; private final List> tableProperties; @@ -57,6 +59,11 @@ public IcebergTableProperties(IcebergConfig icebergConfig) .map(name -> ((String) name).toLowerCase(ENGLISH)) .collect(toImmutableList()), value -> value)) + .add(stringProperty( + LOCATION_PROPERTY, + "File system location URI for the table", + null, + false)) .build(); } @@ -76,4 +83,9 @@ public static List getPartitioning(Map tableProperties) List partitioning = (List) tableProperties.get(PARTITIONING_PROPERTY); return partitioning == null ? ImmutableList.of() : ImmutableList.copyOf(partitioning); } + + public static String getLocation(Map tableProperties) + { + return (String) tableProperties.get(LOCATION_PROPERTY); + } } From 0ce280a6df44656c3b16073c87d3d6f9533179ac Mon Sep 17 00:00:00 2001 From: "wupeng@analysys.com.cn" Date: Tue, 31 Mar 2020 17:30:29 +0800 Subject: [PATCH 082/519] Fix incorrect pushdown in Kudu when value can be NULL. --- .../prestosql/plugin/kudu/KuduClientSession.java | 3 +++ .../plugin/kudu/TestKuduDistributedQueries.java | 16 ++++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/presto-kudu/src/main/java/io/prestosql/plugin/kudu/KuduClientSession.java b/presto-kudu/src/main/java/io/prestosql/plugin/kudu/KuduClientSession.java index 5478a30635de..486b1f2e3552 100644 --- a/presto-kudu/src/main/java/io/prestosql/plugin/kudu/KuduClientSession.java +++ b/presto-kudu/src/main/java/io/prestosql/plugin/kudu/KuduClientSession.java @@ -491,6 +491,9 @@ private void addConstraintPredicates(KuduTable table, KuduScanToken.KuduScanToke else if (domain.isOnlyNull()) { builder.addPredicate(KuduPredicate.newIsNullPredicate(columnSchema)); } + else if (!domain.getValues().isNone() && domain.isNullAllowed()) { + // no restriction + } else if (domain.getValues().isAll() && !domain.isNullAllowed()) { builder.addPredicate(KuduPredicate.newIsNotNullPredicate(columnSchema)); } diff --git a/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java b/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java index d3fc45273626..58235bf5e7ea 100644 --- a/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java +++ b/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java @@ -77,6 +77,22 @@ public void testDataMappingSmokeTest(DataMappingTestSetup dataMappingTestSetup) // TODO Support these test once kudu connector can create tables with default partitions } + @Override + public void testPredicatePushdown() + { + assertUpdate("CREATE TABLE IF NOT EXISTS test_is_null (" + + "id INT WITH (primary_key=true), " + + "col_nullable bigint with (nullable=true)" + + ") WITH (" + + " partition_by_hash_columns = ARRAY['id'], " + + " partition_by_hash_buckets = 2" + + ")"); + + assertUpdate("INSERT INTO test_is_null VALUES (1, 1)", 1); + assertUpdate("INSERT INTO test_is_null(id) VALUES (2)", 1); + assertQuery("SELECT id FROM test_is_null WHERE col_nullable = 1 OR col_nullable IS NULL", "VALUES (1), (2)"); + } + @Override public void testAddColumn() { From 733fa05c48dc03c94ee6095cddd94bf9a8f291cd Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Wed, 8 Apr 2020 10:21:52 +0200 Subject: [PATCH 083/519] Fix formatting --- .../plugin/cassandra/TestCassandraIntegrationSmokeTest.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraIntegrationSmokeTest.java b/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraIntegrationSmokeTest.java index 5a9e2e082caa..40ef10e6c29f 100644 --- a/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraIntegrationSmokeTest.java +++ b/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraIntegrationSmokeTest.java @@ -481,15 +481,15 @@ public void testNestedCollectionType() session.execute("INSERT INTO keyspace_test_nested_collection.table_set (column_5, nested_collection) VALUES (1, {{1, 2, 3}})"); assertEquals(execute("SELECT nested_collection FROM cassandra.keyspace_test_nested_collection.table_set").getMaterializedRows().get(0), - new MaterializedRow(DEFAULT_PRECISION, "[[1,2,3]]")); + new MaterializedRow(DEFAULT_PRECISION, "[[1,2,3]]")); session.execute("INSERT INTO keyspace_test_nested_collection.table_list (column_5, nested_collection) VALUES (1, [[4, 5, 6]])"); assertEquals(execute("SELECT nested_collection FROM cassandra.keyspace_test_nested_collection.table_list").getMaterializedRows().get(0), - new MaterializedRow(DEFAULT_PRECISION, "[[4,5,6]]")); + new MaterializedRow(DEFAULT_PRECISION, "[[4,5,6]]")); session.execute("INSERT INTO keyspace_test_nested_collection.table_map (column_5, nested_collection) VALUES (1, {7:{8:9}})"); assertEquals(execute("SELECT nested_collection FROM cassandra.keyspace_test_nested_collection.table_map").getMaterializedRows().get(0), - new MaterializedRow(DEFAULT_PRECISION, "{7:{8:9}}")); + new MaterializedRow(DEFAULT_PRECISION, "{7:{8:9}}")); session.execute("DROP KEYSPACE keyspace_test_nested_collection"); } From ff6b0315afcc67a4864d45aeaa6d85e352d606cf Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Fri, 3 Apr 2020 10:40:45 +0200 Subject: [PATCH 084/519] Add SHOW CREATE TABLE test --- presto-accumulo/pom.xml | 6 +++++ .../TestAccumuloIntegrationSmokeTest.java | 18 ++++++++++++++ .../TestCassandraIntegrationSmokeTest.java | 18 ++++++++++++++ presto-elasticsearch/pom.xml | 6 +++++ ...TestElasticsearchIntegrationSmokeTest.java | 11 +++++++++ .../hive/TestHiveIntegrationSmokeTest.java | 17 +++++++++++++ .../plugin/iceberg/TestIcebergSmoke.java | 21 ++++++++++++++++ presto-kudu/pom.xml | 6 +++++ .../AbstractKuduIntegrationSmokeTest.java | 22 +++++++++++++++++ .../mysql/TestMySqlIntegrationSmokeTest.java | 18 ++++++++++++++ presto-phoenix/pom.xml | 6 +++++ .../TestPhoenixIntegrationSmokeTest.java | 22 +++++++++++++++++ .../TestRaptorIntegrationSmokeTest.java | 3 +++ ...estRaptorIntegrationSmokeTestBucketed.java | 24 +++++++++++++++++++ .../AbstractTestIntegrationSmokeTest.java | 18 ++++++++++++++ 15 files changed, 216 insertions(+) diff --git a/presto-accumulo/pom.xml b/presto-accumulo/pom.xml index 51392388398d..bed7e1bf8b0d 100644 --- a/presto-accumulo/pom.xml +++ b/presto-accumulo/pom.xml @@ -342,6 +342,12 @@ testng test + + + org.assertj + assertj-core + test + diff --git a/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/TestAccumuloIntegrationSmokeTest.java b/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/TestAccumuloIntegrationSmokeTest.java index be5624af44cd..45d8eb7dd89f 100644 --- a/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/TestAccumuloIntegrationSmokeTest.java +++ b/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/TestAccumuloIntegrationSmokeTest.java @@ -23,6 +23,7 @@ import static io.prestosql.spi.type.VarcharType.VARCHAR; import static io.prestosql.testing.MaterializedResult.resultBuilder; import static io.prestosql.testing.assertions.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; public class TestAccumuloIntegrationSmokeTest extends AbstractTestIntegrationSmokeTest @@ -52,4 +53,21 @@ public void testDescribeTable() MaterializedResult actualColumns = computeActual("DESCRIBE orders"); assertEquals(actualColumns, expectedColumns); } + + @Override + public void testShowCreateTable() + { + assertThat(computeActual("SHOW CREATE TABLE orders").getOnlyValue()) + .isEqualTo("CREATE TABLE accumulo.tpch.orders (\n" + + " orderkey bigint COMMENT 'Accumulo row ID',\n" + + " custkey bigint COMMENT 'Accumulo column custkey:custkey. Indexed: false',\n" + + " orderstatus varchar(1) COMMENT 'Accumulo column orderstatus:orderstatus. Indexed: false',\n" + + " totalprice double COMMENT 'Accumulo column totalprice:totalprice. Indexed: false',\n" + + " orderdate date COMMENT 'Accumulo column orderdate:orderdate. Indexed: true',\n" + + " orderpriority varchar(15) COMMENT 'Accumulo column orderpriority:orderpriority. Indexed: false',\n" + + " clerk varchar(15) COMMENT 'Accumulo column clerk:clerk. Indexed: false',\n" + + " shippriority integer COMMENT 'Accumulo column shippriority:shippriority. Indexed: false',\n" + + " comment varchar(79) COMMENT 'Accumulo column comment:comment. Indexed: false'\n" + + ")"); + } } diff --git a/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraIntegrationSmokeTest.java b/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraIntegrationSmokeTest.java index 40ef10e6c29f..df28e8937ebd 100644 --- a/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraIntegrationSmokeTest.java +++ b/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraIntegrationSmokeTest.java @@ -61,6 +61,7 @@ import static java.lang.String.format; import static java.util.concurrent.TimeUnit.MINUTES; import static java.util.stream.Collectors.toList; +import static org.assertj.core.api.Assertions.assertThat; import static org.testng.Assert.assertEquals; public class TestCassandraIntegrationSmokeTest @@ -111,6 +112,23 @@ public void testDescribeTable() Assert.assertEquals(actualColumns, expectedColumns); } + @Override + public void testShowCreateTable() + { + assertThat(computeActual("SHOW CREATE TABLE orders").getOnlyValue()) + .isEqualTo("CREATE TABLE cassandra.tpch.orders (\n" + + " orderkey bigint,\n" + + " custkey bigint,\n" + + " orderstatus varchar,\n" + + " totalprice double,\n" + + " orderdate varchar,\n" + + " orderpriority varchar,\n" + + " clerk varchar,\n" + + " shippriority integer,\n" + + " comment varchar\n" + + ")"); + } + @Test public void testPartitionKeyPredicate() { diff --git a/presto-elasticsearch/pom.xml b/presto-elasticsearch/pom.xml index 2ece5639388a..731efbec2e78 100644 --- a/presto-elasticsearch/pom.xml +++ b/presto-elasticsearch/pom.xml @@ -367,6 +367,12 @@ test + + org.assertj + assertj-core + test + + org.jetbrains annotations diff --git a/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearchIntegrationSmokeTest.java b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearchIntegrationSmokeTest.java index 9f8cf01183e0..7abc148861d1 100644 --- a/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearchIntegrationSmokeTest.java +++ b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearchIntegrationSmokeTest.java @@ -28,6 +28,7 @@ import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestHighLevelClient; import org.intellij.lang.annotations.Language; +import org.testng.SkipException; import org.testng.annotations.AfterClass; import org.testng.annotations.Test; @@ -41,6 +42,7 @@ import static io.prestosql.testing.MaterializedResult.resultBuilder; import static io.prestosql.testing.assertions.Assert.assertEquals; import static java.lang.String.format; +import static org.assertj.core.api.Assertions.assertThatThrownBy; public class TestElasticsearchIntegrationSmokeTest extends AbstractTestIntegrationSmokeTest @@ -95,6 +97,15 @@ public void testDescribeTable() assertEquals(actualColumns, expectedColumns); } + @Override + public void testShowCreateTable() + { + // TODO (https://github.com/prestosql/presto/issues/3385) Fix SHOW CREATE TABLE + assertThatThrownBy(super::testShowCreateTable) + .hasMessage("No PropertyMetadata for property: original-name"); + throw new SkipException("Fix SHOW CREATE TABLE"); + } + @Test public void testNestedFields() throws IOException diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java index 1292e4a3a8c5..eb82cd320658 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java @@ -3037,8 +3037,25 @@ public void testTableCommentsTable() } @Test + @Override public void testShowCreateTable() { + assertThat(computeActual("SHOW CREATE TABLE orders").getOnlyValue()) + .isEqualTo("CREATE TABLE hive.tpch.orders (\n" + + " orderkey bigint,\n" + + " custkey bigint,\n" + + " orderstatus varchar(1),\n" + + " totalprice double,\n" + + " orderdate date,\n" + + " orderpriority varchar(15),\n" + + " clerk varchar(15),\n" + + " shippriority integer,\n" + + " comment varchar(79)\n" + + ")\n" + + "WITH (\n" + + " format = 'ORC'\n" + + ")"); + String createTableSql = format("" + "CREATE TABLE %s.%s.%s (\n" + " c1 bigint,\n" + diff --git a/presto-iceberg/src/test/java/io/prestosql/plugin/iceberg/TestIcebergSmoke.java b/presto-iceberg/src/test/java/io/prestosql/plugin/iceberg/TestIcebergSmoke.java index 9dcf8b303d7e..14bc3cfb2da5 100644 --- a/presto-iceberg/src/test/java/io/prestosql/plugin/iceberg/TestIcebergSmoke.java +++ b/presto-iceberg/src/test/java/io/prestosql/plugin/iceberg/TestIcebergSmoke.java @@ -30,6 +30,7 @@ import static io.prestosql.spi.type.VarcharType.VARCHAR; import static io.prestosql.testing.MaterializedResult.resultBuilder; import static java.lang.String.format; +import static org.assertj.core.api.Assertions.assertThat; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; @@ -62,6 +63,26 @@ public void testDescribeTable() Assert.assertEquals(actualColumns, expectedColumns); } + @Override + public void testShowCreateTable() + { + assertThat(computeActual("SHOW CREATE TABLE orders").getOnlyValue()) + .isEqualTo("CREATE TABLE iceberg.tpch.orders (\n" + + " orderkey bigint,\n" + + " custkey bigint,\n" + + " orderstatus varchar,\n" + + " totalprice double,\n" + + " orderdate date,\n" + + " orderpriority varchar,\n" + + " clerk varchar,\n" + + " shippriority integer,\n" + + " comment varchar\n" + + ")\n" + + "WITH (\n" + + " format = 'ORC'\n" + + ")"); + } + @Test public void testDecimal() { diff --git a/presto-kudu/pom.xml b/presto-kudu/pom.xml index 2ecb76eef011..fcb690de6fc0 100644 --- a/presto-kudu/pom.xml +++ b/presto-kudu/pom.xml @@ -145,6 +145,12 @@ test + + org.assertj + assertj-core + test + + io.airlift testing diff --git a/presto-kudu/src/test/java/io/prestosql/plugin/kudu/AbstractKuduIntegrationSmokeTest.java b/presto-kudu/src/test/java/io/prestosql/plugin/kudu/AbstractKuduIntegrationSmokeTest.java index 571c1472dcdb..70550a05ee90 100644 --- a/presto-kudu/src/test/java/io/prestosql/plugin/kudu/AbstractKuduIntegrationSmokeTest.java +++ b/presto-kudu/src/test/java/io/prestosql/plugin/kudu/AbstractKuduIntegrationSmokeTest.java @@ -27,6 +27,7 @@ import static io.prestosql.testing.MaterializedResult.resultBuilder; import static io.prestosql.testing.assertions.Assert.assertEquals; import static io.prestosql.tpch.TpchTable.ORDERS; +import static org.assertj.core.api.Assertions.assertThat; import static org.testng.Assert.assertTrue; public abstract class AbstractKuduIntegrationSmokeTest @@ -71,8 +72,29 @@ public void testDescribeTable() } @Test + @Override public void testShowCreateTable() { + assertThat((String) computeActual("SHOW CREATE TABLE orders").getOnlyValue()) + .matches("CREATE TABLE kudu\\.\\w+\\.orders \\Q(\n" + + " orderkey bigint WITH ( nullable = true ),\n" + + " custkey bigint WITH ( nullable = true ),\n" + + " orderstatus varchar WITH ( nullable = true ),\n" + + " totalprice double WITH ( nullable = true ),\n" + + " orderdate varchar WITH ( nullable = true ),\n" + + " orderpriority varchar WITH ( nullable = true ),\n" + + " clerk varchar WITH ( nullable = true ),\n" + + " shippriority integer WITH ( nullable = true ),\n" + + " comment varchar WITH ( nullable = true )\n" + + ")\n" + + "WITH (\n" + + " number_of_replicas = 3,\n" + + " partition_by_hash_buckets = 2,\n" + + " partition_by_hash_columns = ARRAY['row_uuid'],\n" + + " partition_by_range_columns = ARRAY['row_uuid'],\n" + + " range_partitions = '[{\"lower\":null,\"upper\":null}]'\n" + + ")"); + assertUpdate("CREATE TABLE IF NOT EXISTS test_show_create_table (\n" + "id INT WITH (primary_key=true),\n" + "user_name VARCHAR\n" + diff --git a/presto-mysql/src/test/java/io/prestosql/plugin/mysql/TestMySqlIntegrationSmokeTest.java b/presto-mysql/src/test/java/io/prestosql/plugin/mysql/TestMySqlIntegrationSmokeTest.java index 9a2257d34442..1179e2eccb65 100644 --- a/presto-mysql/src/test/java/io/prestosql/plugin/mysql/TestMySqlIntegrationSmokeTest.java +++ b/presto-mysql/src/test/java/io/prestosql/plugin/mysql/TestMySqlIntegrationSmokeTest.java @@ -30,6 +30,7 @@ import static io.prestosql.testing.assertions.Assert.assertEquals; import static io.prestosql.tpch.TpchTable.ORDERS; import static java.lang.String.format; +import static org.assertj.core.api.Assertions.assertThat; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; @@ -71,6 +72,23 @@ public void testDescribeTable() assertEquals(actualColumns, expectedColumns); } + @Override + public void testShowCreateTable() + { + assertThat(computeActual("SHOW CREATE TABLE orders").getOnlyValue()) + .isEqualTo("CREATE TABLE mysql.tpch.orders (\n" + + " orderkey bigint,\n" + + " custkey bigint,\n" + + " orderstatus varchar(255),\n" + + " totalprice double,\n" + + " orderdate date,\n" + + " orderpriority varchar(255),\n" + + " clerk varchar(255),\n" + + " shippriority integer,\n" + + " comment varchar(255)\n" + + ")"); + } + @Test public void testDropTable() { diff --git a/presto-phoenix/pom.xml b/presto-phoenix/pom.xml index 5d6f902c6a35..2b95b8245226 100644 --- a/presto-phoenix/pom.xml +++ b/presto-phoenix/pom.xml @@ -172,6 +172,12 @@ test + + org.assertj + assertj-core + test + + io.airlift testing diff --git a/presto-phoenix/src/test/java/io/prestosql/plugin/phoenix/TestPhoenixIntegrationSmokeTest.java b/presto-phoenix/src/test/java/io/prestosql/plugin/phoenix/TestPhoenixIntegrationSmokeTest.java index 7bb57f330a57..7f24f9a8f3e6 100644 --- a/presto-phoenix/src/test/java/io/prestosql/plugin/phoenix/TestPhoenixIntegrationSmokeTest.java +++ b/presto-phoenix/src/test/java/io/prestosql/plugin/phoenix/TestPhoenixIntegrationSmokeTest.java @@ -28,6 +28,7 @@ import static io.prestosql.plugin.jdbc.TypeHandlingJdbcPropertiesProvider.UNSUPPORTED_TYPE_HANDLING; import static io.prestosql.plugin.jdbc.UnsupportedTypeHandling.CONVERT_TO_VARCHAR; import static io.prestosql.plugin.phoenix.PhoenixQueryRunner.createPhoenixQueryRunner; +import static org.assertj.core.api.Assertions.assertThat; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; @@ -50,6 +51,27 @@ public void destroy() TestingPhoenixServer.shutDown(); } + @Override + public void testShowCreateTable() + { + assertThat(computeActual("SHOW CREATE TABLE orders").getOnlyValue()) + .isEqualTo("CREATE TABLE phoenix.tpch.orders (\n" + + " orderkey bigint,\n" + + " custkey bigint,\n" + + " orderstatus varchar(1),\n" + + " totalprice double,\n" + + " orderdate date,\n" + + " orderpriority varchar(15),\n" + + " clerk varchar(15),\n" + + " shippriority integer,\n" + + " comment varchar(79)\n" + + ")\n" + + "WITH (\n" + + " rowkeys = 'ROWKEY',\n" + + " salt_buckets = 10\n" + + ")"); + } + @Test public void testSchemaOperations() { diff --git a/presto-raptor-legacy/src/test/java/io/prestosql/plugin/raptor/legacy/TestRaptorIntegrationSmokeTest.java b/presto-raptor-legacy/src/test/java/io/prestosql/plugin/raptor/legacy/TestRaptorIntegrationSmokeTest.java index 93c761d15bb5..661ccac99a6f 100644 --- a/presto-raptor-legacy/src/test/java/io/prestosql/plugin/raptor/legacy/TestRaptorIntegrationSmokeTest.java +++ b/presto-raptor-legacy/src/test/java/io/prestosql/plugin/raptor/legacy/TestRaptorIntegrationSmokeTest.java @@ -492,8 +492,11 @@ public void testBucketingMixedTypes() } @Test + @Override public void testShowCreateTable() { + super.testShowCreateTable(); + String createTableSql = format("" + "CREATE TABLE %s.%s.%s (\n" + " c1 bigint,\n" + diff --git a/presto-raptor-legacy/src/test/java/io/prestosql/plugin/raptor/legacy/TestRaptorIntegrationSmokeTestBucketed.java b/presto-raptor-legacy/src/test/java/io/prestosql/plugin/raptor/legacy/TestRaptorIntegrationSmokeTestBucketed.java index 05df55b94e6f..67ae01694c19 100644 --- a/presto-raptor-legacy/src/test/java/io/prestosql/plugin/raptor/legacy/TestRaptorIntegrationSmokeTestBucketed.java +++ b/presto-raptor-legacy/src/test/java/io/prestosql/plugin/raptor/legacy/TestRaptorIntegrationSmokeTestBucketed.java @@ -18,6 +18,7 @@ import org.testng.annotations.Test; import static io.prestosql.plugin.raptor.legacy.RaptorQueryRunner.createRaptorQueryRunner; +import static org.assertj.core.api.Assertions.assertThat; public class TestRaptorIntegrationSmokeTestBucketed extends TestRaptorIntegrationSmokeTest @@ -29,6 +30,29 @@ protected QueryRunner createQueryRunner() return createRaptorQueryRunner(ImmutableMap.of(), true, true); } + @Test + @Override + public void testShowCreateTable() + { + assertThat(computeActual("SHOW CREATE TABLE orders").getOnlyValue()) + .isEqualTo("CREATE TABLE raptor.tpch.orders (\n" + + " orderkey bigint,\n" + + " custkey bigint,\n" + + " orderstatus varchar(1),\n" + + " totalprice double,\n" + + " orderdate date,\n" + + " orderpriority varchar(15),\n" + + " clerk varchar(15),\n" + + " shippriority integer,\n" + + " comment varchar(79)\n" + + ")\n" + + "WITH (\n" + + " bucket_count = 25,\n" + + " bucketed_on = ARRAY['orderkey'],\n" + + " distribution_name = 'order'\n" + + ")"); + } + @Test public void testShardsSystemTableBucketNumber() { diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java index c4b428af065f..54d70ff9da02 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java @@ -153,6 +153,24 @@ public void testDescribeTable() assertEquals(actualColumns, expectedColumns); } + @Test + public void testShowCreateTable() + { + assertThat((String) computeActual("SHOW CREATE TABLE orders").getOnlyValue()) + // If the connector reports additional column properties, the expected value needs to be adjusted in the test subclass + .matches("CREATE TABLE \\w+\\.\\w+\\.orders \\Q(\n" + + " orderkey bigint,\n" + + " custkey bigint,\n" + + " orderstatus varchar(1),\n" + + " totalprice double,\n" + + " orderdate date,\n" + + " orderpriority varchar(15),\n" + + " clerk varchar(15),\n" + + " shippriority integer,\n" + + " comment varchar(79)\n" + + ")"); + } + @Test public void testSelectInformationSchemaTables() { From 3fdc8626af697d14879ff6b9660446d27bf3a906 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Grzegorz=20Kokosi=C5=84ski?= Date: Mon, 6 Apr 2020 16:07:01 +0200 Subject: [PATCH 085/519] Allow to modify Presto in product tests --- .../resources/docker/presto-product-tests/run-presto.sh | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/run-presto.sh b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/run-presto.sh index 997fccf6d128..5a5c45830656 100755 --- a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/run-presto.sh +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/run-presto.sh @@ -10,6 +10,13 @@ fi set -x tar xf /docker/presto-server.tar.gz -C /docker + +if test -d /docker/presto-init.d; then + for init_script in /docker/presto-init.d/*; do + "${init_script}" + done +fi + exec /docker/presto-server-*/bin/launcher \ -Dpresto-temporarily-allow-java8=true \ -Dnode.id="${HOSTNAME}" \ From 101d55404e4172ac72889b2d175863de5ec07546 Mon Sep 17 00:00:00 2001 From: Chun Han Hsiao Date: Wed, 8 Apr 2020 10:55:19 +0800 Subject: [PATCH 086/519] Support elasticsearch numeric keyword --- .../decoders/VarcharDecoder.java | 4 +-- ...TestElasticsearchIntegrationSmokeTest.java | 28 +++++++++++++++++++ 2 files changed, 30 insertions(+), 2 deletions(-) diff --git a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/VarcharDecoder.java b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/VarcharDecoder.java index 9eeb2af8e810..cbff4331f287 100644 --- a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/VarcharDecoder.java +++ b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/decoders/VarcharDecoder.java @@ -42,11 +42,11 @@ public void decode(SearchHit hit, Supplier getter, BlockBuilder output) if (value == null) { output.appendNull(); } - else if (value instanceof String) { + else if (value instanceof String || value instanceof Number) { VARCHAR.writeSlice(output, Slices.utf8Slice(value.toString())); } else { - throw new PrestoException(TYPE_MISMATCH, format("Expected a string value for field '%s' of type VARCHAR: %s [%s]", path, value, value.getClass().getSimpleName())); + throw new PrestoException(TYPE_MISMATCH, format("Expected a string or numeric value for field '%s' of type VARCHAR: %s [%s]", path, value, value.getClass().getSimpleName())); } } } diff --git a/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearchIntegrationSmokeTest.java b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearchIntegrationSmokeTest.java index 7abc148861d1..88779baf8ddf 100644 --- a/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearchIntegrationSmokeTest.java +++ b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearchIntegrationSmokeTest.java @@ -695,6 +695,34 @@ public void testMixedCase() "VALUES ('john', 32)"); } + @Test + public void testNumericKeyword() + throws IOException + { + String indexName = "numeric_keyword"; + @Language("JSON") String mapping = "" + + "{" + + " \"mappings\": {" + + " \"doc\": {" + + " \"properties\": {" + + " \"numeric_keyword\": { \"type\": \"keyword\" }" + + " }" + + " }" + + " }" + + "}"; + createIndex(indexName, mapping); + index(indexName, ImmutableMap.builder() + .put("numeric_keyword", 20) + .build()); + + assertQuery( + "SELECT numeric_keyword FROM numeric_keyword", + "VALUES 20"); + assertQuery( + "SELECT numeric_keyword FROM numeric_keyword where numeric_keyword = '20'", + "VALUES 20"); + } + @Test public void testQueryStringError() { From fc727917c4b3882dfd4a3d1ec807a1d1c0c9dce5 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Wed, 1 Apr 2020 20:28:43 -0700 Subject: [PATCH 087/519] Remove launcher help output from documentation This will become out of date and is not needed in the documentation, as anyone that needs to adjust it can run help on their own. --- .../main/sphinx/installation/deployment.rst | 49 +++++-------------- 1 file changed, 11 insertions(+), 38 deletions(-) diff --git a/presto-docs/src/main/sphinx/installation/deployment.rst b/presto-docs/src/main/sphinx/installation/deployment.rst index 622b318c82fe..ae90fda0e9e8 100644 --- a/presto-docs/src/main/sphinx/installation/deployment.rst +++ b/presto-docs/src/main/sphinx/installation/deployment.rst @@ -264,44 +264,17 @@ if using a supervision system like daemontools: bin/launcher run Run the launcher with ``--help`` to see the supported commands and -command line options: - -.. code-block:: none - - bin/launcher --help - Usage: launcher [options] command - - Commands: run, start, stop, restart, kill, status - - Options: - -h, --help show this help message and exit - -v, --verbose Run verbosely - --etc-dir=DIR Defaults to INSTALL_PATH/etc - --launcher-config=FILE Defaults to INSTALL_PATH/bin/launcher.properties - --node-config=FILE Defaults to ETC_DIR/node.properties - --jvm-config=FILE Defaults to ETC_DIR/jvm.config - --config=FILE Defaults to ETC_DIR/config.properties - --log-levels-file=FILE Defaults to ETC_DIR/log.properties - --data-dir=DIR Defaults to INSTALL_PATH - --pid-file=FILE Defaults to DATA_DIR/var/run/launcher.pid - --launcher-log-file=FILE Defaults to DATA_DIR/var/log/launcher.log (only in - daemon mode) - --server-log-file=FILE Defaults to DATA_DIR/var/log/server.log (only in - daemon mode) - -D NAME=VALUE Set a Java system property - -In particular, the ``--verbose`` option is very useful for debugging the -installation and any problems starting Presto. - -As you can see, the launcher script configures default values for the -configuration directory ``etc``, configuration files, the data directory ``var`` -and log files in the data directory. - -You can use these options to adjust your Presto usage to any requirements, such -as using a directory outside the installation directory, specific mount points -or locations and even using other file names. For example, the Presto RPM -package adjusts the used directories to better follow the Linux Filesystem -Hierarchy Standard. +command line options. In particular, the ``--verbose`` option is +very useful for debugging the installation. + +The launcher configures default values for the configuration +directory ``etc``, configuration files, the data directory ``var``, +and log files in the data directory. You can change these values +to adjust your Presto usage to any requirements, such as using a +directory outside the installation directory, specific mount points +or locations, and even using other file names. For example, the Presto +RPM adjusts the used directories to better follow the Linux Filesystem +Hierarchy Standard (FHS). After starting Presto, you can find log files in the ``log`` directory inside the data directory ``var``: From 73a1108274365b9d4fcd3a4c5ba23fce6a242fe4 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Fri, 3 Apr 2020 16:11:15 -0700 Subject: [PATCH 088/519] Remove unused parameter from createOrcDataSink --- .../java/io/prestosql/plugin/hive/HiveWriterFactory.java | 4 ++-- .../io/prestosql/plugin/hive/orc/OrcFileWriterFactory.java | 7 ++----- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveWriterFactory.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveWriterFactory.java index 6df18935c358..37b60d25951f 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveWriterFactory.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveWriterFactory.java @@ -30,6 +30,7 @@ import io.prestosql.plugin.hive.metastore.SortingColumn; import io.prestosql.plugin.hive.metastore.StorageFormat; import io.prestosql.plugin.hive.metastore.Table; +import io.prestosql.plugin.hive.orc.OrcFileWriterFactory; import io.prestosql.plugin.hive.util.HiveWriteUtils; import io.prestosql.spi.NodeManager; import io.prestosql.spi.Page; @@ -81,7 +82,6 @@ import static io.prestosql.plugin.hive.LocationHandle.WriteMode.DIRECT_TO_TARGET_EXISTING_DIRECTORY; import static io.prestosql.plugin.hive.metastore.MetastoreUtil.getHiveSchema; import static io.prestosql.plugin.hive.metastore.StorageFormat.fromHiveStorageFormat; -import static io.prestosql.plugin.hive.orc.OrcFileWriterFactory.createOrcDataSink; import static io.prestosql.plugin.hive.util.CompressionConfigUtil.configureCompression; import static io.prestosql.plugin.hive.util.ConfigurationUtils.toJobConf; import static io.prestosql.plugin.hive.util.HiveUtil.getColumnNames; @@ -535,7 +535,7 @@ else if (insertExistingPartitionsBehavior == InsertExistingPartitionsBehavior.ER sortFields, sortOrders, pageSorter, - (fs, p) -> createOrcDataSink(session, fs, p)); + OrcFileWriterFactory::createOrcDataSink); } return new HiveWriter( diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcFileWriterFactory.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcFileWriterFactory.java index ae02f54b7e4b..25afe83a533a 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcFileWriterFactory.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcFileWriterFactory.java @@ -154,7 +154,7 @@ public Optional createFileWriter( try { FileSystem fileSystem = hdfsEnvironment.getFileSystem(session.getUser(), path, configuration); - OrcDataSink orcDataSink = createOrcDataSink(session, fileSystem, path); + OrcDataSink orcDataSink = createOrcDataSink(fileSystem, path); Optional> validationInputFactory = Optional.empty(); if (HiveSessionProperties.isOrcOptimizedWriterValidate(session)) { @@ -207,10 +207,7 @@ public Optional createFileWriter( } } - /** - * Allow subclass to replace data sink implementation. - */ - public static OrcDataSink createOrcDataSink(ConnectorSession session, FileSystem fileSystem, Path path) + public static OrcDataSink createOrcDataSink(FileSystem fileSystem, Path path) throws IOException { return new OutputStreamOrcDataSink(fileSystem.create(path)); From 1a0a0e7acec9dafccc66c4558958bf2b5612d658 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Fri, 3 Apr 2020 16:14:54 -0700 Subject: [PATCH 089/519] Use static imports --- .../plugin/hive/RcFileFileWriterFactory.java | 9 ++++++--- .../plugin/hive/orc/OrcFileWriterFactory.java | 15 ++++++++------- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/RcFileFileWriterFactory.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/RcFileFileWriterFactory.java index 8e6132812dc9..f84cbf30b503 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/RcFileFileWriterFactory.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/RcFileFileWriterFactory.java @@ -44,6 +44,9 @@ import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_WRITER_OPEN_ERROR; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_WRITE_VALIDATION_FAILED; +import static io.prestosql.plugin.hive.HiveMetadata.PRESTO_QUERY_ID_NAME; +import static io.prestosql.plugin.hive.HiveMetadata.PRESTO_VERSION_NAME; +import static io.prestosql.plugin.hive.HiveSessionProperties.isRcfileOptimizedWriterValidate; import static io.prestosql.plugin.hive.rcfile.RcFilePageSourceFactory.createTextVectorEncoding; import static io.prestosql.plugin.hive.util.HiveUtil.getColumnNames; import static io.prestosql.plugin.hive.util.HiveUtil.getColumnTypes; @@ -126,7 +129,7 @@ else if (ColumnarSerDe.class.getName().equals(storageFormat.getSerDe())) { OutputStream outputStream = fileSystem.create(path); Optional> validationInputFactory = Optional.empty(); - if (HiveSessionProperties.isRcfileOptimizedWriterValidate(session)) { + if (isRcfileOptimizedWriterValidate(session)) { validationInputFactory = Optional.of(() -> { try { return new HdfsRcFileDataSource( @@ -154,8 +157,8 @@ else if (ColumnarSerDe.class.getName().equals(storageFormat.getSerDe())) { codecName, fileInputColumnIndexes, ImmutableMap.builder() - .put(HiveMetadata.PRESTO_VERSION_NAME, nodeVersion.toString()) - .put(HiveMetadata.PRESTO_QUERY_ID_NAME, session.getQueryId()) + .put(PRESTO_VERSION_NAME, nodeVersion.toString()) + .put(PRESTO_QUERY_ID_NAME, session.getQueryId()) .build(), validationInputFactory)); } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcFileWriterFactory.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcFileWriterFactory.java index 25afe83a533a..987334db462b 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcFileWriterFactory.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcFileWriterFactory.java @@ -22,14 +22,11 @@ import io.prestosql.orc.OrcWriterStats; import io.prestosql.orc.OutputStreamOrcDataSink; import io.prestosql.orc.metadata.CompressionKind; -import io.prestosql.orc.metadata.OrcType; import io.prestosql.plugin.hive.FileFormatDataSourceStats; import io.prestosql.plugin.hive.FileWriter; import io.prestosql.plugin.hive.HdfsEnvironment; import io.prestosql.plugin.hive.HiveConfig; import io.prestosql.plugin.hive.HiveFileWriterFactory; -import io.prestosql.plugin.hive.HiveMetadata; -import io.prestosql.plugin.hive.HiveSessionProperties; import io.prestosql.plugin.hive.NodeVersion; import io.prestosql.plugin.hive.metastore.StorageFormat; import io.prestosql.spi.PrestoException; @@ -54,15 +51,19 @@ import java.util.concurrent.Callable; import java.util.function.Supplier; +import static io.prestosql.orc.metadata.OrcType.createRootOrcType; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_UNSUPPORTED_FORMAT; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_WRITER_OPEN_ERROR; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_WRITE_VALIDATION_FAILED; +import static io.prestosql.plugin.hive.HiveMetadata.PRESTO_QUERY_ID_NAME; +import static io.prestosql.plugin.hive.HiveMetadata.PRESTO_VERSION_NAME; import static io.prestosql.plugin.hive.HiveSessionProperties.getOrcOptimizedWriterMaxDictionaryMemory; import static io.prestosql.plugin.hive.HiveSessionProperties.getOrcOptimizedWriterMaxStripeRows; import static io.prestosql.plugin.hive.HiveSessionProperties.getOrcOptimizedWriterMaxStripeSize; import static io.prestosql.plugin.hive.HiveSessionProperties.getOrcOptimizedWriterMinStripeSize; import static io.prestosql.plugin.hive.HiveSessionProperties.getOrcOptimizedWriterValidateMode; import static io.prestosql.plugin.hive.HiveSessionProperties.getOrcStringStatisticsLimit; +import static io.prestosql.plugin.hive.HiveSessionProperties.isOrcOptimizedWriterValidate; import static io.prestosql.plugin.hive.util.HiveUtil.getColumnNames; import static io.prestosql.plugin.hive.util.HiveUtil.getColumnTypes; import static java.util.Locale.ENGLISH; @@ -157,7 +158,7 @@ public Optional createFileWriter( OrcDataSink orcDataSink = createOrcDataSink(fileSystem, path); Optional> validationInputFactory = Optional.empty(); - if (HiveSessionProperties.isOrcOptimizedWriterValidate(session)) { + if (isOrcOptimizedWriterValidate(session)) { validationInputFactory = Optional.of(() -> { try { return new HdfsOrcDataSource( @@ -183,7 +184,7 @@ public Optional createFileWriter( rollbackAction, fileColumnNames, fileColumnTypes, - OrcType.createRootOrcType(fileColumnNames, fileColumnTypes), + createRootOrcType(fileColumnNames, fileColumnTypes), compression, orcWriterOptions .withStripeMinSize(getOrcOptimizedWriterMinStripeSize(session)) @@ -194,8 +195,8 @@ public Optional createFileWriter( writeLegacyVersion, fileInputColumnIndexes, ImmutableMap.builder() - .put(HiveMetadata.PRESTO_VERSION_NAME, nodeVersion.toString()) - .put(HiveMetadata.PRESTO_QUERY_ID_NAME, session.getQueryId()) + .put(PRESTO_VERSION_NAME, nodeVersion.toString()) + .put(PRESTO_QUERY_ID_NAME, session.getQueryId()) .build(), hiveStorageTimeZone, validationInputFactory, From bdeb47f90166dd38e78c001a602a4e30cb558dc7 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Tue, 7 Apr 2020 22:20:25 -0700 Subject: [PATCH 090/519] Cleanup usages of Iceberg schema and table properties --- .../io/prestosql/plugin/iceberg/IcebergMetadata.java | 11 ++++++----- .../plugin/iceberg/IcebergSchemaProperties.java | 2 +- .../plugin/iceberg/IcebergTableProperties.java | 2 +- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergMetadata.java b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergMetadata.java index d45fff194de0..c547c7d17562 100644 --- a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergMetadata.java +++ b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergMetadata.java @@ -21,7 +21,6 @@ import io.prestosql.plugin.base.classloader.ClassLoaderSafeSystemTable; import io.prestosql.plugin.hive.HdfsEnvironment; import io.prestosql.plugin.hive.HdfsEnvironment.HdfsContext; -import io.prestosql.plugin.hive.HiveSchemaProperties; import io.prestosql.plugin.hive.HiveWrittenPartitions; import io.prestosql.plugin.hive.TableAlreadyExistsException; import io.prestosql.plugin.hive.authentication.HiveIdentity; @@ -86,10 +85,12 @@ import static io.prestosql.plugin.hive.util.HiveWriteUtils.getTableDefaultLocation; import static io.prestosql.plugin.iceberg.DomainConverter.convertTupleDomainTypes; import static io.prestosql.plugin.iceberg.ExpressionConverter.toIcebergExpression; +import static io.prestosql.plugin.iceberg.IcebergSchemaProperties.getSchemaLocation; import static io.prestosql.plugin.iceberg.IcebergTableProperties.FILE_FORMAT_PROPERTY; import static io.prestosql.plugin.iceberg.IcebergTableProperties.PARTITIONING_PROPERTY; -import static io.prestosql.plugin.iceberg.IcebergTableProperties.getLocation; +import static io.prestosql.plugin.iceberg.IcebergTableProperties.getFileFormat; import static io.prestosql.plugin.iceberg.IcebergTableProperties.getPartitioning; +import static io.prestosql.plugin.iceberg.IcebergTableProperties.getTableLocation; import static io.prestosql.plugin.iceberg.IcebergUtil.getColumns; import static io.prestosql.plugin.iceberg.IcebergUtil.getDataPath; import static io.prestosql.plugin.iceberg.IcebergUtil.getFileFormat; @@ -254,7 +255,7 @@ public Map> listTableColumns(ConnectorSess @Override public void createSchema(ConnectorSession session, String schemaName, Map properties, PrestoPrincipal owner) { - Optional location = HiveSchemaProperties.getLocation(properties).map(uri -> { + Optional location = getSchemaLocation(properties).map(uri -> { try { hdfsEnvironment.getFileSystem(new HdfsContext(session, schemaName), new Path(uri)); } @@ -320,7 +321,7 @@ public ConnectorOutputTableHandle beginCreateTable(ConnectorSession session, Con HdfsContext hdfsContext = new HdfsContext(session, schemaName, tableName); HiveIdentity identity = new HiveIdentity(session); - String targetPath = getLocation(tableMetadata.getProperties()); + String targetPath = getTableLocation(tableMetadata.getProperties()); if (targetPath == null) { targetPath = getTableDefaultLocation(database, hdfsContext, hdfsEnvironment, schemaName, tableName).toString(); } @@ -330,7 +331,7 @@ public ConnectorOutputTableHandle beginCreateTable(ConnectorSession session, Con throw new TableAlreadyExistsException(schemaTableName); } - FileFormat fileFormat = (FileFormat) tableMetadata.getProperties().get(FILE_FORMAT_PROPERTY); + FileFormat fileFormat = getFileFormat(tableMetadata.getProperties()); TableMetadata metadata = newTableMetadata(operations, schema, partitionSpec, targetPath, ImmutableMap.of(DEFAULT_FILE_FORMAT, fileFormat.toString())); transaction = createTableTransaction(operations, metadata); diff --git a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergSchemaProperties.java b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergSchemaProperties.java index 889b9d2562c2..ff48b2f0cc21 100644 --- a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergSchemaProperties.java +++ b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergSchemaProperties.java @@ -36,7 +36,7 @@ public final class IcebergSchemaProperties private IcebergSchemaProperties() {} - public static Optional getLocation(Map schemaProperties) + public static Optional getSchemaLocation(Map schemaProperties) { return Optional.ofNullable((String) schemaProperties.get(LOCATION_PROPERTY)); } diff --git a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergTableProperties.java b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergTableProperties.java index 9272a6aaf30b..afb7fc324295 100644 --- a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergTableProperties.java +++ b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergTableProperties.java @@ -84,7 +84,7 @@ public static List getPartitioning(Map tableProperties) return partitioning == null ? ImmutableList.of() : ImmutableList.copyOf(partitioning); } - public static String getLocation(Map tableProperties) + public static String getTableLocation(Map tableProperties) { return (String) tableProperties.get(LOCATION_PROPERTY); } From 36a6abc4b63157ae399e543e45eeb98d80e7604f Mon Sep 17 00:00:00 2001 From: David Phillips Date: Tue, 7 Apr 2020 10:27:39 -0700 Subject: [PATCH 091/519] Update to Airlift 0.195 This fixes ArrayIndexOutOfBoundsException for TDigest. --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 7a3beca00f97..e4ca2cd4cef8 100644 --- a/pom.xml +++ b/pom.xml @@ -44,7 +44,7 @@ 3.3.9 4.7.1 - 0.193 + 0.195 ${dep.airlift.version} 1.11.749 3.9.0 From 28e504d79596e0a47682d11f12c3e5b6cf036cfc Mon Sep 17 00:00:00 2001 From: Martin Traverso Date: Mon, 6 Apr 2020 16:44:17 -0700 Subject: [PATCH 092/519] Add 332 release notes --- .../src/main/sphinx/connector/hive.rst | 2 + presto-docs/src/main/sphinx/release.rst | 1 + .../src/main/sphinx/release/release-332.rst | 112 ++++++++++++++++++ 3 files changed, 115 insertions(+) create mode 100644 presto-docs/src/main/sphinx/release/release-332.rst diff --git a/presto-docs/src/main/sphinx/connector/hive.rst b/presto-docs/src/main/sphinx/connector/hive.rst index 3772d85dcb81..3422c9800798 100644 --- a/presto-docs/src/main/sphinx/connector/hive.rst +++ b/presto-docs/src/main/sphinx/connector/hive.rst @@ -729,6 +729,8 @@ to bypass the network (*short-circuit*). See `Performance Tuning Tips for Presto `_ for more details. +.. _alluxio_catalog_service: + Alluxio Catalog Service ^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/presto-docs/src/main/sphinx/release.rst b/presto-docs/src/main/sphinx/release.rst index 6ca36ccd1e92..11ae43be1f1b 100644 --- a/presto-docs/src/main/sphinx/release.rst +++ b/presto-docs/src/main/sphinx/release.rst @@ -5,6 +5,7 @@ Release Notes .. toctree:: :maxdepth: 1 + release/release-332 release/release-331 release/release-330 release/release-329 diff --git a/presto-docs/src/main/sphinx/release/release-332.rst b/presto-docs/src/main/sphinx/release/release-332.rst new file mode 100644 index 000000000000..806602ed4e7b --- /dev/null +++ b/presto-docs/src/main/sphinx/release/release-332.rst @@ -0,0 +1,112 @@ +========================= +Release 332 (08 Apr 2020) +========================= + +General Changes +--------------- + +* Fix query failure during planning phase for certain queries involving multiple joins. (:issue:`3149`) +* Fix execution failure for queries involving large ``IN`` predicates on decimal values with precision larger than 18. (:issue:`3191`) +* Fix prepared statements or view creation for queries containing certain nested aliases or ``TABLESAMPLE`` clauses. (:issue:`3250`) +* Fix rare query failure. (:issue:`2981`) +* Ignore trailing whitespace when loading configuration files such as + ``etc/event-listener.properties`` or ``etc/group-provider.properties``. + Trailing whitespace in ``etc/config.properties`` and catalog properties + files files was already ignored. (:issue:`3231`) +* Reduce overhead for internal communication requests. (:issue:`3215`) +* Include filters over all table columns in output of ``EXPLAIN (TYPE IO)``. (:issue:`2743`) +* Support configuring multiple event listeners. The properties files for all the event listeners + can be specified using the ``event-listener.config-files`` configuration property. (:issue:`3128`) +* Add ``CREATE SCHEMA ... AUTHORIZATION`` syntax to create a schema with specified owner. (:issue:`3066`). +* Add ``optimizer.push-partial-aggregation-through-join`` configuration property to control + pushing partial aggregations through inner joins. Previously, this was only available + via the ``push_partial_aggregation_through_join`` session property. (:issue:`3205`) +* Rename configuration property ``optimizer.push-aggregation-through-join`` + to ``optimizer.push-aggregation-through-outer-join``. (:issue:`3205`) +* Add operator statistics for the number of splits processed with a dynamic filter applied. (:issue:`3217`) + +Security Changes +---------------- + +* Fix LDAP authentication when user belongs to multiple groups. (:issue:`3206`) +* Verify access to table columns when running ``SHOW STATS``. (:issue:`2665`) +* Only return views accessible to the user from ``information_schema.views``. (:issue:`3290`) + +JDBC Driver Changes +------------------- + +* Add ``clientInfo`` property to set extra information about the client. (:issue:`3188`) +* Add ``traceToken`` property to set a trace token for correlating requests across systems. (:issue:`3188`) + +BigQuery Connector Changes +-------------------------- + +* Extract parent project ID from service account before looking at the environment. (:issue:`3131`) + +Elasticsearch Connector Changes +------------------------------- + +* Add support for ``ip`` type. (:issue:`3347`) +* Add support for ``keyword`` fields with numeric values. (:issue:`3381`) +* Remove unnecessary ``elasticsearch.aws.use-instance-credentials`` configuration property. (:issue:`3265`) + +Hive Connector Changes +---------------------- + +* Fix failure reading certain Parquet files larger than 2GB. (:issue:`2730`) +* Improve performance when reading gzip-compressed Parquet data. (:issue:`3175`) +* Explicitly disallow reading from Delta Lake tables. Previously, reading + from partitioned tables would return zero rows, and reading from + unpartitioned tables would fail with a cryptic error. (:issue:`3366`) +* Add ``hive.fs.new-directory-permissions`` configuration property for setting the permissions of new directories + created by Presto. Default value is ``0777``. (:issue:`3126`) +* Add ``hive.partition-use-column-names`` configuration property and matching ``partition_use_column_names`` catalog + session property that allows to match columns between table and partition schemas by names. By default they are mapped + by index. (:issue:2933`) +* Add support for ``CREATE SCHEMA ... AUTHORIZATION`` to create a schema with specified owner. (:issue:`3066`). +* Allow specifying the Glue metastore endpoint URL using the + ``hive.metastore.glue.endpoint-url`` configuration property. (:issue:`3239`) +* Add experimental file system caching. This can be enabled with the ``hive.cache.enabled`` configuration property. (:issue:`2679`) +* Support reading files compressed with newer versions of LZO. (:issue:`3209`) +* Add support for :ref:`alluxio_catalog_service`. (:issue:`2116`) +* Remove unnecessary ``hive.metastore.glue.use-instance-credentials`` configuration property. (:issue:`3265`) +* Remove unnecessary ``hive.s3.use-instance-credentials`` configuration property. (:issue:`3265`) +* Add flexible :ref:`hive-s3-security-mapping`, allowing for separate credentials + or IAM roles for specific users or buckets/paths. (:issue:`3265`) +* Add support for specifying an External ID for an IAM role trust policy using + the ``hive.metastore.glue.external-id`` configuration property (:issue:`3144`) +* Allow using configured S3 credentials with IAM role. Previously, + the configured IAM role was silently ignored. (:issue:`3351`) + +Kudu Connector Changes +---------------------- + +* Fix incorrect column mapping in Kudu connector. (:issue:`3170`, :issue:`2963`) +* Fix incorrect query result for certain queries involving ``IS NULL`` predicates with ``OR``. (:issue:`3274`) + +Memory Connector Changes +------------------------ + +* Include views in the list of tables returned to the JDBC driver. (:issue:`3208`) + +MongoDB Connector Changes +------------------------- + +* Add ``objectid_timestamp`` for extracting the timestamp from ``ObjectId``. (:issue:`3089`) +* Delete document from ``_schema`` collection when ``DROP TABLE`` + is executed for a table that exists only in ``_schema``. (:issue:`3234`) + +SQL Server Connector +-------------------- + +* Disallow renaming tables between schemas. Previously, such renames were allowed + but the schema name was ignored when performing the rename. (:issue:`3284`) + +SPI Changes +----------- + +* Expose row filters and column masks in ``QueryCompletedEvent``. (:issue:`3183`) +* Expose referenced functions and procedures in ``QueryCompletedEvent``. (:issue:`3246`) +* Allow ``Connector`` to provide ``EventListener`` instances. (:issue:`3166`) +* Deprecate the ``ConnectorPageSourceProvider.createPageSource()` variant without the + ``dynamicFilter`` parameter. The method will be removed in a future release. (:issue:`3255`) From d239bb4c038124742bed07654dd73c257becc780 Mon Sep 17 00:00:00 2001 From: Martin Traverso Date: Wed, 8 Apr 2020 17:17:32 -0700 Subject: [PATCH 093/519] [maven-release-plugin] prepare release 332 --- pom.xml | 4 ++-- presto-accumulo/pom.xml | 2 +- presto-array/pom.xml | 2 +- presto-atop/pom.xml | 2 +- presto-base-jdbc/pom.xml | 2 +- presto-benchmark-driver/pom.xml | 2 +- presto-benchmark/pom.xml | 2 +- presto-benchto-benchmarks/pom.xml | 2 +- presto-bigquery/pom.xml | 2 +- presto-blackhole/pom.xml | 2 +- presto-cassandra/pom.xml | 2 +- presto-cli/pom.xml | 2 +- presto-client/pom.xml | 2 +- presto-docs/pom.xml | 2 +- presto-elasticsearch/pom.xml | 2 +- presto-example-http/pom.xml | 2 +- presto-geospatial-toolkit/pom.xml | 2 +- presto-geospatial/pom.xml | 2 +- presto-google-sheets/pom.xml | 2 +- presto-hive-hadoop2/pom.xml | 2 +- presto-hive/pom.xml | 2 +- presto-iceberg/pom.xml | 2 +- presto-jdbc/pom.xml | 2 +- presto-jmx/pom.xml | 2 +- presto-kafka/pom.xml | 2 +- presto-kinesis/pom.xml | 2 +- presto-kudu/pom.xml | 2 +- presto-local-file/pom.xml | 2 +- presto-main/pom.xml | 2 +- presto-matching/pom.xml | 2 +- presto-memory-context/pom.xml | 2 +- presto-memory/pom.xml | 2 +- presto-memsql/pom.xml | 2 +- presto-ml/pom.xml | 2 +- presto-mongodb/pom.xml | 2 +- presto-mysql/pom.xml | 2 +- presto-noop/pom.xml | 2 +- presto-orc/pom.xml | 2 +- presto-parquet/pom.xml | 2 +- presto-parser/pom.xml | 2 +- presto-password-authenticators/pom.xml | 2 +- presto-phoenix/pom.xml | 2 +- presto-plugin-toolkit/pom.xml | 2 +- presto-postgresql/pom.xml | 2 +- presto-product-tests-launcher/pom.xml | 2 +- presto-product-tests/pom.xml | 2 +- presto-proxy/pom.xml | 2 +- presto-raptor-legacy/pom.xml | 2 +- presto-rcfile/pom.xml | 2 +- presto-record-decoder/pom.xml | 2 +- presto-redis/pom.xml | 2 +- presto-redshift/pom.xml | 2 +- presto-resource-group-managers/pom.xml | 2 +- presto-server-rpm/pom.xml | 2 +- presto-server/pom.xml | 2 +- presto-session-property-managers/pom.xml | 2 +- presto-spi/pom.xml | 2 +- presto-sqlserver/pom.xml | 2 +- presto-teradata-functions/pom.xml | 2 +- presto-testing-server-launcher/pom.xml | 2 +- presto-testing/pom.xml | 2 +- presto-tests/pom.xml | 2 +- presto-thrift-api/pom.xml | 2 +- presto-thrift-testing-server/pom.xml | 2 +- presto-thrift/pom.xml | 2 +- presto-tpcds/pom.xml | 2 +- presto-tpch/pom.xml | 2 +- presto-verifier/pom.xml | 2 +- 68 files changed, 69 insertions(+), 69 deletions(-) diff --git a/pom.xml b/pom.xml index e4ca2cd4cef8..91c4c4b2795c 100644 --- a/pom.xml +++ b/pom.xml @@ -10,7 +10,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 pom presto-root @@ -30,7 +30,7 @@ scm:git:git://github.com/prestosql/presto.git https://github.com/prestosql/presto - HEAD + 332 diff --git a/presto-accumulo/pom.xml b/presto-accumulo/pom.xml index bed7e1bf8b0d..2754cbf313f4 100644 --- a/presto-accumulo/pom.xml +++ b/presto-accumulo/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-accumulo diff --git a/presto-array/pom.xml b/presto-array/pom.xml index ecd22bfff656..fad0177e2d28 100644 --- a/presto-array/pom.xml +++ b/presto-array/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-array diff --git a/presto-atop/pom.xml b/presto-atop/pom.xml index d2b1af8275a5..477fe8353a43 100644 --- a/presto-atop/pom.xml +++ b/presto-atop/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-atop diff --git a/presto-base-jdbc/pom.xml b/presto-base-jdbc/pom.xml index 00bc3e00318b..3bb1726d4992 100644 --- a/presto-base-jdbc/pom.xml +++ b/presto-base-jdbc/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-base-jdbc diff --git a/presto-benchmark-driver/pom.xml b/presto-benchmark-driver/pom.xml index 59768e4f4f23..6ba0ac0ecec9 100644 --- a/presto-benchmark-driver/pom.xml +++ b/presto-benchmark-driver/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-benchmark-driver diff --git a/presto-benchmark/pom.xml b/presto-benchmark/pom.xml index 68b0366d40ea..0d4f42f632ec 100644 --- a/presto-benchmark/pom.xml +++ b/presto-benchmark/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-benchmark diff --git a/presto-benchto-benchmarks/pom.xml b/presto-benchto-benchmarks/pom.xml index 439f999ada1d..94f721cc6eeb 100644 --- a/presto-benchto-benchmarks/pom.xml +++ b/presto-benchto-benchmarks/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-benchto-benchmarks diff --git a/presto-bigquery/pom.xml b/presto-bigquery/pom.xml index bb185d3b2566..71280e6ca96d 100644 --- a/presto-bigquery/pom.xml +++ b/presto-bigquery/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-bigquery diff --git a/presto-blackhole/pom.xml b/presto-blackhole/pom.xml index d4ac1678e842..eca03391fb7d 100644 --- a/presto-blackhole/pom.xml +++ b/presto-blackhole/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-blackhole diff --git a/presto-cassandra/pom.xml b/presto-cassandra/pom.xml index b027823e5181..8250618410c6 100644 --- a/presto-cassandra/pom.xml +++ b/presto-cassandra/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-cassandra diff --git a/presto-cli/pom.xml b/presto-cli/pom.xml index d20f5282d860..3940008deb4b 100644 --- a/presto-cli/pom.xml +++ b/presto-cli/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-cli diff --git a/presto-client/pom.xml b/presto-client/pom.xml index 890b3292685f..748c5fa744fa 100644 --- a/presto-client/pom.xml +++ b/presto-client/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-client diff --git a/presto-docs/pom.xml b/presto-docs/pom.xml index 6e81d90243b3..97894d27ff0f 100644 --- a/presto-docs/pom.xml +++ b/presto-docs/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-docs diff --git a/presto-elasticsearch/pom.xml b/presto-elasticsearch/pom.xml index 731efbec2e78..338a9dfa1937 100644 --- a/presto-elasticsearch/pom.xml +++ b/presto-elasticsearch/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-elasticsearch diff --git a/presto-example-http/pom.xml b/presto-example-http/pom.xml index 88e0da2df555..edba67d181ae 100644 --- a/presto-example-http/pom.xml +++ b/presto-example-http/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-example-http diff --git a/presto-geospatial-toolkit/pom.xml b/presto-geospatial-toolkit/pom.xml index 7e83cb895365..216149131f2a 100644 --- a/presto-geospatial-toolkit/pom.xml +++ b/presto-geospatial-toolkit/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-geospatial-toolkit diff --git a/presto-geospatial/pom.xml b/presto-geospatial/pom.xml index f199c3d64d59..250057ff0d50 100644 --- a/presto-geospatial/pom.xml +++ b/presto-geospatial/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-geospatial diff --git a/presto-google-sheets/pom.xml b/presto-google-sheets/pom.xml index 14d5c04c556e..c8f589195e46 100644 --- a/presto-google-sheets/pom.xml +++ b/presto-google-sheets/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-google-sheets diff --git a/presto-hive-hadoop2/pom.xml b/presto-hive-hadoop2/pom.xml index 8dc97fce83e3..c5a07942e670 100644 --- a/presto-hive-hadoop2/pom.xml +++ b/presto-hive-hadoop2/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-hive-hadoop2 diff --git a/presto-hive/pom.xml b/presto-hive/pom.xml index 0bb54a2e850d..8077718c37b5 100644 --- a/presto-hive/pom.xml +++ b/presto-hive/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-hive diff --git a/presto-iceberg/pom.xml b/presto-iceberg/pom.xml index 9027c81055e9..fc7558d8c92a 100644 --- a/presto-iceberg/pom.xml +++ b/presto-iceberg/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-iceberg diff --git a/presto-jdbc/pom.xml b/presto-jdbc/pom.xml index 48356dd13b76..4446299083fb 100644 --- a/presto-jdbc/pom.xml +++ b/presto-jdbc/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-jdbc diff --git a/presto-jmx/pom.xml b/presto-jmx/pom.xml index abc934836697..07fca4cf320f 100644 --- a/presto-jmx/pom.xml +++ b/presto-jmx/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-jmx diff --git a/presto-kafka/pom.xml b/presto-kafka/pom.xml index b53b3bcb3b00..bc5902c993ce 100644 --- a/presto-kafka/pom.xml +++ b/presto-kafka/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-kafka diff --git a/presto-kinesis/pom.xml b/presto-kinesis/pom.xml index 24b8dbe1298f..393dd41e698a 100644 --- a/presto-kinesis/pom.xml +++ b/presto-kinesis/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-kinesis diff --git a/presto-kudu/pom.xml b/presto-kudu/pom.xml index fcb690de6fc0..b77caed625f5 100644 --- a/presto-kudu/pom.xml +++ b/presto-kudu/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-kudu diff --git a/presto-local-file/pom.xml b/presto-local-file/pom.xml index 7b82edf7f002..5a006a3e4067 100644 --- a/presto-local-file/pom.xml +++ b/presto-local-file/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-local-file diff --git a/presto-main/pom.xml b/presto-main/pom.xml index f39cafb2bcea..38f9c3022bd7 100644 --- a/presto-main/pom.xml +++ b/presto-main/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-main diff --git a/presto-matching/pom.xml b/presto-matching/pom.xml index c408b30b4fd4..c23e0fc73e4d 100644 --- a/presto-matching/pom.xml +++ b/presto-matching/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-matching diff --git a/presto-memory-context/pom.xml b/presto-memory-context/pom.xml index 142f9f5d5e10..4c520594a149 100644 --- a/presto-memory-context/pom.xml +++ b/presto-memory-context/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-memory-context diff --git a/presto-memory/pom.xml b/presto-memory/pom.xml index 3ed4a825a011..f0492ecbae33 100644 --- a/presto-memory/pom.xml +++ b/presto-memory/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-memory diff --git a/presto-memsql/pom.xml b/presto-memsql/pom.xml index 0b060cc456d4..874758abb1b1 100644 --- a/presto-memsql/pom.xml +++ b/presto-memsql/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-memsql diff --git a/presto-ml/pom.xml b/presto-ml/pom.xml index e8f3b1ef48c6..e3d05fdbd04a 100644 --- a/presto-ml/pom.xml +++ b/presto-ml/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-ml diff --git a/presto-mongodb/pom.xml b/presto-mongodb/pom.xml index a82d7a2af2fb..4e0a94e3ce2d 100644 --- a/presto-mongodb/pom.xml +++ b/presto-mongodb/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-mongodb diff --git a/presto-mysql/pom.xml b/presto-mysql/pom.xml index c82b8713fa5d..2e889dbfdcb3 100644 --- a/presto-mysql/pom.xml +++ b/presto-mysql/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-mysql diff --git a/presto-noop/pom.xml b/presto-noop/pom.xml index 15685c39a0f8..3d2432873d06 100644 --- a/presto-noop/pom.xml +++ b/presto-noop/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-noop diff --git a/presto-orc/pom.xml b/presto-orc/pom.xml index dee02c603bae..cd7767c6bce2 100644 --- a/presto-orc/pom.xml +++ b/presto-orc/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-orc diff --git a/presto-parquet/pom.xml b/presto-parquet/pom.xml index e61350bed258..71175b00a17c 100644 --- a/presto-parquet/pom.xml +++ b/presto-parquet/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-parquet diff --git a/presto-parser/pom.xml b/presto-parser/pom.xml index 854fa06dc15d..1865c7d60bf0 100644 --- a/presto-parser/pom.xml +++ b/presto-parser/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-parser diff --git a/presto-password-authenticators/pom.xml b/presto-password-authenticators/pom.xml index be11c658aaf2..1e20860872b0 100644 --- a/presto-password-authenticators/pom.xml +++ b/presto-password-authenticators/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-password-authenticators diff --git a/presto-phoenix/pom.xml b/presto-phoenix/pom.xml index 2b95b8245226..156e0e700951 100644 --- a/presto-phoenix/pom.xml +++ b/presto-phoenix/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-phoenix diff --git a/presto-plugin-toolkit/pom.xml b/presto-plugin-toolkit/pom.xml index c421585bc30e..03f4d82584b7 100644 --- a/presto-plugin-toolkit/pom.xml +++ b/presto-plugin-toolkit/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-plugin-toolkit diff --git a/presto-postgresql/pom.xml b/presto-postgresql/pom.xml index aa6e7cfb2437..5c33f5a62466 100644 --- a/presto-postgresql/pom.xml +++ b/presto-postgresql/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-postgresql diff --git a/presto-product-tests-launcher/pom.xml b/presto-product-tests-launcher/pom.xml index ab4acf9dd7f3..25e9863d8e86 100644 --- a/presto-product-tests-launcher/pom.xml +++ b/presto-product-tests-launcher/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-product-tests-launcher diff --git a/presto-product-tests/pom.xml b/presto-product-tests/pom.xml index 6eb036c69fe3..e23ea5ff97f0 100644 --- a/presto-product-tests/pom.xml +++ b/presto-product-tests/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-product-tests diff --git a/presto-proxy/pom.xml b/presto-proxy/pom.xml index c62c7e69d6bb..4d714657767e 100644 --- a/presto-proxy/pom.xml +++ b/presto-proxy/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-proxy diff --git a/presto-raptor-legacy/pom.xml b/presto-raptor-legacy/pom.xml index 56b33c7b3afd..cb3f2b217a57 100644 --- a/presto-raptor-legacy/pom.xml +++ b/presto-raptor-legacy/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-raptor-legacy diff --git a/presto-rcfile/pom.xml b/presto-rcfile/pom.xml index 1dd9fd3dd61d..69df964cab6c 100644 --- a/presto-rcfile/pom.xml +++ b/presto-rcfile/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-rcfile diff --git a/presto-record-decoder/pom.xml b/presto-record-decoder/pom.xml index 1ccdf5f1b296..078607e912cb 100644 --- a/presto-record-decoder/pom.xml +++ b/presto-record-decoder/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-record-decoder diff --git a/presto-redis/pom.xml b/presto-redis/pom.xml index 3df40083db9a..85411a5c7a7d 100644 --- a/presto-redis/pom.xml +++ b/presto-redis/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-redis diff --git a/presto-redshift/pom.xml b/presto-redshift/pom.xml index 1a8412cad8ab..99a60ada6148 100644 --- a/presto-redshift/pom.xml +++ b/presto-redshift/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-redshift diff --git a/presto-resource-group-managers/pom.xml b/presto-resource-group-managers/pom.xml index 5fe30c81914e..1470c2343d38 100644 --- a/presto-resource-group-managers/pom.xml +++ b/presto-resource-group-managers/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-resource-group-managers diff --git a/presto-server-rpm/pom.xml b/presto-server-rpm/pom.xml index 4eb229a86e18..26ef6f4ff251 100644 --- a/presto-server-rpm/pom.xml +++ b/presto-server-rpm/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-server-rpm diff --git a/presto-server/pom.xml b/presto-server/pom.xml index 261594b98070..5dfc2e2fc5cd 100644 --- a/presto-server/pom.xml +++ b/presto-server/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-server diff --git a/presto-session-property-managers/pom.xml b/presto-session-property-managers/pom.xml index f0e35b4ad1af..bc13fcd8542c 100644 --- a/presto-session-property-managers/pom.xml +++ b/presto-session-property-managers/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-session-property-managers diff --git a/presto-spi/pom.xml b/presto-spi/pom.xml index 61b7d2cf2571..5bce1643b747 100644 --- a/presto-spi/pom.xml +++ b/presto-spi/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-spi diff --git a/presto-sqlserver/pom.xml b/presto-sqlserver/pom.xml index 1092aea2d116..1e57d086a43c 100644 --- a/presto-sqlserver/pom.xml +++ b/presto-sqlserver/pom.xml @@ -3,7 +3,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 4.0.0 diff --git a/presto-teradata-functions/pom.xml b/presto-teradata-functions/pom.xml index 1ee5ed8d5965..f14f7d9acc18 100644 --- a/presto-teradata-functions/pom.xml +++ b/presto-teradata-functions/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-teradata-functions diff --git a/presto-testing-server-launcher/pom.xml b/presto-testing-server-launcher/pom.xml index 881fdf932cc8..3ba288b169a8 100644 --- a/presto-testing-server-launcher/pom.xml +++ b/presto-testing-server-launcher/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-testing-server-launcher diff --git a/presto-testing/pom.xml b/presto-testing/pom.xml index de92c014d3e1..8043427826fa 100644 --- a/presto-testing/pom.xml +++ b/presto-testing/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-testing diff --git a/presto-tests/pom.xml b/presto-tests/pom.xml index 7d66f1d300e0..0209b867bbda 100644 --- a/presto-tests/pom.xml +++ b/presto-tests/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-tests diff --git a/presto-thrift-api/pom.xml b/presto-thrift-api/pom.xml index 4f4332a65619..72ddcefde41d 100644 --- a/presto-thrift-api/pom.xml +++ b/presto-thrift-api/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-thrift-api diff --git a/presto-thrift-testing-server/pom.xml b/presto-thrift-testing-server/pom.xml index 85d0f0380913..53ce23f5c352 100644 --- a/presto-thrift-testing-server/pom.xml +++ b/presto-thrift-testing-server/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-thrift-testing-server diff --git a/presto-thrift/pom.xml b/presto-thrift/pom.xml index cbf0d152967a..00283a913419 100644 --- a/presto-thrift/pom.xml +++ b/presto-thrift/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-thrift diff --git a/presto-tpcds/pom.xml b/presto-tpcds/pom.xml index 6f17360943ab..55cc4e1fa189 100644 --- a/presto-tpcds/pom.xml +++ b/presto-tpcds/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-tpcds diff --git a/presto-tpch/pom.xml b/presto-tpch/pom.xml index dc051f7aff16..eb740476d33e 100644 --- a/presto-tpch/pom.xml +++ b/presto-tpch/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-tpch diff --git a/presto-verifier/pom.xml b/presto-verifier/pom.xml index 9b6e6d189873..c1440e28453a 100644 --- a/presto-verifier/pom.xml +++ b/presto-verifier/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332-SNAPSHOT + 332 presto-verifier From 73e0d536aec11ad003dae74efa5dc3b2712b850b Mon Sep 17 00:00:00 2001 From: Martin Traverso Date: Wed, 8 Apr 2020 17:17:33 -0700 Subject: [PATCH 094/519] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- presto-accumulo/pom.xml | 2 +- presto-array/pom.xml | 2 +- presto-atop/pom.xml | 2 +- presto-base-jdbc/pom.xml | 2 +- presto-benchmark-driver/pom.xml | 2 +- presto-benchmark/pom.xml | 2 +- presto-benchto-benchmarks/pom.xml | 2 +- presto-bigquery/pom.xml | 2 +- presto-blackhole/pom.xml | 2 +- presto-cassandra/pom.xml | 2 +- presto-cli/pom.xml | 2 +- presto-client/pom.xml | 2 +- presto-docs/pom.xml | 2 +- presto-elasticsearch/pom.xml | 2 +- presto-example-http/pom.xml | 2 +- presto-geospatial-toolkit/pom.xml | 2 +- presto-geospatial/pom.xml | 2 +- presto-google-sheets/pom.xml | 2 +- presto-hive-hadoop2/pom.xml | 2 +- presto-hive/pom.xml | 2 +- presto-iceberg/pom.xml | 2 +- presto-jdbc/pom.xml | 2 +- presto-jmx/pom.xml | 2 +- presto-kafka/pom.xml | 2 +- presto-kinesis/pom.xml | 2 +- presto-kudu/pom.xml | 2 +- presto-local-file/pom.xml | 2 +- presto-main/pom.xml | 2 +- presto-matching/pom.xml | 2 +- presto-memory-context/pom.xml | 2 +- presto-memory/pom.xml | 2 +- presto-memsql/pom.xml | 2 +- presto-ml/pom.xml | 2 +- presto-mongodb/pom.xml | 2 +- presto-mysql/pom.xml | 2 +- presto-noop/pom.xml | 2 +- presto-orc/pom.xml | 2 +- presto-parquet/pom.xml | 2 +- presto-parser/pom.xml | 2 +- presto-password-authenticators/pom.xml | 2 +- presto-phoenix/pom.xml | 2 +- presto-plugin-toolkit/pom.xml | 2 +- presto-postgresql/pom.xml | 2 +- presto-product-tests-launcher/pom.xml | 2 +- presto-product-tests/pom.xml | 2 +- presto-proxy/pom.xml | 2 +- presto-raptor-legacy/pom.xml | 2 +- presto-rcfile/pom.xml | 2 +- presto-record-decoder/pom.xml | 2 +- presto-redis/pom.xml | 2 +- presto-redshift/pom.xml | 2 +- presto-resource-group-managers/pom.xml | 2 +- presto-server-rpm/pom.xml | 2 +- presto-server/pom.xml | 2 +- presto-session-property-managers/pom.xml | 2 +- presto-spi/pom.xml | 2 +- presto-sqlserver/pom.xml | 2 +- presto-teradata-functions/pom.xml | 2 +- presto-testing-server-launcher/pom.xml | 2 +- presto-testing/pom.xml | 2 +- presto-tests/pom.xml | 2 +- presto-thrift-api/pom.xml | 2 +- presto-thrift-testing-server/pom.xml | 2 +- presto-thrift/pom.xml | 2 +- presto-tpcds/pom.xml | 2 +- presto-tpch/pom.xml | 2 +- presto-verifier/pom.xml | 2 +- 68 files changed, 69 insertions(+), 69 deletions(-) diff --git a/pom.xml b/pom.xml index 91c4c4b2795c..83ed82a03b15 100644 --- a/pom.xml +++ b/pom.xml @@ -10,7 +10,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT pom presto-root @@ -30,7 +30,7 @@ scm:git:git://github.com/prestosql/presto.git https://github.com/prestosql/presto - 332 + HEAD diff --git a/presto-accumulo/pom.xml b/presto-accumulo/pom.xml index 2754cbf313f4..ffc539612319 100644 --- a/presto-accumulo/pom.xml +++ b/presto-accumulo/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-accumulo diff --git a/presto-array/pom.xml b/presto-array/pom.xml index fad0177e2d28..97333c94addd 100644 --- a/presto-array/pom.xml +++ b/presto-array/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-array diff --git a/presto-atop/pom.xml b/presto-atop/pom.xml index 477fe8353a43..87b5d072886f 100644 --- a/presto-atop/pom.xml +++ b/presto-atop/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-atop diff --git a/presto-base-jdbc/pom.xml b/presto-base-jdbc/pom.xml index 3bb1726d4992..f6ade36ba376 100644 --- a/presto-base-jdbc/pom.xml +++ b/presto-base-jdbc/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-base-jdbc diff --git a/presto-benchmark-driver/pom.xml b/presto-benchmark-driver/pom.xml index 6ba0ac0ecec9..a1c87b16e427 100644 --- a/presto-benchmark-driver/pom.xml +++ b/presto-benchmark-driver/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-benchmark-driver diff --git a/presto-benchmark/pom.xml b/presto-benchmark/pom.xml index 0d4f42f632ec..3ac166609048 100644 --- a/presto-benchmark/pom.xml +++ b/presto-benchmark/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-benchmark diff --git a/presto-benchto-benchmarks/pom.xml b/presto-benchto-benchmarks/pom.xml index 94f721cc6eeb..8c987a2c8054 100644 --- a/presto-benchto-benchmarks/pom.xml +++ b/presto-benchto-benchmarks/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-benchto-benchmarks diff --git a/presto-bigquery/pom.xml b/presto-bigquery/pom.xml index 71280e6ca96d..9ad0e21b5ca5 100644 --- a/presto-bigquery/pom.xml +++ b/presto-bigquery/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-bigquery diff --git a/presto-blackhole/pom.xml b/presto-blackhole/pom.xml index eca03391fb7d..5017d4040f0c 100644 --- a/presto-blackhole/pom.xml +++ b/presto-blackhole/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-blackhole diff --git a/presto-cassandra/pom.xml b/presto-cassandra/pom.xml index 8250618410c6..eac005ef5eb4 100644 --- a/presto-cassandra/pom.xml +++ b/presto-cassandra/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-cassandra diff --git a/presto-cli/pom.xml b/presto-cli/pom.xml index 3940008deb4b..044e50fe4295 100644 --- a/presto-cli/pom.xml +++ b/presto-cli/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-cli diff --git a/presto-client/pom.xml b/presto-client/pom.xml index 748c5fa744fa..2ba0cc7ee6dc 100644 --- a/presto-client/pom.xml +++ b/presto-client/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-client diff --git a/presto-docs/pom.xml b/presto-docs/pom.xml index 97894d27ff0f..ea4a3fa4521e 100644 --- a/presto-docs/pom.xml +++ b/presto-docs/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-docs diff --git a/presto-elasticsearch/pom.xml b/presto-elasticsearch/pom.xml index 338a9dfa1937..7a16926e6f0a 100644 --- a/presto-elasticsearch/pom.xml +++ b/presto-elasticsearch/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-elasticsearch diff --git a/presto-example-http/pom.xml b/presto-example-http/pom.xml index edba67d181ae..79115edc57ea 100644 --- a/presto-example-http/pom.xml +++ b/presto-example-http/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-example-http diff --git a/presto-geospatial-toolkit/pom.xml b/presto-geospatial-toolkit/pom.xml index 216149131f2a..0f6e9bcd0fe5 100644 --- a/presto-geospatial-toolkit/pom.xml +++ b/presto-geospatial-toolkit/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-geospatial-toolkit diff --git a/presto-geospatial/pom.xml b/presto-geospatial/pom.xml index 250057ff0d50..197144d632aa 100644 --- a/presto-geospatial/pom.xml +++ b/presto-geospatial/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-geospatial diff --git a/presto-google-sheets/pom.xml b/presto-google-sheets/pom.xml index c8f589195e46..050e2c4b4df1 100644 --- a/presto-google-sheets/pom.xml +++ b/presto-google-sheets/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-google-sheets diff --git a/presto-hive-hadoop2/pom.xml b/presto-hive-hadoop2/pom.xml index c5a07942e670..a306ce9d3b70 100644 --- a/presto-hive-hadoop2/pom.xml +++ b/presto-hive-hadoop2/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-hive-hadoop2 diff --git a/presto-hive/pom.xml b/presto-hive/pom.xml index 8077718c37b5..d8d3bf85a614 100644 --- a/presto-hive/pom.xml +++ b/presto-hive/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-hive diff --git a/presto-iceberg/pom.xml b/presto-iceberg/pom.xml index fc7558d8c92a..3a87a043c491 100644 --- a/presto-iceberg/pom.xml +++ b/presto-iceberg/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-iceberg diff --git a/presto-jdbc/pom.xml b/presto-jdbc/pom.xml index 4446299083fb..51cbae33354a 100644 --- a/presto-jdbc/pom.xml +++ b/presto-jdbc/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-jdbc diff --git a/presto-jmx/pom.xml b/presto-jmx/pom.xml index 07fca4cf320f..630f241187dc 100644 --- a/presto-jmx/pom.xml +++ b/presto-jmx/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-jmx diff --git a/presto-kafka/pom.xml b/presto-kafka/pom.xml index bc5902c993ce..5741b29623ce 100644 --- a/presto-kafka/pom.xml +++ b/presto-kafka/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-kafka diff --git a/presto-kinesis/pom.xml b/presto-kinesis/pom.xml index 393dd41e698a..7da27a5da949 100644 --- a/presto-kinesis/pom.xml +++ b/presto-kinesis/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-kinesis diff --git a/presto-kudu/pom.xml b/presto-kudu/pom.xml index b77caed625f5..70e232a589bb 100644 --- a/presto-kudu/pom.xml +++ b/presto-kudu/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-kudu diff --git a/presto-local-file/pom.xml b/presto-local-file/pom.xml index 5a006a3e4067..58412e6baf55 100644 --- a/presto-local-file/pom.xml +++ b/presto-local-file/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-local-file diff --git a/presto-main/pom.xml b/presto-main/pom.xml index 38f9c3022bd7..3f5bbd89366c 100644 --- a/presto-main/pom.xml +++ b/presto-main/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-main diff --git a/presto-matching/pom.xml b/presto-matching/pom.xml index c23e0fc73e4d..feb5d552f72f 100644 --- a/presto-matching/pom.xml +++ b/presto-matching/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-matching diff --git a/presto-memory-context/pom.xml b/presto-memory-context/pom.xml index 4c520594a149..0f5dd805df5e 100644 --- a/presto-memory-context/pom.xml +++ b/presto-memory-context/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-memory-context diff --git a/presto-memory/pom.xml b/presto-memory/pom.xml index f0492ecbae33..c4161556cbf2 100644 --- a/presto-memory/pom.xml +++ b/presto-memory/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-memory diff --git a/presto-memsql/pom.xml b/presto-memsql/pom.xml index 874758abb1b1..593b15089e52 100644 --- a/presto-memsql/pom.xml +++ b/presto-memsql/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-memsql diff --git a/presto-ml/pom.xml b/presto-ml/pom.xml index e3d05fdbd04a..b66b8f4ddc74 100644 --- a/presto-ml/pom.xml +++ b/presto-ml/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-ml diff --git a/presto-mongodb/pom.xml b/presto-mongodb/pom.xml index 4e0a94e3ce2d..a919919038ba 100644 --- a/presto-mongodb/pom.xml +++ b/presto-mongodb/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-mongodb diff --git a/presto-mysql/pom.xml b/presto-mysql/pom.xml index 2e889dbfdcb3..30fca3f14aec 100644 --- a/presto-mysql/pom.xml +++ b/presto-mysql/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-mysql diff --git a/presto-noop/pom.xml b/presto-noop/pom.xml index 3d2432873d06..181779fd9b79 100644 --- a/presto-noop/pom.xml +++ b/presto-noop/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-noop diff --git a/presto-orc/pom.xml b/presto-orc/pom.xml index cd7767c6bce2..0736a64d2229 100644 --- a/presto-orc/pom.xml +++ b/presto-orc/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-orc diff --git a/presto-parquet/pom.xml b/presto-parquet/pom.xml index 71175b00a17c..8d6f4b2c8f15 100644 --- a/presto-parquet/pom.xml +++ b/presto-parquet/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-parquet diff --git a/presto-parser/pom.xml b/presto-parser/pom.xml index 1865c7d60bf0..1bdc59664d82 100644 --- a/presto-parser/pom.xml +++ b/presto-parser/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-parser diff --git a/presto-password-authenticators/pom.xml b/presto-password-authenticators/pom.xml index 1e20860872b0..2b1bae70d9f1 100644 --- a/presto-password-authenticators/pom.xml +++ b/presto-password-authenticators/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-password-authenticators diff --git a/presto-phoenix/pom.xml b/presto-phoenix/pom.xml index 156e0e700951..6f3dc9229e4d 100644 --- a/presto-phoenix/pom.xml +++ b/presto-phoenix/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-phoenix diff --git a/presto-plugin-toolkit/pom.xml b/presto-plugin-toolkit/pom.xml index 03f4d82584b7..a824badef735 100644 --- a/presto-plugin-toolkit/pom.xml +++ b/presto-plugin-toolkit/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-plugin-toolkit diff --git a/presto-postgresql/pom.xml b/presto-postgresql/pom.xml index 5c33f5a62466..0fb2f70654e3 100644 --- a/presto-postgresql/pom.xml +++ b/presto-postgresql/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-postgresql diff --git a/presto-product-tests-launcher/pom.xml b/presto-product-tests-launcher/pom.xml index 25e9863d8e86..9897bf936b09 100644 --- a/presto-product-tests-launcher/pom.xml +++ b/presto-product-tests-launcher/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-product-tests-launcher diff --git a/presto-product-tests/pom.xml b/presto-product-tests/pom.xml index e23ea5ff97f0..0f44eb1b00f6 100644 --- a/presto-product-tests/pom.xml +++ b/presto-product-tests/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-product-tests diff --git a/presto-proxy/pom.xml b/presto-proxy/pom.xml index 4d714657767e..c2090e2b939e 100644 --- a/presto-proxy/pom.xml +++ b/presto-proxy/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-proxy diff --git a/presto-raptor-legacy/pom.xml b/presto-raptor-legacy/pom.xml index cb3f2b217a57..daf7976b844f 100644 --- a/presto-raptor-legacy/pom.xml +++ b/presto-raptor-legacy/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-raptor-legacy diff --git a/presto-rcfile/pom.xml b/presto-rcfile/pom.xml index 69df964cab6c..a4036438e062 100644 --- a/presto-rcfile/pom.xml +++ b/presto-rcfile/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-rcfile diff --git a/presto-record-decoder/pom.xml b/presto-record-decoder/pom.xml index 078607e912cb..5cce08d0a4e4 100644 --- a/presto-record-decoder/pom.xml +++ b/presto-record-decoder/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-record-decoder diff --git a/presto-redis/pom.xml b/presto-redis/pom.xml index 85411a5c7a7d..14a05f135e03 100644 --- a/presto-redis/pom.xml +++ b/presto-redis/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-redis diff --git a/presto-redshift/pom.xml b/presto-redshift/pom.xml index 99a60ada6148..71106cbb9d21 100644 --- a/presto-redshift/pom.xml +++ b/presto-redshift/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-redshift diff --git a/presto-resource-group-managers/pom.xml b/presto-resource-group-managers/pom.xml index 1470c2343d38..d1f0240178a6 100644 --- a/presto-resource-group-managers/pom.xml +++ b/presto-resource-group-managers/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-resource-group-managers diff --git a/presto-server-rpm/pom.xml b/presto-server-rpm/pom.xml index 26ef6f4ff251..aa91eaaeb75e 100644 --- a/presto-server-rpm/pom.xml +++ b/presto-server-rpm/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-server-rpm diff --git a/presto-server/pom.xml b/presto-server/pom.xml index 5dfc2e2fc5cd..7bb28d437dae 100644 --- a/presto-server/pom.xml +++ b/presto-server/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-server diff --git a/presto-session-property-managers/pom.xml b/presto-session-property-managers/pom.xml index bc13fcd8542c..2e47115baf33 100644 --- a/presto-session-property-managers/pom.xml +++ b/presto-session-property-managers/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-session-property-managers diff --git a/presto-spi/pom.xml b/presto-spi/pom.xml index 5bce1643b747..51cbcfb7bd88 100644 --- a/presto-spi/pom.xml +++ b/presto-spi/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-spi diff --git a/presto-sqlserver/pom.xml b/presto-sqlserver/pom.xml index 1e57d086a43c..dd2fdfce648d 100644 --- a/presto-sqlserver/pom.xml +++ b/presto-sqlserver/pom.xml @@ -3,7 +3,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT 4.0.0 diff --git a/presto-teradata-functions/pom.xml b/presto-teradata-functions/pom.xml index f14f7d9acc18..cd926748588a 100644 --- a/presto-teradata-functions/pom.xml +++ b/presto-teradata-functions/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-teradata-functions diff --git a/presto-testing-server-launcher/pom.xml b/presto-testing-server-launcher/pom.xml index 3ba288b169a8..f927f4e78bd7 100644 --- a/presto-testing-server-launcher/pom.xml +++ b/presto-testing-server-launcher/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-testing-server-launcher diff --git a/presto-testing/pom.xml b/presto-testing/pom.xml index 8043427826fa..765ed50c7cc9 100644 --- a/presto-testing/pom.xml +++ b/presto-testing/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-testing diff --git a/presto-tests/pom.xml b/presto-tests/pom.xml index 0209b867bbda..6b06053c9873 100644 --- a/presto-tests/pom.xml +++ b/presto-tests/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-tests diff --git a/presto-thrift-api/pom.xml b/presto-thrift-api/pom.xml index 72ddcefde41d..85da30b38cd3 100644 --- a/presto-thrift-api/pom.xml +++ b/presto-thrift-api/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-thrift-api diff --git a/presto-thrift-testing-server/pom.xml b/presto-thrift-testing-server/pom.xml index 53ce23f5c352..e9618bd6374e 100644 --- a/presto-thrift-testing-server/pom.xml +++ b/presto-thrift-testing-server/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-thrift-testing-server diff --git a/presto-thrift/pom.xml b/presto-thrift/pom.xml index 00283a913419..f28a1593afc2 100644 --- a/presto-thrift/pom.xml +++ b/presto-thrift/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-thrift diff --git a/presto-tpcds/pom.xml b/presto-tpcds/pom.xml index 55cc4e1fa189..68796cad6a46 100644 --- a/presto-tpcds/pom.xml +++ b/presto-tpcds/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-tpcds diff --git a/presto-tpch/pom.xml b/presto-tpch/pom.xml index eb740476d33e..748176a6fa7b 100644 --- a/presto-tpch/pom.xml +++ b/presto-tpch/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-tpch diff --git a/presto-verifier/pom.xml b/presto-verifier/pom.xml index c1440e28453a..cf8c3f8345eb 100644 --- a/presto-verifier/pom.xml +++ b/presto-verifier/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 332 + 333-SNAPSHOT presto-verifier From 3ab95af0cba10fe848fa96e8be5d314a0c5334b3 Mon Sep 17 00:00:00 2001 From: Szymon Homa Date: Tue, 7 Apr 2020 13:21:10 +0200 Subject: [PATCH 095/519] Add ClassLoader safe Event Listener --- .../ClassLoaderSafeEventListener.java | 59 +++++++++++++++++++ .../TestClassLoaderSafeWrappers.java | 2 + 2 files changed, 61 insertions(+) create mode 100644 presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/classloader/ClassLoaderSafeEventListener.java diff --git a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/classloader/ClassLoaderSafeEventListener.java b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/classloader/ClassLoaderSafeEventListener.java new file mode 100644 index 000000000000..d44f97d248e8 --- /dev/null +++ b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/classloader/ClassLoaderSafeEventListener.java @@ -0,0 +1,59 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.base.classloader; + +import io.prestosql.spi.classloader.ThreadContextClassLoader; +import io.prestosql.spi.eventlistener.EventListener; +import io.prestosql.spi.eventlistener.QueryCompletedEvent; +import io.prestosql.spi.eventlistener.QueryCreatedEvent; +import io.prestosql.spi.eventlistener.SplitCompletedEvent; + +import static java.util.Objects.requireNonNull; + +public class ClassLoaderSafeEventListener + implements EventListener +{ + private final EventListener delegate; + private final ClassLoader classLoader; + + public ClassLoaderSafeEventListener(@ForClassLoaderSafe EventListener delegate, ClassLoader classLoader) + { + this.delegate = requireNonNull(delegate, "delegate is null"); + this.classLoader = requireNonNull(classLoader, "classLoader is null"); + } + + @Override + public void queryCreated(QueryCreatedEvent queryCreatedEvent) + { + try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) { + delegate.queryCreated(queryCreatedEvent); + } + } + + @Override + public void queryCompleted(QueryCompletedEvent queryCompletedEvent) + { + try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) { + delegate.queryCompleted(queryCompletedEvent); + } + } + + @Override + public void splitCompleted(SplitCompletedEvent splitCompletedEvent) + { + try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) { + delegate.splitCompleted(splitCompletedEvent); + } + } +} diff --git a/presto-plugin-toolkit/src/test/java/io/prestosql/plugin/base/classloader/TestClassLoaderSafeWrappers.java b/presto-plugin-toolkit/src/test/java/io/prestosql/plugin/base/classloader/TestClassLoaderSafeWrappers.java index e95c51928d6a..1cfb363e90d6 100644 --- a/presto-plugin-toolkit/src/test/java/io/prestosql/plugin/base/classloader/TestClassLoaderSafeWrappers.java +++ b/presto-plugin-toolkit/src/test/java/io/prestosql/plugin/base/classloader/TestClassLoaderSafeWrappers.java @@ -24,6 +24,7 @@ import io.prestosql.spi.connector.ConnectorSplitSource; import io.prestosql.spi.connector.RecordSet; import io.prestosql.spi.connector.SystemTable; +import io.prestosql.spi.eventlistener.EventListener; import org.testng.annotations.Test; import static io.prestosql.spi.testing.InterfaceTestUtils.assertAllMethodsOverridden; @@ -44,5 +45,6 @@ public void testAllMethodsOverridden() assertAllMethodsOverridden(SystemTable.class, ClassLoaderSafeSystemTable.class); assertAllMethodsOverridden(ConnectorRecordSetProvider.class, ClassLoaderSafeConnectorRecordSetProvider.class); assertAllMethodsOverridden(RecordSet.class, ClassLoaderSafeRecordSet.class); + assertAllMethodsOverridden(EventListener.class, ClassLoaderSafeEventListener.class); } } From b5e3aff2bb0e707823bb747e1e4fc4facb176f42 Mon Sep 17 00:00:00 2001 From: Szymon Homa Date: Tue, 7 Apr 2020 13:21:34 +0200 Subject: [PATCH 096/519] Allow HiveConnector to return EventListener --- .../java/io/prestosql/plugin/hive/HiveConnector.java | 10 ++++++++++ .../plugin/hive/InternalHiveConnectorFactory.java | 10 ++++++++++ 2 files changed, 20 insertions(+) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveConnector.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveConnector.java index f45e75851ed3..6032add6db8d 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveConnector.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveConnector.java @@ -28,6 +28,7 @@ import io.prestosql.spi.connector.ConnectorSplitManager; import io.prestosql.spi.connector.ConnectorTransactionHandle; import io.prestosql.spi.connector.SystemTable; +import io.prestosql.spi.eventlistener.EventListener; import io.prestosql.spi.procedure.Procedure; import io.prestosql.spi.session.PropertyMetadata; import io.prestosql.spi.transaction.IsolationLevel; @@ -52,6 +53,7 @@ public class HiveConnector private final ConnectorNodePartitioningProvider nodePartitioningProvider; private final Set systemTables; private final Set procedures; + private final Set eventListeners; private final List> sessionProperties; private final List> schemaProperties; private final List> tableProperties; @@ -72,6 +74,7 @@ public HiveConnector( ConnectorNodePartitioningProvider nodePartitioningProvider, Set systemTables, Set procedures, + Set eventListeners, List> sessionProperties, List> schemaProperties, List> tableProperties, @@ -88,6 +91,7 @@ public HiveConnector( this.nodePartitioningProvider = requireNonNull(nodePartitioningProvider, "nodePartitioningProvider is null"); this.systemTables = ImmutableSet.copyOf(requireNonNull(systemTables, "systemTables is null")); this.procedures = ImmutableSet.copyOf(requireNonNull(procedures, "procedures is null")); + this.eventListeners = ImmutableSet.copyOf(requireNonNull(eventListeners, "eventListeners is null")); this.sessionProperties = ImmutableList.copyOf(requireNonNull(sessionProperties, "sessionProperties is null")); this.schemaProperties = ImmutableList.copyOf(requireNonNull(schemaProperties, "schemaProperties is null")); this.tableProperties = ImmutableList.copyOf(requireNonNull(tableProperties, "tableProperties is null")); @@ -170,6 +174,12 @@ public List> getTableProperties() return tableProperties; } + @Override + public Iterable getEventListeners() + { + return eventListeners; + } + @Override public ConnectorAccessControl getAccessControl() { diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/InternalHiveConnectorFactory.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/InternalHiveConnectorFactory.java index 245cac99f750..493223eafc51 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/InternalHiveConnectorFactory.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/InternalHiveConnectorFactory.java @@ -26,6 +26,7 @@ import io.prestosql.plugin.base.classloader.ClassLoaderSafeConnectorPageSinkProvider; import io.prestosql.plugin.base.classloader.ClassLoaderSafeConnectorPageSourceProvider; import io.prestosql.plugin.base.classloader.ClassLoaderSafeConnectorSplitManager; +import io.prestosql.plugin.base.classloader.ClassLoaderSafeEventListener; import io.prestosql.plugin.base.classloader.ClassLoaderSafeNodePartitioningProvider; import io.prestosql.plugin.base.jmx.ConnectorObjectNameGeneratorModule; import io.prestosql.plugin.base.jmx.MBeanServerModule; @@ -54,6 +55,7 @@ import io.prestosql.spi.connector.ConnectorPageSourceProvider; import io.prestosql.spi.connector.ConnectorSplitManager; import io.prestosql.spi.connector.SystemTable; +import io.prestosql.spi.eventlistener.EventListener; import io.prestosql.spi.procedure.Procedure; import io.prestosql.spi.type.TypeManager; import org.weakref.jmx.guice.MBeanModule; @@ -62,6 +64,8 @@ import java.util.Optional; import java.util.Set; +import static com.google.common.collect.ImmutableSet.toImmutableSet; +import static com.google.inject.multibindings.Multibinder.newSetBinder; import static io.airlift.configuration.ConditionalModule.installModuleIf; import static java.util.Objects.requireNonNull; @@ -104,6 +108,7 @@ public static Connector createConnector(String catalogName, Map binder.bind(PageSorter.class).toInstance(context.getPageSorter()); binder.bind(CatalogName.class).toInstance(new CatalogName(catalogName)); }, + binder -> newSetBinder(binder, EventListener.class), module); Injector injector = app @@ -133,6 +138,10 @@ public static Connector createConnector(String catalogName, Map classLoader); Set procedures = injector.getInstance(Key.get(new TypeLiteral>() {})); Set systemTables = injector.getInstance(Key.get(new TypeLiteral>() {})); + Set eventListeners = injector.getInstance(Key.get(new TypeLiteral>() {})) + .stream() + .map(listener -> new ClassLoaderSafeEventListener(listener, classLoader)) + .collect(toImmutableSet()); return new HiveConnector( lifeCycleManager, @@ -144,6 +153,7 @@ public static Connector createConnector(String catalogName, Map new ClassLoaderSafeNodePartitioningProvider(connectorDistributionProvider, classLoader), systemTables, procedures, + eventListeners, hiveSessionProperties.getSessionProperties(), HiveSchemaProperties.SCHEMA_PROPERTIES, hiveTableProperties.getTableProperties(), From 6c335ad8c6f20a4e7eec3e66743da5a0d45bf87c Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 6 Apr 2020 13:02:47 +0200 Subject: [PATCH 097/519] Expose ports conditionally --- .../launcher/env/EnvironmentModule.java | 2 + .../launcher/env/EnvironmentOptions.java | 3 ++ .../product/launcher/env/common/Hadoop.java | 31 +++++++------ .../product/launcher/env/common/Kafka.java | 17 ++++++-- .../product/launcher/env/common/Kerberos.java | 9 ++-- .../product/launcher/env/common/Standard.java | 6 ++- .../environment/AbstractSinglenodeLdap.java | 10 +++-- .../env/environment/MultinodeTls.java | 7 ++- .../env/environment/SinglenodeCassandra.java | 8 ++-- .../env/environment/SinglenodeLdap.java | 5 ++- .../env/environment/SinglenodeLdapBindDn.java | 5 ++- .../environment/SinglenodeLdapReferrals.java | 5 ++- .../env/environment/SinglenodeMysql.java | 8 ++-- .../env/environment/SinglenodePostgresql.java | 8 ++-- .../env/environment/SinglenodeSqlserver.java | 8 ++-- .../launcher/testcontainers/PortBinder.java | 43 +++++++++++++++++++ 16 files changed, 130 insertions(+), 45 deletions(-) create mode 100644 presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/testcontainers/PortBinder.java diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/EnvironmentModule.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/EnvironmentModule.java index 4f60b3a8d922..e6352e8156cf 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/EnvironmentModule.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/EnvironmentModule.java @@ -21,6 +21,7 @@ import io.prestosql.tests.product.launcher.env.common.Kerberos; import io.prestosql.tests.product.launcher.env.common.KerberosKms; import io.prestosql.tests.product.launcher.env.common.Standard; +import io.prestosql.tests.product.launcher.testcontainers.PortBinder; import static com.google.inject.multibindings.MapBinder.newMapBinder; import static java.util.Objects.requireNonNull; @@ -39,6 +40,7 @@ public EnvironmentModule(Module additionalEnvironments) @Override public void configure(Binder binder) { + binder.bind(PortBinder.class); binder.bind(EnvironmentFactory.class); binder.bind(Standard.class); binder.bind(Hadoop.class); diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/EnvironmentOptions.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/EnvironmentOptions.java index 64aff4941b3c..2997cf023ba9 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/EnvironmentOptions.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/EnvironmentOptions.java @@ -32,6 +32,9 @@ public final class EnvironmentOptions @Option(name = "--without-presto", title = "without Presto", description = "do not start presto-master") public boolean withoutPrestoMaster; + @Option(name = "--bind", description = "bind ports on localhost") + public boolean bindPorts = true; + @Option(name = "--debug", description = "open Java debug ports") public boolean debug; diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Hadoop.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Hadoop.java index 3ef5928684dc..a5f985494391 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Hadoop.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Hadoop.java @@ -17,6 +17,7 @@ import io.prestosql.tests.product.launcher.env.DockerContainer; import io.prestosql.tests.product.launcher.env.Environment; import io.prestosql.tests.product.launcher.env.EnvironmentOptions; +import io.prestosql.tests.product.launcher.testcontainers.PortBinder; import io.prestosql.tests.product.launcher.testcontainers.SelectedPortWaitStrategy; import org.testcontainers.containers.startupcheck.IsRunningStartupCheckStrategy; @@ -25,7 +26,6 @@ import java.time.Duration; import static io.prestosql.tests.product.launcher.env.common.Standard.CONTAINER_PRESTO_ETC; -import static io.prestosql.tests.product.launcher.testcontainers.TestcontainersUtil.exposePort; import static java.util.Objects.requireNonNull; import static org.testcontainers.containers.BindMode.READ_ONLY; @@ -36,6 +36,7 @@ public final class Hadoop public static final String CONTAINER_PRESTO_ICEBERG_PROPERTIES = CONTAINER_PRESTO_ETC + "/catalog/iceberg.properties"; private final DockerFiles dockerFiles; + private final PortBinder portBinder; private final String hadoopBaseImage; private final String imagesVersion; @@ -43,9 +44,11 @@ public final class Hadoop @Inject public Hadoop( DockerFiles dockerFiles, + PortBinder portBinder, EnvironmentOptions environmentOptions) { this.dockerFiles = requireNonNull(dockerFiles, "dockerFiles is null"); + this.portBinder = requireNonNull(portBinder, "portBinder is null"); requireNonNull(environmentOptions, "environmentOptions is null"); hadoopBaseImage = requireNonNull(environmentOptions.hadoopBaseImage, "environmentOptions.hadoopBaseImage is null"); imagesVersion = requireNonNull(environmentOptions.imagesVersion, "environmentOptions.imagesVersion is null"); @@ -71,19 +74,19 @@ private DockerContainer createHadoopMaster() .waitingFor(new SelectedPortWaitStrategy(10000)) // HiveServer2 .withStartupTimeout(Duration.ofMinutes(5)); - exposePort(container, 1180); // socks proxy - // TODO exposePort(container, 5006); // debug port - exposePort(container, 8020); - exposePort(container, 8042); - exposePort(container, 8088); - exposePort(container, 9000); - exposePort(container, 9083); // Metastore Thrift - exposePort(container, 9864); // DataNode Web UI since Hadoop 3 - exposePort(container, 9870); // NameNode Web UI since Hadoop 3 - exposePort(container, 10000); // HiveServer2 - exposePort(container, 19888); - exposePort(container, 50070); // NameNode Web UI prior to Hadoop 3 - exposePort(container, 50075); // DataNode Web UI prior to Hadoop 3 + portBinder.exposePort(container, 1180); // socks proxy + // TODO portBinder.exposePort(container, 5006); // debug port + portBinder.exposePort(container, 8020); + portBinder.exposePort(container, 8042); + portBinder.exposePort(container, 8088); + portBinder.exposePort(container, 9000); + portBinder.exposePort(container, 9083); // Metastore Thrift + portBinder.exposePort(container, 9864); // DataNode Web UI since Hadoop 3 + portBinder.exposePort(container, 9870); // NameNode Web UI since Hadoop 3 + portBinder.exposePort(container, 10000); // HiveServer2 + portBinder.exposePort(container, 19888); + portBinder.exposePort(container, 50070); // NameNode Web UI prior to Hadoop 3 + portBinder.exposePort(container, 50075); // DataNode Web UI prior to Hadoop 3 return container; } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Kafka.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Kafka.java index 88fb26fb5b25..790f50056bd9 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Kafka.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Kafka.java @@ -16,16 +16,27 @@ import io.prestosql.tests.product.launcher.env.DockerContainer; import io.prestosql.tests.product.launcher.env.Environment; +import io.prestosql.tests.product.launcher.testcontainers.PortBinder; import io.prestosql.tests.product.launcher.testcontainers.SelectedPortWaitStrategy; import org.testcontainers.containers.startupcheck.IsRunningStartupCheckStrategy; -import static io.prestosql.tests.product.launcher.testcontainers.TestcontainersUtil.exposePort; +import javax.inject.Inject; + +import static java.util.Objects.requireNonNull; public class Kafka implements EnvironmentExtender { private static final String CONFLUENT_VERSION = "5.2.1"; + private final PortBinder portBinder; + + @Inject + public Kafka(PortBinder portBinder) + { + this.portBinder = requireNonNull(portBinder, "portBinder is null"); + } + @Override public void extendEnvironment(Environment.Builder builder) { @@ -42,7 +53,7 @@ private DockerContainer createZookeeper() .withStartupCheckStrategy(new IsRunningStartupCheckStrategy()) .waitingFor(new SelectedPortWaitStrategy(2181)); - exposePort(container, 2181); + portBinder.exposePort(container, 2181); return container; } @@ -59,7 +70,7 @@ private DockerContainer createKafka() .withStartupCheckStrategy(new IsRunningStartupCheckStrategy()) .waitingFor(new SelectedPortWaitStrategy(9092)); - exposePort(container, 9092); + portBinder.exposePort(container, 9092); return container; } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Kerberos.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Kerberos.java index 22f4249dea27..d1b4f05c02ee 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Kerberos.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Kerberos.java @@ -16,12 +16,12 @@ import io.prestosql.tests.product.launcher.docker.DockerFiles; import io.prestosql.tests.product.launcher.env.Environment; import io.prestosql.tests.product.launcher.env.EnvironmentOptions; +import io.prestosql.tests.product.launcher.testcontainers.PortBinder; import javax.inject.Inject; import static io.prestosql.tests.product.launcher.env.common.Standard.CONTAINER_PRESTO_CONFIG_PROPERTIES; import static io.prestosql.tests.product.launcher.env.common.Standard.CONTAINER_TEMPTO_PROFILE_CONFIG; -import static io.prestosql.tests.product.launcher.testcontainers.TestcontainersUtil.exposePort; import static java.util.Objects.requireNonNull; import static org.testcontainers.containers.BindMode.READ_ONLY; @@ -29,6 +29,7 @@ public class Kerberos implements EnvironmentExtender { private final DockerFiles dockerFiles; + private final PortBinder portBinder; private final String hadoopBaseImage; private final String imagesVersion; @@ -36,9 +37,11 @@ public class Kerberos @Inject public Kerberos( DockerFiles dockerFiles, + PortBinder portBinder, EnvironmentOptions environmentOptions) { this.dockerFiles = requireNonNull(dockerFiles, "dockerFiles is null"); + this.portBinder = requireNonNull(portBinder, "portBinder is null"); requireNonNull(environmentOptions, "environmentOptions is null"); hadoopBaseImage = requireNonNull(environmentOptions.hadoopBaseImage, "environmentOptions.hadoopBaseImage is null"); imagesVersion = requireNonNull(environmentOptions.imagesVersion, "environmentOptions.imagesVersion is null"); @@ -50,11 +53,11 @@ public void extendEnvironment(Environment.Builder builder) String dockerImageName = hadoopBaseImage + "-kerberized:" + imagesVersion; builder.configureContainer("hadoop-master", container -> { container.setDockerImageName(dockerImageName); - exposePort(container, 88); + portBinder.exposePort(container, 88); }); builder.configureContainer("presto-master", container -> { container.setDockerImageName(dockerImageName); - exposePort(container, 7778); + portBinder.exposePort(container, 7778); container .withNetworkAliases("presto-master.docker.cluster") .withCreateContainerCmdModifier(createContainerCmd -> createContainerCmd.withDomainName("docker.cluster")) diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Standard.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Standard.java index 60afbcfb31a6..3525db9d4cc9 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Standard.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Standard.java @@ -21,6 +21,7 @@ import io.prestosql.tests.product.launcher.env.DockerContainer; import io.prestosql.tests.product.launcher.env.Environment; import io.prestosql.tests.product.launcher.env.EnvironmentOptions; +import io.prestosql.tests.product.launcher.testcontainers.PortBinder; import org.testcontainers.containers.startupcheck.IsRunningStartupCheckStrategy; import org.testcontainers.containers.wait.strategy.Wait; import org.testcontainers.containers.wait.strategy.WaitAllStrategy; @@ -58,6 +59,7 @@ public final class Standard private final PathResolver pathResolver; private final DockerFiles dockerFiles; + private final PortBinder portBinder; private final String imagesVersion; private final File serverPackage; @@ -67,10 +69,12 @@ public final class Standard public Standard( PathResolver pathResolver, DockerFiles dockerFiles, + PortBinder portBinder, EnvironmentOptions environmentOptions) { this.pathResolver = requireNonNull(pathResolver, "pathResolver is null"); this.dockerFiles = requireNonNull(dockerFiles, "dockerFiles is null"); + this.portBinder = requireNonNull(portBinder, "portBinder is null"); requireNonNull(environmentOptions, "environmentOptions is null"); imagesVersion = requireNonNull(environmentOptions.imagesVersion, "environmentOptions.imagesVersion is null"); serverPackage = requireNonNull(environmentOptions.serverPackage, "environmentOptions.serverPackage is null"); @@ -92,7 +96,7 @@ private DockerContainer createPrestoMaster() createPrestoContainer(dockerFiles, pathResolver, serverPackage, "prestodev/centos7-oj11:" + imagesVersion) .withFileSystemBind(dockerFiles.getDockerFilesHostPath("common/standard/config.properties"), CONTAINER_PRESTO_CONFIG_PROPERTIES, READ_ONLY); - exposePort(container, 8080); // Presto default port + portBinder.exposePort(container, 8080); // Presto default port if (debug) { container.withCreateContainerCmdModifier(this::enableDebuggerInJvmConfig); diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/AbstractSinglenodeLdap.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/AbstractSinglenodeLdap.java index ab2ab6a80bfc..bf850b8ce64c 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/AbstractSinglenodeLdap.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/AbstractSinglenodeLdap.java @@ -19,6 +19,7 @@ import io.prestosql.tests.product.launcher.env.EnvironmentOptions; import io.prestosql.tests.product.launcher.env.common.AbstractEnvironmentProvider; import io.prestosql.tests.product.launcher.env.common.EnvironmentExtender; +import io.prestosql.tests.product.launcher.testcontainers.PortBinder; import io.prestosql.tests.product.launcher.testcontainers.SelectedPortWaitStrategy; import org.testcontainers.containers.startupcheck.IsRunningStartupCheckStrategy; @@ -28,7 +29,6 @@ import static io.prestosql.tests.product.launcher.env.common.Standard.CONTAINER_PRESTO_CONFIG_PROPERTIES; import static io.prestosql.tests.product.launcher.env.common.Standard.CONTAINER_PRESTO_ETC; import static io.prestosql.tests.product.launcher.env.common.Standard.CONTAINER_TEMPTO_PROFILE_CONFIG; -import static io.prestosql.tests.product.launcher.testcontainers.TestcontainersUtil.exposePort; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static org.testcontainers.containers.BindMode.READ_ONLY; @@ -37,14 +37,16 @@ public abstract class AbstractSinglenodeLdap extends AbstractEnvironmentProvider { private final DockerFiles dockerFiles; + private final PortBinder portBinder; private final String imagesVersion; private static final int LDAP_PORT = 636; - protected AbstractSinglenodeLdap(List bases, DockerFiles dockerFiles, EnvironmentOptions environmentOptions) + protected AbstractSinglenodeLdap(List bases, DockerFiles dockerFiles, PortBinder portBinder, EnvironmentOptions environmentOptions) { super(bases); this.dockerFiles = requireNonNull(dockerFiles, "dockerFiles is null"); + this.portBinder = requireNonNull(portBinder, "portBinder is null"); this.imagesVersion = requireNonNull(environmentOptions.imagesVersion, "environmentOptions.imagesVersion is null"); } @@ -66,7 +68,7 @@ protected void extendEnvironment(Environment.Builder builder) CONTAINER_PRESTO_CONFIG_PROPERTIES, READ_ONLY); - exposePort(dockerContainer, 8443); + portBinder.exposePort(dockerContainer, 8443); }); builder.configureContainer("tests", dockerContainer -> { @@ -81,7 +83,7 @@ protected void extendEnvironment(Environment.Builder builder) .withStartupCheckStrategy(new IsRunningStartupCheckStrategy()) .waitingFor(new SelectedPortWaitStrategy(LDAP_PORT)) .withStartupTimeout(Duration.ofMinutes(5)); - exposePort(container, LDAP_PORT); + portBinder.exposePort(container, LDAP_PORT); builder.addContainer("ldapserver", container); } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/MultinodeTls.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/MultinodeTls.java index 5827e3208bd9..fdd94943da39 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/MultinodeTls.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/MultinodeTls.java @@ -23,6 +23,7 @@ import io.prestosql.tests.product.launcher.env.common.Hadoop; import io.prestosql.tests.product.launcher.env.common.Standard; import io.prestosql.tests.product.launcher.env.common.TestsEnvironment; +import io.prestosql.tests.product.launcher.testcontainers.PortBinder; import javax.inject.Inject; @@ -33,7 +34,6 @@ import static io.prestosql.tests.product.launcher.env.common.Standard.CONTAINER_PRESTO_CONFIG_PROPERTIES; import static io.prestosql.tests.product.launcher.env.common.Standard.CONTAINER_TEMPTO_PROFILE_CONFIG; import static io.prestosql.tests.product.launcher.env.common.Standard.createPrestoContainer; -import static io.prestosql.tests.product.launcher.testcontainers.TestcontainersUtil.exposePort; import static java.util.Objects.requireNonNull; import static org.testcontainers.containers.BindMode.READ_ONLY; @@ -43,6 +43,7 @@ public final class MultinodeTls { private final PathResolver pathResolver; private final DockerFiles dockerFiles; + private final PortBinder portBinder; private final String imagesVersion; private final File serverPackage; @@ -51,6 +52,7 @@ public final class MultinodeTls public MultinodeTls( PathResolver pathResolver, DockerFiles dockerFiles, + PortBinder portBinder, Standard standard, Hadoop hadoop, EnvironmentOptions environmentOptions) @@ -58,6 +60,7 @@ public MultinodeTls( super(ImmutableList.of(standard, hadoop)); this.pathResolver = requireNonNull(pathResolver, "pathResolver is null"); this.dockerFiles = requireNonNull(dockerFiles, "dockerFiles is null"); + this.portBinder = requireNonNull(portBinder, "portBinder is null"); imagesVersion = requireNonNull(environmentOptions.imagesVersion, "environmentOptions.imagesVersion is null"); serverPackage = requireNonNull(environmentOptions.serverPackage, "environmentOptions.serverPackage is null"); } @@ -72,7 +75,7 @@ protected void extendEnvironment(Environment.Builder builder) .withNetworkAliases("presto-master.docker.cluster") .withFileSystemBind(dockerFiles.getDockerFilesHostPath("conf/environment/multinode-tls/config-master.properties"), CONTAINER_PRESTO_CONFIG_PROPERTIES, READ_ONLY); - exposePort(container, 7778); + portBinder.exposePort(container, 7778); }); addPrestoWorker(builder, "presto-worker-1"); diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeCassandra.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeCassandra.java index fb97bc329755..7f1fc38081d4 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeCassandra.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeCassandra.java @@ -20,6 +20,7 @@ import io.prestosql.tests.product.launcher.env.common.AbstractEnvironmentProvider; import io.prestosql.tests.product.launcher.env.common.Standard; import io.prestosql.tests.product.launcher.env.common.TestsEnvironment; +import io.prestosql.tests.product.launcher.testcontainers.PortBinder; import io.prestosql.tests.product.launcher.testcontainers.SelectedPortWaitStrategy; import org.testcontainers.containers.startupcheck.IsRunningStartupCheckStrategy; @@ -28,7 +29,6 @@ import java.time.Duration; import static io.prestosql.tests.product.launcher.env.common.Standard.CONTAINER_PRESTO_ETC; -import static io.prestosql.tests.product.launcher.testcontainers.TestcontainersUtil.exposePort; import static java.util.Objects.requireNonNull; @TestsEnvironment @@ -36,15 +36,17 @@ public final class SinglenodeCassandra extends AbstractEnvironmentProvider { private final DockerFiles dockerFiles; + private final PortBinder portBinder; public static final String CONTAINER_PRESTO_CASSANDRA_PROPERTIES = CONTAINER_PRESTO_ETC + "/catalog/cassandra.properties"; public static final int CASSANDRA_PORT = 9042; @Inject - protected SinglenodeCassandra(DockerFiles dockerFiles, Standard standard) + protected SinglenodeCassandra(DockerFiles dockerFiles, PortBinder portBinder, Standard standard) { super(ImmutableList.of(standard)); this.dockerFiles = requireNonNull(dockerFiles, "dockerFiles is null"); + this.portBinder = requireNonNull(portBinder, "portBinder is null"); } @Override @@ -69,7 +71,7 @@ private DockerContainer createCassandra() .waitingFor(new SelectedPortWaitStrategy(CASSANDRA_PORT)) .withStartupTimeout(Duration.ofMinutes(5)); - exposePort(container, CASSANDRA_PORT); + portBinder.exposePort(container, CASSANDRA_PORT); return container; } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeLdap.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeLdap.java index ec9df3348135..dc33504f26bc 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeLdap.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeLdap.java @@ -19,6 +19,7 @@ import io.prestosql.tests.product.launcher.env.common.Hadoop; import io.prestosql.tests.product.launcher.env.common.Standard; import io.prestosql.tests.product.launcher.env.common.TestsEnvironment; +import io.prestosql.tests.product.launcher.testcontainers.PortBinder; import javax.inject.Inject; @@ -27,9 +28,9 @@ public class SinglenodeLdap extends AbstractSinglenodeLdap { @Inject - public SinglenodeLdap(Standard standard, Hadoop hadoop, DockerFiles dockerFiles, EnvironmentOptions environmentOptions) + public SinglenodeLdap(Standard standard, Hadoop hadoop, DockerFiles dockerFiles, PortBinder portBinder, EnvironmentOptions environmentOptions) { - super(ImmutableList.of(standard, hadoop), dockerFiles, environmentOptions); + super(ImmutableList.of(standard, hadoop), dockerFiles, portBinder, environmentOptions); } @Override diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeLdapBindDn.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeLdapBindDn.java index deda0bf8bef9..dcab536e1aa8 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeLdapBindDn.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeLdapBindDn.java @@ -19,6 +19,7 @@ import io.prestosql.tests.product.launcher.env.common.Hadoop; import io.prestosql.tests.product.launcher.env.common.Standard; import io.prestosql.tests.product.launcher.env.common.TestsEnvironment; +import io.prestosql.tests.product.launcher.testcontainers.PortBinder; import javax.inject.Inject; @@ -27,9 +28,9 @@ public class SinglenodeLdapBindDn extends AbstractSinglenodeLdap { @Inject - public SinglenodeLdapBindDn(Standard standard, Hadoop hadoop, DockerFiles dockerFiles, EnvironmentOptions environmentOptions) + public SinglenodeLdapBindDn(Standard standard, Hadoop hadoop, DockerFiles dockerFiles, PortBinder portBinder, EnvironmentOptions environmentOptions) { - super(ImmutableList.of(standard, hadoop), dockerFiles, environmentOptions); + super(ImmutableList.of(standard, hadoop), dockerFiles, portBinder, environmentOptions); } @Override diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeLdapReferrals.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeLdapReferrals.java index f487172acf32..2de994b61a60 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeLdapReferrals.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeLdapReferrals.java @@ -19,6 +19,7 @@ import io.prestosql.tests.product.launcher.env.common.Hadoop; import io.prestosql.tests.product.launcher.env.common.Standard; import io.prestosql.tests.product.launcher.env.common.TestsEnvironment; +import io.prestosql.tests.product.launcher.testcontainers.PortBinder; import javax.inject.Inject; @@ -27,9 +28,9 @@ public class SinglenodeLdapReferrals extends AbstractSinglenodeLdap { @Inject - public SinglenodeLdapReferrals(Standard standard, Hadoop hadoop, DockerFiles dockerFiles, EnvironmentOptions environmentOptions) + public SinglenodeLdapReferrals(Standard standard, Hadoop hadoop, DockerFiles dockerFiles, PortBinder portBinder, EnvironmentOptions environmentOptions) { - super(ImmutableList.of(standard, hadoop), dockerFiles, environmentOptions); + super(ImmutableList.of(standard, hadoop), dockerFiles, portBinder, environmentOptions); } @Override diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeMysql.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeMysql.java index 3b5840bcf4af..09ef802d2e91 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeMysql.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeMysql.java @@ -20,13 +20,13 @@ import io.prestosql.tests.product.launcher.env.common.AbstractEnvironmentProvider; import io.prestosql.tests.product.launcher.env.common.Standard; import io.prestosql.tests.product.launcher.env.common.TestsEnvironment; +import io.prestosql.tests.product.launcher.testcontainers.PortBinder; import io.prestosql.tests.product.launcher.testcontainers.SelectedPortWaitStrategy; import org.testcontainers.containers.startupcheck.IsRunningStartupCheckStrategy; import javax.inject.Inject; import static io.prestosql.tests.product.launcher.env.common.Standard.CONTAINER_PRESTO_ETC; -import static io.prestosql.tests.product.launcher.testcontainers.TestcontainersUtil.exposePort; import static java.util.Objects.requireNonNull; import static org.testcontainers.containers.BindMode.READ_ONLY; @@ -38,12 +38,14 @@ public final class SinglenodeMysql public static final int MYSQL_PORT = 13306; private final DockerFiles dockerFiles; + private final PortBinder portBinder; @Inject - public SinglenodeMysql(Standard standard, DockerFiles dockerFiles) + public SinglenodeMysql(Standard standard, DockerFiles dockerFiles, PortBinder portBinder) { super(ImmutableList.of(standard)); this.dockerFiles = requireNonNull(dockerFiles, "dockerFiles is null"); + this.portBinder = requireNonNull(portBinder, "portBinder is null"); } @Override @@ -70,7 +72,7 @@ private DockerContainer createMySql() .withStartupCheckStrategy(new IsRunningStartupCheckStrategy()) .waitingFor(new SelectedPortWaitStrategy(MYSQL_PORT)); - exposePort(container, MYSQL_PORT); + portBinder.exposePort(container, MYSQL_PORT); return container; } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodePostgresql.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodePostgresql.java index 3686623122de..443fe9b72976 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodePostgresql.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodePostgresql.java @@ -21,13 +21,13 @@ import io.prestosql.tests.product.launcher.env.common.AbstractEnvironmentProvider; import io.prestosql.tests.product.launcher.env.common.Standard; import io.prestosql.tests.product.launcher.env.common.TestsEnvironment; +import io.prestosql.tests.product.launcher.testcontainers.PortBinder; import io.prestosql.tests.product.launcher.testcontainers.SelectedPortWaitStrategy; import org.testcontainers.containers.startupcheck.IsRunningStartupCheckStrategy; import javax.inject.Inject; import static io.prestosql.tests.product.launcher.env.common.Standard.CONTAINER_PRESTO_ETC; -import static io.prestosql.tests.product.launcher.testcontainers.TestcontainersUtil.exposePort; import static java.util.Objects.requireNonNull; import static org.testcontainers.containers.BindMode.READ_ONLY; @@ -39,12 +39,14 @@ public final class SinglenodePostgresql public static final int POSTGRESQL_PORT = 15432; private final DockerFiles dockerFiles; + private final PortBinder portBinder; @Inject - public SinglenodePostgresql(Standard standard, DockerFiles dockerFiles) + public SinglenodePostgresql(Standard standard, DockerFiles dockerFiles, PortBinder portBinder) { super(ImmutableList.of(standard)); this.dockerFiles = requireNonNull(dockerFiles, "dockerFiles is null"); + this.portBinder = requireNonNull(portBinder, "portBinder is null"); } @Override @@ -70,7 +72,7 @@ private DockerContainer createPostgreSql() .withStartupCheckStrategy(new IsRunningStartupCheckStrategy()) .waitingFor(new SelectedPortWaitStrategy(POSTGRESQL_PORT)); - exposePort(container, POSTGRESQL_PORT); + portBinder.exposePort(container, POSTGRESQL_PORT); return container; } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeSqlserver.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeSqlserver.java index d3d44fe058f6..80b340fba28e 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeSqlserver.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeSqlserver.java @@ -21,13 +21,13 @@ import io.prestosql.tests.product.launcher.env.common.AbstractEnvironmentProvider; import io.prestosql.tests.product.launcher.env.common.Standard; import io.prestosql.tests.product.launcher.env.common.TestsEnvironment; +import io.prestosql.tests.product.launcher.testcontainers.PortBinder; import io.prestosql.tests.product.launcher.testcontainers.SelectedPortWaitStrategy; import org.testcontainers.containers.startupcheck.IsRunningStartupCheckStrategy; import javax.inject.Inject; import static io.prestosql.tests.product.launcher.env.common.Standard.CONTAINER_PRESTO_ETC; -import static io.prestosql.tests.product.launcher.testcontainers.TestcontainersUtil.exposePort; import static java.util.Objects.requireNonNull; import static org.testcontainers.containers.BindMode.READ_ONLY; @@ -38,12 +38,14 @@ public final class SinglenodeSqlserver public static final int SQLSERVER_PORT = 1433; private final DockerFiles dockerFiles; + private final PortBinder portBinder; @Inject - public SinglenodeSqlserver(Standard standard, DockerFiles dockerFiles) + public SinglenodeSqlserver(Standard standard, DockerFiles dockerFiles, PortBinder portBinder) { super(ImmutableList.of(standard)); this.dockerFiles = requireNonNull(dockerFiles, "dockerFiles is null"); + this.portBinder = requireNonNull(portBinder, "portBinder is null"); } @Override @@ -67,7 +69,7 @@ private DockerContainer createSqlServer() .withStartupCheckStrategy(new IsRunningStartupCheckStrategy()) .waitingFor(new SelectedPortWaitStrategy(SQLSERVER_PORT)); - exposePort(container, 1433); + portBinder.exposePort(container, 1433); return container; } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/testcontainers/PortBinder.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/testcontainers/PortBinder.java new file mode 100644 index 000000000000..078a57c9447a --- /dev/null +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/testcontainers/PortBinder.java @@ -0,0 +1,43 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.tests.product.launcher.testcontainers; + +import io.prestosql.tests.product.launcher.env.DockerContainer; +import io.prestosql.tests.product.launcher.env.EnvironmentOptions; + +import javax.inject.Inject; + +import static java.util.Objects.requireNonNull; + +public class PortBinder +{ + private final boolean bindPorts; + + @Inject + public PortBinder(EnvironmentOptions environmentOptions) + { + this.bindPorts = requireNonNull(environmentOptions, "environmentOptions is null").bindPorts; + } + + public void exposePort(DockerContainer container, int port) + { + if (bindPorts) { + TestcontainersUtil.exposePort(container, port); + } + else { + // Still export port, at a random free number, as certain startup checks require this. + container.addExposedPort(port); + } + } +} From 9e73177bc77821a988b8f1c2bcfe0dcb2eedb659 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 6 Apr 2020 13:06:48 +0200 Subject: [PATCH 098/519] Do not bind ports in product tests on CI --- .github/workflows/ci-tests.yml | 1 + .../launcher/env/EnvironmentOptions.java | 18 +++++++++++++++++- 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml index 86b1ea48e5e1..c707cc655309 100644 --- a/.github/workflows/ci-tests.yml +++ b/.github/workflows/ci-tests.yml @@ -217,5 +217,6 @@ jobs: rm -rf ~/.m2/repository - name: Product Tests run: | + export PTL_BIND_PORTS=false && source presto-product-tests/conf/product-tests-${{ matrix.config }}.sh && presto-product-tests/bin/product-tests-${{ matrix.suite }}.sh diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/EnvironmentOptions.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/EnvironmentOptions.java index 2997cf023ba9..b0b6720c0afd 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/EnvironmentOptions.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/EnvironmentOptions.java @@ -17,6 +17,10 @@ import io.airlift.airline.Option; import java.io.File; +import java.util.Locale; + +import static com.google.common.base.MoreObjects.firstNonNull; +import static java.util.Objects.requireNonNull; public final class EnvironmentOptions { @@ -33,7 +37,7 @@ public final class EnvironmentOptions public boolean withoutPrestoMaster; @Option(name = "--bind", description = "bind ports on localhost") - public boolean bindPorts = true; + public boolean bindPorts = toBoolean(firstNonNull(System.getenv("PTL_BIND_PORTS"), "true")); @Option(name = "--debug", description = "open Java debug ports") public boolean debug; @@ -44,4 +48,16 @@ public Module toModule() binder.bind(EnvironmentOptions.class).toInstance(this); }; } + + private static boolean toBoolean(String value) + { + requireNonNull(value, "value is null"); + switch (value.toLowerCase(Locale.ENGLISH)) { + case "true": + return true; + case "false": + return false; + } + throw new IllegalArgumentException("Cannot convert to boolean: " + value); + } } From a619e47458e8c4575a95507be801e5082fde30c1 Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Mon, 27 Jan 2020 14:27:04 -0800 Subject: [PATCH 099/519] Fix spelling in PlanOptimizers class --- .../src/main/java/io/prestosql/sql/planner/PlanOptimizers.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java index 9e983b745556..994967674d0b 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java @@ -505,7 +505,7 @@ public PlanOptimizers( ImmutableSet.of(new RemoveRedundantIdentityProjections())), // Because ReorderJoins runs only once, - // PredicatePushDown, PruneUnreferenedOutputpus and RemoveRedundantIdentityProjections + // PredicatePushDown, PruneUnreferencedOutputs and RemoveRedundantIdentityProjections // need to run beforehand in order to produce an optimal join order // It also needs to run after EliminateCrossJoins so that its chosen order doesn't get undone. new IterativeOptimizer( From 9165b91da365573beeedb3ac5a55e57f3b03246e Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Mon, 27 Jan 2020 14:27:05 -0800 Subject: [PATCH 100/519] Encode BigDecimal to Slice in StructuralTestUtil::appendToBlockBuilder --- .../src/test/java/io/prestosql/util/StructuralTestUtil.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/presto-main/src/test/java/io/prestosql/util/StructuralTestUtil.java b/presto-main/src/test/java/io/prestosql/util/StructuralTestUtil.java index 588050fd93f3..08ea7f0fa3bd 100644 --- a/presto-main/src/test/java/io/prestosql/util/StructuralTestUtil.java +++ b/presto-main/src/test/java/io/prestosql/util/StructuralTestUtil.java @@ -28,6 +28,7 @@ import io.prestosql.spi.type.Type; import io.prestosql.spi.type.TypeSignatureParameter; +import java.math.BigDecimal; import java.util.Map; import static io.prestosql.metadata.MetadataManager.createTestMetadataManager; @@ -126,6 +127,9 @@ else if (element instanceof byte[]) { else if (element instanceof SqlDecimal) { type.writeSlice(blockBuilder, Decimals.encodeUnscaledValue(((SqlDecimal) element).getUnscaledValue())); } + else if (element instanceof BigDecimal) { + type.writeSlice(blockBuilder, Decimals.encodeScaledValue((BigDecimal) element)); + } else { type.writeSlice(blockBuilder, (Slice) element); } From 4928507ecd2a60bc12aba0dd714d26491045c85a Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Mon, 27 Jan 2020 14:27:06 -0800 Subject: [PATCH 101/519] Support projected columns in Hive --- presto-hive/pom.xml | 7 + .../hive/GenericHiveRecordCursorProvider.java | 27 +- .../plugin/hive/HiveBucketHandle.java | 3 + .../plugin/hive/HiveCoercionRecordCursor.java | 4 +- .../plugin/hive/HiveColumnHandle.java | 138 ++++-- .../plugin/hive/HiveColumnProjectionInfo.java | 111 +++++ .../prestosql/plugin/hive/HiveMetadata.java | 5 +- .../prestosql/plugin/hive/HivePageSource.java | 24 +- .../plugin/hive/HivePageSourceFactory.java | 39 +- .../plugin/hive/HivePageSourceProvider.java | 214 ++++++--- ...ReaderProjectionsAdaptingRecordCursor.java | 235 ++++++++++ .../plugin/hive/HiveRecordCursor.java | 4 + .../plugin/hive/HiveRecordCursorProvider.java | 34 +- .../io/prestosql/plugin/hive/HiveType.java | 36 ++ .../plugin/hive/IonSqlQueryBuilder.java | 9 +- .../plugin/hive/ReaderProjections.java | 106 +++++ .../plugin/hive/ReaderProjectionsAdapter.java | 249 ++++++++++ .../plugin/hive/orc/OrcPageSourceFactory.java | 25 +- .../parquet/ParquetPageSourceFactory.java | 25 +- .../plugin/hive/rcfile/RcFilePageSource.java | 6 +- .../hive/rcfile/RcFilePageSourceFactory.java | 17 +- .../S3SelectRecordCursorProvider.java | 19 +- .../prestosql/plugin/hive/util/HiveUtil.java | 11 +- .../plugin/hive/AbstractTestHive.java | 11 +- .../hive/AbstractTestHiveFileFormats.java | 91 +++- .../hive/TestBackgroundHiveSplitLoader.java | 3 +- .../plugin/hive/TestHiveColumnHandle.java | 50 +- .../plugin/hive/TestHiveFileFormats.java | 436 ++++++++++++++++-- .../plugin/hive/TestHiveMetadata.java | 5 +- .../plugin/hive/TestHivePageSink.java | 3 +- .../hive/TestHiveReaderProjectionsUtil.java | 92 ++++ .../prestosql/plugin/hive/TestHiveSplit.java | 3 +- .../plugin/hive/TestIonSqlQueryBuilder.java | 23 +- .../hive/TestOrcPageSourceMemoryTracking.java | 3 +- .../plugin/hive/TestReaderProjections.java | 85 ++++ .../hive/TestReaderProjectionsAdapter.java | 312 +++++++++++++ .../plugin/hive/benchmark/FileFormat.java | 30 +- .../hive/orc/TestOrcPageSourceFactory.java | 17 +- .../predicate/TestParquetPredicateUtils.java | 12 +- .../s3select/TestS3SelectRecordCursor.java | 9 +- .../TestMetastoreHiveStatisticsProvider.java | 11 +- 41 files changed, 2300 insertions(+), 244 deletions(-) create mode 100644 presto-hive/src/main/java/io/prestosql/plugin/hive/HiveColumnProjectionInfo.java create mode 100644 presto-hive/src/main/java/io/prestosql/plugin/hive/HiveReaderProjectionsAdaptingRecordCursor.java create mode 100644 presto-hive/src/main/java/io/prestosql/plugin/hive/ReaderProjections.java create mode 100644 presto-hive/src/main/java/io/prestosql/plugin/hive/ReaderProjectionsAdapter.java create mode 100644 presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveReaderProjectionsUtil.java create mode 100644 presto-hive/src/test/java/io/prestosql/plugin/hive/TestReaderProjections.java create mode 100644 presto-hive/src/test/java/io/prestosql/plugin/hive/TestReaderProjectionsAdapter.java diff --git a/presto-hive/pom.xml b/presto-hive/pom.xml index d8d3bf85a614..b7cd7be4a33e 100644 --- a/presto-hive/pom.xml +++ b/presto-hive/pom.xml @@ -245,6 +245,13 @@ test + + io.prestosql + presto-main + test-jar + test + + io.prestosql presto-main diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/GenericHiveRecordCursorProvider.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/GenericHiveRecordCursorProvider.java index 52baea41cfb7..87da5d6b34b5 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/GenericHiveRecordCursorProvider.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/GenericHiveRecordCursorProvider.java @@ -36,6 +36,7 @@ import static com.google.common.base.Preconditions.checkArgument; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_FILESYSTEM_ERROR; +import static io.prestosql.plugin.hive.ReaderProjections.projectBaseColumns; import static java.lang.Math.toIntExact; import static java.util.Objects.requireNonNull; @@ -59,7 +60,7 @@ public GenericHiveRecordCursorProvider(HdfsEnvironment hdfsEnvironment, DataSize } @Override - public Optional createRecordCursor( + public Optional createRecordCursor( Configuration configuration, ConnectorSession session, Path path, @@ -83,18 +84,32 @@ public Optional createRecordCursor( throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed getting FileSystem: " + path, e); } - return hdfsEnvironment.doAs(session.getUser(), () -> { - RecordReader recordReader = HiveUtil.createRecordReader(configuration, path, start, length, schema, columns); + Optional projectedReaderColumns = projectBaseColumns(columns); - return Optional.of(new GenericHiveRecordCursor<>( + RecordCursor cursor = hdfsEnvironment.doAs(session.getUser(), () -> { + RecordReader recordReader = HiveUtil.createRecordReader( + configuration, + path, + start, + length, + schema, + projectedReaderColumns + .map(ReaderProjections::getReaderColumns) + .orElse(columns)); + + return new GenericHiveRecordCursor<>( configuration, path, genericRecordReader(recordReader), length, schema, - columns, - hiveStorageTimeZone)); + projectedReaderColumns + .map(ReaderProjections::getReaderColumns) + .orElse(columns), + hiveStorageTimeZone); }); + + return Optional.of(new ReaderRecordCursorWithProjections(cursor, projectedReaderColumns)); } @SuppressWarnings("unchecked") diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveBucketHandle.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveBucketHandle.java index c8cded6199b7..17a53dc36fd9 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveBucketHandle.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveBucketHandle.java @@ -20,6 +20,8 @@ import java.util.List; +import static com.google.common.base.Preconditions.checkArgument; +import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.stream.Collectors.toList; @@ -41,6 +43,7 @@ public HiveBucketHandle( @JsonProperty("readBucketCount") int readBucketCount) { this.columns = requireNonNull(columns, "columns is null"); + columns.forEach(column -> checkArgument(column.isBaseColumn(), format("projected column %s is not allowed for bucketing", column))); this.bucketingVersion = requireNonNull(bucketingVersion, "bucketingVersion is null"); this.tableBucketCount = tableBucketCount; this.readBucketCount = readBucketCount; diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveCoercionRecordCursor.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveCoercionRecordCursor.java index d071745d4b2d..37d94e48263a 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveCoercionRecordCursor.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveCoercionRecordCursor.java @@ -74,8 +74,8 @@ public HiveCoercionRecordCursor( for (int columnIndex = 0; columnIndex < size; columnIndex++) { ColumnMapping columnMapping = columnMappings.get(columnIndex); - if (columnMapping.getCoercionFrom().isPresent()) { - coercers[columnIndex] = createCoercer(typeManager, columnMapping.getCoercionFrom().get(), columnMapping.getHiveColumnHandle().getHiveType(), bridgingRecordCursor); + if (columnMapping.getBaseTypeCoercionFrom().isPresent()) { + coercers[columnIndex] = createCoercer(typeManager, columnMapping.getBaseTypeCoercionFrom().get(), columnMapping.getHiveColumnHandle().getHiveType(), bridgingRecordCursor); } } } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveColumnHandle.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveColumnHandle.java index 2a70868c58a2..69e8345e6de4 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveColumnHandle.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveColumnHandle.java @@ -33,6 +33,11 @@ import static io.prestosql.spi.type.VarcharType.VARCHAR; import static java.util.Objects.requireNonNull; +/** + * ColumnHandle for Hive Connector representing a full top level column or a projected column. Currently projected columns + * that represent a simple chain of dereferences are supported. e.g. for a column "A" with type struct(B struct(C bigint, ...), ....) + * there can be a projected column representing expression "A.B.C". + */ public class HiveColumnHandle implements ColumnHandle { @@ -65,47 +70,102 @@ public enum ColumnType SYNTHESIZED, } + // Information about top level hive column + private final String baseColumnName; + private final int baseHiveColumnIndex; + private final HiveType baseHiveType; + private final Type baseType; + private final Optional comment; + + // Information about parts of the base column to be referenced by this column handle. + private final Optional hiveColumnProjectionInfo; + private final String name; - private final HiveType hiveType; - private final Type type; - private final int hiveColumnIndex; private final ColumnType columnType; - private final Optional comment; @JsonCreator public HiveColumnHandle( - @JsonProperty("name") String name, - @JsonProperty("hiveType") HiveType hiveType, - @JsonProperty("type") Type type, - @JsonProperty("hiveColumnIndex") int hiveColumnIndex, + @JsonProperty("baseColumnName") String baseColumnName, + @JsonProperty("baseHiveColumnIndex") int baseHiveColumnIndex, + @JsonProperty("baseHiveType") HiveType baseHiveType, + @JsonProperty("baseType") Type baseType, + @JsonProperty("hiveColumnProjectionInfo") Optional hiveColumnProjectionInfo, @JsonProperty("columnType") ColumnType columnType, @JsonProperty("comment") Optional comment) { - this.name = requireNonNull(name, "name is null"); - checkArgument(hiveColumnIndex >= 0 || columnType == PARTITION_KEY || columnType == SYNTHESIZED, "hiveColumnIndex is negative"); - this.hiveColumnIndex = hiveColumnIndex; - this.hiveType = requireNonNull(hiveType, "hiveType is null"); - this.type = requireNonNull(type, "type is null"); + this.baseColumnName = requireNonNull(baseColumnName, "baseColumnName is null"); + checkArgument(baseHiveColumnIndex >= 0 || columnType == PARTITION_KEY || columnType == SYNTHESIZED, "baseHiveColumnIndex is negative"); + this.baseHiveColumnIndex = baseHiveColumnIndex; + this.baseHiveType = requireNonNull(baseHiveType, "baseHiveType is null"); + this.baseType = requireNonNull(baseType, "baseType is null"); + + this.hiveColumnProjectionInfo = requireNonNull(hiveColumnProjectionInfo, "hiveColumnProjectionInfo is null"); + + this.name = this.baseColumnName + hiveColumnProjectionInfo.map(HiveColumnProjectionInfo::getPartialName).orElse(""); + this.columnType = requireNonNull(columnType, "columnType is null"); this.comment = requireNonNull(comment, "comment is null"); } - @JsonProperty + public static HiveColumnHandle createBaseColumn( + String topLevelColumnName, + int topLevelColumnIndex, + HiveType hiveType, + Type type, + ColumnType columnType, + Optional comment) + { + return new HiveColumnHandle(topLevelColumnName, topLevelColumnIndex, hiveType, type, Optional.empty(), columnType, comment); + } + + public HiveColumnHandle getBaseColumn() + { + return isBaseColumn() ? this : createBaseColumn(baseColumnName, baseHiveColumnIndex, baseHiveType, baseType, columnType, comment); + } + public String getName() { return name; } @JsonProperty - public HiveType getHiveType() + public String getBaseColumnName() + { + return baseColumnName; + } + + @JsonProperty + public HiveType getBaseHiveType() + { + return baseHiveType; + } + + @JsonProperty + public Type getBaseType() { - return hiveType; + return baseType; } @JsonProperty - public int getHiveColumnIndex() + public int getBaseHiveColumnIndex() + { + return baseHiveColumnIndex; + } + + @JsonProperty + public Optional getHiveColumnProjectionInfo() + { + return hiveColumnProjectionInfo; + } + + public HiveType getHiveType() { - return hiveColumnIndex; + return hiveColumnProjectionInfo.map(HiveColumnProjectionInfo::getHiveType).orElse(baseHiveType); + } + + public Type getType() + { + return hiveColumnProjectionInfo.map(HiveColumnProjectionInfo::getType).orElse(baseType); } public boolean isPartitionKey() @@ -122,7 +182,7 @@ public ColumnMetadata getColumnMetadata() { return ColumnMetadata.builder() .setName(name) - .setType(type) + .setType(getType()) .setHidden(isHidden()) .build(); } @@ -134,21 +194,20 @@ public Optional getComment() } @JsonProperty - public Type getType() + public ColumnType getColumnType() { - return type; + return columnType; } - @JsonProperty - public ColumnType getColumnType() + public boolean isBaseColumn() { - return columnType; + return !hiveColumnProjectionInfo.isPresent(); } @Override public int hashCode() { - return Objects.hash(name, hiveColumnIndex, hiveType, columnType, comment); + return Objects.hash(baseColumnName, baseHiveColumnIndex, baseHiveType, baseType, hiveColumnProjectionInfo, columnType, comment); } @Override @@ -161,9 +220,12 @@ public boolean equals(Object obj) return false; } HiveColumnHandle other = (HiveColumnHandle) obj; - return Objects.equals(this.name, other.name) && - Objects.equals(this.hiveColumnIndex, other.hiveColumnIndex) && - Objects.equals(this.hiveType, other.hiveType) && + return Objects.equals(this.baseColumnName, other.baseColumnName) && + Objects.equals(this.baseHiveColumnIndex, other.baseHiveColumnIndex) && + Objects.equals(this.baseHiveType, other.baseHiveType) && + Objects.equals(this.baseType, other.baseType) && + Objects.equals(this.hiveColumnProjectionInfo, other.hiveColumnProjectionInfo) && + Objects.equals(this.name, other.name) && this.columnType == other.columnType && Objects.equals(this.comment, other.comment); } @@ -171,7 +233,7 @@ public boolean equals(Object obj) @Override public String toString() { - return name + ":" + hiveType + ":" + hiveColumnIndex + ":" + columnType; + return name + ":" + getHiveType() + ":" + columnType; } public static HiveColumnHandle updateRowIdHandle() @@ -182,12 +244,12 @@ public static HiveColumnHandle updateRowIdHandle() // plan-time support for row-by-row delete so that planning doesn't fail. This is why we need // rowid handle. Note that in Hive connector, rowid handle is not implemented beyond plan-time. - return new HiveColumnHandle(UPDATE_ROW_ID_COLUMN_NAME, HIVE_LONG, BIGINT, -1, SYNTHESIZED, Optional.empty()); + return createBaseColumn(UPDATE_ROW_ID_COLUMN_NAME, -1, HIVE_LONG, BIGINT, SYNTHESIZED, Optional.empty()); } public static HiveColumnHandle pathColumnHandle() { - return new HiveColumnHandle(PATH_COLUMN_NAME, PATH_HIVE_TYPE, PATH_TYPE, PATH_COLUMN_INDEX, SYNTHESIZED, Optional.empty()); + return createBaseColumn(PATH_COLUMN_NAME, PATH_COLUMN_INDEX, PATH_HIVE_TYPE, PATH_TYPE, SYNTHESIZED, Optional.empty()); } /** @@ -197,36 +259,36 @@ public static HiveColumnHandle pathColumnHandle() */ public static HiveColumnHandle bucketColumnHandle() { - return new HiveColumnHandle(BUCKET_COLUMN_NAME, BUCKET_HIVE_TYPE, BUCKET_TYPE_SIGNATURE, BUCKET_COLUMN_INDEX, SYNTHESIZED, Optional.empty()); + return createBaseColumn(BUCKET_COLUMN_NAME, BUCKET_COLUMN_INDEX, BUCKET_HIVE_TYPE, BUCKET_TYPE_SIGNATURE, SYNTHESIZED, Optional.empty()); } public static HiveColumnHandle fileSizeColumnHandle() { - return new HiveColumnHandle(FILE_SIZE_COLUMN_NAME, FILE_SIZE_TYPE, FILE_SIZE_TYPE_SIGNATURE, FILE_SIZE_COLUMN_INDEX, SYNTHESIZED, Optional.empty()); + return createBaseColumn(FILE_SIZE_COLUMN_NAME, FILE_SIZE_COLUMN_INDEX, FILE_SIZE_TYPE, FILE_SIZE_TYPE_SIGNATURE, SYNTHESIZED, Optional.empty()); } public static HiveColumnHandle fileModifiedTimeColumnHandle() { - return new HiveColumnHandle(FILE_MODIFIED_TIME_COLUMN_NAME, FILE_MODIFIED_TIME_TYPE, FILE_MODIFIED_TIME_TYPE_SIGNATURE, FILE_MODIFIED_TIME_COLUMN_INDEX, SYNTHESIZED, Optional.empty()); + return createBaseColumn(FILE_MODIFIED_TIME_COLUMN_NAME, FILE_MODIFIED_TIME_COLUMN_INDEX, FILE_MODIFIED_TIME_TYPE, FILE_MODIFIED_TIME_TYPE_SIGNATURE, SYNTHESIZED, Optional.empty()); } public static boolean isPathColumnHandle(HiveColumnHandle column) { - return column.getHiveColumnIndex() == PATH_COLUMN_INDEX; + return column.getBaseHiveColumnIndex() == PATH_COLUMN_INDEX; } public static boolean isBucketColumnHandle(HiveColumnHandle column) { - return column.getHiveColumnIndex() == BUCKET_COLUMN_INDEX; + return column.getBaseHiveColumnIndex() == BUCKET_COLUMN_INDEX; } public static boolean isFileSizeColumnHandle(HiveColumnHandle column) { - return column.getHiveColumnIndex() == FILE_SIZE_COLUMN_INDEX; + return column.getBaseHiveColumnIndex() == FILE_SIZE_COLUMN_INDEX; } public static boolean isFileModifiedTimeColumnHandle(HiveColumnHandle column) { - return column.getHiveColumnIndex() == FILE_MODIFIED_TIME_COLUMN_INDEX; + return column.getBaseHiveColumnIndex() == FILE_MODIFIED_TIME_COLUMN_INDEX; } } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveColumnProjectionInfo.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveColumnProjectionInfo.java new file mode 100644 index 000000000000..6cac57ddf80d --- /dev/null +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveColumnProjectionInfo.java @@ -0,0 +1,111 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.hive; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import io.prestosql.spi.type.Type; + +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +import static com.google.common.base.Preconditions.checkArgument; +import static java.util.Objects.requireNonNull; + +public class HiveColumnProjectionInfo +{ + private final List dereferenceIndices; + private final List dereferenceNames; + private final HiveType hiveType; + private final Type type; + private final String partialName; + + @JsonCreator + public HiveColumnProjectionInfo( + @JsonProperty("dereferenceIndices") List dereferenceIndices, + @JsonProperty("dereferenceNames") List dereferenceNames, + @JsonProperty("hiveType") HiveType hiveType, + @JsonProperty("type") Type type) + { + this.dereferenceIndices = requireNonNull(dereferenceIndices, "dereferenceIndices is null"); + this.dereferenceNames = requireNonNull(dereferenceNames, "dereferenceNames is null"); + checkArgument(dereferenceIndices.size() > 0, "dereferenceIndices should not be empty"); + checkArgument(dereferenceIndices.size() == dereferenceNames.size(), "dereferenceIndices and dereferenceNames should have the same sizes"); + + this.hiveType = requireNonNull(hiveType, "hiveType is null"); + this.type = requireNonNull(type, "type is null"); + + this.partialName = generatePartialName(dereferenceNames); + } + + public String getPartialName() + { + return partialName; + } + + @JsonProperty + public List getDereferenceIndices() + { + return dereferenceIndices; + } + + @JsonProperty + public List getDereferenceNames() + { + return dereferenceNames; + } + + @JsonProperty + public HiveType getHiveType() + { + return hiveType; + } + + @JsonProperty + public Type getType() + { + return type; + } + + @Override + public int hashCode() + { + return Objects.hash(dereferenceIndices, dereferenceNames, hiveType, type); + } + + @Override + public boolean equals(Object obj) + { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + HiveColumnProjectionInfo other = (HiveColumnProjectionInfo) obj; + return Objects.equals(this.dereferenceIndices, other.dereferenceIndices) && + Objects.equals(this.dereferenceNames, other.dereferenceNames) && + Objects.equals(this.hiveType, other.hiveType) && + Objects.equals(this.type, other.type); + } + + public static String generatePartialName(List dereferenceNames) + { + return dereferenceNames.stream() + .map(name -> "#" + name) + .collect(Collectors.joining()); + } +} diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java index 95f3fa959b0a..4382e5a3a26b 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java @@ -135,6 +135,7 @@ import static io.prestosql.plugin.hive.HiveColumnHandle.FILE_MODIFIED_TIME_COLUMN_NAME; import static io.prestosql.plugin.hive.HiveColumnHandle.FILE_SIZE_COLUMN_NAME; import static io.prestosql.plugin.hive.HiveColumnHandle.PATH_COLUMN_NAME; +import static io.prestosql.plugin.hive.HiveColumnHandle.createBaseColumn; import static io.prestosql.plugin.hive.HiveColumnHandle.updateRowIdHandle; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_COLUMN_ORDER_MISMATCH; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_CONCURRENT_MODIFICATION_DETECTED; @@ -2348,11 +2349,11 @@ else if (column.isHidden()) { else { columnType = REGULAR; } - columnHandles.add(new HiveColumnHandle( + columnHandles.add(createBaseColumn( column.getName(), + ordinal, toHiveType(typeTranslator, column.getType()), column.getType(), - ordinal, columnType, Optional.ofNullable(column.getComment()))); ordinal++; diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSource.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSource.java index b7aa82f3f46d..b4d8785cbb22 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSource.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSource.java @@ -59,6 +59,7 @@ import static io.airlift.slice.Slices.utf8Slice; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_CURSOR_ERROR; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_INVALID_BUCKET_FILES; +import static io.prestosql.plugin.hive.HivePageSourceProvider.ColumnMappingKind.EMPTY; import static io.prestosql.plugin.hive.HivePageSourceProvider.ColumnMappingKind.PREFILLED; import static io.prestosql.plugin.hive.HiveType.HIVE_BYTE; import static io.prestosql.plugin.hive.HiveType.HIVE_DOUBLE; @@ -120,12 +121,14 @@ public class HivePageSource private final Object[] prefilledValues; private final Type[] types; private final List>> coercers; + private final Optional projectionsAdapter; private final ConnectorPageSource delegate; public HivePageSource( List columnMappings, Optional bucketAdaptation, + Optional projectionsAdapter, DateTimeZone hiveStorageTimeZone, TypeManager typeManager, ConnectorPageSource delegate) @@ -138,6 +141,8 @@ public HivePageSource( this.columnMappings = columnMappings; this.bucketAdapter = bucketAdaptation.map(BucketAdapter::new); + this.projectionsAdapter = requireNonNull(projectionsAdapter, "projectionsAdapter is null"); + int size = columnMappings.size(); prefilledValues = new Object[size]; @@ -152,14 +157,22 @@ public HivePageSource( Type type = column.getType(); types[columnIndex] = type; - if (columnMapping.getCoercionFrom().isPresent()) { - coercers.add(Optional.of(createCoercer(typeManager, columnMapping.getCoercionFrom().get(), columnMapping.getHiveColumnHandle().getHiveType()))); + if (columnMapping.getKind() != EMPTY && columnMapping.getBaseTypeCoercionFrom().isPresent()) { + List dereferenceIndices = column.getHiveColumnProjectionInfo() + .map(HiveColumnProjectionInfo::getDereferenceIndices) + .orElse(ImmutableList.of()); + HiveType fromType = columnMapping.getBaseTypeCoercionFrom().get().getHiveTypeForDereferences(dereferenceIndices).get(); + HiveType toType = columnMapping.getHiveColumnHandle().getHiveType(); + coercers.add(Optional.of(createCoercer(typeManager, fromType, toType))); } else { coercers.add(Optional.empty()); } - if (columnMapping.getKind() == PREFILLED) { + if (columnMapping.getKind() == EMPTY) { + prefilledValues[columnIndex] = null; + } + else if (columnMapping.getKind() == PREFILLED) { String columnValue = columnMapping.getPrefilledValue(); byte[] bytes = columnValue.getBytes(UTF_8); @@ -246,6 +259,10 @@ public Page getNextPage() return null; } + if (projectionsAdapter.isPresent()) { + dataPage = projectionsAdapter.get().adaptPage(dataPage); + } + if (bucketAdapter.isPresent()) { IntArrayList rowsToKeep = bucketAdapter.get().computeEligibleRowIds(dataPage); dataPage = dataPage.getPositions(rowsToKeep.elements(), 0, rowsToKeep.size()); @@ -257,6 +274,7 @@ public Page getNextPage() ColumnMapping columnMapping = columnMappings.get(fieldId); switch (columnMapping.getKind()) { case PREFILLED: + case EMPTY: blocks.add(RunLengthEncodedBlock.create(types[fieldId], prefilledValues[fieldId], batchSize)); break; case REGULAR: diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSourceFactory.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSourceFactory.java index c18b0a2689c2..aceb46c7f202 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSourceFactory.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSourceFactory.java @@ -24,9 +24,11 @@ import java.util.Optional; import java.util.Properties; +import static java.util.Objects.requireNonNull; + public interface HivePageSourceFactory { - Optional createPageSource( + Optional createPageSource( Configuration configuration, ConnectorSession session, Path path, @@ -38,4 +40,39 @@ Optional createPageSource( TupleDomain effectivePredicate, DateTimeZone hiveStorageTimeZone, Optional deleteDeltaLocations); + + /** + * A wrapper class for + * - delegate reader page source and + * - projection information for columns to be returned by the delegate + * + * Empty {@param projectedReaderColumns} indicates that the delegate page source reads the exact same columns provided to + * it in {@link HivePageSourceFactory#createPageSource} + */ + class ReaderPageSourceWithProjections + { + private final ConnectorPageSource connectorPageSource; + private final Optional projectedReaderColumns; + + public ReaderPageSourceWithProjections(ConnectorPageSource connectorPageSource, Optional projectedReaderColumns) + { + this.connectorPageSource = requireNonNull(connectorPageSource, "connectorPageSource is null"); + this.projectedReaderColumns = requireNonNull(projectedReaderColumns, "projectedReaderColumns is null"); + } + + public ConnectorPageSource getConnectorPageSource() + { + return connectorPageSource; + } + + public Optional getProjectedReaderColumns() + { + return projectedReaderColumns; + } + + public static ReaderPageSourceWithProjections noProjectionAdaptation(ConnectorPageSource connectorPageSource) + { + return new ReaderPageSourceWithProjections(connectorPageSource, Optional.empty()); + } + } } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSourceProvider.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSourceProvider.java index 57f741e5c3d6..8f4f930d3c3f 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSourceProvider.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSourceProvider.java @@ -16,6 +16,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import io.prestosql.plugin.hive.HdfsEnvironment.HdfsContext; +import io.prestosql.plugin.hive.HivePageSourceFactory.ReaderPageSourceWithProjections; +import io.prestosql.plugin.hive.HiveRecordCursorProvider.ReaderRecordCursorWithProjections; import io.prestosql.plugin.hive.HiveSplit.BucketConversion; import io.prestosql.plugin.hive.util.HiveBucketing.BucketingVersion; import io.prestosql.spi.connector.ColumnHandle; @@ -37,6 +39,7 @@ import javax.inject.Inject; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -139,7 +142,7 @@ public static Optional createHivePageSource( long fileModifiedTime, Properties schema, TupleDomain effectivePredicate, - List hiveColumns, + List columns, List partitionKeys, DateTimeZone hiveStorageTimeZone, TypeManager typeManager, @@ -154,7 +157,7 @@ public static Optional createHivePageSource( List columnMappings = ColumnMapping.buildColumnMappings( partitionKeys, - hiveColumns, + columns, bucketConversion.map(BucketConversion::getBucketColumnHandles).orElse(ImmutableList.of()), tableToPartitionMapping, path, @@ -163,25 +166,12 @@ public static Optional createHivePageSource( fileModifiedTime); List regularAndInterimColumnMappings = ColumnMapping.extractRegularAndInterimColumnMappings(columnMappings); - Optional bucketAdaptation = bucketConversion.map(conversion -> { - Map hiveIndexToBlockIndex = uniqueIndex(regularAndInterimColumnMappings, columnMapping -> columnMapping.getHiveColumnHandle().getHiveColumnIndex()); - int[] bucketColumnIndices = conversion.getBucketColumnHandles().stream() - .mapToInt(columnHandle -> hiveIndexToBlockIndex.get(columnHandle.getHiveColumnIndex()).getIndex()) - .toArray(); - List bucketColumnHiveTypes = conversion.getBucketColumnHandles().stream() - .map(columnHandle -> hiveIndexToBlockIndex.get(columnHandle.getHiveColumnIndex()).getHiveColumnHandle().getHiveType()) - .collect(toImmutableList()); - return new BucketAdaptation( - bucketColumnIndices, - bucketColumnHiveTypes, - conversion.getBucketingVersion(), - conversion.getTableBucketCount(), - conversion.getPartitionBucketCount(), - bucketNumber.getAsInt()); - }); + Optional bucketAdaptation = createBucketAdaptation(bucketConversion, bucketNumber, regularAndInterimColumnMappings); for (HivePageSourceFactory pageSourceFactory : pageSourceFactories) { - Optional pageSource = pageSourceFactory.createPageSource( + List desiredColumns = toColumnHandles(regularAndInterimColumnMappings, true, typeManager); + + Optional readerWithProjections = pageSourceFactory.createPageSource( configuration, session, path, @@ -189,18 +179,27 @@ public static Optional createHivePageSource( length, fileSize, schema, - toColumnHandles(regularAndInterimColumnMappings, true, typeManager), + desiredColumns, effectivePredicate, hiveStorageTimeZone, deleteDeltaLocations); - if (pageSource.isPresent()) { - return Optional.of( - new HivePageSource( - columnMappings, - bucketAdaptation, - hiveStorageTimeZone, - typeManager, - pageSource.get())); + + if (readerWithProjections.isPresent()) { + ConnectorPageSource pageSource = readerWithProjections.get().getConnectorPageSource(); + + Optional readerProjections = readerWithProjections.get().getProjectedReaderColumns(); + Optional adapter = Optional.empty(); + if (readerProjections.isPresent()) { + adapter = Optional.of(new ReaderProjectionsAdapter(desiredColumns, readerProjections.get())); + } + + return Optional.of(new HivePageSource( + columnMappings, + bucketAdaptation, + adapter, + hiveStorageTimeZone, + typeManager, + pageSource)); } } @@ -208,7 +207,8 @@ public static Optional createHivePageSource( // GenericHiveRecordCursor will automatically do the coercion without HiveCoercionRecordCursor boolean doCoercion = !(provider instanceof GenericHiveRecordCursorProvider); - Optional cursor = provider.createRecordCursor( + List desiredColumns = toColumnHandles(regularAndInterimColumnMappings, doCoercion, typeManager); + Optional readerWithProjections = provider.createRecordCursor( configuration, session, path, @@ -216,14 +216,20 @@ public static Optional createHivePageSource( length, fileSize, schema, - toColumnHandles(regularAndInterimColumnMappings, doCoercion, typeManager), + desiredColumns, effectivePredicate, hiveStorageTimeZone, typeManager, s3SelectPushdownEnabled); - if (cursor.isPresent()) { - RecordCursor delegate = cursor.get(); + if (readerWithProjections.isPresent()) { + RecordCursor delegate = readerWithProjections.get().getRecordCursor(); + Optional projections = readerWithProjections.get().getProjectedReaderColumns(); + + if (projections.isPresent()) { + ReaderProjectionsAdapter projectionsAdapter = new ReaderProjectionsAdapter(desiredColumns, projections.get()); + delegate = new HiveReaderProjectionsAdaptingRecordCursor(delegate, projectionsAdapter); + } checkArgument(!deleteDeltaLocations.isPresent(), "Delete delta is not supported"); @@ -248,7 +254,7 @@ public static Optional createHivePageSource( columnMappings, hiveStorageTimeZone, delegate); - List columnTypes = hiveColumns.stream() + List columnTypes = columns.stream() .map(HiveColumnHandle::getType) .collect(toList()); @@ -268,33 +274,45 @@ public static class ColumnMapping * ordinal of this column in the underlying page source or record cursor */ private final OptionalInt index; - private final Optional coercionFrom; + private final Optional baseTypeCoercionFrom; - public static ColumnMapping regular(HiveColumnHandle hiveColumnHandle, int index, Optional coerceFrom) + public static ColumnMapping regular(HiveColumnHandle hiveColumnHandle, int index, Optional baseTypeCoercionFrom) { checkArgument(hiveColumnHandle.getColumnType() == REGULAR); - return new ColumnMapping(ColumnMappingKind.REGULAR, hiveColumnHandle, Optional.empty(), OptionalInt.of(index), coerceFrom); + return new ColumnMapping(ColumnMappingKind.REGULAR, hiveColumnHandle, Optional.empty(), OptionalInt.of(index), baseTypeCoercionFrom); } - public static ColumnMapping prefilled(HiveColumnHandle hiveColumnHandle, String prefilledValue, Optional coerceFrom) + public static ColumnMapping prefilled(HiveColumnHandle hiveColumnHandle, String prefilledValue, Optional baseTypeCoercionFrom) { checkArgument(hiveColumnHandle.getColumnType() == PARTITION_KEY || hiveColumnHandle.getColumnType() == SYNTHESIZED); - return new ColumnMapping(ColumnMappingKind.PREFILLED, hiveColumnHandle, Optional.of(prefilledValue), OptionalInt.empty(), coerceFrom); + checkArgument(hiveColumnHandle.isBaseColumn(), "prefilled values not supported for projected columns"); + return new ColumnMapping(ColumnMappingKind.PREFILLED, hiveColumnHandle, Optional.of(prefilledValue), OptionalInt.empty(), baseTypeCoercionFrom); } - public static ColumnMapping interim(HiveColumnHandle hiveColumnHandle, int index) + public static ColumnMapping interim(HiveColumnHandle hiveColumnHandle, int index, Optional baseTypeCoercionFrom) { checkArgument(hiveColumnHandle.getColumnType() == REGULAR); - return new ColumnMapping(ColumnMappingKind.INTERIM, hiveColumnHandle, Optional.empty(), OptionalInt.of(index), Optional.empty()); + return new ColumnMapping(ColumnMappingKind.INTERIM, hiveColumnHandle, Optional.empty(), OptionalInt.of(index), baseTypeCoercionFrom); } - private ColumnMapping(ColumnMappingKind kind, HiveColumnHandle hiveColumnHandle, Optional prefilledValue, OptionalInt index, Optional coerceFrom) + public static ColumnMapping empty(HiveColumnHandle hiveColumnHandle) + { + checkArgument(hiveColumnHandle.getColumnType() == REGULAR); + return new ColumnMapping(ColumnMappingKind.EMPTY, hiveColumnHandle, Optional.empty(), OptionalInt.empty(), Optional.empty()); + } + + private ColumnMapping( + ColumnMappingKind kind, + HiveColumnHandle hiveColumnHandle, + Optional prefilledValue, + OptionalInt index, + Optional baseTypeCoercionFrom) { this.kind = requireNonNull(kind, "kind is null"); this.hiveColumnHandle = requireNonNull(hiveColumnHandle, "hiveColumnHandle is null"); this.prefilledValue = requireNonNull(prefilledValue, "prefilledValue is null"); this.index = requireNonNull(index, "index is null"); - this.coercionFrom = requireNonNull(coerceFrom, "coerceFrom is null"); + this.baseTypeCoercionFrom = requireNonNull(baseTypeCoercionFrom, "baseTypeCoercionFrom is null"); } public ColumnMappingKind getKind() @@ -319,9 +337,9 @@ public int getIndex() return index.getAsInt(); } - public Optional getCoercionFrom() + public Optional getBaseTypeCoercionFrom() { - return coercionFrom; + return baseTypeCoercionFrom; } public static List buildColumnMappings( @@ -335,37 +353,72 @@ public static List buildColumnMappings( long fileModifiedTime) { Map partitionKeysByName = uniqueIndex(partitionKeys, HivePartitionKey::getName); - int regularIndex = 0; - Set regularColumnIndices = new HashSet<>(); + + // Maintain state about hive columns added to the mapping as we iterate (for validation) + Set baseColumnHiveIndices = new HashSet<>(); + Map>> projectionsForColumn = new HashMap<>(); + ImmutableList.Builder columnMappings = ImmutableList.builder(); + int regularIndex = 0; + for (HiveColumnHandle column : columns) { - Optional coercionFrom = tableToPartitionMapping.getCoercion(column.getHiveColumnIndex()); + Optional baseTypeCoercionFrom = tableToPartitionMapping.getCoercion(column.getBaseHiveColumnIndex()); + if (column.getColumnType() == REGULAR) { - checkArgument(regularColumnIndices.add(column.getHiveColumnIndex()), "duplicate hiveColumnIndex in columns list"); - columnMappings.add(regular(column, regularIndex, coercionFrom)); - regularIndex++; + if (column.isBaseColumn()) { + baseColumnHiveIndices.add(column.getBaseHiveColumnIndex()); + } + + checkArgument( + projectionsForColumn.computeIfAbsent(column.getBaseHiveColumnIndex(), HashSet::new).add(column.getHiveColumnProjectionInfo()), + "duplicate column in columns list"); + + // Add regular mapping if projection is valid for partition schema, otherwise add an empty mapping + if (!baseTypeCoercionFrom.isPresent() + || projectionValidForType(baseTypeCoercionFrom.get(), column.getHiveColumnProjectionInfo())) { + columnMappings.add(regular(column, regularIndex, baseTypeCoercionFrom)); + regularIndex++; + } + else { + columnMappings.add(empty(column)); + } } else { columnMappings.add(prefilled( column, getPrefilledColumnValue(column, partitionKeysByName.get(column.getName()), path, bucketNumber, fileSize, fileModifiedTime), - coercionFrom)); + baseTypeCoercionFrom)); } } + for (HiveColumnHandle column : requiredInterimColumns) { checkArgument(column.getColumnType() == REGULAR); - if (regularColumnIndices.contains(column.getHiveColumnIndex())) { + checkArgument(column.isBaseColumn(), "bucketed columns should be base columns"); + if (baseColumnHiveIndices.contains(column.getBaseHiveColumnIndex())) { continue; // This column exists in columns. Do not add it again. } - // If coercion does not affect bucket number calculation, coercion doesn't need to be applied here. - // Otherwise, read of this partition should not be allowed. - // (Alternatively, the partition could be read as an unbucketed partition. This is not implemented.) - columnMappings.add(interim(column, regularIndex)); + + if (projectionsForColumn.containsKey(column.getBaseHiveColumnIndex())) { + columnMappings.add(interim(column, regularIndex, tableToPartitionMapping.getCoercion(column.getBaseHiveColumnIndex()))); + } + else { + // If coercion does not affect bucket number calculation, coercion doesn't need to be applied here. + // Otherwise, read of this partition should not be allowed. + // (Alternatively, the partition could be read as an unbucketed partition. This is not implemented.) + columnMappings.add(interim(column, regularIndex, Optional.empty())); + } regularIndex++; } return columnMappings.build(); } + private static boolean projectionValidForType(HiveType baseType, Optional projection) + { + List dereferences = projection.map(HiveColumnProjectionInfo::getDereferenceIndices).orElse(ImmutableList.of()); + Optional targetType = baseType.getHiveTypeForDereferences(dereferences); + return targetType.isPresent(); + } + public static List extractRegularAndInterimColumnMappings(List columnMappings) { return columnMappings.stream() @@ -378,16 +431,28 @@ public static List toColumnHandles(List regular return regularColumnMappings.stream() .map(columnMapping -> { HiveColumnHandle columnHandle = columnMapping.getHiveColumnHandle(); - if (!doCoercion || !columnMapping.getCoercionFrom().isPresent()) { + if (!doCoercion || !columnMapping.getBaseTypeCoercionFrom().isPresent()) { return columnHandle; } + HiveType fromHiveTypeBase = columnMapping.getBaseTypeCoercionFrom().get(); + + Optional newColumnProjectionInfo = columnHandle.getHiveColumnProjectionInfo().map(projectedColumn -> { + HiveType fromHiveType = fromHiveTypeBase.getHiveTypeForDereferences(projectedColumn.getDereferenceIndices()).get(); + return new HiveColumnProjectionInfo( + projectedColumn.getDereferenceIndices(), + projectedColumn.getDereferenceNames(), + fromHiveType, + fromHiveType.getType(typeManager)); + }); + return new HiveColumnHandle( - columnHandle.getName(), - columnMapping.getCoercionFrom().get(), - columnMapping.getCoercionFrom().get().getType(typeManager), - columnHandle.getHiveColumnIndex(), + columnHandle.getBaseColumnName(), + columnHandle.getBaseHiveColumnIndex(), + fromHiveTypeBase, + fromHiveTypeBase.getType(typeManager), + newColumnProjectionInfo, columnHandle.getColumnType(), - Optional.empty()); + columnHandle.getComment()); }) .collect(toList()); } @@ -398,6 +463,31 @@ public enum ColumnMappingKind REGULAR, PREFILLED, INTERIM, + EMPTY + } + + private static Optional createBucketAdaptation(Optional bucketConversion, OptionalInt bucketNumber, List columnMappings) + { + return bucketConversion.map(conversion -> { + List baseColumnMapping = columnMappings.stream() + .filter(mapping -> mapping.getHiveColumnHandle().isBaseColumn()) + .collect(toList()); + Map baseHiveColumnToBlockIndex = uniqueIndex(baseColumnMapping, mapping -> mapping.getHiveColumnHandle().getBaseHiveColumnIndex()); + + int[] bucketColumnIndices = conversion.getBucketColumnHandles().stream() + .mapToInt(columnHandle -> baseHiveColumnToBlockIndex.get(columnHandle.getBaseHiveColumnIndex()).getIndex()) + .toArray(); + List bucketColumnHiveTypes = conversion.getBucketColumnHandles().stream() + .map(columnHandle -> baseHiveColumnToBlockIndex.get(columnHandle.getBaseHiveColumnIndex()).getHiveColumnHandle().getHiveType()) + .collect(toImmutableList()); + return new BucketAdaptation( + bucketColumnIndices, + bucketColumnHiveTypes, + conversion.getBucketingVersion(), + conversion.getTableBucketCount(), + conversion.getPartitionBucketCount(), + bucketNumber.getAsInt()); + }); } public static class BucketAdaptation diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveReaderProjectionsAdaptingRecordCursor.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveReaderProjectionsAdaptingRecordCursor.java new file mode 100644 index 000000000000..0d6a7ae9f480 --- /dev/null +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveReaderProjectionsAdaptingRecordCursor.java @@ -0,0 +1,235 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.hive; + +import com.google.common.collect.Iterables; +import io.airlift.slice.Slice; +import io.prestosql.plugin.hive.ReaderProjectionsAdapter.ChannelMapping; +import io.prestosql.spi.block.Block; +import io.prestosql.spi.connector.RecordCursor; +import io.prestosql.spi.type.Type; + +import java.util.List; + +import static com.google.common.base.Preconditions.checkArgument; +import static java.util.Objects.requireNonNull; + +/** + * Applies projections on delegate fields provided by {@link ChannelMapping} to produce fields expected from this cursor. + */ +public class HiveReaderProjectionsAdaptingRecordCursor + implements RecordCursor +{ + private final RecordCursor delegate; + private final ChannelMapping[] channelMappings; + private final Type[] outputTypes; + private final Type[] inputTypes; + + private final Type[] baseTypes; + + public HiveReaderProjectionsAdaptingRecordCursor(RecordCursor delegate, ReaderProjectionsAdapter projectionsAdapter) + { + this.delegate = requireNonNull(delegate, "delegate is null"); + requireNonNull(projectionsAdapter, "projectionsAdapter is null"); + + this.channelMappings = new ChannelMapping[projectionsAdapter.getOutputToInputMapping().size()]; + projectionsAdapter.getOutputToInputMapping().toArray(channelMappings); + + this.outputTypes = new Type[projectionsAdapter.getOutputTypes().size()]; + projectionsAdapter.getOutputTypes().toArray(outputTypes); + + this.inputTypes = new Type[projectionsAdapter.getInputTypes().size()]; + projectionsAdapter.getInputTypes().toArray(inputTypes); + + this.baseTypes = new Type[outputTypes.length]; + for (int i = 0; i < baseTypes.length; i++) { + Type type = inputTypes[channelMappings[i].getInputChannelIndex()]; + List dereferences = channelMappings[i].getDereferenceSequence(); + for (int j = 0; j < dereferences.size(); j++) { + type = type.getTypeParameters().get(dereferences.get(j)); + } + baseTypes[i] = type; + } + } + + @Override + public long getCompletedBytes() + { + return delegate.getCompletedBytes(); + } + + @Override + public long getReadTimeNanos() + { + return delegate.getReadTimeNanos(); + } + + @Override + public Type getType(int field) + { + return outputTypes[field]; + } + + @Override + public boolean advanceNextPosition() + { + return delegate.advanceNextPosition(); + } + + private Block applyDereferences(Block baseObject, List dereferences, int length) + { + checkArgument(length <= dereferences.size()); + Block current = baseObject; + for (int i = 0; i < length; i++) { + current = current.getObject(dereferences.get(i), Block.class); + } + return current; + } + + @Override + public boolean getBoolean(int field) + { + int inputFieldIndex = channelMappings[field].getInputChannelIndex(); + List dereferences = channelMappings[field].getDereferenceSequence(); + + if (dereferences.isEmpty()) { + return delegate.getBoolean(inputFieldIndex); + } + + // Get SingleRowBlock corresponding to the element at current position + Block elementBlock = (Block) delegate.getObject(inputFieldIndex); + + // Apply dereferences except for the last one, which is type dependent + Block baseObject = applyDereferences(elementBlock, dereferences, dereferences.size() - 1); + + return baseTypes[field].getBoolean(baseObject, Iterables.getLast(dereferences)); + } + + @Override + public long getLong(int field) + { + int inputFieldIndex = channelMappings[field].getInputChannelIndex(); + List dereferences = channelMappings[field].getDereferenceSequence(); + + if (dereferences.isEmpty()) { + return delegate.getLong(inputFieldIndex); + } + + // Get SingleRowBlock corresponding to the element at current position + Block elementBlock = (Block) delegate.getObject(inputFieldIndex); + + // Apply dereferences except for the last one, which is type dependent + Block baseObject = applyDereferences(elementBlock, dereferences, dereferences.size() - 1); + + return baseTypes[field].getLong(baseObject, Iterables.getLast(dereferences)); + } + + @Override + public double getDouble(int field) + { + int inputFieldIndex = channelMappings[field].getInputChannelIndex(); + List dereferences = channelMappings[field].getDereferenceSequence(); + + if (dereferences.isEmpty()) { + return delegate.getDouble(inputFieldIndex); + } + + // Get SingleRowBlock corresponding to the element at current position + Block elementBlock = (Block) delegate.getObject(inputFieldIndex); + + // Apply dereferences except for the last one, which is type dependent + Block baseObject = applyDereferences(elementBlock, dereferences, dereferences.size() - 1); + + return baseTypes[field].getDouble(baseObject, Iterables.getLast(dereferences)); + } + + @Override + public Slice getSlice(int field) + { + int inputFieldIndex = channelMappings[field].getInputChannelIndex(); + List dereferences = channelMappings[field].getDereferenceSequence(); + + if (dereferences.isEmpty()) { + return delegate.getSlice(inputFieldIndex); + } + + // Get SingleRowBlock corresponding to the element at current position + Block elementBlock = (Block) delegate.getObject(inputFieldIndex); + + // Apply dereferences except for the last one, which is type dependent + Block baseObject = applyDereferences(elementBlock, dereferences, dereferences.size() - 1); + + return baseTypes[field].getSlice(baseObject, Iterables.getLast(dereferences)); + } + + @Override + public Object getObject(int field) + { + int inputFieldIndex = channelMappings[field].getInputChannelIndex(); + List dereferences = channelMappings[field].getDereferenceSequence(); + + if (dereferences.isEmpty()) { + return delegate.getObject(inputFieldIndex); + } + + // Get SingleRowBlock corresponding to the element at current position + Block elementBlock = (Block) delegate.getObject(inputFieldIndex); + + // Apply dereferences except for the last one, which is type dependent + Block baseObject = applyDereferences(elementBlock, dereferences, dereferences.size() - 1); + + return baseTypes[field].getObject(baseObject, Iterables.getLast(dereferences)); + } + + @Override + public boolean isNull(int field) + { + int inputFieldIndex = channelMappings[field].getInputChannelIndex(); + List dereferences = channelMappings[field].getDereferenceSequence(); + + if (dereferences.isEmpty()) { + return delegate.isNull(inputFieldIndex); + } + + if (delegate.isNull(inputFieldIndex)) { + return true; + } + + // Get SingleRowBlock corresponding to the element at current position + Block baseObject = (Block) delegate.getObject(inputFieldIndex); + + for (int j = 0; j < dereferences.size() - 1; j++) { + int dereferenceIndex = dereferences.get(j); + if (baseObject.isNull(dereferenceIndex)) { + return true; + } + baseObject = baseObject.getObject(dereferenceIndex, Block.class); + } + + int finalDereference = Iterables.getLast(dereferences); + return baseObject.isNull(finalDereference); + } + + @Override + public long getSystemMemoryUsage() + { + return delegate.getSystemMemoryUsage(); + } + + @Override + public void close() + { + delegate.close(); + } +} diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveRecordCursor.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveRecordCursor.java index 58aa7bbd3b1a..fe5e9dfb7a9d 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveRecordCursor.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveRecordCursor.java @@ -25,6 +25,7 @@ import java.util.List; +import static io.prestosql.plugin.hive.HivePageSourceProvider.ColumnMappingKind.EMPTY; import static io.prestosql.plugin.hive.HivePageSourceProvider.ColumnMappingKind.PREFILLED; import static io.prestosql.plugin.hive.HivePageSourceProvider.ColumnMappingKind.REGULAR; import static io.prestosql.plugin.hive.util.HiveUtil.bigintPartitionKey; @@ -99,6 +100,9 @@ public HiveRecordCursor( for (int columnIndex = 0; columnIndex < size; columnIndex++) { ColumnMapping columnMapping = columnMappings.get(columnIndex); + if (columnMapping.getKind() == EMPTY) { + nulls[columnIndex] = true; + } if (columnMapping.getKind() == PREFILLED) { String columnValue = columnMapping.getPrefilledValue(); byte[] bytes = columnValue.getBytes(UTF_8); diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveRecordCursorProvider.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveRecordCursorProvider.java index c84e036d63a0..f307921154e3 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveRecordCursorProvider.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveRecordCursorProvider.java @@ -25,9 +25,11 @@ import java.util.Optional; import java.util.Properties; +import static java.util.Objects.requireNonNull; + public interface HiveRecordCursorProvider { - Optional createRecordCursor( + Optional createRecordCursor( Configuration configuration, ConnectorSession session, Path path, @@ -40,4 +42,34 @@ Optional createRecordCursor( DateTimeZone hiveStorageTimeZone, TypeManager typeManager, boolean s3SelectPushdownEnabled); + + /** + * A wrapper class for + * - delegate reader record cursor and + * - projection information for columns to be returned by the delegate + * + * Empty {@param projectedReaderColumns} indicates that the delegate cursor reads the exact same columns provided to + * it in {@link HiveRecordCursorProvider#createRecordCursor} + */ + class ReaderRecordCursorWithProjections + { + private final RecordCursor recordCursor; + private final Optional projectedReaderColumns; + + public ReaderRecordCursorWithProjections(RecordCursor recordCursor, Optional projectedReaderColumns) + { + this.recordCursor = requireNonNull(recordCursor, "recordCursor is null"); + this.projectedReaderColumns = requireNonNull(projectedReaderColumns, "projectedReaderColumns is null"); + } + + public RecordCursor getRecordCursor() + { + return recordCursor; + } + + public Optional getProjectedReaderColumns() + { + return projectedReaderColumns; + } + } } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveType.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveType.java index 393d5ee3348f..a7d4bb4e87a1 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveType.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveType.java @@ -37,6 +37,7 @@ import java.util.Locale; import java.util.Optional; +import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.ImmutableList.toImmutableList; import static io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED; import static io.prestosql.spi.type.BigintType.BIGINT; @@ -279,4 +280,39 @@ public static Type getPrimitiveType(PrimitiveTypeInfo typeInfo) return null; } } + + public Optional getHiveTypeForDereferences(List dereferences) + { + TypeInfo typeInfo = getTypeInfo(); + for (int fieldIndex : dereferences) { + checkArgument(typeInfo instanceof StructTypeInfo, "typeInfo should be struct type", typeInfo); + StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo; + try { + typeInfo = structTypeInfo.getAllStructFieldTypeInfos().get(fieldIndex); + } + catch (RuntimeException e) { + return Optional.empty(); + } + } + return Optional.of(toHiveType(typeInfo)); + } + + public List getHiveDereferenceNames(List dereferences) + { + ImmutableList.Builder dereferenceNames = ImmutableList.builder(); + TypeInfo typeInfo = getTypeInfo(); + for (int fieldIndex : dereferences) { + checkArgument(typeInfo instanceof StructTypeInfo, "typeInfo should be struct type", typeInfo); + StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo; + + checkArgument(fieldIndex >= 0, "fieldIndex cannot be negative"); + checkArgument(fieldIndex < structTypeInfo.getAllStructFieldNames().size(), + "fieldIndex should be less than the number of fields in the struct"); + String fieldName = structTypeInfo.getAllStructFieldNames().get(fieldIndex); + dereferenceNames.add(fieldName); + typeInfo = structTypeInfo.getAllStructFieldTypeInfos().get(fieldIndex); + } + + return dereferenceNames.build(); + } } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/IonSqlQueryBuilder.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/IonSqlQueryBuilder.java index 5c1f2c174090..681dc84c0aef 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/IonSqlQueryBuilder.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/IonSqlQueryBuilder.java @@ -62,6 +62,11 @@ public IonSqlQueryBuilder(TypeManager typeManager) public String buildSql(List columns, TupleDomain tupleDomain) { + columns.forEach(column -> checkArgument(column.isBaseColumn(), "%s is not a base column", column)); + tupleDomain.getDomains().ifPresent(domains -> { + domains.keySet().forEach(column -> checkArgument(column.isBaseColumn(), "%s is not a base column", column)); + }); + StringBuilder sql = new StringBuilder("SELECT "); if (columns.isEmpty()) { @@ -69,7 +74,7 @@ public String buildSql(List columns, TupleDomain format("s._%d", column.getHiveColumnIndex() + 1)) + .map(column -> format("s._%d", column.getBaseHiveColumnIndex() + 1)) .collect(joining(", ")); sql.append(columnNames); } @@ -94,7 +99,7 @@ private List toConjuncts(List columns, TupleDomain readerColumns; + // indices for mapping expected hive column handles to the reader's column handles + private final List readerBlockIndices; + + private ReaderProjections(List readerColumns, List readerBlockIndices) + { + this.readerColumns = ImmutableList.copyOf(requireNonNull(readerColumns, "readerColumns is null")); + + readerBlockIndices.forEach(value -> checkArgument(value >= 0 && value < readerColumns.size(), "block index out of bounds")); + this.readerBlockIndices = ImmutableList.copyOf(requireNonNull(readerBlockIndices, "readerBlockIndices is null")); + } + + /** + * For a column required by the {@link HivePageSource}, returns the column read by the delegate page source or record cursor. + */ + public HiveColumnHandle readerColumnForHiveColumnAt(int index) + { + checkArgument(index >= 0 && index < readerBlockIndices.size(), "index is not valid"); + int readerIndex = readerBlockIndices.get(index); + return readerColumns.get(readerIndex); + } + + /** + * For a channel expected by {@link HivePageSource}, returns the channel index in the underlying page source or record cursor. + */ + public int readerColumnPositionForHiveColumnAt(int index) + { + checkArgument(index >= 0 && index < readerBlockIndices.size(), "index is invalid"); + return readerBlockIndices.get(index); + } + + /** + * returns the actual list of columns being read by underlying page source or record cursor in order. + */ + public List getReaderColumns() + { + return readerColumns; + } + + /** + * Creates a mapping between the input {@param columns} and base columns if required. + */ + public static Optional projectBaseColumns(List columns) + { + requireNonNull(columns, "columns is null"); + + // No projection is required if all columns are base columns + if (columns.stream().allMatch(HiveColumnHandle::isBaseColumn)) { + return Optional.empty(); + } + + ImmutableList.Builder projectedColumns = ImmutableList.builder(); + ImmutableList.Builder outputColumnMapping = ImmutableList.builder(); + Map mappedHiveColumnIndices = new HashMap<>(); + int projectedColumnCount = 0; + + for (HiveColumnHandle column : columns) { + int hiveColumnIndex = column.getBaseHiveColumnIndex(); + Integer mapped = mappedHiveColumnIndices.get(hiveColumnIndex); + + if (mapped == null) { + projectedColumns.add(column.getBaseColumn()); + mappedHiveColumnIndices.put(hiveColumnIndex, projectedColumnCount); + outputColumnMapping.add(projectedColumnCount); + projectedColumnCount++; + } + else { + outputColumnMapping.add(mapped); + } + } + + return Optional.of(new ReaderProjections(projectedColumns.build(), outputColumnMapping.build())); + } +} diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/ReaderProjectionsAdapter.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/ReaderProjectionsAdapter.java new file mode 100644 index 000000000000..d7bf5e278168 --- /dev/null +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/ReaderProjectionsAdapter.java @@ -0,0 +1,249 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.hive; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableList; +import io.prestosql.spi.Page; +import io.prestosql.spi.block.Block; +import io.prestosql.spi.block.BlockBuilder; +import io.prestosql.spi.block.ColumnarRow; +import io.prestosql.spi.block.LazyBlock; +import io.prestosql.spi.block.LazyBlockLoader; +import io.prestosql.spi.type.Type; + +import java.util.List; + +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkState; +import static com.google.common.collect.ImmutableList.toImmutableList; +import static io.prestosql.plugin.hive.ReaderProjectionsAdapter.ChannelMapping.createChannelMapping; +import static io.prestosql.spi.block.ColumnarRow.toColumnarRow; +import static java.util.Objects.requireNonNull; + +public class ReaderProjectionsAdapter +{ + private final List outputToInputMapping; + private final List outputTypes; + private final List inputTypes; + + public ReaderProjectionsAdapter(List expectedHiveColumns, ReaderProjections readerProjections) + { + requireNonNull(expectedHiveColumns, "expectedHiveColumns is null"); + requireNonNull(readerProjections, "readerProjections is null"); + + ImmutableList.Builder mappingBuilder = ImmutableList.builder(); + + for (int i = 0; i < expectedHiveColumns.size(); i++) { + HiveColumnHandle projectedColumnHandle = readerProjections.readerColumnForHiveColumnAt(i); + int inputChannel = readerProjections.readerColumnPositionForHiveColumnAt(i); + ChannelMapping mapping = createChannelMapping(expectedHiveColumns.get(i), projectedColumnHandle, inputChannel); + mappingBuilder.add(mapping); + } + + outputToInputMapping = mappingBuilder.build(); + + outputTypes = expectedHiveColumns.stream() + .map(HiveColumnHandle::getType) + .collect(toImmutableList()); + + inputTypes = readerProjections.getReaderColumns().stream() + .map(HiveColumnHandle::getType) + .collect(toImmutableList()); + } + + public Page adaptPage(Page input) + { + if (input == null) { + return null; + } + + Block[] blocks = new Block[outputToInputMapping.size()]; + + // Prepare adaptations to extract dereferences + for (int i = 0; i < outputToInputMapping.size(); i++) { + ChannelMapping mapping = outputToInputMapping.get(i); + + Block inputBlock = input.getBlock(mapping.getInputChannelIndex()); + blocks[i] = createAdaptedLazyBlock(inputBlock, mapping.getDereferenceSequence(), outputTypes.get(i)); + } + + return new Page(input.getPositionCount(), blocks); + } + + private static Block createAdaptedLazyBlock(Block inputBlock, List dereferenceSequence, Type type) + { + if (dereferenceSequence.size() == 0) { + return inputBlock; + } + + if (inputBlock == null) { + return null; + } + + return new LazyBlock(inputBlock.getPositionCount(), new DereferenceBlockLoader(inputBlock, dereferenceSequence, type)); + } + + private static class DereferenceBlockLoader + implements LazyBlockLoader + { + private final List dereferenceSequence; + private final Type type; + private boolean loaded; + private Block inputBlock; + + DereferenceBlockLoader(Block inputBlock, List dereferenceSequence, Type type) + { + this.inputBlock = requireNonNull(inputBlock, "inputBlock is null"); + this.dereferenceSequence = requireNonNull(dereferenceSequence, "dereferenceSequence is null"); + this.type = type; + } + + @Override + public Block load() + { + checkState(!loaded, "Already loaded"); + Block loadedBlock = loadInternalBlock(dereferenceSequence, inputBlock); + inputBlock = null; + loaded = true; + return loadedBlock; + } + + /** + * Applies dereference operations on the input block to extract the required internal block. If the input block is lazy + * in a nested manner, this implementation avoids loading the entire input block. + */ + private Block loadInternalBlock(List dereferences, Block parentBlock) + { + if (dereferences.size() == 0) { + return parentBlock.getLoadedBlock(); + } + + ColumnarRow columnarRow = toColumnarRow(parentBlock); + + int dereferenceIndex = dereferences.get(0); + List remainingDereferences = dereferences.subList(1, dereferences.size()); + + Block fieldBlock = columnarRow.getField(dereferenceIndex); + Block loadedInternalBlock = loadInternalBlock(remainingDereferences, fieldBlock); + + // Field blocks provided by ColumnarRow can have a smaller position count, because they do not store nulls. + // The following step adds null elements (when required) to the loaded block. + return adaptNulls(columnarRow, loadedInternalBlock); + } + + private Block adaptNulls(ColumnarRow columnarRow, Block loadedInternalBlock) + { + // TODO: The current implementation copies over data to a new block builder when a null row element is found. + // We can optimize this by using a Block implementation that uses a null vector of the parent row block and + // the block for the field. + + BlockBuilder newlyCreatedBlock = null; + int fieldBlockPosition = 0; + + for (int i = 0; i < columnarRow.getPositionCount(); i++) { + boolean isRowNull = columnarRow.isNull(i); + + if (isRowNull) { + // A new block is only created when a null is encountered for the first time. + if (newlyCreatedBlock == null) { + newlyCreatedBlock = type.createBlockBuilder(null, columnarRow.getPositionCount()); + + // Copy over all elements encountered so far to the new block + for (int j = 0; j < i; j++) { + type.appendTo(loadedInternalBlock, j, newlyCreatedBlock); + } + } + newlyCreatedBlock.appendNull(); + } + else { + if (newlyCreatedBlock != null) { + type.appendTo(loadedInternalBlock, fieldBlockPosition, newlyCreatedBlock); + } + fieldBlockPosition++; + } + } + + if (newlyCreatedBlock == null) { + // If there was no need to create a null, return the original block + return loadedInternalBlock; + } + + return newlyCreatedBlock.build(); + } + } + + List getOutputToInputMapping() + { + return outputToInputMapping; + } + + List getOutputTypes() + { + return outputTypes; + } + + List getInputTypes() + { + return inputTypes; + } + + @VisibleForTesting + static class ChannelMapping + { + private final int inputChannelIndex; + private final List dereferenceSequence; + + private ChannelMapping(int inputBlockIndex, List dereferenceSequence) + { + checkArgument(inputBlockIndex >= 0, "inputBlockIndex cannot be negative"); + this.inputChannelIndex = inputBlockIndex; + this.dereferenceSequence = ImmutableList.copyOf(requireNonNull(dereferenceSequence, "dereferences is null")); + } + + public int getInputChannelIndex() + { + return inputChannelIndex; + } + + public List getDereferenceSequence() + { + return dereferenceSequence; + } + + static ChannelMapping createChannelMapping(HiveColumnHandle expected, HiveColumnHandle delegate, int inputBlockIndex) + { + List dereferences = validateProjectionAndExtractDereferences(expected, delegate); + return new ChannelMapping(inputBlockIndex, dereferences); + } + + private static List validateProjectionAndExtractDereferences(HiveColumnHandle expectedColumn, HiveColumnHandle readerColumn) + { + checkArgument(expectedColumn.getBaseColumn().equals(readerColumn.getBaseColumn()), "reader column is not valid for expected column"); + + List expectedDereferences = expectedColumn.getHiveColumnProjectionInfo() + .map(HiveColumnProjectionInfo::getDereferenceIndices) + .orElse(ImmutableList.of()); + + List readerDereferences = readerColumn.getHiveColumnProjectionInfo() + .map(HiveColumnProjectionInfo::getDereferenceIndices) + .orElse(ImmutableList.of()); + + checkArgument(readerDereferences.size() <= expectedDereferences.size(), "Field returned by the reader should include expected field"); + checkArgument(expectedDereferences.subList(0, readerDereferences.size()).equals(readerDereferences), "Field returned by the reader should be a prefix of expected field"); + + return expectedDereferences.subList(readerDereferences.size(), expectedDereferences.size()); + } + } +} diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcPageSourceFactory.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcPageSourceFactory.java index 5dc45a9fb4bb..601f54bee51e 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcPageSourceFactory.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcPageSourceFactory.java @@ -31,6 +31,7 @@ import io.prestosql.plugin.hive.HdfsEnvironment; import io.prestosql.plugin.hive.HiveColumnHandle; import io.prestosql.plugin.hive.HivePageSourceFactory; +import io.prestosql.plugin.hive.ReaderProjections; import io.prestosql.plugin.hive.orc.OrcPageSource.ColumnAdaptation; import io.prestosql.spi.PrestoException; import io.prestosql.spi.connector.ConnectorPageSource; @@ -71,6 +72,7 @@ import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_CANNOT_OPEN_SPLIT; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_FILE_MISSING_COLUMN_NAMES; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_MISSING_DATA; +import static io.prestosql.plugin.hive.HivePageSourceFactory.ReaderPageSourceWithProjections.noProjectionAdaptation; import static io.prestosql.plugin.hive.HiveSessionProperties.getOrcLazyReadSmallRanges; import static io.prestosql.plugin.hive.HiveSessionProperties.getOrcMaxBufferSize; import static io.prestosql.plugin.hive.HiveSessionProperties.getOrcMaxMergeDistance; @@ -80,6 +82,7 @@ import static io.prestosql.plugin.hive.HiveSessionProperties.isOrcBloomFiltersEnabled; import static io.prestosql.plugin.hive.HiveSessionProperties.isOrcNestedLazy; import static io.prestosql.plugin.hive.HiveSessionProperties.isUseOrcColumnNames; +import static io.prestosql.plugin.hive.ReaderProjections.projectBaseColumns; import static io.prestosql.plugin.hive.orc.OrcPageSource.handleException; import static io.prestosql.plugin.hive.util.HiveUtil.isDeserializerClass; import static io.prestosql.spi.type.BigintType.BIGINT; @@ -122,7 +125,7 @@ public OrcPageSourceFactory( } @Override - public Optional createPageSource( + public Optional createPageSource( Configuration configuration, ConnectorSession session, Path path, @@ -141,10 +144,14 @@ public Optional createPageSource( // per HIVE-13040 and ORC-162, empty files are allowed if (fileSize == 0) { - return Optional.of(new FixedPageSource(ImmutableList.of())); + ReaderPageSourceWithProjections context = noProjectionAdaptation(new FixedPageSource(ImmutableList.of())); + return Optional.of(context); } - return Optional.of(createOrcPageSource( + Optional projectedReaderColumns = projectBaseColumns(columns); + effectivePredicate = effectivePredicate.transform(column -> column.isBaseColumn() ? column : null); + + ConnectorPageSource orcPageSource = createOrcPageSource( hdfsEnvironment, session.getUser(), configuration, @@ -152,7 +159,9 @@ public Optional createPageSource( start, length, fileSize, - columns, + projectedReaderColumns + .map(ReaderProjections::getReaderColumns) + .orElse(columns), isUseOrcColumnNames(session), isFullAcidTable(Maps.fromProperties(schema)), effectivePredicate, @@ -167,7 +176,9 @@ public Optional createPageSource( .withNestedLazy(isOrcNestedLazy(session)) .withBloomFiltersEnabled(isOrcBloomFiltersEnabled(session)), deleteDeltaLocations, - stats)); + stats); + + return Optional.of(new ReaderPageSourceWithProjections(orcPageSource, projectedReaderColumns)); } private static OrcPageSource createOrcPageSource( @@ -249,8 +260,8 @@ private static OrcPageSource createOrcPageSource( if (useOrcColumnNames || isFullAcid) { orcColumn = fileColumnsByName.get(column.getName().toLowerCase(ENGLISH)); } - else if (column.getHiveColumnIndex() < fileColumns.size()) { - orcColumn = fileColumns.get(column.getHiveColumnIndex()); + else if (column.getBaseHiveColumnIndex() < fileColumns.size()) { + orcColumn = fileColumns.get(column.getBaseHiveColumnIndex()); } Type readType = column.getType(); diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/parquet/ParquetPageSourceFactory.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/parquet/ParquetPageSourceFactory.java index 028349c4eabc..a43a2d8a2aa6 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/parquet/ParquetPageSourceFactory.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/parquet/ParquetPageSourceFactory.java @@ -31,6 +31,7 @@ import io.prestosql.plugin.hive.HdfsEnvironment; import io.prestosql.plugin.hive.HiveColumnHandle; import io.prestosql.plugin.hive.HivePageSourceFactory; +import io.prestosql.plugin.hive.ReaderProjections; import io.prestosql.spi.PrestoException; import io.prestosql.spi.connector.ConnectorPageSource; import io.prestosql.spi.connector.ConnectorSession; @@ -78,6 +79,7 @@ import static io.prestosql.plugin.hive.HiveSessionProperties.getParquetMaxReadBlockSize; import static io.prestosql.plugin.hive.HiveSessionProperties.isFailOnCorruptedParquetStatistics; import static io.prestosql.plugin.hive.HiveSessionProperties.isUseParquetColumnNames; +import static io.prestosql.plugin.hive.ReaderProjections.projectBaseColumns; import static io.prestosql.plugin.hive.parquet.HdfsParquetDataSource.buildHdfsParquetDataSource; import static io.prestosql.plugin.hive.parquet.ParquetColumnIOConverter.constructField; import static io.prestosql.plugin.hive.util.HiveUtil.getDeserializerClassName; @@ -108,7 +110,7 @@ public ParquetPageSourceFactory(HdfsEnvironment hdfsEnvironment, FileFormatDataS } @Override - public Optional createPageSource( + public Optional createPageSource( Configuration configuration, ConnectorSession session, Path path, @@ -127,7 +129,12 @@ public Optional createPageSource( checkArgument(!deleteDeltaLocations.isPresent(), "Delete delta is not supported"); - return Optional.of(createParquetPageSource( + // Ignore predicates on partial columns for now. + effectivePredicate = effectivePredicate.transform(column -> column.isBaseColumn() ? column : null); + + Optional projectedReaderColumns = projectBaseColumns(columns); + + ConnectorPageSource parquetPageSource = createParquetPageSource( hdfsEnvironment, session.getUser(), configuration, @@ -135,13 +142,17 @@ public Optional createPageSource( start, length, fileSize, - columns, + projectedReaderColumns + .map(ReaderProjections::getReaderColumns) + .orElse(columns), isUseParquetColumnNames(session), options .withFailOnCorruptedStatistics(isFailOnCorruptedParquetStatistics(session)) .withMaxReadBlockSize(getParquetMaxReadBlockSize(session)), effectivePredicate, - stats)); + stats); + + return Optional.of(new ReaderPageSourceWithProjections(parquetPageSource, projectedReaderColumns)); } public static ParquetPageSource createParquetPageSource( @@ -220,7 +231,7 @@ public static ParquetPageSource createParquetPageSource( prestoTypes.add(column.getType()); internalFields.add(parquetField.flatMap(field -> { - String columnName = useParquetColumnNames ? column.getName() : fileSchema.getFields().get(column.getHiveColumnIndex()).getName(); + String columnName = useParquetColumnNames ? column.getName() : fileSchema.getFields().get(column.getBaseHiveColumnIndex()).getName(); return constructField(column.getType(), lookupColumnByName(messageColumnIO, columnName)); })); } @@ -281,8 +292,8 @@ private static org.apache.parquet.schema.Type getParquetType(HiveColumnHandle co return getParquetTypeByName(column.getName(), messageType); } - if (column.getHiveColumnIndex() < messageType.getFieldCount()) { - return messageType.getType(column.getHiveColumnIndex()); + if (column.getBaseHiveColumnIndex() < messageType.getFieldCount()) { + return messageType.getType(column.getBaseHiveColumnIndex()); } return null; } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/rcfile/RcFilePageSource.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/rcfile/RcFilePageSource.java index 039c05aab737..6d4e295b109d 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/rcfile/RcFilePageSource.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/rcfile/RcFilePageSource.java @@ -79,7 +79,7 @@ public RcFilePageSource(RcFileReader rcFileReader, List column typesBuilder.add(column.getType()); hiveTypesBuilder.add(column.getHiveType()); - hiveColumnIndexes[columnIndex] = column.getHiveColumnIndex(); + hiveColumnIndexes[columnIndex] = column.getBaseHiveColumnIndex(); if (hiveColumnIndexes[columnIndex] >= rcFileReader.getColumnCount()) { // this file may contain fewer fields than what's declared in the schema @@ -135,9 +135,7 @@ public Page getNextPage() } } - Page page = new Page(currentPageSize, blocks); - - return page; + return new Page(currentPageSize, blocks); } catch (PrestoException e) { closeWithSuppression(e); diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/rcfile/RcFilePageSourceFactory.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/rcfile/RcFilePageSourceFactory.java index 760db67ab05e..1d6d4378ef14 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/rcfile/RcFilePageSourceFactory.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/rcfile/RcFilePageSourceFactory.java @@ -23,6 +23,7 @@ import io.prestosql.plugin.hive.HdfsEnvironment; import io.prestosql.plugin.hive.HiveColumnHandle; import io.prestosql.plugin.hive.HivePageSourceFactory; +import io.prestosql.plugin.hive.ReaderProjections; import io.prestosql.rcfile.AircompressorCodecFactory; import io.prestosql.rcfile.HadoopCodecFactory; import io.prestosql.rcfile.RcFileCorruptionException; @@ -59,6 +60,7 @@ import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_BAD_DATA; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_CANNOT_OPEN_SPLIT; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_MISSING_DATA; +import static io.prestosql.plugin.hive.ReaderProjections.projectBaseColumns; import static io.prestosql.plugin.hive.util.HiveUtil.getDeserializerClassName; import static io.prestosql.rcfile.text.TextRcFileEncoding.DEFAULT_NULL_SEQUENCE; import static io.prestosql.rcfile.text.TextRcFileEncoding.DEFAULT_SEPARATORS; @@ -93,7 +95,7 @@ public RcFilePageSourceFactory(TypeManager typeManager, HdfsEnvironment hdfsEnvi } @Override - public Optional createPageSource( + public Optional createPageSource( Configuration configuration, ConnectorSession session, Path path, @@ -124,6 +126,12 @@ else if (deserializerClassName.equals(ColumnarSerDe.class.getName())) { throw new PrestoException(HIVE_BAD_DATA, "RCFile is empty: " + path); } + Optional readerProjections = projectBaseColumns(columns); + + List projectedReaderColumns = readerProjections + .map(ReaderProjections::getReaderColumns) + .orElse(columns); + FSDataInputStream inputStream; try { FileSystem fileSystem = hdfsEnvironment.getFileSystem(session.getUser(), path, configuration); @@ -139,8 +147,8 @@ else if (deserializerClassName.equals(ColumnarSerDe.class.getName())) { try { ImmutableMap.Builder readColumns = ImmutableMap.builder(); - for (HiveColumnHandle column : columns) { - readColumns.put(column.getHiveColumnIndex(), column.getHiveType().getType(typeManager)); + for (HiveColumnHandle column : projectedReaderColumns) { + readColumns.put(column.getBaseHiveColumnIndex(), column.getHiveType().getType(typeManager)); } RcFileReader rcFileReader = new RcFileReader( @@ -152,7 +160,8 @@ else if (deserializerClassName.equals(ColumnarSerDe.class.getName())) { length, DataSize.of(8, Unit.MEGABYTE)); - return Optional.of(new RcFilePageSource(rcFileReader, columns)); + ConnectorPageSource pageSource = new RcFilePageSource(rcFileReader, projectedReaderColumns); + return Optional.of(new ReaderPageSourceWithProjections(pageSource, readerProjections)); } catch (Throwable e) { try { diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3select/S3SelectRecordCursorProvider.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3select/S3SelectRecordCursorProvider.java index d310524fbdc7..31b25a34a92f 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3select/S3SelectRecordCursorProvider.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3select/S3SelectRecordCursorProvider.java @@ -13,11 +13,13 @@ */ package io.prestosql.plugin.hive.s3select; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import io.prestosql.plugin.hive.HdfsEnvironment; import io.prestosql.plugin.hive.HiveColumnHandle; import io.prestosql.plugin.hive.HiveRecordCursorProvider; import io.prestosql.plugin.hive.IonSqlQueryBuilder; +import io.prestosql.plugin.hive.ReaderProjections; import io.prestosql.spi.PrestoException; import io.prestosql.spi.connector.ConnectorSession; import io.prestosql.spi.connector.RecordCursor; @@ -37,6 +39,7 @@ import java.util.Set; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_FILESYSTEM_ERROR; +import static io.prestosql.plugin.hive.ReaderProjections.projectBaseColumns; import static io.prestosql.plugin.hive.util.HiveUtil.getDeserializerClassName; import static java.util.Objects.requireNonNull; @@ -55,7 +58,7 @@ public S3SelectRecordCursorProvider(HdfsEnvironment hdfsEnvironment, PrestoS3Cli } @Override - public Optional createRecordCursor( + public Optional createRecordCursor( Configuration configuration, ConnectorSession session, Path path, @@ -80,12 +83,22 @@ public Optional createRecordCursor( throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed getting FileSystem: " + path, e); } + Optional projectedReaderColumns = projectBaseColumns(columns); + // Ignore predicates on partial columns for now. + effectivePredicate = effectivePredicate.transform(column -> column.isBaseColumn() ? column : null); + String serdeName = getDeserializerClassName(schema); if (CSV_SERDES.contains(serdeName)) { + List readerColumns = projectedReaderColumns + .map(ReaderProjections::getReaderColumns) + .orElse(ImmutableList.of()); + IonSqlQueryBuilder queryBuilder = new IonSqlQueryBuilder(typeManager); - String ionSqlQuery = queryBuilder.buildSql(columns, effectivePredicate); + String ionSqlQuery = queryBuilder.buildSql(readerColumns, effectivePredicate); S3SelectLineRecordReader recordReader = new S3SelectCsvRecordReader(configuration, path, start, length, schema, ionSqlQuery, s3ClientFactory); - return Optional.of(new S3SelectRecordCursor<>(configuration, path, recordReader, length, schema, columns, hiveStorageTimeZone)); + + RecordCursor cursor = new S3SelectRecordCursor<>(configuration, path, recordReader, length, schema, readerColumns, hiveStorageTimeZone); + return Optional.of(new ReaderRecordCursorWithProjections(cursor, projectedReaderColumns)); } // unsupported serdes diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/util/HiveUtil.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/util/HiveUtil.java index 83bbe99d1d19..157cc037dfe1 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/util/HiveUtil.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/util/HiveUtil.java @@ -111,6 +111,7 @@ import static io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.PARTITION_KEY; import static io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.REGULAR; import static io.prestosql.plugin.hive.HiveColumnHandle.bucketColumnHandle; +import static io.prestosql.plugin.hive.HiveColumnHandle.createBaseColumn; import static io.prestosql.plugin.hive.HiveColumnHandle.fileModifiedTimeColumnHandle; import static io.prestosql.plugin.hive.HiveColumnHandle.fileSizeColumnHandle; import static io.prestosql.plugin.hive.HiveColumnHandle.isBucketColumnHandle; @@ -222,8 +223,12 @@ private HiveUtil() List readColumns = columns.stream() .filter(column -> column.getColumnType() == REGULAR) .collect(toImmutableList()); + + // Projected columns are not supported here + readColumns.forEach(readColumn -> checkArgument(readColumn.isBaseColumn(), "column %s is not a base column", readColumn.getName())); + List readHiveColumnIndexes = readColumns.stream() - .map(HiveColumnHandle::getHiveColumnIndex) + .map(HiveColumnHandle::getBaseHiveColumnIndex) .collect(toImmutableList()); // Tell hive the columns we would like to read, this lets hive optimize reading column oriented files @@ -908,7 +913,7 @@ public static List getRegularColumnHandles(Table table, TypeMa // ignore unsupported types rather than failing HiveType hiveType = field.getType(); if (hiveType.isSupportedType()) { - columns.add(new HiveColumnHandle(field.getName(), hiveType, hiveType.getType(typeManager), hiveColumnIndex, REGULAR, field.getComment())); + columns.add(createBaseColumn(field.getName(), hiveColumnIndex, hiveType, hiveType.getType(typeManager), REGULAR, field.getComment())); } hiveColumnIndex++; } @@ -926,7 +931,7 @@ public static List getPartitionKeyColumnHandles(Table table, T if (!hiveType.isSupportedType()) { throw new PrestoException(NOT_SUPPORTED, format("Unsupported Hive type %s found in partition keys of table %s.%s", hiveType, table.getDatabaseName(), table.getTableName())); } - columns.add(new HiveColumnHandle(field.getName(), hiveType, hiveType.getType(typeManager), -1, PARTITION_KEY, field.getComment())); + columns.add(createBaseColumn(field.getName(), -1, hiveType, hiveType.getType(typeManager), PARTITION_KEY, field.getComment())); } return columns.build(); diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHive.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHive.java index 3a789e963c55..b73ba6e572cc 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHive.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHive.java @@ -182,6 +182,7 @@ import static io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.PARTITION_KEY; import static io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.REGULAR; import static io.prestosql.plugin.hive.HiveColumnHandle.bucketColumnHandle; +import static io.prestosql.plugin.hive.HiveColumnHandle.createBaseColumn; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_INVALID_PARTITION_VALUE; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_PARTITION_SCHEMA_MISMATCH; import static io.prestosql.plugin.hive.HiveMetadata.PRESTO_QUERY_ID_NAME; @@ -640,11 +641,11 @@ protected void setupHive(String databaseName, String timeZoneId) invalidTableHandle = new HiveTableHandle(database, INVALID_TABLE, ImmutableMap.of(), ImmutableList.of(), Optional.empty()); - dsColumn = new HiveColumnHandle("ds", HIVE_STRING, VARCHAR, -1, PARTITION_KEY, Optional.empty()); - fileFormatColumn = new HiveColumnHandle("file_format", HIVE_STRING, VARCHAR, -1, PARTITION_KEY, Optional.empty()); - dummyColumn = new HiveColumnHandle("dummy", HIVE_INT, INTEGER, -1, PARTITION_KEY, Optional.empty()); - intColumn = new HiveColumnHandle("t_int", HIVE_INT, INTEGER, -1, PARTITION_KEY, Optional.empty()); - invalidColumnHandle = new HiveColumnHandle(INVALID_COLUMN, HIVE_STRING, VARCHAR, 0, REGULAR, Optional.empty()); + dsColumn = createBaseColumn("ds", -1, HIVE_STRING, VARCHAR, PARTITION_KEY, Optional.empty()); + fileFormatColumn = createBaseColumn("file_format", -1, HIVE_STRING, VARCHAR, PARTITION_KEY, Optional.empty()); + dummyColumn = createBaseColumn("dummy", -1, HIVE_INT, INTEGER, PARTITION_KEY, Optional.empty()); + intColumn = createBaseColumn("t_int", -1, HIVE_INT, INTEGER, PARTITION_KEY, Optional.empty()); + invalidColumnHandle = createBaseColumn(INVALID_COLUMN, 0, HIVE_STRING, VARCHAR, REGULAR, Optional.empty()); List partitionColumns = ImmutableList.of(dsColumn, fileFormatColumn, dummyColumn); tablePartitionFormatPartitions = ImmutableList.builder() diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHiveFileFormats.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHiveFileFormats.java index d559cad4b5e6..cd773f3b29a1 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHiveFileFormats.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHiveFileFormats.java @@ -85,6 +85,8 @@ import static com.google.common.collect.ImmutableMap.toImmutableMap; import static io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.PARTITION_KEY; import static io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.REGULAR; +import static io.prestosql.plugin.hive.HiveColumnHandle.createBaseColumn; +import static io.prestosql.plugin.hive.HiveColumnProjectionInfo.generatePartialName; import static io.prestosql.plugin.hive.HivePartitionKey.HIVE_DEFAULT_DYNAMIC_PARTITION; import static io.prestosql.plugin.hive.HiveTestUtils.SESSION; import static io.prestosql.plugin.hive.HiveTestUtils.TYPE_MANAGER; @@ -473,13 +475,47 @@ private static Map asMap(K[] keys, V[] values) protected List getColumnHandles(List testColumns) { List columns = new ArrayList<>(); + Map hiveColumnIndexes = new HashMap<>(); + int nextHiveColumnIndex = 0; for (int i = 0; i < testColumns.size(); i++) { TestColumn testColumn = testColumns.get(i); - int columnIndex = testColumn.isPartitionKey() ? -1 : nextHiveColumnIndex++; - HiveType hiveType = HiveType.valueOf(testColumn.getObjectInspector().getTypeName()); - columns.add(new HiveColumnHandle(testColumn.getName(), hiveType, hiveType.getType(TYPE_MANAGER), columnIndex, testColumn.isPartitionKey() ? PARTITION_KEY : REGULAR, Optional.empty())); + int columnIndex; + if (testColumn.isPartitionKey()) { + columnIndex = -1; + } + else { + if (hiveColumnIndexes.get(testColumn.getBaseName()) != null) { + columnIndex = hiveColumnIndexes.get(testColumn.getBaseName()); + } + else { + columnIndex = nextHiveColumnIndex++; + hiveColumnIndexes.put(testColumn.getBaseName(), columnIndex); + } + } + + if (testColumn.getDereferenceNames().size() == 0) { + HiveType hiveType = HiveType.valueOf(testColumn.getObjectInspector().getTypeName()); + columns.add(createBaseColumn(testColumn.getName(), columnIndex, hiveType, hiveType.getType(TYPE_MANAGER), testColumn.isPartitionKey() ? PARTITION_KEY : REGULAR, Optional.empty())); + } + else { + HiveType baseHiveType = HiveType.valueOf(testColumn.getBaseObjectInspector().getTypeName()); + HiveType partialHiveType = baseHiveType.getHiveTypeForDereferences(testColumn.getDereferenceIndices()).get(); + HiveColumnHandle hiveColumnHandle = new HiveColumnHandle( + testColumn.getBaseName(), + columnIndex, + baseHiveType, + baseHiveType.getType(TYPE_MANAGER), + Optional.of(new HiveColumnProjectionInfo( + testColumn.getDereferenceIndices(), + testColumn.getDereferenceNames(), + partialHiveType, + partialHiveType.getType(TYPE_MANAGER))), + testColumn.isPartitionKey() ? PARTITION_KEY : REGULAR, + Optional.empty()); + columns.add(hiveColumnHandle); + } } return columns; } @@ -817,6 +853,10 @@ else if (testColumn.getObjectInspector().getCategory() == Category.PRIMITIVE) { public static final class TestColumn { + private final String baseName; + private final ObjectInspector baseObjectInspector; + private final List dereferenceNames; + private final List dereferenceIndices; private final String name; private final ObjectInspector objectInspector; private final Object writeValue; @@ -830,11 +870,30 @@ public TestColumn(String name, ObjectInspector objectInspector, Object writeValu public TestColumn(String name, ObjectInspector objectInspector, Object writeValue, Object expectedValue, boolean partitionKey) { - this.name = requireNonNull(name, "name is null"); + this(name, objectInspector, ImmutableList.of(), ImmutableList.of(), objectInspector, writeValue, expectedValue, partitionKey); + } + + public TestColumn( + String baseName, + ObjectInspector baseObjectInspector, + List dereferenceNames, + List dereferenceIndices, + ObjectInspector objectInspector, + Object writeValue, + Object expectedValue, + boolean partitionKey) + { + this.baseName = requireNonNull(baseName, "baseName is null"); + this.baseObjectInspector = requireNonNull(baseObjectInspector, "baseObjectInspector is null"); + this.dereferenceNames = requireNonNull(dereferenceNames, "dereferenceNames is null"); + this.dereferenceIndices = requireNonNull(dereferenceIndices, "dereferenceIndices is null"); + checkArgument(dereferenceIndices.size() == dereferenceNames.size(), "dereferenceIndices and dereferenceNames should have the same size"); + this.name = baseName + generatePartialName(dereferenceNames); this.objectInspector = requireNonNull(objectInspector, "objectInspector is null"); this.writeValue = writeValue; this.expectedValue = expectedValue; this.partitionKey = partitionKey; + checkArgument(dereferenceNames.size() == 0 || partitionKey == false, "partial column cannot be a partition key"); } public String getName() @@ -842,11 +901,31 @@ public String getName() return name; } + public String getBaseName() + { + return baseName; + } + + public List getDereferenceNames() + { + return dereferenceNames; + } + + public List getDereferenceIndices() + { + return dereferenceIndices; + } + public String getType() { return objectInspector.getTypeName(); } + public ObjectInspector getBaseObjectInspector() + { + return baseObjectInspector; + } + public ObjectInspector getObjectInspector() { return objectInspector; @@ -871,7 +950,9 @@ public boolean isPartitionKey() public String toString() { StringBuilder sb = new StringBuilder("TestColumn{"); - sb.append("name='").append(name).append('\''); + sb.append("baseName='").append(baseName).append("'"); + sb.append("dereferenceNames=").append("[").append(dereferenceNames.stream().collect(Collectors.joining(","))).append("]"); + sb.append("name=").append(name); sb.append(", objectInspector=").append(objectInspector); sb.append(", writeValue=").append(writeValue); sb.append(", expectedValue=").append(expectedValue); diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestBackgroundHiveSplitLoader.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestBackgroundHiveSplitLoader.java index 5c213466a828..89bdde3e4c9a 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestBackgroundHiveSplitLoader.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestBackgroundHiveSplitLoader.java @@ -76,6 +76,7 @@ import static io.airlift.units.DataSize.Unit.MEGABYTE; import static io.prestosql.plugin.hive.BackgroundHiveSplitLoader.BucketSplitInfo.createBucketSplitInfo; import static io.prestosql.plugin.hive.BackgroundHiveSplitLoader.getBucketNumber; +import static io.prestosql.plugin.hive.HiveColumnHandle.createBaseColumn; import static io.prestosql.plugin.hive.HiveColumnHandle.pathColumnHandle; import static io.prestosql.plugin.hive.HiveStorageFormat.CSV; import static io.prestosql.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT; @@ -124,7 +125,7 @@ public class TestBackgroundHiveSplitLoader private static final List PARTITION_COLUMNS = ImmutableList.of( new Column("partitionColumn", HIVE_INT, Optional.empty())); private static final List BUCKET_COLUMN_HANDLES = ImmutableList.of( - new HiveColumnHandle("col1", HIVE_INT, INTEGER, 0, ColumnType.REGULAR, Optional.empty())); + createBaseColumn("col1", 0, HIVE_INT, INTEGER, ColumnType.REGULAR, Optional.empty())); private static final Optional BUCKET_PROPERTY = Optional.of( new HiveBucketProperty(ImmutableList.of("col1"), BUCKETING_V1, BUCKET_COUNT, ImmutableList.of())); diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveColumnHandle.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveColumnHandle.java index 5ca36851644f..0084575d88f1 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveColumnHandle.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveColumnHandle.java @@ -13,19 +13,28 @@ */ package io.prestosql.plugin.hive; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.airlift.json.JsonCodec; import io.airlift.json.JsonCodecFactory; import io.airlift.json.ObjectMapperProvider; -import io.prestosql.spi.type.TestingTypeManager; +import io.prestosql.spi.type.RowType; import io.prestosql.spi.type.Type; +import io.prestosql.type.InternalTypeManager; import org.testng.annotations.Test; import java.util.Optional; +import static io.prestosql.metadata.MetadataManager.createTestMetadataManager; import static io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.PARTITION_KEY; import static io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.REGULAR; +import static io.prestosql.plugin.hive.HiveColumnHandle.createBaseColumn; +import static io.prestosql.plugin.hive.HiveType.toHiveType; +import static io.prestosql.spi.type.BigintType.BIGINT; import static io.prestosql.spi.type.DoubleType.DOUBLE; +import static io.prestosql.spi.type.RowType.field; +import static io.prestosql.spi.type.VarcharType.VARCHAR; +import static java.util.Arrays.asList; import static org.testng.Assert.assertEquals; public class TestHiveColumnHandle @@ -40,29 +49,60 @@ public void testHiddenColumn() @Test public void testRegularColumn() { - HiveColumnHandle expectedPartitionColumn = new HiveColumnHandle("name", HiveType.HIVE_FLOAT, DOUBLE, 88, PARTITION_KEY, Optional.empty()); + HiveColumnHandle expectedPartitionColumn = createBaseColumn("name", 88, HiveType.HIVE_FLOAT, DOUBLE, PARTITION_KEY, Optional.empty()); testRoundTrip(expectedPartitionColumn); } @Test public void testPartitionKeyColumn() { - HiveColumnHandle expectedRegularColumn = new HiveColumnHandle("name", HiveType.HIVE_FLOAT, DOUBLE, 88, REGULAR, Optional.empty()); + HiveColumnHandle expectedRegularColumn = createBaseColumn("name", 88, HiveType.HIVE_FLOAT, DOUBLE, REGULAR, Optional.empty()); testRoundTrip(expectedRegularColumn); } + @Test + public void testProjectedColumn() + { + Type baseType = RowType.from(asList(field("a", VARCHAR), field("b", BIGINT))); + HiveType baseHiveType = toHiveType(new HiveTypeTranslator(), baseType); + + HiveColumnProjectionInfo columnProjectionInfo = new HiveColumnProjectionInfo( + ImmutableList.of(1), + ImmutableList.of("b"), + HiveType.HIVE_LONG, + BIGINT); + + HiveColumnHandle projectedColumn = new HiveColumnHandle( + "struct_col", + 88, + baseHiveType, + baseType, + Optional.of(columnProjectionInfo), + REGULAR, + Optional.empty()); + + testRoundTrip(projectedColumn); + } + private void testRoundTrip(HiveColumnHandle expected) { ObjectMapperProvider objectMapperProvider = new ObjectMapperProvider(); - objectMapperProvider.setJsonDeserializers(ImmutableMap.of(Type.class, new HiveModule.TypeDeserializer(new TestingTypeManager()))); + objectMapperProvider.setJsonDeserializers(ImmutableMap.of(Type.class, new HiveModule.TypeDeserializer(new InternalTypeManager(createTestMetadataManager())))); JsonCodec codec = new JsonCodecFactory(objectMapperProvider).jsonCodec(HiveColumnHandle.class); String json = codec.toJson(expected); HiveColumnHandle actual = codec.fromJson(json); + assertEquals(actual.getBaseColumnName(), expected.getBaseColumnName()); + assertEquals(actual.getBaseHiveColumnIndex(), expected.getBaseHiveColumnIndex()); + assertEquals(actual.getBaseType(), expected.getBaseType()); + assertEquals(actual.getBaseHiveType(), expected.getBaseHiveType()); + assertEquals(actual.getName(), expected.getName()); + assertEquals(actual.getType(), expected.getType()); assertEquals(actual.getHiveType(), expected.getHiveType()); - assertEquals(actual.getHiveColumnIndex(), expected.getHiveColumnIndex()); + + assertEquals(actual.getHiveColumnProjectionInfo(), expected.getHiveColumnProjectionInfo()); assertEquals(actual.isPartitionKey(), expected.isPartitionKey()); } } diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveFileFormats.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveFileFormats.java index f2892c16fb84..0582869ad504 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveFileFormats.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveFileFormats.java @@ -34,6 +34,7 @@ import io.prestosql.spi.connector.RecordCursor; import io.prestosql.spi.connector.RecordPageSource; import io.prestosql.spi.predicate.TupleDomain; +import io.prestosql.spi.type.Type; import io.prestosql.testing.TestingConnectorSession; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.type.HiveVarchar; @@ -55,14 +56,18 @@ import java.io.File; import java.io.IOException; import java.time.Instant; +import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Optional; import java.util.OptionalInt; import java.util.Properties; +import java.util.Set; import java.util.TimeZone; import java.util.stream.Collectors; +import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.ImmutableList.toImmutableList; import static io.airlift.slice.Slices.utf8Slice; import static io.prestosql.plugin.hive.HiveStorageFormat.AVRO; @@ -80,10 +85,12 @@ import static io.prestosql.plugin.hive.HiveTestUtils.createGenericHiveRecordCursorProvider; import static io.prestosql.plugin.hive.HiveTestUtils.getHiveSession; import static io.prestosql.plugin.hive.HiveTestUtils.getTypes; +import static io.prestosql.testing.StructuralTestUtil.rowBlockOf; import static java.util.Objects.requireNonNull; import static java.util.stream.Collectors.toList; import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.FILE_INPUT_FORMAT; import static org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB; +import static org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector; import static org.testng.Assert.assertEquals; @@ -523,6 +530,254 @@ public void testTruncateVarcharColumn() .isReadableByRecordCursor(createGenericHiveRecordCursorProvider(HDFS_ENVIRONMENT)); } + @Test(dataProvider = "rowCount") + public void testAvroProjectedColumns(int rowCount) + throws Exception + { + List supportedColumns = getTestColumnsSupportedByAvro(); + List regularColumns = getRegularColumns(supportedColumns); + List partitionColumns = getPartitionColumns(supportedColumns); + + // Created projected columns for all regular supported columns + ImmutableList.Builder writeColumnsBuilder = ImmutableList.builder(); + ImmutableList.Builder readeColumnsBuilder = ImmutableList.builder(); + generateProjectedColumns(regularColumns, writeColumnsBuilder, readeColumnsBuilder); + + List writeColumns = writeColumnsBuilder.addAll(partitionColumns).build(); + List readColumns = readeColumnsBuilder.addAll(partitionColumns).build(); + + assertThatFileFormat(AVRO) + .withWriteColumns(writeColumns) + .withReadColumns(readColumns) + .withRowsCount(rowCount) + .isReadableByRecordCursorPageSource(createGenericHiveRecordCursorProvider(HDFS_ENVIRONMENT)); + } + + @Test(dataProvider = "rowCount") + public void testParquetProjectedColumns(int rowCount) + throws Exception + { + List supportedColumns = getTestColumnsSupportedByParquet(); + List regularColumns = getRegularColumns(supportedColumns); + List partitionColumns = getPartitionColumns(supportedColumns); + + // Created projected columns for all regular supported columns + ImmutableList.Builder writeColumnsBuilder = ImmutableList.builder(); + ImmutableList.Builder readeColumnsBuilder = ImmutableList.builder(); + generateProjectedColumns(regularColumns, writeColumnsBuilder, readeColumnsBuilder); + + List writeColumns = writeColumnsBuilder.addAll(partitionColumns).build(); + List readColumns = readeColumnsBuilder.addAll(partitionColumns).build(); + + assertThatFileFormat(PARQUET) + .withWriteColumns(writeColumns) + .withReadColumns(readColumns) + .withRowsCount(rowCount) + .withSession(PARQUET_SESSION) + .isReadableByPageSource(new ParquetPageSourceFactory(HDFS_ENVIRONMENT, STATS, new ParquetReaderConfig())); + + assertThatFileFormat(PARQUET) + .withWriteColumns(writeColumns) + .withReadColumns(readColumns) + .withRowsCount(rowCount) + .withSession(PARQUET_SESSION_USE_NAME) + .isReadableByPageSource(new ParquetPageSourceFactory(HDFS_ENVIRONMENT, STATS, new ParquetReaderConfig())); + } + + @Test(dataProvider = "rowCount") + public void testORCProjectedColumns(int rowCount) + throws Exception + { + List supportedColumns = TEST_COLUMNS; + List regularColumns = getRegularColumns(supportedColumns); + List partitionColumns = getPartitionColumns(supportedColumns); + + // Created projected columns for all regular supported columns + ImmutableList.Builder writeColumnsBuilder = ImmutableList.builder(); + ImmutableList.Builder readeColumnsBuilder = ImmutableList.builder(); + generateProjectedColumns(regularColumns, writeColumnsBuilder, readeColumnsBuilder); + + List writeColumns = writeColumnsBuilder.addAll(partitionColumns).build(); + List readColumns = readeColumnsBuilder.addAll(partitionColumns).build(); + + ConnectorSession session = getHiveSession(new HiveConfig(), new OrcReaderConfig().setUseColumnNames(true)); + assertThatFileFormat(ORC) + .withWriteColumns(writeColumns) + .withReadColumns(readColumns) + .withRowsCount(rowCount) + .withSession(session) + .isReadableByPageSource(new OrcPageSourceFactory(new OrcReaderOptions(), HDFS_ENVIRONMENT, STATS)); + + assertThatFileFormat(ORC) + .withWriteColumns(writeColumns) + .withReadColumns(readColumns) + .withRowsCount(rowCount) + .isReadableByPageSource(new OrcPageSourceFactory(new OrcReaderOptions(), HDFS_ENVIRONMENT, STATS)); + } + + @Test(dataProvider = "rowCount") + public void testSequenceFileProjectedColumns(int rowCount) + throws Exception + { + List supportedColumns = TEST_COLUMNS.stream() + .filter(column -> !column.getName().equals("t_map_null_key_complex_key_value")) + .collect(toList()); + + List regularColumns = getRegularColumns(supportedColumns); + List partitionColumns = getPartitionColumns(supportedColumns); + + // Created projected columns for all regular supported columns + ImmutableList.Builder writeColumnsBuilder = ImmutableList.builder(); + ImmutableList.Builder readeColumnsBuilder = ImmutableList.builder(); + generateProjectedColumns(regularColumns, writeColumnsBuilder, readeColumnsBuilder); + + List writeColumns = writeColumnsBuilder.addAll(partitionColumns).build(); + List readColumns = readeColumnsBuilder.addAll(partitionColumns).build(); + + assertThatFileFormat(SEQUENCEFILE) + .withWriteColumns(writeColumns) + .withReadColumns(readColumns) + .withRowsCount(rowCount) + .isReadableByRecordCursorPageSource(createGenericHiveRecordCursorProvider(HDFS_ENVIRONMENT)); + } + + @Test(dataProvider = "rowCount") + public void testTextFileProjectedColumns(int rowCount) + throws Exception + { + List supportedColumns = TEST_COLUMNS.stream() + .filter(column -> !column.getName().equals("t_map_null_key_complex_key_value")) + .collect(toList()); + + List regularColumns = getRegularColumns(supportedColumns); + List partitionColumns = getPartitionColumns(supportedColumns); + + // Created projected columns for all regular supported columns + ImmutableList.Builder writeColumnsBuilder = ImmutableList.builder(); + ImmutableList.Builder readeColumnsBuilder = ImmutableList.builder(); + generateProjectedColumns(regularColumns, writeColumnsBuilder, readeColumnsBuilder); + + List writeColumns = writeColumnsBuilder.addAll(partitionColumns).build(); + List readColumns = readeColumnsBuilder.addAll(partitionColumns).build(); + + assertThatFileFormat(TEXTFILE) + .withWriteColumns(writeColumns) + .withReadColumns(readColumns) + .withRowsCount(rowCount) + .isReadableByRecordCursorPageSource(createGenericHiveRecordCursorProvider(HDFS_ENVIRONMENT)); + } + + @Test(dataProvider = "rowCount") + public void testRCTextProjectedColumns(int rowCount) + throws Exception + { + List supportedColumns = TEST_COLUMNS.stream() + .filter(testColumn -> { + // TODO: This is a bug in the RC text reader + // RC file does not support complex type as key of a map + return !testColumn.getName().equals("t_struct_null") + && !testColumn.getName().equals("t_map_null_key_complex_key_value"); + }) + .collect(toImmutableList()); + + List regularColumns = getRegularColumns(supportedColumns); + List partitionColumns = getPartitionColumns(supportedColumns); + + // Created projected columns for all regular supported columns + ImmutableList.Builder writeColumnsBuilder = ImmutableList.builder(); + ImmutableList.Builder readeColumnsBuilder = ImmutableList.builder(); + generateProjectedColumns(regularColumns, writeColumnsBuilder, readeColumnsBuilder); + + List writeColumns = writeColumnsBuilder.addAll(partitionColumns).build(); + List readColumns = readeColumnsBuilder.addAll(partitionColumns).build(); + + assertThatFileFormat(RCTEXT) + .withWriteColumns(writeColumns) + .withReadColumns(readColumns) + .withRowsCount(rowCount) + .isReadableByRecordCursorPageSource(createGenericHiveRecordCursorProvider(HDFS_ENVIRONMENT)); + } + + @Test(dataProvider = "rowCount") + public void testRCTextProjectedColumnsPageSource(int rowCount) + throws Exception + { + List supportedColumns = TEST_COLUMNS; + List regularColumns = getRegularColumns(supportedColumns); + List partitionColumns = getPartitionColumns(supportedColumns); + + // Created projected columns for all regular supported columns + ImmutableList.Builder writeColumnsBuilder = ImmutableList.builder(); + ImmutableList.Builder readeColumnsBuilder = ImmutableList.builder(); + generateProjectedColumns(regularColumns, writeColumnsBuilder, readeColumnsBuilder); + + List writeColumns = writeColumnsBuilder.addAll(partitionColumns).build(); + List readColumns = readeColumnsBuilder.addAll(partitionColumns).build(); + + assertThatFileFormat(RCTEXT) + .withWriteColumns(writeColumns) + .withReadColumns(readColumns) + .withRowsCount(rowCount) + .isReadableByPageSource(new RcFilePageSourceFactory(TYPE_MANAGER, HDFS_ENVIRONMENT, STATS)); + } + + @Test(dataProvider = "rowCount") + public void testRCBinaryProjectedColumns(int rowCount) + throws Exception + { + // RCBinary does not support complex type as key of a map and interprets empty VARCHAR as nulls + List supportedColumns = TEST_COLUMNS.stream() + .filter(testColumn -> { + String name = testColumn.getName(); + return !name.equals("t_map_null_key_complex_key_value") && !name.equals("t_empty_varchar"); + }) + .collect(toList()); + + List regularColumns = getRegularColumns(supportedColumns); + List partitionColumns = getPartitionColumns(supportedColumns); + + // Created projected columns for all regular supported columns + ImmutableList.Builder writeColumnsBuilder = ImmutableList.builder(); + ImmutableList.Builder readeColumnsBuilder = ImmutableList.builder(); + generateProjectedColumns(regularColumns, writeColumnsBuilder, readeColumnsBuilder); + + List writeColumns = writeColumnsBuilder.addAll(partitionColumns).build(); + List readColumns = readeColumnsBuilder.addAll(partitionColumns).build(); + + assertThatFileFormat(RCBINARY) + .withWriteColumns(writeColumns) + .withReadColumns(readColumns) + .withRowsCount(rowCount) + .isReadableByRecordCursor(createGenericHiveRecordCursorProvider(HDFS_ENVIRONMENT)); + } + + @Test(dataProvider = "rowCount") + public void testRCBinaryProjectedColumnsPageSource(int rowCount) + throws Exception + { + // RCBinary does not support complex type as key of a map and interprets empty VARCHAR as nulls + List supportedColumns = TEST_COLUMNS.stream() + .filter(testColumn -> !testColumn.getName().equals("t_empty_varchar")) + .collect(toList()); + + List regularColumns = getRegularColumns(supportedColumns); + List partitionColumns = getPartitionColumns(supportedColumns); + + // Created projected columns for all regular supported columns + ImmutableList.Builder writeColumnsBuilder = ImmutableList.builder(); + ImmutableList.Builder readeColumnsBuilder = ImmutableList.builder(); + generateProjectedColumns(regularColumns, writeColumnsBuilder, readeColumnsBuilder); + + List writeColumns = writeColumnsBuilder.addAll(partitionColumns).build(); + List readColumns = readeColumnsBuilder.addAll(partitionColumns).build(); + + assertThatFileFormat(RCBINARY) + .withWriteColumns(writeColumns) + .withReadColumns(readColumns) + .withRowsCount(rowCount) + .isReadableByPageSource(new RcFilePageSourceFactory(TYPE_MANAGER, HDFS_ENVIRONMENT, STATS)); + } + @Test public void testFailForLongVarcharPartitionColumn() throws Exception @@ -563,42 +818,81 @@ public void testFailForLongVarcharPartitionColumn() .isFailingForRecordCursor(createGenericHiveRecordCursorProvider(HDFS_ENVIRONMENT), expectedErrorCode, expectedMessage); } + private void testRecordPageSource( + HiveRecordCursorProvider cursorProvider, + FileSplit split, + HiveStorageFormat storageFormat, + List testReadColumns, + ConnectorSession session, + int rowCount) + throws Exception + { + Properties splitProperties = new Properties(); + splitProperties.setProperty(FILE_INPUT_FORMAT, storageFormat.getInputFormat()); + splitProperties.setProperty(SERIALIZATION_LIB, storageFormat.getSerDe()); + ConnectorPageSource pageSource = createPageSourceFromCursorProvider(cursorProvider, split, splitProperties, testReadColumns, session); + checkPageSource(pageSource, testReadColumns, getTypes(getColumnHandles(testReadColumns)), rowCount); + } + private void testCursorProvider( HiveRecordCursorProvider cursorProvider, FileSplit split, HiveStorageFormat storageFormat, - List testColumns, + List testReadColumns, ConnectorSession session, int rowCount) { Properties splitProperties = new Properties(); splitProperties.setProperty(FILE_INPUT_FORMAT, storageFormat.getInputFormat()); splitProperties.setProperty(SERIALIZATION_LIB, storageFormat.getSerDe()); - testCursorProvider(cursorProvider, split, splitProperties, testColumns, session, rowCount); + testCursorProvider(cursorProvider, split, splitProperties, testReadColumns, session, rowCount); } private void testCursorProvider( HiveRecordCursorProvider cursorProvider, FileSplit split, Properties splitProperties, - List testColumns, + List testReadColumns, ConnectorSession session, int rowCount) { + ConnectorPageSource pageSource = createPageSourceFromCursorProvider(cursorProvider, split, splitProperties, testReadColumns, session); + RecordCursor cursor = ((RecordPageSource) pageSource).getCursor(); + checkCursor(cursor, testReadColumns, rowCount); + } + + private ConnectorPageSource createPageSourceFromCursorProvider( + HiveRecordCursorProvider cursorProvider, + FileSplit split, + Properties splitProperties, + List testReadColumns, + ConnectorSession session) + { + // Use full columns in split properties + ImmutableList.Builder splitPropertiesColumnNames = ImmutableList.builder(); + ImmutableList.Builder splitPropertiesColumnTypes = ImmutableList.builder(); + Set baseColumnNames = new HashSet<>(); + + for (TestColumn testReadColumn : testReadColumns) { + String name = testReadColumn.getBaseName(); + if (!baseColumnNames.contains(name) && !testReadColumn.isPartitionKey()) { + baseColumnNames.add(name); + splitPropertiesColumnNames.add(name); + splitPropertiesColumnTypes.add(testReadColumn.getBaseObjectInspector().getTypeName()); + } + } + splitProperties.setProperty( "columns", - testColumns.stream() - .filter(column -> !column.isPartitionKey()) - .map(TestColumn::getName) - .collect(Collectors.joining(","))); + splitPropertiesColumnNames.build().stream() + .collect(Collectors.joining(","))); + splitProperties.setProperty( "columns.types", - testColumns.stream() - .filter(column -> !column.isPartitionKey()) - .map(TestColumn::getType) - .collect(Collectors.joining(","))); + splitPropertiesColumnTypes.build().stream() + .collect(Collectors.joining(","))); - List partitionKeys = testColumns.stream() + List partitionKeys = testReadColumns.stream() .filter(TestColumn::isPartitionKey) .map(input -> new HivePartitionKey(input.getName(), (String) input.getWriteValue())) .collect(toList()); @@ -618,7 +912,7 @@ private void testCursorProvider( Instant.now().toEpochMilli(), splitProperties, TupleDomain.all(), - getColumnHandles(testColumns), + getColumnHandles(testReadColumns), partitionKeys, DateTimeZone.getDefault(), TYPE_MANAGER, @@ -627,16 +921,14 @@ private void testCursorProvider( false, Optional.empty()); - RecordCursor cursor = ((RecordPageSource) pageSource.get()).getCursor(); - - checkCursor(cursor, testColumns, rowCount); + return pageSource.get(); } private void testPageSourceFactory( HivePageSourceFactory sourceFactory, FileSplit split, HiveStorageFormat storageFormat, - List testColumns, + List testReadColumns, ConnectorSession session, int rowCount) throws IOException @@ -644,25 +936,30 @@ private void testPageSourceFactory( Properties splitProperties = new Properties(); splitProperties.setProperty(FILE_INPUT_FORMAT, storageFormat.getInputFormat()); splitProperties.setProperty(SERIALIZATION_LIB, storageFormat.getSerDe()); - splitProperties.setProperty( - "columns", - testColumns.stream() - .filter(column -> !column.isPartitionKey()) - .map(TestColumn::getName) - .collect(Collectors.joining(","))); - splitProperties.setProperty( - "columns.types", - testColumns.stream() - .filter(column -> !column.isPartitionKey()) - .map(TestColumn::getType) - .collect(Collectors.joining(","))); - List partitionKeys = testColumns.stream() + // Use full columns in split properties + ImmutableList.Builder splitPropertiesColumnNames = ImmutableList.builder(); + ImmutableList.Builder splitPropertiesColumnTypes = ImmutableList.builder(); + Set baseColumnNames = new HashSet<>(); + + for (TestColumn testReadColumn : testReadColumns) { + String name = testReadColumn.getBaseName(); + if (!baseColumnNames.contains(name) && !testReadColumn.isPartitionKey()) { + baseColumnNames.add(name); + splitPropertiesColumnNames.add(name); + splitPropertiesColumnTypes.add(testReadColumn.getBaseObjectInspector().getTypeName()); + } + } + + splitProperties.setProperty("columns", splitPropertiesColumnNames.build().stream().collect(Collectors.joining(","))); + splitProperties.setProperty("columns.types", splitPropertiesColumnTypes.build().stream().collect(Collectors.joining(","))); + + List partitionKeys = testReadColumns.stream() .filter(TestColumn::isPartitionKey) .map(input -> new HivePartitionKey(input.getName(), (String) input.getWriteValue())) .collect(toList()); - List columnHandles = getColumnHandles(testColumns); + List columnHandles = getColumnHandles(testReadColumns); Optional pageSource = HivePageSourceProvider.createHivePageSource( ImmutableSet.of(sourceFactory), @@ -688,7 +985,7 @@ private void testPageSourceFactory( assertTrue(pageSource.isPresent()); - checkPageSource(pageSource.get(), testColumns, getTypes(columnHandles), rowCount); + checkPageSource(pageSource.get(), testReadColumns, getTypes(columnHandles), rowCount); } public static boolean hasType(ObjectInspector objectInspector, PrimitiveCategory... types) @@ -743,6 +1040,51 @@ private static HiveConfig createParquetHiveConfig(boolean useParquetColumnNames) .setUseParquetColumnNames(useParquetColumnNames); } + private void generateProjectedColumns(List childColumns, ImmutableList.Builder testFullColumnsBuilder, ImmutableList.Builder testDereferencedColumnsBuilder) + { + for (int i = 0; i < childColumns.size(); i++) { + TestColumn childColumn = childColumns.get(i); + checkState(childColumn.getDereferenceIndices().size() == 0); + ObjectInspector newObjectInspector = getStandardStructObjectInspector( + ImmutableList.of("field0"), + ImmutableList.of(childColumn.getObjectInspector())); + + HiveType hiveType = (HiveType.valueOf(childColumn.getObjectInspector().getTypeName())); + Type prestoType = hiveType.getType(TYPE_MANAGER); + + List list = new ArrayList<>(); + list.add(childColumn.getWriteValue()); + + TestColumn newProjectedColumn = new TestColumn( + "new_col" + i, newObjectInspector, + ImmutableList.of("field0"), + ImmutableList.of(0), + childColumn.getObjectInspector(), + childColumn.getWriteValue(), + childColumn.getExpectedValue(), + false); + + TestColumn newFullColumn = new TestColumn("new_col" + i, newObjectInspector, list, rowBlockOf(ImmutableList.of(prestoType), childColumn.getExpectedValue())); + + testFullColumnsBuilder.add(newFullColumn); + testDereferencedColumnsBuilder.add(newProjectedColumn); + } + } + + private final List getRegularColumns(List columns) + { + return columns.stream() + .filter(column -> !column.isPartitionKey()) + .collect(toImmutableList()); + } + + private final List getPartitionColumns(List columns) + { + return columns.stream() + .filter(column -> column.isPartitionKey()) + .collect(toImmutableList()); + } + private class FileFormatAssertion { private final String formatName; @@ -811,32 +1153,39 @@ public FileFormatAssertion withSession(ConnectorSession session) public FileFormatAssertion isReadableByPageSource(HivePageSourceFactory pageSourceFactory) throws Exception { - assertRead(Optional.of(pageSourceFactory), Optional.empty()); + assertRead(Optional.of(pageSourceFactory), Optional.empty(), false); + return this; + } + + public FileFormatAssertion isReadableByRecordCursorPageSource(HiveRecordCursorProvider cursorProvider) + throws Exception + { + assertRead(Optional.empty(), Optional.of(cursorProvider), true); return this; } public FileFormatAssertion isReadableByRecordCursor(HiveRecordCursorProvider cursorProvider) throws Exception { - assertRead(Optional.empty(), Optional.of(cursorProvider)); + assertRead(Optional.empty(), Optional.of(cursorProvider), false); return this; } public FileFormatAssertion isFailingForPageSource(HivePageSourceFactory pageSourceFactory, HiveErrorCode expectedErrorCode, String expectedMessage) throws Exception { - assertFailure(Optional.of(pageSourceFactory), Optional.empty(), expectedErrorCode, expectedMessage); + assertFailure(Optional.of(pageSourceFactory), Optional.empty(), expectedErrorCode, expectedMessage, false); return this; } public FileFormatAssertion isFailingForRecordCursor(HiveRecordCursorProvider cursorProvider, HiveErrorCode expectedErrorCode, String expectedMessage) throws Exception { - assertFailure(Optional.empty(), Optional.of(cursorProvider), expectedErrorCode, expectedMessage); + assertFailure(Optional.empty(), Optional.of(cursorProvider), expectedErrorCode, expectedMessage, false); return this; } - private void assertRead(Optional pageSourceFactory, Optional cursorProvider) + private void assertRead(Optional pageSourceFactory, Optional cursorProvider, boolean withRecordPageSource) throws Exception { assertNotNull(storageFormat, "storageFormat must be specified"); @@ -866,11 +1215,17 @@ private void assertRead(Optional pageSourceFactory, Optio else { split = createTestFile(file.getAbsolutePath(), storageFormat, compressionCodec, writeColumns, rowsCount); } + if (pageSourceFactory.isPresent()) { testPageSourceFactory(pageSourceFactory.get(), split, storageFormat, readColumns, session, rowsCount); } if (cursorProvider.isPresent()) { - testCursorProvider(cursorProvider.get(), split, storageFormat, readColumns, session, rowsCount); + if (withRecordPageSource) { + testRecordPageSource(cursorProvider.get(), split, storageFormat, readColumns, session, rowsCount); + } + else { + testCursorProvider(cursorProvider.get(), split, storageFormat, readColumns, session, rowsCount); + } } } finally { @@ -883,11 +1238,12 @@ private void assertFailure( Optional pageSourceFactory, Optional cursorProvider, HiveErrorCode expectedErrorCode, - String expectedMessage) + String expectedMessage, + boolean withRecordPageSource) throws Exception { try { - assertRead(pageSourceFactory, cursorProvider); + assertRead(pageSourceFactory, cursorProvider, withRecordPageSource); fail("failure is expected"); } catch (PrestoException prestoException) { diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveMetadata.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveMetadata.java index 2a15f05dfb1d..b9fea88293c1 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveMetadata.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveMetadata.java @@ -22,16 +22,17 @@ import java.util.Optional; +import static io.prestosql.plugin.hive.HiveColumnHandle.createBaseColumn; import static io.prestosql.plugin.hive.HiveMetadata.createPredicate; import static io.prestosql.spi.type.VarcharType.VARCHAR; public class TestHiveMetadata { - private static final HiveColumnHandle TEST_COLUMN_HANDLE = new HiveColumnHandle( + private static final HiveColumnHandle TEST_COLUMN_HANDLE = createBaseColumn( "test", + 0, HiveType.HIVE_STRING, VARCHAR, - 0, HiveColumnHandle.ColumnType.PARTITION_KEY, Optional.empty()); diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHivePageSink.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHivePageSink.java index 5f9ff82dea67..585a47b49722 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHivePageSink.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHivePageSink.java @@ -60,6 +60,7 @@ import static io.airlift.testing.Assertions.assertGreaterThan; import static io.prestosql.metadata.MetadataManager.createTestMetadataManager; import static io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.REGULAR; +import static io.prestosql.plugin.hive.HiveColumnHandle.createBaseColumn; import static io.prestosql.plugin.hive.HiveCompressionCodec.NONE; import static io.prestosql.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT; import static io.prestosql.plugin.hive.HiveTestUtils.PAGE_SORTER; @@ -291,7 +292,7 @@ private static List getColumnHandles() for (int i = 0; i < columns.size(); i++) { LineItemColumn column = columns.get(i); HiveType hiveType = getHiveType(column.getType()); - handles.add(new HiveColumnHandle(column.getColumnName(), hiveType, hiveType.getType(TYPE_MANAGER), i, REGULAR, Optional.empty())); + handles.add(createBaseColumn(column.getColumnName(), i, hiveType, hiveType.getType(TYPE_MANAGER), REGULAR, Optional.empty())); } return handles.build(); } diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveReaderProjectionsUtil.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveReaderProjectionsUtil.java new file mode 100644 index 000000000000..3e20795b28ac --- /dev/null +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveReaderProjectionsUtil.java @@ -0,0 +1,92 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.hive; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.spi.type.NamedTypeSignature; +import io.prestosql.spi.type.RowFieldName; +import io.prestosql.spi.type.RowType; +import io.prestosql.spi.type.Type; +import io.prestosql.spi.type.TypeManager; +import io.prestosql.type.InternalTypeManager; + +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import static com.google.common.base.Preconditions.checkArgument; +import static io.prestosql.metadata.MetadataManager.createTestMetadataManager; +import static io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.REGULAR; +import static io.prestosql.plugin.hive.HiveColumnHandle.createBaseColumn; +import static io.prestosql.plugin.hive.HiveTestUtils.rowType; +import static io.prestosql.plugin.hive.HiveType.toHiveType; +import static io.prestosql.spi.type.BigintType.BIGINT; + +public class TestHiveReaderProjectionsUtil +{ + private TestHiveReaderProjectionsUtil() {} + + public static final RowType ROWTYPE_OF_PRIMITIVES = rowType(ImmutableList.of( + new NamedTypeSignature(Optional.of(new RowFieldName("f_bigint_0")), BIGINT.getTypeSignature()), + new NamedTypeSignature(Optional.of(new RowFieldName("f_bigint_1")), BIGINT.getTypeSignature()))); + + public static final RowType ROWTYPE_OF_ROW_AND_PRIMITIVES = rowType(ImmutableList.of( + new NamedTypeSignature(Optional.of(new RowFieldName("f_row_0")), ROWTYPE_OF_PRIMITIVES.getTypeSignature()), + new NamedTypeSignature(Optional.of(new RowFieldName("f_bigint_0")), BIGINT.getTypeSignature()))); + + public static final TypeManager TYPE_MANAGER = new InternalTypeManager(createTestMetadataManager()); + + public static final HiveTypeTranslator HIVE_TYPE_TRANSLATOR = new HiveTypeTranslator(); + + public static Map createTestFullColumns(List names, Map types) + { + checkArgument(names.size() == types.size()); + + ImmutableMap.Builder hiveColumns = ImmutableMap.builder(); + + int regularColumnHiveIndex = 0; + for (String name : names) { + HiveType hiveType = toHiveType(HIVE_TYPE_TRANSLATOR, types.get(name)); + hiveColumns.put(name, createBaseColumn(name, regularColumnHiveIndex, hiveType, types.get(name), REGULAR, Optional.empty())); + regularColumnHiveIndex++; + } + + return hiveColumns.build(); + } + + static HiveColumnHandle createProjectedColumnHandle(HiveColumnHandle column, List indices) + { + checkArgument(column.isBaseColumn(), "base column is expected here"); + + if (indices.size() == 0) { + return column; + } + + HiveType baseHiveType = column.getHiveType(); + List names = baseHiveType.getHiveDereferenceNames(indices); + HiveType hiveType = baseHiveType.getHiveTypeForDereferences(indices).get(); + + HiveColumnProjectionInfo columnProjection = new HiveColumnProjectionInfo(indices, names, hiveType, hiveType.getType(TYPE_MANAGER)); + + return new HiveColumnHandle( + column.getBaseColumnName(), + column.getBaseHiveColumnIndex(), + column.getBaseHiveType(), + column.getBaseType(), + Optional.of(columnProjection), + column.getColumnType(), + column.getComment()); + } +} diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveSplit.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveSplit.java index cae0f85838b3..c81255592a60 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveSplit.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveSplit.java @@ -30,6 +30,7 @@ import java.util.OptionalInt; import java.util.Properties; +import static io.prestosql.plugin.hive.HiveColumnHandle.createBaseColumn; import static io.prestosql.plugin.hive.HiveType.HIVE_LONG; import static io.prestosql.plugin.hive.util.HiveBucketing.BucketingVersion.BUCKETING_V1; import static io.prestosql.spi.type.BigintType.BIGINT; @@ -75,7 +76,7 @@ public void testJsonRoundTrip() BUCKETING_V1, 32, 16, - ImmutableList.of(new HiveColumnHandle("col", HIVE_LONG, BIGINT, 5, ColumnType.REGULAR, Optional.of("comment"))))), + ImmutableList.of(createBaseColumn("col", 5, HIVE_LONG, BIGINT, ColumnType.REGULAR, Optional.of("comment"))))), false, Optional.of(deleteDeltaLocations)); diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestIonSqlQueryBuilder.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestIonSqlQueryBuilder.java index 0f713bfaf8e4..365146473a73 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestIonSqlQueryBuilder.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestIonSqlQueryBuilder.java @@ -30,6 +30,7 @@ import static io.prestosql.metadata.MetadataManager.createTestMetadataManager; import static io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.REGULAR; +import static io.prestosql.plugin.hive.HiveColumnHandle.createBaseColumn; import static io.prestosql.plugin.hive.HiveTestUtils.longDecimal; import static io.prestosql.plugin.hive.HiveTestUtils.shortDecimal; import static io.prestosql.plugin.hive.HiveType.HIVE_DATE; @@ -56,9 +57,9 @@ public void testBuildSQL() { IonSqlQueryBuilder queryBuilder = new IonSqlQueryBuilder(typeManager); List columns = ImmutableList.of( - new HiveColumnHandle("n_nationkey", HIVE_INT, INTEGER, 0, REGULAR, Optional.empty()), - new HiveColumnHandle("n_name", HIVE_STRING, VARCHAR, 1, REGULAR, Optional.empty()), - new HiveColumnHandle("n_regionkey", HIVE_INT, INTEGER, 2, REGULAR, Optional.empty())); + createBaseColumn("n_nationkey", 0, HIVE_INT, INTEGER, REGULAR, Optional.empty()), + createBaseColumn("n_name", 1, HIVE_STRING, VARCHAR, REGULAR, Optional.empty()), + createBaseColumn("n_regionkey", 2, HIVE_INT, INTEGER, REGULAR, Optional.empty())); assertEquals("SELECT s._1, s._2, s._3 FROM S3Object s", queryBuilder.buildSql(columns, TupleDomain.all())); @@ -81,9 +82,9 @@ public void testDecimalColumns() TypeManager typeManager = this.typeManager; IonSqlQueryBuilder queryBuilder = new IonSqlQueryBuilder(typeManager); List columns = ImmutableList.of( - new HiveColumnHandle("quantity", HiveType.valueOf("decimal(20,0)"), DecimalType.createDecimalType(), 0, REGULAR, Optional.empty()), - new HiveColumnHandle("extendedprice", HiveType.valueOf("decimal(20,2)"), DecimalType.createDecimalType(), 1, REGULAR, Optional.empty()), - new HiveColumnHandle("discount", HiveType.valueOf("decimal(10,2)"), DecimalType.createDecimalType(), 2, REGULAR, Optional.empty())); + createBaseColumn("quantity", 0, HiveType.valueOf("decimal(20,0)"), DecimalType.createDecimalType(), REGULAR, Optional.empty()), + createBaseColumn("extendedprice", 1, HiveType.valueOf("decimal(20,2)"), DecimalType.createDecimalType(), REGULAR, Optional.empty()), + createBaseColumn("discount", 2, HiveType.valueOf("decimal(10,2)"), DecimalType.createDecimalType(), REGULAR, Optional.empty())); DecimalType decimalType = DecimalType.createDecimalType(10, 2); TupleDomain tupleDomain = withColumnDomains( ImmutableMap.of( @@ -101,8 +102,8 @@ public void testDateColumn() { IonSqlQueryBuilder queryBuilder = new IonSqlQueryBuilder(typeManager); List columns = ImmutableList.of( - new HiveColumnHandle("t1", HIVE_TIMESTAMP, TIMESTAMP, 0, REGULAR, Optional.empty()), - new HiveColumnHandle("t2", HIVE_DATE, DATE, 1, REGULAR, Optional.empty())); + createBaseColumn("t1", 0, HIVE_TIMESTAMP, TIMESTAMP, REGULAR, Optional.empty()), + createBaseColumn("t2", 1, HIVE_DATE, DATE, REGULAR, Optional.empty())); TupleDomain tupleDomain = withColumnDomains(ImmutableMap.of( columns.get(1), Domain.create(SortedRangeSet.copyOf(DATE, ImmutableList.of(Range.equal(DATE, (long) DateTimeUtils.parseDate("2001-08-22")))), false))); @@ -114,9 +115,9 @@ public void testNotPushDoublePredicates() { IonSqlQueryBuilder queryBuilder = new IonSqlQueryBuilder(typeManager); List columns = ImmutableList.of( - new HiveColumnHandle("quantity", HIVE_INT, INTEGER, 0, REGULAR, Optional.empty()), - new HiveColumnHandle("extendedprice", HIVE_DOUBLE, DOUBLE, 1, REGULAR, Optional.empty()), - new HiveColumnHandle("discount", HIVE_DOUBLE, DOUBLE, 2, REGULAR, Optional.empty())); + createBaseColumn("quantity", 0, HIVE_INT, INTEGER, REGULAR, Optional.empty()), + createBaseColumn("extendedprice", 1, HIVE_DOUBLE, DOUBLE, REGULAR, Optional.empty()), + createBaseColumn("discount", 2, HIVE_DOUBLE, DOUBLE, REGULAR, Optional.empty())); TupleDomain tupleDomain = withColumnDomains( ImmutableMap.of( columns.get(0), Domain.create(ofRanges(Range.lessThan(BIGINT, 50L)), false), diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestOrcPageSourceMemoryTracking.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestOrcPageSourceMemoryTracking.java index c2a309b6a9c0..a2901ffd854a 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestOrcPageSourceMemoryTracking.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestOrcPageSourceMemoryTracking.java @@ -101,6 +101,7 @@ import static io.prestosql.orc.OrcReader.MAX_BATCH_SIZE; import static io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.PARTITION_KEY; import static io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.REGULAR; +import static io.prestosql.plugin.hive.HiveColumnHandle.createBaseColumn; import static io.prestosql.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT; import static io.prestosql.plugin.hive.HiveTestUtils.SESSION; import static io.prestosql.plugin.hive.HiveTestUtils.TYPE_MANAGER; @@ -454,7 +455,7 @@ public TestPreparer(String tempFilePath, List testColumns, int numRo HiveType hiveType = HiveType.valueOf(inspector.getTypeName()); Type type = hiveType.getType(TYPE_MANAGER); - columnsBuilder.add(new HiveColumnHandle(testColumn.getName(), hiveType, type, columnIndex, testColumn.isPartitionKey() ? PARTITION_KEY : REGULAR, Optional.empty())); + columnsBuilder.add(createBaseColumn(testColumn.getName(), columnIndex, hiveType, type, testColumn.isPartitionKey() ? PARTITION_KEY : REGULAR, Optional.empty())); typesBuilder.add(type); } columns = columnsBuilder.build(); diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestReaderProjections.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestReaderProjections.java new file mode 100644 index 000000000000..72f187c33274 --- /dev/null +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestReaderProjections.java @@ -0,0 +1,85 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.hive; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.spi.type.Type; +import org.testng.annotations.Test; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import static io.prestosql.plugin.hive.ReaderProjections.projectBaseColumns; +import static io.prestosql.plugin.hive.TestHiveReaderProjectionsUtil.ROWTYPE_OF_PRIMITIVES; +import static io.prestosql.plugin.hive.TestHiveReaderProjectionsUtil.ROWTYPE_OF_ROW_AND_PRIMITIVES; +import static io.prestosql.plugin.hive.TestHiveReaderProjectionsUtil.createProjectedColumnHandle; +import static io.prestosql.plugin.hive.TestHiveReaderProjectionsUtil.createTestFullColumns; +import static io.prestosql.spi.type.BigintType.BIGINT; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + +public class TestReaderProjections +{ + private static final List TEST_COLUMN_NAMES = ImmutableList.of( + "col_bigint", + "col_struct_of_primitives", + "col_struct_of_non_primitives", + "col_partition_key_1", + "col_partition_key_2"); + + private static final Map TEST_COLUMN_TYPES = ImmutableMap.builder() + .put("col_bigint", BIGINT) + .put("col_struct_of_primitives", ROWTYPE_OF_PRIMITIVES) + .put("col_struct_of_non_primitives", ROWTYPE_OF_ROW_AND_PRIMITIVES) + .put("col_partition_key_1", BIGINT) + .put("col_partition_key_2", BIGINT) + .build(); + + private static final Map TEST_FULL_COLUMNS = createTestFullColumns(TEST_COLUMN_NAMES, TEST_COLUMN_TYPES); + + @Test + public void testNoProjections() + { + List columns = new ArrayList<>(TEST_FULL_COLUMNS.values()); + Optional mapping = projectBaseColumns(columns); + assertTrue(!mapping.isPresent(), "Full columns should not require any adaptation"); + } + + @Test + public void testBaseColumnsProjection() + { + List columns = ImmutableList.of( + createProjectedColumnHandle(TEST_FULL_COLUMNS.get("col_struct_of_primitives"), ImmutableList.of(0)), + createProjectedColumnHandle(TEST_FULL_COLUMNS.get("col_struct_of_primitives"), ImmutableList.of(1)), + createProjectedColumnHandle(TEST_FULL_COLUMNS.get("col_bigint"), ImmutableList.of()), + createProjectedColumnHandle(TEST_FULL_COLUMNS.get("col_struct_of_non_primitives"), ImmutableList.of(0, 1)), + createProjectedColumnHandle(TEST_FULL_COLUMNS.get("col_struct_of_non_primitives"), ImmutableList.of(0))); + + Optional mapping = projectBaseColumns(columns); + assertTrue(mapping.isPresent(), "Full columns should be created for corresponding projected columns"); + + List readerColumns = mapping.get().getReaderColumns(); + + for (int i = 0; i < columns.size(); i++) { + HiveColumnHandle column = columns.get(i); + int readerIndex = mapping.get().readerColumnPositionForHiveColumnAt(i); + HiveColumnHandle readerColumn = mapping.get().readerColumnForHiveColumnAt(i); + assertEquals(column.getBaseColumn(), readerColumn); + assertEquals(readerColumns.get(readerIndex), readerColumn); + } + } +} diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestReaderProjectionsAdapter.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestReaderProjectionsAdapter.java new file mode 100644 index 000000000000..2e5815eb42e2 --- /dev/null +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestReaderProjectionsAdapter.java @@ -0,0 +1,312 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.hive; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.spi.Page; +import io.prestosql.spi.block.Block; +import io.prestosql.spi.block.BlockBuilder; +import io.prestosql.spi.block.ColumnarRow; +import io.prestosql.spi.block.LazyBlock; +import io.prestosql.spi.block.RowBlock; +import io.prestosql.spi.type.RowType; +import io.prestosql.spi.type.Type; +import org.testng.annotations.Test; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import static com.google.common.base.Preconditions.checkArgument; +import static io.prestosql.block.BlockAssertions.assertBlockEquals; +import static io.prestosql.plugin.hive.ReaderProjections.projectBaseColumns; +import static io.prestosql.plugin.hive.TestHiveReaderProjectionsUtil.ROWTYPE_OF_ROW_AND_PRIMITIVES; +import static io.prestosql.plugin.hive.TestHiveReaderProjectionsUtil.createProjectedColumnHandle; +import static io.prestosql.plugin.hive.TestHiveReaderProjectionsUtil.createTestFullColumns; +import static io.prestosql.plugin.hive.TestReaderProjectionsAdapter.RowData.rowData; +import static io.prestosql.spi.block.ColumnarRow.toColumnarRow; +import static io.prestosql.spi.block.RowBlock.fromFieldBlocks; +import static io.prestosql.spi.type.BigintType.BIGINT; +import static java.util.Objects.requireNonNull; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertTrue; + +public class TestReaderProjectionsAdapter +{ + private static final String TEST_COLUMN_NAME = "col"; + private static final Type TEST_COLUMN_TYPE = ROWTYPE_OF_ROW_AND_PRIMITIVES; + + private static final Map TEST_FULL_COLUMNS = createTestFullColumns( + ImmutableList.of(TEST_COLUMN_NAME), + ImmutableMap.of(TEST_COLUMN_NAME, TEST_COLUMN_TYPE)); + + @Test + public void testAdaptPage() + { + List columns = ImmutableList.of( + createProjectedColumnHandle(TEST_FULL_COLUMNS.get("col"), ImmutableList.of(0, 0)), + createProjectedColumnHandle(TEST_FULL_COLUMNS.get("col"), ImmutableList.of(0))); + + Optional readerProjections = projectBaseColumns(columns); + + List inputBlockData = new ArrayList<>(); + inputBlockData.add(rowData(rowData(11L, 12L, 13L), 1L)); + inputBlockData.add(rowData(null, 2L)); + inputBlockData.add(null); + inputBlockData.add(rowData(rowData(31L, 32L, 33L), 3L)); + + ReaderProjectionsAdapter adapter = new ReaderProjectionsAdapter(columns, readerProjections.get()); + verifyPageAdaptation(adapter, ImmutableList.of(inputBlockData)); + } + + @Test + public void testLazyDereferenceProjectionLoading() + { + List columns = ImmutableList.of(createProjectedColumnHandle(TEST_FULL_COLUMNS.get("col"), ImmutableList.of(0, 0))); + + List inputBlockData = new ArrayList<>(); + inputBlockData.add(rowData(rowData(11L, 12L, 13L), 1L)); + inputBlockData.add(rowData(null, 2L)); + inputBlockData.add(null); + inputBlockData.add(rowData(rowData(31L, 32L, 33L), 3L)); + + // Produce an output page by applying adaptation + Optional readerProjections = projectBaseColumns(columns); + ReaderProjectionsAdapter adapter = new ReaderProjectionsAdapter(columns, readerProjections.get()); + Page inputPage = createPage(ImmutableList.of(inputBlockData), adapter.getInputTypes()); + Page outputPage = adapter.adaptPage(inputPage).getLoadedPage(); + + // Verify that only the block corresponding to subfield "col.f_row_0.f_bigint_0" should be completely loaded, others are not. + + // Assertion for "col" + Block lazyBlockLevel1 = inputPage.getBlock(0); + assertTrue(lazyBlockLevel1 instanceof LazyBlock); + assertFalse(lazyBlockLevel1.isLoaded()); + RowBlock rowBlockLevel1 = ((RowBlock) (((LazyBlock) lazyBlockLevel1).getBlock())); + assertFalse(rowBlockLevel1.isLoaded()); + + // Assertion for "col.f_row_0" and col.f_bigint_0" + ColumnarRow columnarRowLevel1 = toColumnarRow(rowBlockLevel1); + assertFalse(columnarRowLevel1.getField(0).isLoaded()); + assertFalse(columnarRowLevel1.getField(1).isLoaded()); + + Block lazyBlockLevel2 = columnarRowLevel1.getField(0); + assertTrue(lazyBlockLevel2 instanceof LazyBlock); + RowBlock rowBlockLevel2 = ((RowBlock) (((LazyBlock) lazyBlockLevel2).getBlock())); + assertFalse(rowBlockLevel2.isLoaded()); + ColumnarRow columnarRowLevel2 = toColumnarRow(rowBlockLevel2); + // Assertion for "col.f_row_0.f_bigint_0" and "col.f_row_0.f_bigint_1" + assertTrue(columnarRowLevel2.getField(0).isLoaded()); + assertFalse(columnarRowLevel2.getField(1).isLoaded()); + } + + private void verifyPageAdaptation(ReaderProjectionsAdapter adapter, List> inputPageData) + { + List columnMapping = adapter.getOutputToInputMapping(); + List outputTypes = adapter.getOutputTypes(); + List inputTypes = adapter.getInputTypes(); + + Page inputPage = createPage(inputPageData, inputTypes); + Page outputPage = adapter.adaptPage(inputPage).getLoadedPage(); + + // Verify output block values + for (int i = 0; i < columnMapping.size(); i++) { + ReaderProjectionsAdapter.ChannelMapping mapping = columnMapping.get(i); + int inputBlockIndex = mapping.getInputChannelIndex(); + verifyBlock( + outputPage.getBlock(i), + outputTypes.get(i), + inputPage.getBlock(inputBlockIndex), + inputTypes.get(inputBlockIndex), + mapping.getDereferenceSequence()); + } + } + + private static Page createPage(List> pageData, List types) + { + Block[] inputPageBlocks = new Block[pageData.size()]; + for (int i = 0; i < inputPageBlocks.length; i++) { + inputPageBlocks[i] = createInputBlock(pageData.get(i), types.get(i)); + } + + return new Page(inputPageBlocks); + } + + private static Block createInputBlock(List data, Type type) + { + int positionCount = data.size(); + + if (type instanceof RowType) { + return new LazyBlock(data.size(), () -> createRowBlockWithLazyNestedBlocks(data, (RowType) type)); + } + else if (BIGINT.equals(type)) { + return new LazyBlock(positionCount, () -> createLongArrayBlock(data)); + } + else { + throw new UnsupportedOperationException(); + } + } + + private static Block createRowBlockWithLazyNestedBlocks(List data, RowType rowType) + { + int positionCount = data.size(); + + boolean[] isNull = new boolean[positionCount]; + int fieldCount = rowType.getFields().size(); + + List> fieldsData = new ArrayList<>(); + for (int i = 0; i < fieldCount; i++) { + fieldsData.add(new ArrayList<>()); + } + + // Extract data to generate fieldBlocks + for (int position = 0; position < data.size(); position++) { + RowData row = (RowData) data.get(position); + if (row == null) { + isNull[position] = true; + } + else { + for (int field = 0; field < fieldCount; field++) { + fieldsData.get(field).add(row.getField(field)); + } + } + } + + Block[] fieldBlocks = new Block[fieldCount]; + for (int field = 0; field < fieldCount; field++) { + fieldBlocks[field] = createInputBlock(fieldsData.get(field), rowType.getFields().get(field).getType()); + } + + return fromFieldBlocks(positionCount, Optional.of(isNull), fieldBlocks); + } + + private static Block createLongArrayBlock(List data) + { + BlockBuilder builder = BIGINT.createBlockBuilder(null, data.size()); + for (int i = 0; i < data.size(); i++) { + Long value = (Long) data.get(i); + if (value == null) { + builder.appendNull(); + } + else { + builder.writeLong(value); + } + } + return builder.build(); + } + + private static void verifyBlock(Block actualBlock, Type outputType, Block input, Type inputType, List dereferences) + { + Block expectedOutputBlock = createProjectedColumnBlock(input, outputType, inputType, dereferences); + assertBlockEquals(outputType, actualBlock, expectedOutputBlock); + } + + private static Block createProjectedColumnBlock(Block data, Type finalType, Type blockType, List dereferences) + { + if (dereferences.size() == 0) { + return data; + } + + BlockBuilder builder = finalType.createBlockBuilder(null, data.getPositionCount()); + + for (int i = 0; i < data.getPositionCount(); i++) { + Type sourceType = blockType; + + Block currentData = null; + boolean isNull = data.isNull(i); + + if (!isNull) { + // Get SingleRowBlock corresponding to element at position i + currentData = data.getObject(i, Block.class); + } + + // Apply all dereferences except for the last one, because the type can be different + for (int j = 0; j < dereferences.size() - 1; j++) { + if (isNull) { + // If null element is discovered at any dereferencing step, break + break; + } + + checkArgument(sourceType instanceof RowType); + if (currentData.isNull(dereferences.get(j))) { + currentData = null; + } + else { + sourceType = ((RowType) sourceType).getFields().get(dereferences.get(j)).getType(); + currentData = currentData.getObject(dereferences.get(j), Block.class); + } + + isNull = isNull || (currentData == null); + } + + if (isNull) { + // Append null if any of the elements in the dereference chain were null + builder.appendNull(); + } + else { + int lastDereference = dereferences.get(dereferences.size() - 1); + + if (currentData.isNull(lastDereference)) { + // Append null if the last dereference is null + builder.appendNull(); + } + else { + // Append actual values otherwise + if (finalType.equals(BIGINT)) { + Long value = currentData.getLong(lastDereference, 0); + builder.writeLong(value); + } + else if (finalType instanceof RowType) { + Block block = currentData.getObject(lastDereference, Block.class); + builder.appendStructure(block); + } + else { + throw new UnsupportedOperationException(); + } + } + } + } + + return builder.build(); + } + + static class RowData + { + private final List data; + + private RowData(Object... data) + { + this.data = requireNonNull(Arrays.asList(data), "data is null"); + } + + static RowData rowData(Object... data) + { + return new RowData(data); + } + + List getData() + { + return data; + } + + Object getField(int field) + { + checkArgument(field >= 0 && field < data.size()); + return data.get(field); + } + } +} diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/benchmark/FileFormat.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/benchmark/FileFormat.java index 2eddbc0524bf..0c4320b35e1f 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/benchmark/FileFormat.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/benchmark/FileFormat.java @@ -28,7 +28,9 @@ import io.prestosql.plugin.hive.HiveColumnHandle; import io.prestosql.plugin.hive.HiveCompressionCodec; import io.prestosql.plugin.hive.HivePageSourceFactory; +import io.prestosql.plugin.hive.HivePageSourceFactory.ReaderPageSourceWithProjections; import io.prestosql.plugin.hive.HiveRecordCursorProvider; +import io.prestosql.plugin.hive.HiveRecordCursorProvider.ReaderRecordCursorWithProjections; import io.prestosql.plugin.hive.HiveStorageFormat; import io.prestosql.plugin.hive.HiveType; import io.prestosql.plugin.hive.HiveTypeName; @@ -49,7 +51,6 @@ import io.prestosql.spi.Page; import io.prestosql.spi.connector.ConnectorPageSource; import io.prestosql.spi.connector.ConnectorSession; -import io.prestosql.spi.connector.RecordCursor; import io.prestosql.spi.connector.RecordPageSource; import io.prestosql.spi.predicate.TupleDomain; import io.prestosql.spi.type.Type; @@ -66,8 +67,10 @@ import java.util.Optional; import java.util.Properties; +import static com.google.common.base.Preconditions.checkState; import static io.prestosql.orc.OrcWriteValidation.OrcWriteValidationMode.BOTH; import static io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.REGULAR; +import static io.prestosql.plugin.hive.HiveColumnHandle.createBaseColumn; import static io.prestosql.plugin.hive.HiveTestUtils.TYPE_MANAGER; import static io.prestosql.plugin.hive.HiveTestUtils.createGenericHiveRecordCursorProvider; import static io.prestosql.plugin.hive.HiveType.toHiveType; @@ -299,10 +302,10 @@ private static ConnectorPageSource createPageSource( for (int i = 0; i < columnNames.size(); i++) { String columnName = columnNames.get(i); Type columnType = columnTypes.get(i); - columnHandles.add(new HiveColumnHandle(columnName, toHiveType(typeTranslator, columnType), columnType, i, REGULAR, Optional.empty())); + columnHandles.add(createBaseColumn(columnName, i, toHiveType(typeTranslator, columnType), columnType, REGULAR, Optional.empty())); } - RecordCursor recordCursor = cursorProvider + Optional recordCursorWithProjections = cursorProvider .createRecordCursor( conf, session, @@ -315,9 +318,12 @@ private static ConnectorPageSource createPageSource( TupleDomain.all(), DateTimeZone.forID(session.getTimeZoneKey().getId()), TYPE_MANAGER, - false) - .get(); - return new RecordPageSource(columnTypes, recordCursor); + false); + + checkState(recordCursorWithProjections.isPresent(), "recordCursorWithProjections is not present"); + checkState(!recordCursorWithProjections.get().getProjectedReaderColumns().isPresent(), "projections should not be required"); + + return new RecordPageSource(columnTypes, recordCursorWithProjections.get().getRecordCursor()); } private static ConnectorPageSource createPageSource( @@ -333,10 +339,10 @@ private static ConnectorPageSource createPageSource( for (int i = 0; i < columnNames.size(); i++) { String columnName = columnNames.get(i); Type columnType = columnTypes.get(i); - columnHandles.add(new HiveColumnHandle(columnName, toHiveType(typeTranslator, columnType), columnType, i, REGULAR, Optional.empty())); + columnHandles.add(createBaseColumn(columnName, i, toHiveType(typeTranslator, columnType), columnType, REGULAR, Optional.empty())); } - return pageSourceFactory + Optional readerPageSourceWithProjections = pageSourceFactory .createPageSource( conf, session, @@ -348,8 +354,12 @@ private static ConnectorPageSource createPageSource( columnHandles, TupleDomain.all(), DateTimeZone.forID(session.getTimeZoneKey().getId()), - Optional.empty()) - .get(); + Optional.empty()); + + checkState(readerPageSourceWithProjections.isPresent(), "readerPageSourceWithProjections is not present"); + checkState(!readerPageSourceWithProjections.get().getProjectedReaderColumns().isPresent(), "projection should not be required"); + + return readerPageSourceWithProjections.get().getConnectorPageSource(); } private static class RecordFormatWriter diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/orc/TestOrcPageSourceFactory.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/orc/TestOrcPageSourceFactory.java index a13794080653..9040c271f00e 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/orc/TestOrcPageSourceFactory.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/orc/TestOrcPageSourceFactory.java @@ -20,6 +20,7 @@ import io.prestosql.plugin.hive.FileFormatDataSourceStats; import io.prestosql.plugin.hive.HiveColumnHandle; import io.prestosql.plugin.hive.HivePageSourceFactory; +import io.prestosql.plugin.hive.HivePageSourceFactory.ReaderPageSourceWithProjections; import io.prestosql.plugin.hive.HiveTypeTranslator; import io.prestosql.spi.Page; import io.prestosql.spi.connector.ConnectorPageSource; @@ -44,9 +45,11 @@ import java.util.Set; import java.util.function.LongPredicate; +import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.io.Resources.getResource; import static io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.REGULAR; +import static io.prestosql.plugin.hive.HiveColumnHandle.createBaseColumn; import static io.prestosql.plugin.hive.HiveStorageFormat.ORC; import static io.prestosql.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT; import static io.prestosql.plugin.hive.HiveTestUtils.SESSION; @@ -156,7 +159,7 @@ private static List readFile(Set columns, TupleDomain pageSourceWithProjections = PAGE_SOURCE_FACTORY.createPageSource( new JobConf(new Configuration(false)), SESSION, new Path(nationFileWithReplicatedRows.getAbsoluteFile().toURI()), @@ -167,7 +170,13 @@ private static List readFile(Set columns, TupleDomain"), new ArrayType(INTEGER), 0, REGULAR, Optional.empty()); + HiveColumnHandle columnHandle = createBaseColumn("my_array", 0, HiveType.valueOf("array"), new ArrayType(INTEGER), REGULAR, Optional.empty()); TupleDomain domain = withColumnDomains(ImmutableMap.of(columnHandle, Domain.notNull(new ArrayType(INTEGER)))); MessageType fileSchema = new MessageType("hive_schema", @@ -73,7 +74,8 @@ public void testParquetTupleDomainStructArray() RowType.Field rowField = new RowType.Field(Optional.of("a"), INTEGER); RowType rowType = RowType.from(ImmutableList.of(rowField)); - HiveColumnHandle columnHandle = new HiveColumnHandle("my_array_struct", HiveType.valueOf("array>"), rowType, 0, REGULAR, Optional.empty()); + HiveColumnHandle columnHandle = createBaseColumn("my_array_struct", 0, HiveType.valueOf("array>"), rowType, REGULAR, Optional.empty()); + TupleDomain domain = withColumnDomains(ImmutableMap.of(columnHandle, Domain.notNull(new ArrayType(rowType)))); MessageType fileSchema = new MessageType("hive_schema", @@ -89,7 +91,7 @@ public void testParquetTupleDomainStructArray() @Test public void testParquetTupleDomainPrimitive() { - HiveColumnHandle columnHandle = new HiveColumnHandle("my_primitive", HiveType.valueOf("bigint"), BIGINT, 0, REGULAR, Optional.empty()); + HiveColumnHandle columnHandle = createBaseColumn("my_primitive", 0, HiveType.valueOf("bigint"), BIGINT, REGULAR, Optional.empty()); Domain singleValueDomain = Domain.singleValue(BIGINT, 123L); TupleDomain domain = withColumnDomains(ImmutableMap.of(columnHandle, singleValueDomain)); @@ -114,7 +116,7 @@ public void testParquetTupleDomainStruct() RowType.field("a", INTEGER), RowType.field("b", INTEGER)); - HiveColumnHandle columnHandle = new HiveColumnHandle("my_struct", HiveType.valueOf("struct"), rowType, 0, REGULAR, Optional.empty()); + HiveColumnHandle columnHandle = createBaseColumn("my_struct", 0, HiveType.valueOf("struct"), rowType, REGULAR, Optional.empty()); TupleDomain domain = withColumnDomains(ImmutableMap.of(columnHandle, Domain.notNull(rowType))); MessageType fileSchema = new MessageType("hive_schema", @@ -137,7 +139,7 @@ public void testParquetTupleDomainMap() methodHandle(TestParquetPredicateUtils.class, "throwUnsupportedOperationException"), methodHandle(TestParquetPredicateUtils.class, "throwUnsupportedOperationException")); - HiveColumnHandle columnHandle = new HiveColumnHandle("my_map", HiveType.valueOf("map"), mapType, 0, REGULAR, Optional.empty()); + HiveColumnHandle columnHandle = createBaseColumn("my_map", 0, HiveType.valueOf("map"), mapType, REGULAR, Optional.empty()); TupleDomain domain = withColumnDomains(ImmutableMap.of(columnHandle, Domain.notNull(mapType))); diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/s3select/TestS3SelectRecordCursor.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3select/TestS3SelectRecordCursor.java index 6d70c80c2b64..a81b646bd2f2 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/s3select/TestS3SelectRecordCursor.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/s3select/TestS3SelectRecordCursor.java @@ -25,6 +25,7 @@ import java.util.stream.Stream; import static io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.REGULAR; +import static io.prestosql.plugin.hive.HiveColumnHandle.createBaseColumn; import static io.prestosql.plugin.hive.HiveType.HIVE_INT; import static io.prestosql.plugin.hive.HiveType.HIVE_STRING; import static io.prestosql.plugin.hive.s3select.S3SelectRecordCursor.updateSplitSchema; @@ -43,10 +44,10 @@ public class TestS3SelectRecordCursor { private static final String LAZY_SERDE_CLASS_NAME = LazySimpleSerDe.class.getName(); - private static final HiveColumnHandle ARTICLE_COLUMN = new HiveColumnHandle("article", HIVE_STRING, VARCHAR, 1, REGULAR, Optional.empty()); - private static final HiveColumnHandle AUTHOR_COLUMN = new HiveColumnHandle("author", HIVE_STRING, VARCHAR, 1, REGULAR, Optional.empty()); - private static final HiveColumnHandle DATE_ARTICLE_COLUMN = new HiveColumnHandle("date_pub", HIVE_INT, DATE, 1, REGULAR, Optional.empty()); - private static final HiveColumnHandle QUANTITY_COLUMN = new HiveColumnHandle("quantity", HIVE_INT, INTEGER, 1, REGULAR, Optional.empty()); + private static final HiveColumnHandle ARTICLE_COLUMN = createBaseColumn("article", 1, HIVE_STRING, VARCHAR, REGULAR, Optional.empty()); + private static final HiveColumnHandle AUTHOR_COLUMN = createBaseColumn("author", 1, HIVE_STRING, VARCHAR, REGULAR, Optional.empty()); + private static final HiveColumnHandle DATE_ARTICLE_COLUMN = createBaseColumn("date_pub", 1, HIVE_INT, DATE, REGULAR, Optional.empty()); + private static final HiveColumnHandle QUANTITY_COLUMN = createBaseColumn("quantity", 1, HIVE_INT, INTEGER, REGULAR, Optional.empty()); private static final HiveColumnHandle[] DEFAULT_TEST_COLUMNS = {ARTICLE_COLUMN, AUTHOR_COLUMN, DATE_ARTICLE_COLUMN, QUANTITY_COLUMN}; @Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "Invalid Thrift DDL struct article \\{ \\}") diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/statistics/TestMetastoreHiveStatisticsProvider.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/statistics/TestMetastoreHiveStatisticsProvider.java index 318f3fd619ea..cea0060675ce 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/statistics/TestMetastoreHiveStatisticsProvider.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/statistics/TestMetastoreHiveStatisticsProvider.java @@ -45,6 +45,7 @@ import static io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.PARTITION_KEY; import static io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.REGULAR; +import static io.prestosql.plugin.hive.HiveColumnHandle.createBaseColumn; import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_CORRUPTED_COLUMN_STATISTICS; import static io.prestosql.plugin.hive.HivePartition.UNPARTITIONED_ID; import static io.prestosql.plugin.hive.HivePartitionManager.parsePartition; @@ -93,8 +94,8 @@ public class TestMetastoreHiveStatisticsProvider private static final String COLUMN = "column"; private static final DecimalType DECIMAL = createDecimalType(5, 3); - private static final HiveColumnHandle PARTITION_COLUMN_1 = new HiveColumnHandle("p1", HIVE_STRING, VARCHAR, 0, PARTITION_KEY, Optional.empty()); - private static final HiveColumnHandle PARTITION_COLUMN_2 = new HiveColumnHandle("p2", HIVE_LONG, BIGINT, 1, PARTITION_KEY, Optional.empty()); + private static final HiveColumnHandle PARTITION_COLUMN_1 = createBaseColumn("p1", 0, HIVE_STRING, VARCHAR, PARTITION_KEY, Optional.empty()); + private static final HiveColumnHandle PARTITION_COLUMN_2 = createBaseColumn("p2", 1, HIVE_LONG, BIGINT, PARTITION_KEY, Optional.empty()); @Test public void testGetPartitionsSample() @@ -609,7 +610,7 @@ public void testGetTableStatistics() .setColumnStatistics(ImmutableMap.of(COLUMN, createIntegerColumnStatistics(OptionalLong.of(-100), OptionalLong.of(100), OptionalLong.of(500), OptionalLong.of(300)))) .build(); MetastoreHiveStatisticsProvider statisticsProvider = new MetastoreHiveStatisticsProvider((session, table, hivePartitions) -> ImmutableMap.of(partitionName, statistics)); - HiveColumnHandle columnHandle = new HiveColumnHandle(COLUMN, HIVE_LONG, BIGINT, 2, REGULAR, Optional.empty()); + HiveColumnHandle columnHandle = createBaseColumn(COLUMN, 2, HIVE_LONG, BIGINT, REGULAR, Optional.empty()); TableStatistics expected = TableStatistics.builder() .setRowCount(Estimate.of(1000)) .setColumnStatistics( @@ -658,7 +659,9 @@ public void testGetTableStatisticsUnpartitioned() .setColumnStatistics(ImmutableMap.of(COLUMN, createIntegerColumnStatistics(OptionalLong.of(-100), OptionalLong.of(100), OptionalLong.of(500), OptionalLong.of(300)))) .build(); MetastoreHiveStatisticsProvider statisticsProvider = new MetastoreHiveStatisticsProvider((session, table, hivePartitions) -> ImmutableMap.of(UNPARTITIONED_ID, statistics)); - HiveColumnHandle columnHandle = new HiveColumnHandle(COLUMN, HIVE_LONG, BIGINT, 2, REGULAR, Optional.empty()); + + HiveColumnHandle columnHandle = createBaseColumn(COLUMN, 2, HIVE_LONG, BIGINT, REGULAR, Optional.empty()); + TableStatistics expected = TableStatistics.builder() .setRowCount(Estimate.of(1000)) .setColumnStatistics( From 9aacfde3d12e2d6fa8e145df80d3b57e3bc9d716 Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Tue, 24 Mar 2020 23:12:54 -0700 Subject: [PATCH 102/519] Implement pushdown of dereference projections into hive connector --- .../plugin/hive/TestHiveAlluxioMetastore.java | 6 + .../plugin/hive/HiveApplyProjectionUtil.java | 189 ++++++++++++++++++ .../prestosql/plugin/hive/HiveMetadata.java | 103 ++++++++++ .../plugin/hive/AbstractTestHive.java | 147 ++++++++++++++ .../hive/TestHiveApplyProjectionUtil.java | 66 ++++++ 5 files changed, 511 insertions(+) create mode 100644 presto-hive/src/main/java/io/prestosql/plugin/hive/HiveApplyProjectionUtil.java create mode 100644 presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveApplyProjectionUtil.java diff --git a/presto-hive-hadoop2/src/test/java/io/prestosql/plugin/hive/TestHiveAlluxioMetastore.java b/presto-hive-hadoop2/src/test/java/io/prestosql/plugin/hive/TestHiveAlluxioMetastore.java index ee7441d95d59..c179fde223d3 100644 --- a/presto-hive-hadoop2/src/test/java/io/prestosql/plugin/hive/TestHiveAlluxioMetastore.java +++ b/presto-hive-hadoop2/src/test/java/io/prestosql/plugin/hive/TestHiveAlluxioMetastore.java @@ -210,6 +210,12 @@ public void testPartitionStatisticsSampling() // Alluxio metastore does not support create operations } + @Override + public void testApplyProjection() + { + // Alluxio metastore does not support create/delete operations + } + @Override public void testPreferredInsertLayout() { diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveApplyProjectionUtil.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveApplyProjectionUtil.java new file mode 100644 index 000000000000..13c7413eada9 --- /dev/null +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveApplyProjectionUtil.java @@ -0,0 +1,189 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.hive; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableList; +import io.prestosql.spi.connector.ColumnHandle; +import io.prestosql.spi.expression.ConnectorExpression; +import io.prestosql.spi.expression.FieldDereference; +import io.prestosql.spi.expression.Variable; + +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; + +import static java.util.Objects.requireNonNull; + +final class HiveApplyProjectionUtil +{ + private HiveApplyProjectionUtil() {} + + public static List extractSupportedProjectedColumns(ConnectorExpression expression) + { + requireNonNull(expression, "expression is null"); + ImmutableList.Builder supportedSubExpressions = ImmutableList.builder(); + fillSupportedProjectedColumns(expression, supportedSubExpressions); + return supportedSubExpressions.build(); + } + + private static void fillSupportedProjectedColumns(ConnectorExpression expression, ImmutableList.Builder supportedSubExpressions) + { + if (isPushDownSupported(expression)) { + supportedSubExpressions.add(expression); + return; + } + + // If the whole expression is not supported, look for a partially supported projection + if (expression instanceof FieldDereference) { + fillSupportedProjectedColumns(((FieldDereference) expression).getTarget(), supportedSubExpressions); + } + } + + @VisibleForTesting + static boolean isPushDownSupported(ConnectorExpression expression) + { + return expression instanceof Variable || + (expression instanceof FieldDereference && isPushDownSupported(((FieldDereference) expression).getTarget())); + } + + public static ProjectedColumnRepresentation createProjectedColumnRepresentation(ConnectorExpression expression) + { + ImmutableList.Builder ordinals = ImmutableList.builder(); + + Variable target; + while (true) { + if (expression instanceof Variable) { + target = (Variable) expression; + break; + } + else if (expression instanceof FieldDereference) { + FieldDereference dereference = (FieldDereference) expression; + ordinals.add(dereference.getField()); + expression = dereference.getTarget(); + } + else { + throw new IllegalArgumentException("expression is not a valid dereference chain"); + } + } + + return new ProjectedColumnRepresentation(target, ordinals.build().reverse()); + } + + /** + * Replace all connector expressions with variables as given by {@param expressionToVariableMappings} in a top down manner. + * i.e. if the replacement occurs for the parent, the children will not be visited. + */ + public static ConnectorExpression replaceWithNewVariables(ConnectorExpression expression, Map expressionToVariableMappings) + { + if (expressionToVariableMappings.containsKey(expression)) { + return expressionToVariableMappings.get(expression); + } + + if (expression instanceof FieldDereference) { + ConnectorExpression newTarget = replaceWithNewVariables(((FieldDereference) expression).getTarget(), expressionToVariableMappings); + return new FieldDereference(expression.getType(), newTarget, ((FieldDereference) expression).getField()); + } + + return expression; + } + + /** + * Returns the assignment key corresponding to the column represented by {@param projectedColumn} in the {@param assignments}, if one exists. + * The variable in the {@param projectedColumn} can itself be a representation of another projected column. For example, + * say a projected column representation has variable "x" and a dereferenceIndices=[0]. "x" can in-turn map to a projected + * column handle with base="a" and [1, 2] as dereference indices. Then the method searches for a column handle in + * {@param assignments} with base="a" and dereferenceIndices=[1, 2, 0]. + */ + public static Optional find(Map assignments, ProjectedColumnRepresentation projectedColumn) + { + HiveColumnHandle variableColumn = (HiveColumnHandle) assignments.get(projectedColumn.getVariable().getName()); + + if (variableColumn == null) { + return Optional.empty(); + } + + String baseColumnName = variableColumn.getBaseColumnName(); + + List variableColumnIndices = variableColumn.getHiveColumnProjectionInfo() + .map(HiveColumnProjectionInfo::getDereferenceIndices) + .orElse(ImmutableList.of()); + + List projectionIndices = ImmutableList.builder() + .addAll(variableColumnIndices) + .addAll(projectedColumn.getDereferenceIndices()) + .build(); + + for (Map.Entry entry : assignments.entrySet()) { + HiveColumnHandle column = (HiveColumnHandle) entry.getValue(); + if (column.getBaseColumnName().equals(baseColumnName) && + column.getHiveColumnProjectionInfo() + .map(HiveColumnProjectionInfo::getDereferenceIndices) + .orElse(ImmutableList.of()) + .equals(projectionIndices)) { + return Optional.of(entry.getKey()); + } + } + + return Optional.empty(); + } + + public static class ProjectedColumnRepresentation + { + private final Variable variable; + private final List dereferenceIndices; + + public ProjectedColumnRepresentation(Variable variable, List dereferenceIndices) + { + this.variable = requireNonNull(variable, "variable is null"); + this.dereferenceIndices = ImmutableList.copyOf(requireNonNull(dereferenceIndices, "dereferenceIndices is null")); + } + + public Variable getVariable() + { + return variable; + } + + public List getDereferenceIndices() + { + return dereferenceIndices; + } + + public boolean isVariable() + { + return dereferenceIndices.isEmpty(); + } + + @Override + public boolean equals(Object obj) + { + if (this == obj) { + return true; + } + if ((obj == null) || (getClass() != obj.getClass())) { + return false; + } + ProjectedColumnRepresentation that = (ProjectedColumnRepresentation) obj; + return Objects.equals(variable, that.variable) && + Objects.equals(dereferenceIndices, that.dereferenceIndices); + } + + @Override + public int hashCode() + { + return Objects.hash(variable, dereferenceIndices); + } + } +} diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java index 4382e5a3a26b..e3774b232b9c 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java @@ -29,6 +29,7 @@ import io.airlift.slice.Slice; import io.prestosql.plugin.base.CatalogName; import io.prestosql.plugin.hive.HdfsEnvironment.HdfsContext; +import io.prestosql.plugin.hive.HiveApplyProjectionUtil.ProjectedColumnRepresentation; import io.prestosql.plugin.hive.LocationService.WriteInfo; import io.prestosql.plugin.hive.authentication.HiveIdentity; import io.prestosql.plugin.hive.metastore.Column; @@ -65,11 +66,15 @@ import io.prestosql.spi.connector.ConstraintApplicationResult; import io.prestosql.spi.connector.DiscretePredicates; import io.prestosql.spi.connector.InMemoryRecordSet; +import io.prestosql.spi.connector.ProjectionApplicationResult; +import io.prestosql.spi.connector.ProjectionApplicationResult.Assignment; import io.prestosql.spi.connector.SchemaTableName; import io.prestosql.spi.connector.SchemaTablePrefix; import io.prestosql.spi.connector.SystemTable; import io.prestosql.spi.connector.TableNotFoundException; import io.prestosql.spi.connector.ViewNotFoundException; +import io.prestosql.spi.expression.ConnectorExpression; +import io.prestosql.spi.expression.Variable; import io.prestosql.spi.predicate.Domain; import io.prestosql.spi.predicate.NullableValue; import io.prestosql.spi.predicate.TupleDomain; @@ -101,6 +106,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; @@ -126,6 +132,9 @@ import static com.google.common.collect.Streams.stream; import static io.prestosql.plugin.hive.HiveAnalyzeProperties.getColumnNames; import static io.prestosql.plugin.hive.HiveAnalyzeProperties.getPartitionList; +import static io.prestosql.plugin.hive.HiveApplyProjectionUtil.extractSupportedProjectedColumns; +import static io.prestosql.plugin.hive.HiveApplyProjectionUtil.find; +import static io.prestosql.plugin.hive.HiveApplyProjectionUtil.replaceWithNewVariables; import static io.prestosql.plugin.hive.HiveBasicStatistics.createEmptyStatistics; import static io.prestosql.plugin.hive.HiveBasicStatistics.createZeroStatistics; import static io.prestosql.plugin.hive.HiveColumnHandle.BUCKET_COLUMN_NAME; @@ -1923,6 +1932,100 @@ public void validateScan(ConnectorSession session, ConnectorTableHandle tableHan } } + @Override + public Optional> applyProjection( + ConnectorSession session, + ConnectorTableHandle handle, + List projections, + Map assignments) + { + // Create projected column representations for supported sub expressions. Simple column references and chain of + // dereferences on a variable are supported right now. + Set projectedExpressions = projections.stream() + .flatMap(expression -> extractSupportedProjectedColumns(expression).stream()) + .collect(toImmutableSet()); + + Map columnProjections = projectedExpressions.stream() + .collect(toImmutableMap(Function.identity(), HiveApplyProjectionUtil::createProjectedColumnRepresentation)); + + // No pushdown required if all references are simple variables + if (columnProjections.values().stream().allMatch(ProjectedColumnRepresentation::isVariable)) { + return Optional.empty(); + } + + Map newAssignments = new HashMap<>(); + ImmutableMap.Builder expressionToVariableMappings = ImmutableMap.builder(); + + for (Map.Entry entry : columnProjections.entrySet()) { + ConnectorExpression expression = entry.getKey(); + ProjectedColumnRepresentation projectedColumn = entry.getValue(); + + ColumnHandle projectedColumnHandle; + String projectedColumnName; + + // See if input already contains a columnhandle for this projected column, avoid creating duplicates. + Optional existingColumn = find(assignments, projectedColumn); + + if (existingColumn.isPresent()) { + projectedColumnName = existingColumn.get(); + projectedColumnHandle = assignments.get(projectedColumnName); + } + else { + // Create a new column handle + HiveColumnHandle oldColumnHandle = (HiveColumnHandle) assignments.get(projectedColumn.getVariable().getName()); + projectedColumnHandle = createProjectedColumnHandle(oldColumnHandle, projectedColumn.getDereferenceIndices()); + projectedColumnName = ((HiveColumnHandle) projectedColumnHandle).getName(); + } + + Variable projectedColumnVariable = new Variable(projectedColumnName, expression.getType()); + Assignment newAssignment = new Assignment(projectedColumnName, projectedColumnHandle, expression.getType()); + newAssignments.put(projectedColumnName, newAssignment); + + expressionToVariableMappings.put(expression, projectedColumnVariable); + } + + // Modify projections to refer to new variables + List newProjections = projections.stream() + .map(expression -> replaceWithNewVariables(expression, expressionToVariableMappings.build())) + .collect(toImmutableList()); + + List outputAssignments = newAssignments.values().stream().collect(toImmutableList()); + return Optional.of(new ProjectionApplicationResult<>(handle, newProjections, outputAssignments)); + } + + private HiveColumnHandle createProjectedColumnHandle(HiveColumnHandle column, List indices) + { + HiveType oldHiveType = column.getHiveType(); + HiveType newHiveType = oldHiveType.getHiveTypeForDereferences(indices).get(); + + HiveColumnProjectionInfo columnProjectionInfo = new HiveColumnProjectionInfo( + // Merge indices + ImmutableList.builder() + .addAll(column.getHiveColumnProjectionInfo() + .map(HiveColumnProjectionInfo::getDereferenceIndices) + .orElse(ImmutableList.of())) + .addAll(indices) + .build(), + // Merge names + ImmutableList.builder() + .addAll(column.getHiveColumnProjectionInfo() + .map(HiveColumnProjectionInfo::getDereferenceNames) + .orElse(ImmutableList.of())) + .addAll(oldHiveType.getHiveDereferenceNames(indices)) + .build(), + newHiveType, + newHiveType.getType(typeManager)); + + return new HiveColumnHandle( + column.getBaseColumnName(), + column.getBaseHiveColumnIndex(), + column.getBaseHiveType(), + column.getBaseType(), + Optional.of(columnProjectionInfo), + column.getColumnType(), + column.getComment()); + } + @Override public Optional getCommonPartitioningHandle(ConnectorSession session, ConnectorPartitioningHandle left, ConnectorPartitioningHandle right) { diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHive.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHive.java index b73ba6e572cc..93806c6ef484 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHive.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHive.java @@ -84,12 +84,17 @@ import io.prestosql.spi.connector.Constraint; import io.prestosql.spi.connector.ConstraintApplicationResult; import io.prestosql.spi.connector.DiscretePredicates; +import io.prestosql.spi.connector.ProjectionApplicationResult; +import io.prestosql.spi.connector.ProjectionApplicationResult.Assignment; import io.prestosql.spi.connector.RecordCursor; import io.prestosql.spi.connector.RecordPageSource; import io.prestosql.spi.connector.SchemaTableName; import io.prestosql.spi.connector.SchemaTablePrefix; import io.prestosql.spi.connector.TableNotFoundException; import io.prestosql.spi.connector.ViewNotFoundException; +import io.prestosql.spi.expression.ConnectorExpression; +import io.prestosql.spi.expression.FieldDereference; +import io.prestosql.spi.expression.Variable; import io.prestosql.spi.predicate.Domain; import io.prestosql.spi.predicate.NullableValue; import io.prestosql.spi.predicate.Range; @@ -141,6 +146,7 @@ import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; import java.util.stream.IntStream; import java.util.stream.LongStream; @@ -2962,6 +2968,147 @@ protected void testPartitionStatisticsSampling(List columns, Par } } + @Test + public void testApplyProjection() + throws Exception + { + ColumnMetadata bigIntColumn0 = new ColumnMetadata("int0", BIGINT); + ColumnMetadata bigIntColumn1 = new ColumnMetadata("int1", BIGINT); + + RowType oneLevelRowType = toRowType(ImmutableList.of(bigIntColumn0, bigIntColumn1)); + ColumnMetadata oneLevelRow0 = new ColumnMetadata("onelevelrow0", oneLevelRowType); + + RowType twoLevelRowType = toRowType(ImmutableList.of(oneLevelRow0, bigIntColumn0, bigIntColumn1)); + ColumnMetadata twoLevelRow0 = new ColumnMetadata("twolevelrow0", twoLevelRowType); + + List columnsForApplyProjectionTest = ImmutableList.of(bigIntColumn0, bigIntColumn1, oneLevelRow0, twoLevelRow0); + + SchemaTableName tableName = temporaryTable("apply_projection_tester"); + doCreateEmptyTable(tableName, ORC, columnsForApplyProjectionTest); + + try (Transaction transaction = newTransaction()) { + ConnectorSession session = newSession(); + ConnectorMetadata metadata = transaction.getMetadata(); + ConnectorTableHandle tableHandle = getTableHandle(metadata, tableName); + + List columnHandles = metadata.getColumnHandles(session, tableHandle).values().stream() + .filter(columnHandle -> !((HiveColumnHandle) columnHandle).isHidden()) + .collect(toList()); + assertEquals(columnHandles.size(), columnsForApplyProjectionTest.size()); + + Map columnHandleMap = columnHandles.stream() + .collect(toImmutableMap(handle -> ((HiveColumnHandle) handle).getBaseColumnName(), Function.identity())); + + // Emulate symbols coming from the query plan and map them to column handles + Map columnHandlesWithSymbols = ImmutableMap.of( + "symbol_0", columnHandleMap.get("int0"), + "symbol_1", columnHandleMap.get("int1"), + "symbol_2", columnHandleMap.get("onelevelrow0"), + "symbol_3", columnHandleMap.get("twolevelrow0")); + + // Create variables for the emulated symbols + Map symbolVariableMapping = columnHandlesWithSymbols.entrySet().stream() + .collect(toImmutableMap( + e -> e.getKey(), + e -> new Variable( + e.getKey(), + ((HiveColumnHandle) e.getValue()).getBaseType()))); + + // Create dereference expressions for testing + FieldDereference symbol2Field0 = new FieldDereference(BIGINT, symbolVariableMapping.get("symbol_2"), 0); + FieldDereference symbol3Field0 = new FieldDereference(oneLevelRowType, symbolVariableMapping.get("symbol_3"), 0); + FieldDereference symbol3Field0Field0 = new FieldDereference(BIGINT, symbol3Field0, 0); + FieldDereference symbol3Field1 = new FieldDereference(BIGINT, symbolVariableMapping.get("symbol_3"), 1); + + Map inputAssignments; + List inputProjections; + Optional> projectionResult; + List expectedProjections; + Map expectedAssignments; + + // Test no projection pushdown in case of all variable references + inputAssignments = getColumnHandlesFor(columnHandlesWithSymbols, ImmutableList.of("symbol_0", "symbol_1")); + inputProjections = ImmutableList.of(symbolVariableMapping.get("symbol_0"), symbolVariableMapping.get("symbol_1")); + projectionResult = metadata.applyProjection(session, tableHandle, inputProjections, inputAssignments); + assertProjectionResult(projectionResult, true, ImmutableList.of(), ImmutableMap.of()); + + // Test projection pushdown for dereferences + inputAssignments = getColumnHandlesFor(columnHandlesWithSymbols, ImmutableList.of("symbol_2", "symbol_3")); + inputProjections = ImmutableList.of(symbol2Field0, symbol3Field0Field0, symbol3Field1); + expectedAssignments = ImmutableMap.of( + "onelevelrow0#f_int0", BIGINT, + "twolevelrow0#f_onelevelrow0#f_int0", BIGINT, + "twolevelrow0#f_int0", BIGINT); + expectedProjections = ImmutableList.of( + new Variable("onelevelrow0#f_int0", BIGINT), + new Variable("twolevelrow0#f_onelevelrow0#f_int0", BIGINT), + new Variable("twolevelrow0#f_int0", BIGINT)); + projectionResult = metadata.applyProjection(session, tableHandle, inputProjections, inputAssignments); + assertProjectionResult(projectionResult, false, expectedProjections, expectedAssignments); + + // Test reuse of virtual column handles + // Round-1: input projections [symbol_2, symbol_2.int0]. virtual handle is created for symbol_2.int0. + inputAssignments = getColumnHandlesFor(columnHandlesWithSymbols, ImmutableList.of("symbol_2")); + inputProjections = ImmutableList.of(symbol2Field0, symbolVariableMapping.get("symbol_2")); + projectionResult = metadata.applyProjection(session, tableHandle, inputProjections, inputAssignments); + expectedProjections = ImmutableList.of(new Variable("onelevelrow0#f_int0", BIGINT), symbolVariableMapping.get("symbol_2")); + expectedAssignments = ImmutableMap.of("onelevelrow0#f_int0", BIGINT, "symbol_2", oneLevelRowType); + assertProjectionResult(projectionResult, false, expectedProjections, expectedAssignments); + + // Round-2: input projections [symbol_2.int0 and onelevelrow0#f_int0]. Virtual handle is reused. + ProjectionApplicationResult.Assignment newlyCreatedColumn = getOnlyElement(projectionResult.get().getAssignments().stream() + .filter(handle -> handle.getVariable().equals("onelevelrow0#f_int0")) + .collect(toList())); + inputAssignments = ImmutableMap.builder() + .putAll(getColumnHandlesFor(columnHandlesWithSymbols, ImmutableList.of("symbol_2"))) + .put(newlyCreatedColumn.getVariable(), newlyCreatedColumn.getColumn()) + .build(); + inputProjections = ImmutableList.of(symbol2Field0, new Variable("onelevelrow0#f_int0", BIGINT)); + projectionResult = metadata.applyProjection(session, tableHandle, inputProjections, inputAssignments); + expectedProjections = ImmutableList.of(new Variable("onelevelrow0#f_int0", BIGINT), new Variable("onelevelrow0#f_int0", BIGINT)); + expectedAssignments = ImmutableMap.of("onelevelrow0#f_int0", BIGINT); + assertProjectionResult(projectionResult, false, expectedProjections, expectedAssignments); + } + finally { + dropTable(tableName); + } + } + + private static Map getColumnHandlesFor(Map columnHandles, List symbols) + { + return columnHandles.entrySet().stream() + .filter(e -> symbols.contains(e.getKey())) + .collect(toImmutableMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + private static void assertProjectionResult(Optional> projectionResult, boolean shouldBeEmpty, List expectedProjections, Map expectedAssignments) + { + if (shouldBeEmpty) { + assertTrue(!projectionResult.isPresent(), "expected projectionResult to be empty"); + return; + } + + assertTrue(projectionResult.isPresent(), "expected non-empty projection result"); + + ProjectionApplicationResult result = projectionResult.get(); + + // Verify projections + assertEquals(expectedProjections, result.getProjections()); + + // Verify assignments + List assignments = result.getAssignments(); + Map actualAssignments = uniqueIndex(assignments, Assignment::getVariable); + + for (String variable : expectedAssignments.keySet()) { + Type expectedType = expectedAssignments.get(variable); + assertTrue(actualAssignments.containsKey(variable)); + assertEquals(actualAssignments.get(variable).getType(), expectedType); + assertEquals(((HiveColumnHandle) actualAssignments.get(variable).getColumn()).getType(), expectedType); + } + + assertEquals(actualAssignments.size(), expectedAssignments.size()); + } + private ConnectorSession sampleSize(int sampleSize) { return getHiveSession(getHiveConfig() diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveApplyProjectionUtil.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveApplyProjectionUtil.java new file mode 100644 index 000000000000..55fe7dfb83d1 --- /dev/null +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveApplyProjectionUtil.java @@ -0,0 +1,66 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.hive; + +import com.google.common.collect.ImmutableList; +import io.prestosql.spi.expression.ConnectorExpression; +import io.prestosql.spi.expression.Constant; +import io.prestosql.spi.expression.FieldDereference; +import io.prestosql.spi.expression.Variable; +import org.testng.annotations.Test; + +import static io.prestosql.plugin.hive.HiveApplyProjectionUtil.extractSupportedProjectedColumns; +import static io.prestosql.plugin.hive.HiveApplyProjectionUtil.isPushDownSupported; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.RowType.field; +import static io.prestosql.spi.type.RowType.rowType; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertTrue; + +public class TestHiveApplyProjectionUtil +{ + private static final ConnectorExpression ROW_OF_ROW_VARIABLE = new Variable("a", rowType(field("b", rowType(field("c", INTEGER))))); + + private static final ConnectorExpression ONE_LEVEL_DEREFERENCE = new FieldDereference( + rowType(field("c", INTEGER)), + ROW_OF_ROW_VARIABLE, + 0); + + private static final ConnectorExpression TWO_LEVEL_DEREFERENCE = new FieldDereference( + INTEGER, + ONE_LEVEL_DEREFERENCE, + 0); + + private static final ConnectorExpression INT_VARIABLE = new Variable("a", INTEGER); + private static final ConnectorExpression CONSTANT = new Constant(5, INTEGER); + + @Test + public void testIsProjectionSupported() + { + assertTrue(isPushDownSupported(ONE_LEVEL_DEREFERENCE)); + assertTrue(isPushDownSupported(TWO_LEVEL_DEREFERENCE)); + assertTrue(isPushDownSupported(INT_VARIABLE)); + assertFalse(isPushDownSupported(CONSTANT)); + } + + @Test + public void testExtractSupportedProjectionColumns() + { + assertEquals(extractSupportedProjectedColumns(ONE_LEVEL_DEREFERENCE), ImmutableList.of(ONE_LEVEL_DEREFERENCE)); + assertEquals(extractSupportedProjectedColumns(TWO_LEVEL_DEREFERENCE), ImmutableList.of(TWO_LEVEL_DEREFERENCE)); + assertEquals(extractSupportedProjectedColumns(INT_VARIABLE), ImmutableList.of(INT_VARIABLE)); + assertEquals(extractSupportedProjectedColumns(CONSTANT), ImmutableList.of()); + } +} From 2b10b5915169028c765601bb09a43db724659a6b Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Mon, 27 Jan 2020 14:27:09 -0800 Subject: [PATCH 103/519] Add dereference pushdown integration tests for all hive formats --- .../hive/TestHiveIntegrationSmokeTest.java | 28 +++++++++++++++---- 1 file changed, 23 insertions(+), 5 deletions(-) diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java index eb82cd320658..3bd296c184a5 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java @@ -2916,12 +2916,30 @@ public void testMaps() } @Test - public void testRows() + public void testRowsWithAllFormats() { - assertUpdate("CREATE TABLE tmp_row1 AS SELECT cast(row(CAST(1 as BIGINT), CAST(NULL as BIGINT)) AS row(col0 bigint, col1 bigint)) AS a", 1); - assertQuery( - "SELECT a.col0, a.col1 FROM tmp_row1", - "SELECT 1, cast(null as bigint)"); + testWithAllStorageFormats(this::testRows); + } + + private void testRows(Session session, HiveStorageFormat format) + { + String tableName = "test_dereferences"; + @Language("SQL") String createTable = "" + + "CREATE TABLE " + tableName + + " WITH (" + + "format = '" + format + "'" + + ") " + + "AS SELECT " + + "CAST(row(CAST(1 as BIGINT), CAST(NULL as BIGINT)) AS row(col0 bigint, col1 bigint)) AS a, " + + "CAST(row(row(CAST('abc' as VARCHAR), CAST(5 as BIGINT)), CAST(3.0 AS DOUBLE)) AS row(field0 row(col0 varchar, col1 bigint), field1 double)) AS b"; + + assertUpdate(session, createTable, 1); + + assertQuery(session, + "SELECT a.col0, a.col1, b.field0.col0, b.field0.col1, b.field1 FROM " + tableName, + "SELECT 1, cast(null as bigint), CAST('abc' as VARCHAR), CAST(5 as BIGINT), CAST(3.0 AS DOUBLE)"); + + assertUpdate(session, "DROP TABLE " + tableName); } @Test From 6b30ac2e2b8ec0937f3ecff188f86b9b49e86318 Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Mon, 27 Jan 2020 14:27:10 -0800 Subject: [PATCH 104/519] Add projectSufficientColumns method to ReaderProjections This method enables creating only the minimally required columns for reading a set of column handles". e.g. for hive columns ["a.b", "a", "c"], this method will create projections using columns "a" and "c", since "a.b" can be projected from "a". --- .../plugin/hive/ReaderProjections.java | 116 ++++++++++++++++++ .../plugin/hive/TestReaderProjections.java | 40 +++++- 2 files changed, 155 insertions(+), 1 deletion(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/ReaderProjections.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/ReaderProjections.java index 3c2bb6b36dd4..a910602f3037 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/ReaderProjections.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/ReaderProjections.java @@ -13,14 +13,19 @@ */ package io.prestosql.plugin.hive; +import com.google.common.collect.BiMap; +import com.google.common.collect.ImmutableBiMap; import com.google.common.collect.ImmutableList; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkState; import static java.util.Objects.requireNonNull; /** @@ -103,4 +108,115 @@ public static Optional projectBaseColumns(List projectSufficientColumns(List columns) + { + requireNonNull(columns, "columns is null"); + + if (columns.stream().allMatch(HiveColumnHandle::isBaseColumn)) { + return Optional.empty(); + } + + ImmutableBiMap.Builder dereferenceChainsBuilder = ImmutableBiMap.builder(); + + for (HiveColumnHandle column : columns) { + List indices = column.getHiveColumnProjectionInfo() + .map(HiveColumnProjectionInfo::getDereferenceIndices) + .orElse(ImmutableList.of()); + + DereferenceChain dereferenceChain = new DereferenceChain(column.getBaseColumnName(), indices); + dereferenceChainsBuilder.put(dereferenceChain, column); + } + + BiMap dereferenceChains = dereferenceChainsBuilder.build(); + + List sufficientColumns = new ArrayList<>(); + ImmutableList.Builder outputColumnMapping = ImmutableList.builder(); + + Map pickedColumns = new HashMap<>(); + + // Pick a covering column for every column + for (HiveColumnHandle columnHandle : columns) { + DereferenceChain column = dereferenceChains.inverse().get(columnHandle); + List orderedPrefixes = column.getOrderedPrefixes(); + DereferenceChain chosenColumn = null; + + // Shortest existing prefix is chosen as the input. + for (DereferenceChain prefix : orderedPrefixes) { + if (dereferenceChains.containsKey(prefix)) { + chosenColumn = prefix; + break; + } + } + + checkState(chosenColumn != null, "chosenColumn is null"); + int inputBlockIndex; + + if (pickedColumns.containsKey(chosenColumn)) { + // Use already picked column + inputBlockIndex = pickedColumns.get(chosenColumn); + } + else { + // Add a new column for the reader + sufficientColumns.add(dereferenceChains.get(chosenColumn)); + pickedColumns.put(chosenColumn, sufficientColumns.size() - 1); + inputBlockIndex = sufficientColumns.size() - 1; + } + + outputColumnMapping.add(inputBlockIndex); + } + + return Optional.of(new ReaderProjections(sufficientColumns, outputColumnMapping.build())); + } + + private static class DereferenceChain + { + private final String name; + private final List indices; + + public DereferenceChain(String name, List indices) + { + this.name = requireNonNull(name, "name is null"); + this.indices = ImmutableList.copyOf(requireNonNull(indices, "indices is null")); + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + DereferenceChain that = (DereferenceChain) o; + return Objects.equals(name, that.name) && + Objects.equals(indices, that.indices); + } + + @Override + public int hashCode() + { + return Objects.hash(name, indices); + } + + /** + * Get Prefixes of this Dereference chain in increasing order of lengths + */ + public List getOrderedPrefixes() + { + ImmutableList.Builder prefixes = ImmutableList.builder(); + + for (int prefixLen = 0; prefixLen <= indices.size(); prefixLen++) { + prefixes.add(new DereferenceChain(name, indices.subList(0, prefixLen))); + } + + return prefixes.build(); + } + } } diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestReaderProjections.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestReaderProjections.java index 72f187c33274..69cd8876f7c3 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestReaderProjections.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestReaderProjections.java @@ -24,6 +24,7 @@ import java.util.Optional; import static io.prestosql.plugin.hive.ReaderProjections.projectBaseColumns; +import static io.prestosql.plugin.hive.ReaderProjections.projectSufficientColumns; import static io.prestosql.plugin.hive.TestHiveReaderProjectionsUtil.ROWTYPE_OF_PRIMITIVES; import static io.prestosql.plugin.hive.TestHiveReaderProjectionsUtil.ROWTYPE_OF_ROW_AND_PRIMITIVES; import static io.prestosql.plugin.hive.TestHiveReaderProjectionsUtil.createProjectedColumnHandle; @@ -55,7 +56,12 @@ public class TestReaderProjections public void testNoProjections() { List columns = new ArrayList<>(TEST_FULL_COLUMNS.values()); - Optional mapping = projectBaseColumns(columns); + Optional mapping; + + mapping = projectBaseColumns(columns); + assertTrue(!mapping.isPresent(), "Full columns should not require any adaptation"); + + mapping = projectSufficientColumns(columns); assertTrue(!mapping.isPresent(), "Full columns should not require any adaptation"); } @@ -82,4 +88,36 @@ public void testBaseColumnsProjection() assertEquals(readerColumns.get(readerIndex), readerColumn); } } + + @Test + public void testProjectSufficientColumns() + { + List columns = ImmutableList.of( + createProjectedColumnHandle(TEST_FULL_COLUMNS.get("col_struct_of_primitives"), ImmutableList.of(0)), + createProjectedColumnHandle(TEST_FULL_COLUMNS.get("col_struct_of_primitives"), ImmutableList.of(1)), + createProjectedColumnHandle(TEST_FULL_COLUMNS.get("col_bigint"), ImmutableList.of()), + createProjectedColumnHandle(TEST_FULL_COLUMNS.get("col_struct_of_non_primitives"), ImmutableList.of(0, 1)), + createProjectedColumnHandle(TEST_FULL_COLUMNS.get("col_struct_of_non_primitives"), ImmutableList.of(0))); + + Optional readerProjections = projectSufficientColumns(columns); + assertTrue(readerProjections.isPresent(), "expected readerProjections to be present"); + + assertEquals(readerProjections.get().readerColumnForHiveColumnAt(0), columns.get(0)); + assertEquals(readerProjections.get().readerColumnForHiveColumnAt(1), columns.get(1)); + assertEquals(readerProjections.get().readerColumnForHiveColumnAt(2), columns.get(2)); + assertEquals(readerProjections.get().readerColumnForHiveColumnAt(3), columns.get(4)); + assertEquals(readerProjections.get().readerColumnForHiveColumnAt(4), columns.get(4)); + + assertEquals(readerProjections.get().readerColumnPositionForHiveColumnAt(0), 0); + assertEquals(readerProjections.get().readerColumnPositionForHiveColumnAt(1), 1); + assertEquals(readerProjections.get().readerColumnPositionForHiveColumnAt(2), 2); + assertEquals(readerProjections.get().readerColumnPositionForHiveColumnAt(3), 3); + assertEquals(readerProjections.get().readerColumnPositionForHiveColumnAt(4), 3); + + List readerColumns = readerProjections.get().getReaderColumns(); + assertEquals(readerColumns.get(0), columns.get(0)); + assertEquals(readerColumns.get(1), columns.get(1)); + assertEquals(readerColumns.get(2), columns.get(2)); + assertEquals(readerColumns.get(3), columns.get(4)); + } } From 265d225dcafc9e1b4fd273d0ff84999811501af6 Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Mon, 27 Jan 2020 14:27:11 -0800 Subject: [PATCH 105/519] Create uniquely named test columns in TestOrcPageSourceMemoryTracking --- .../prestosql/plugin/hive/TestOrcPageSourceMemoryTracking.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestOrcPageSourceMemoryTracking.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestOrcPageSourceMemoryTracking.java index a2901ffd854a..49221816fdb7 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestOrcPageSourceMemoryTracking.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestOrcPageSourceMemoryTracking.java @@ -274,7 +274,7 @@ public void testMaxReadBytes(int rowCount) .add(new TestColumn("p_empty_string", javaStringObjectInspector, () -> "", true)); GrowingTestColumn[] dataColumns = new GrowingTestColumn[numColumns]; for (int i = 0; i < numColumns; i++) { - dataColumns[i] = new GrowingTestColumn("p_string", javaStringObjectInspector, () -> Long.toHexString(random.nextLong()), false, step * (i + 1)); + dataColumns[i] = new GrowingTestColumn("p_string" + "_" + i, javaStringObjectInspector, () -> Long.toHexString(random.nextLong()), false, step * (i + 1)); columnBuilder.add(dataColumns[i]); } List testColumns = columnBuilder.build(); From 6a112e221575532c2ecea9a01e31bf63ea9c0745 Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Mon, 27 Jan 2020 14:27:14 -0800 Subject: [PATCH 106/519] Add plan and rule test for Hive Projection Pushdown --- .../hive/TestHiveReaderProjectionsUtil.java | 2 +- ...stHiveProjectionPushdownIntoTableScan.java | 130 +++++++++++++ .../TestPushProjectionRuleWithHive.java | 176 ++++++++++++++++++ 3 files changed, 307 insertions(+), 1 deletion(-) create mode 100644 presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestHiveProjectionPushdownIntoTableScan.java create mode 100644 presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestPushProjectionRuleWithHive.java diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveReaderProjectionsUtil.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveReaderProjectionsUtil.java index 3e20795b28ac..254bd0772db8 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveReaderProjectionsUtil.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveReaderProjectionsUtil.java @@ -66,7 +66,7 @@ public static Map createTestFullColumns(List n return hiveColumns.build(); } - static HiveColumnHandle createProjectedColumnHandle(HiveColumnHandle column, List indices) + public static HiveColumnHandle createProjectedColumnHandle(HiveColumnHandle column, List indices) { checkArgument(column.isBaseColumn(), "base column is expected here"); diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestHiveProjectionPushdownIntoTableScan.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestHiveProjectionPushdownIntoTableScan.java new file mode 100644 index 000000000000..b5554e85d372 --- /dev/null +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestHiveProjectionPushdownIntoTableScan.java @@ -0,0 +1,130 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.hive.optimizer; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.io.Files; +import io.prestosql.Session; +import io.prestosql.metadata.QualifiedObjectName; +import io.prestosql.metadata.TableHandle; +import io.prestosql.plugin.hive.HdfsConfig; +import io.prestosql.plugin.hive.HdfsConfiguration; +import io.prestosql.plugin.hive.HdfsConfigurationInitializer; +import io.prestosql.plugin.hive.HdfsEnvironment; +import io.prestosql.plugin.hive.HiveColumnHandle; +import io.prestosql.plugin.hive.HiveHdfsConfiguration; +import io.prestosql.plugin.hive.authentication.HiveIdentity; +import io.prestosql.plugin.hive.authentication.NoHdfsAuthentication; +import io.prestosql.plugin.hive.metastore.Database; +import io.prestosql.plugin.hive.metastore.HiveMetastore; +import io.prestosql.plugin.hive.metastore.file.FileHiveMetastore; +import io.prestosql.plugin.hive.testing.TestingHiveConnectorFactory; +import io.prestosql.spi.connector.ColumnHandle; +import io.prestosql.spi.predicate.TupleDomain; +import io.prestosql.spi.security.PrincipalType; +import io.prestosql.sql.planner.assertions.BasePushdownPlanTest; +import io.prestosql.testing.LocalQueryRunner; +import org.testng.annotations.AfterClass; +import org.testng.annotations.Test; + +import java.io.File; +import java.util.Map; +import java.util.Optional; + +import static com.google.common.base.Predicates.equalTo; +import static com.google.common.io.MoreFiles.deleteRecursively; +import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE; +import static io.prestosql.plugin.hive.TestHiveReaderProjectionsUtil.createProjectedColumnHandle; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.any; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.tableScan; +import static io.prestosql.testing.TestingSession.testSessionBuilder; +import static org.testng.Assert.assertTrue; + +public class TestHiveProjectionPushdownIntoTableScan + extends BasePushdownPlanTest +{ + private static final String HIVE_CATALOG_NAME = "hive"; + private static final String SCHEMA_NAME = "test_schema"; + + private static final Session HIVE_SESSION = testSessionBuilder() + .setCatalog(HIVE_CATALOG_NAME) + .setSchema(SCHEMA_NAME) + .build(); + + private File baseDir; + + @Override + protected LocalQueryRunner createLocalQueryRunner() + { + baseDir = Files.createTempDir(); + HdfsConfig config = new HdfsConfig(); + HdfsConfiguration configuration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(config), ImmutableSet.of()); + HdfsEnvironment environment = new HdfsEnvironment(configuration, config, new NoHdfsAuthentication()); + + HiveMetastore metastore = new FileHiveMetastore(environment, baseDir.toURI().toString(), "test"); + Database database = Database.builder() + .setDatabaseName(SCHEMA_NAME) + .setOwnerName("public") + .setOwnerType(PrincipalType.ROLE) + .build(); + + metastore.createDatabase(new HiveIdentity(HIVE_SESSION.toConnectorSession()), database); + + LocalQueryRunner queryRunner = LocalQueryRunner.create(HIVE_SESSION); + queryRunner.createCatalog(HIVE_CATALOG_NAME, new TestingHiveConnectorFactory(metastore), ImmutableMap.of()); + + return queryRunner; + } + + @Test + public void testProjectionPushdown() + { + String testTable = "test_simple_projection_pushdown"; + QualifiedObjectName completeTableName = new QualifiedObjectName(HIVE_CATALOG_NAME, SCHEMA_NAME, testTable); + + String tableName = HIVE_CATALOG_NAME + "." + SCHEMA_NAME + "." + testTable; + getQueryRunner().execute("CREATE TABLE " + tableName + " " + "(col0) AS" + + " SELECT cast(row(5, 6) as row(a bigint, b bigint)) as col0 where false"); + + Session session = getQueryRunner().getDefaultSession(); + + Optional tableHandle = getTableHandle(session, completeTableName); + assertTrue(tableHandle.isPresent(), "expected the table handle to be present"); + + Map columns = getColumnHandles(session, completeTableName); + assertTrue(columns.containsKey("col0"), "expected column not found"); + + HiveColumnHandle baseColumnHandle = (HiveColumnHandle) columns.get("col0"); + + assertPlan( + "SELECT col0.a expr_a, col0.b expr_b FROM " + tableName, + any(tableScan( + equalTo(tableHandle.get().getConnectorHandle()), + TupleDomain.all(), + ImmutableMap.of( + "col0#a", equalTo(createProjectedColumnHandle(baseColumnHandle, ImmutableList.of(0))), + "col0#b", equalTo(createProjectedColumnHandle(baseColumnHandle, ImmutableList.of(1))))))); + } + + @AfterClass(alwaysRun = true) + public void cleanup() + throws Exception + { + if (baseDir != null) { + deleteRecursively(baseDir.toPath(), ALLOW_INSECURE); + } + } +} diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestPushProjectionRuleWithHive.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestPushProjectionRuleWithHive.java new file mode 100644 index 000000000000..3287f442817a --- /dev/null +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestPushProjectionRuleWithHive.java @@ -0,0 +1,176 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.hive.optimizer; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.io.Files; +import io.prestosql.Session; +import io.prestosql.connector.CatalogName; +import io.prestosql.metadata.TableHandle; +import io.prestosql.plugin.hive.HdfsConfig; +import io.prestosql.plugin.hive.HdfsConfiguration; +import io.prestosql.plugin.hive.HdfsConfigurationInitializer; +import io.prestosql.plugin.hive.HdfsEnvironment; +import io.prestosql.plugin.hive.HiveColumnHandle; +import io.prestosql.plugin.hive.HiveColumnProjectionInfo; +import io.prestosql.plugin.hive.HiveHdfsConfiguration; +import io.prestosql.plugin.hive.HiveTableHandle; +import io.prestosql.plugin.hive.HiveTransactionHandle; +import io.prestosql.plugin.hive.HiveTypeTranslator; +import io.prestosql.plugin.hive.authentication.HiveIdentity; +import io.prestosql.plugin.hive.authentication.NoHdfsAuthentication; +import io.prestosql.plugin.hive.metastore.Database; +import io.prestosql.plugin.hive.metastore.HiveMetastore; +import io.prestosql.plugin.hive.metastore.file.FileHiveMetastore; +import io.prestosql.plugin.hive.testing.TestingHiveConnectorFactory; +import io.prestosql.spi.predicate.TupleDomain; +import io.prestosql.spi.security.PrincipalType; +import io.prestosql.spi.type.RowType; +import io.prestosql.spi.type.Type; +import io.prestosql.sql.planner.iterative.rule.PushProjectionIntoTableScan; +import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; +import io.prestosql.sql.planner.plan.Assignments; +import io.prestosql.sql.tree.DereferenceExpression; +import io.prestosql.sql.tree.Identifier; +import io.prestosql.sql.tree.SymbolReference; +import io.prestosql.testing.LocalQueryRunner; +import org.testng.annotations.AfterClass; +import org.testng.annotations.Test; + +import java.io.File; +import java.io.IOException; +import java.util.Optional; + +import static com.google.common.base.Predicates.equalTo; +import static com.google.common.io.MoreFiles.deleteRecursively; +import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE; +import static io.prestosql.plugin.hive.HiveType.toHiveType; +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.RowType.field; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.project; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.tableScan; +import static io.prestosql.testing.TestingConnectorSession.SESSION; +import static io.prestosql.testing.TestingSession.testSessionBuilder; +import static java.util.Arrays.asList; + +public class TestPushProjectionRuleWithHive + extends BaseRuleTest +{ + private static final String HIVE_CATALOG_NAME = "hive"; + private static final String SCHEMA_NAME = "test_schema"; + private static final String TABLE_NAME = "test_table"; + + private static final Type ROW_TYPE = RowType.from(asList(field("a", BIGINT), field("b", BIGINT))); + + private File baseDir; + + private static final Session HIVE_SESSION = testSessionBuilder() + .setCatalog(HIVE_CATALOG_NAME) + .setSchema(SCHEMA_NAME) + .build(); + + @Override + protected Optional createLocalQueryRunner() + { + baseDir = Files.createTempDir(); + HdfsConfig config = new HdfsConfig(); + HdfsConfiguration configuration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(config), ImmutableSet.of()); + HdfsEnvironment environment = new HdfsEnvironment(configuration, config, new NoHdfsAuthentication()); + + HiveMetastore metastore = new FileHiveMetastore(environment, baseDir.toURI().toString(), "test"); + Database database = Database.builder() + .setDatabaseName(SCHEMA_NAME) + .setOwnerName("public") + .setOwnerType(PrincipalType.ROLE) + .build(); + + metastore.createDatabase(new HiveIdentity(SESSION), database); + + LocalQueryRunner queryRunner = LocalQueryRunner.create(HIVE_SESSION); + queryRunner.createCatalog(HIVE_CATALOG_NAME, new TestingHiveConnectorFactory(metastore), ImmutableMap.of()); + + return Optional.of(queryRunner); + } + + @Test + public void testProjectionPushdown() + { + PushProjectionIntoTableScan pushProjectionIntoTableScan = new PushProjectionIntoTableScan(tester().getMetadata(), tester().getTypeAnalyzer()); + + tester().getQueryRunner().execute("CREATE TABLE " + TABLE_NAME + "(struct_of_int) AS " + + "SELECT cast(row(5, 6) as row(a bigint, b bigint)) as struct_of_int where false"); + + Type baseType = ROW_TYPE; + + HiveColumnHandle partialColumn = new HiveColumnHandle( + "struct_of_int", + 0, + toHiveType(new HiveTypeTranslator(), baseType), + baseType, + Optional.of(new HiveColumnProjectionInfo( + ImmutableList.of(0), + ImmutableList.of("a"), + toHiveType(new HiveTypeTranslator(), BIGINT), + BIGINT)), + HiveColumnHandle.ColumnType.REGULAR, + Optional.empty()); + + HiveTableHandle hiveTable = new HiveTableHandle(SCHEMA_NAME, TABLE_NAME, ImmutableMap.of(), ImmutableList.of(), Optional.empty()); + TableHandle table = new TableHandle(new CatalogName(HIVE_CATALOG_NAME), hiveTable, new HiveTransactionHandle(), Optional.empty()); + + HiveColumnHandle fullColumn = partialColumn.getBaseColumn(); + + // Test No pushdown in case of full column references + tester().assertThat(pushProjectionIntoTableScan) + .on(p -> + p.project( + Assignments.of(p.symbol("struct_of_int", baseType), p.symbol("struct_of_int", baseType).toSymbolReference()), + p.tableScan( + table, + ImmutableList.of(p.symbol("struct_of_int", baseType)), + ImmutableMap.of(p.symbol("struct_of_int", baseType), fullColumn)))) + .withSession(HIVE_SESSION) + .doesNotFire(); + + // Test Dereference pushdown + tester().assertThat(pushProjectionIntoTableScan) + .on(p -> + p.project( + Assignments.of( + p.symbol("expr_deref", BIGINT), new DereferenceExpression(p.symbol("struct_of_int", baseType).toSymbolReference(), new Identifier("a"))), + p.tableScan( + table, + ImmutableList.of(p.symbol("struct_of_int", baseType)), + ImmutableMap.of(p.symbol("struct_of_int", baseType), fullColumn)))) + .withSession(HIVE_SESSION) + .matches(project( + ImmutableMap.of("expr_deref", expression(new SymbolReference("struct_of_int#a"))), + tableScan( + equalTo(table.getConnectorHandle()), + TupleDomain.all(), + ImmutableMap.of("struct_of_int#a", equalTo(partialColumn))))); + } + + @AfterClass(alwaysRun = true) + protected void cleanup() + throws IOException + { + if (baseDir != null) { + deleteRecursively(baseDir.toPath(), ALLOW_INSECURE); + } + } +} From 954aef392cc4317c9689b71e614daab2820e6602 Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Mon, 27 Jan 2020 14:27:15 -0800 Subject: [PATCH 107/519] Add tests for schema mismatch with dereference projections --- .../hive/TestHiveIntegrationSmokeTest.java | 75 +++++++++++++++++++ 1 file changed, 75 insertions(+) diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java index 3bd296c184a5..d04f3d8d8e25 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java @@ -4087,6 +4087,81 @@ public void testBucketFilteringByInPredicate() } } + @Test + public void schemaMismatchesWithDereferenceProjections() + { + for (TestingHiveStorageFormat format : getAllTestingHiveStorageFormat()) { + schemaMismatchesWithDereferenceProjections(format.getFormat()); + } + } + + private void schemaMismatchesWithDereferenceProjections(HiveStorageFormat format) + { + // Verify reordering of subfields between a partition column and a table column is not supported + // eg. table column: a row(c varchar, b bigint), partition column: a row(b bigint, c varchar) + try { + assertUpdate("CREATE TABLE evolve_test (dummy bigint, a row(b bigint, c varchar), d bigint) with (format = '" + format + "', partitioned_by=array['d'])"); + assertUpdate("INSERT INTO evolve_test values (1, row(1, 'abc'), 1)", 1); + assertUpdate("ALTER TABLE evolve_test DROP COLUMN a"); + assertUpdate("ALTER TABLE evolve_test ADD COLUMN a row(c varchar, b bigint)"); + assertUpdate("INSERT INTO evolve_test values (2, row('def', 2), 2)", 1); + assertQueryFails("SELECT a.b FROM evolve_test where d = 1", ".*There is a mismatch between the table and partition schemas.*"); + } + finally { + assertUpdate("DROP TABLE IF EXISTS evolve_test"); + } + + // Subfield absent in partition schema is reported as null + // i.e. "a.c" produces null for rows that were inserted before type of "a" was changed + try { + assertUpdate("CREATE TABLE evolve_test (dummy bigint, a row(b bigint), d bigint) with (format = '" + format + "', partitioned_by=array['d'])"); + assertUpdate("INSERT INTO evolve_test values (1, row(1), 1)", 1); + assertUpdate("ALTER TABLE evolve_test DROP COLUMN a"); + assertUpdate("ALTER TABLE evolve_test ADD COLUMN a row(b bigint, c varchar)"); + assertUpdate("INSERT INTO evolve_test values (2, row(2, 'def'), 2)", 1); + assertQuery("SELECT a.c FROM evolve_test", "SELECT 'def' UNION SELECT null"); + } + finally { + assertUpdate("DROP TABLE IF EXISTS evolve_test"); + } + } + + @Test + public void testSubfieldReordering() + { + // Validate for formats for which subfield access is name based + List formats = ImmutableList.of(HiveStorageFormat.ORC, HiveStorageFormat.PARQUET); + + for (HiveStorageFormat format : formats) { + // Subfields reordered in the file are read correctly. e.g. if partition column type is row(b bigint, c varchar) but the file + // column type is row(c varchar, b bigint), "a.b" should read the correct field from the file. + try { + assertUpdate("CREATE TABLE evolve_test (dummy bigint, a row(b bigint, c varchar)) with (format = '" + format + "')"); + assertUpdate("INSERT INTO evolve_test values (1, row(1, 'abc'))", 1); + assertUpdate("ALTER TABLE evolve_test DROP COLUMN a"); + assertUpdate("ALTER TABLE evolve_test ADD COLUMN a row(c varchar, b bigint)"); + assertQuery("SELECT a.b FROM evolve_test", "VALUES 1"); + } + finally { + assertUpdate("DROP TABLE IF EXISTS evolve_test"); + } + + // Assert that reordered subfields are read correctly for a two-level nesting. This is useful for asserting correct adaptation + // of residue projections in HivePageSourceProvider + try { + assertUpdate("CREATE TABLE evolve_test (dummy bigint, a row(b bigint, c row(x bigint, y varchar))) with (format = '" + format + "')"); + assertUpdate("INSERT INTO evolve_test values (1, row(1, row(3, 'abc')))", 1); + assertUpdate("ALTER TABLE evolve_test DROP COLUMN a"); + assertUpdate("ALTER TABLE evolve_test ADD COLUMN a row(c row(y varchar, x bigint), b bigint)"); + // TODO: replace the following assertion with assertQuery once h2QueryRunner starts supporting row types + assertQuerySucceeds("SELECT a.c.y, a.c FROM evolve_test"); + } + finally { + assertUpdate("DROP TABLE IF EXISTS evolve_test"); + } + } + } + @Test public void testMismatchedBucketing() { From 83bc2a618e546032c3538705ddf8265d1cc7a986 Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Sat, 8 Feb 2020 02:33:35 -0800 Subject: [PATCH 108/519] Remove unused parameter in createTestFile method --- .../plugin/hive/TestOrcPageSourceMemoryTracking.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestOrcPageSourceMemoryTracking.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestOrcPageSourceMemoryTracking.java index 49221816fdb7..bb8d2a36ceb2 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestOrcPageSourceMemoryTracking.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestOrcPageSourceMemoryTracking.java @@ -53,7 +53,6 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; -import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.ql.io.orc.OrcFile; import org.apache.hadoop.hive.ql.io.orc.OrcFile.WriterOptions; import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; @@ -461,7 +460,7 @@ public TestPreparer(String tempFilePath, List testColumns, int numRo columns = columnsBuilder.build(); types = typesBuilder.build(); - fileSplit = createTestFile(tempFilePath, new OrcOutputFormat(), serde, null, testColumns, numRows, stripeRows); + fileSplit = createTestFile(tempFilePath, serde, null, testColumns, numRows, stripeRows); } public ConnectorPageSource newPageSource() @@ -554,7 +553,6 @@ private DriverContext newDriverContext() public static FileSplit createTestFile( String filePath, - HiveOutputFormat outputFormat, Serializer serializer, String compressionCodec, List testColumns, From dc59397753dc569b49ee8940824929502b189821 Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Mon, 27 Jan 2020 14:27:03 -0800 Subject: [PATCH 109/519] Fix inaccurate test assertion message --- .../test/java/io/prestosql/plugin/hive/TestHiveFileFormats.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveFileFormats.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveFileFormats.java index 0582869ad504..38fd8e801aa2 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveFileFormats.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveFileFormats.java @@ -1192,7 +1192,7 @@ private void assertRead(Optional pageSourceFactory, Optio assertNotNull(writeColumns, "writeColumns must be specified"); assertNotNull(readColumns, "readColumns must be specified"); assertNotNull(session, "session must be specified"); - assertTrue(rowsCount >= 0, "rowsCount must be greater than zero"); + assertTrue(rowsCount >= 0, "rowsCount must be non-negative"); String compressionSuffix = compressionCodec.getCodec() .map(codec -> { From 1ab307ca57e173b5e77cfe28b4d4fb85497f3f8d Mon Sep 17 00:00:00 2001 From: David Phillips Date: Wed, 8 Apr 2020 19:10:25 -0700 Subject: [PATCH 110/519] Ensure OrcDataSink is closed when flush fails --- presto-orc/src/main/java/io/prestosql/orc/OrcDataSink.java | 3 +++ presto-orc/src/main/java/io/prestosql/orc/OrcWriter.java | 6 +++--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/presto-orc/src/main/java/io/prestosql/orc/OrcDataSink.java b/presto-orc/src/main/java/io/prestosql/orc/OrcDataSink.java index c4689f3a0f30..5a7480453dbd 100644 --- a/presto-orc/src/main/java/io/prestosql/orc/OrcDataSink.java +++ b/presto-orc/src/main/java/io/prestosql/orc/OrcDataSink.java @@ -15,10 +15,12 @@ import io.prestosql.orc.stream.OrcDataOutput; +import java.io.Closeable; import java.io.IOException; import java.util.List; public interface OrcDataSink + extends Closeable { /** * Number of bytes written to this sink so far. @@ -39,6 +41,7 @@ void write(List outputData) /** * ORC file is complete */ + @Override void close() throws IOException; } diff --git a/presto-orc/src/main/java/io/prestosql/orc/OrcWriter.java b/presto-orc/src/main/java/io/prestosql/orc/OrcWriter.java index 4dfedc6fbccb..5315a8d4ec8f 100644 --- a/presto-orc/src/main/java/io/prestosql/orc/OrcWriter.java +++ b/presto-orc/src/main/java/io/prestosql/orc/OrcWriter.java @@ -445,9 +445,9 @@ public void close() stats.updateSizeInBytes(-previouslyRecordedSizeInBytes); previouslyRecordedSizeInBytes = 0; - flushStripe(CLOSED); - - orcDataSink.close(); + try (Closeable ignored = orcDataSink) { + flushStripe(CLOSED); + } } /** From 29f0abcad6f93820ae3102c949654aea4458f377 Mon Sep 17 00:00:00 2001 From: Martin Traverso Date: Fri, 27 Dec 2019 15:30:37 -0800 Subject: [PATCH 111/519] Fix planning failure when lambda expressions are repeated The planner was allocating different symbols for arguments of syntactically similar lambda expressions. During planning, it was rewriting the expressions based on those mappings, which resulted in expressions not being found in TranslationMap. This fix ensures that all lambda arguments with the same name and type share the same symbol so the rewrites of sematically equivalent lambda expressions (syntax + types) are equivalent to each other. --- .../sql/planner/ExpressionSymbolInliner.java | 15 ++-- .../prestosql/sql/planner/LogicalPlanner.java | 58 ++++++++++++-- .../sql/query/TestLambdaExpressions.java | 79 +++++++++++++++++++ 3 files changed, 137 insertions(+), 15 deletions(-) create mode 100644 presto-main/src/test/java/io/prestosql/sql/query/TestLambdaExpressions.java diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/ExpressionSymbolInliner.java b/presto-main/src/main/java/io/prestosql/sql/planner/ExpressionSymbolInliner.java index 2ad376cc744b..bc17b8f26d3d 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/ExpressionSymbolInliner.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/ExpressionSymbolInliner.java @@ -13,6 +13,8 @@ */ package io.prestosql.sql.planner; +import com.google.common.collect.HashMultiset; +import com.google.common.collect.Multiset; import io.prestosql.sql.tree.Expression; import io.prestosql.sql.tree.ExpressionRewriter; import io.prestosql.sql.tree.ExpressionTreeRewriter; @@ -20,13 +22,11 @@ import io.prestosql.sql.tree.LambdaExpression; import io.prestosql.sql.tree.SymbolReference; -import java.util.HashSet; import java.util.Map; -import java.util.Set; import java.util.function.Function; -import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; +import static com.google.common.base.Verify.verify; public final class ExpressionSymbolInliner { @@ -55,7 +55,7 @@ private Expression rewrite(Expression expression) private class Visitor extends ExpressionRewriter { - private final Set excludedNames = new HashSet<>(); + private final Multiset excludedNames = HashMultiset.create(); @Override public Expression rewriteSymbolReference(SymbolReference node, Void context, ExpressionTreeRewriter treeRewriter) @@ -73,14 +73,11 @@ public Expression rewriteSymbolReference(SymbolReference node, Void context, Exp public Expression rewriteLambdaExpression(LambdaExpression node, Void context, ExpressionTreeRewriter treeRewriter) { for (LambdaArgumentDeclaration argument : node.getArguments()) { - String argumentName = argument.getName().getValue(); - // Symbol names are unique. As a result, a symbol should never be excluded multiple times. - checkArgument(!excludedNames.contains(argumentName)); - excludedNames.add(argumentName); + excludedNames.add(argument.getName().getValue()); } Expression result = treeRewriter.defaultRewrite(node, context); for (LambdaArgumentDeclaration argument : node.getArguments()) { - excludedNames.remove(argument.getName().getValue()); + verify(excludedNames.remove(argument.getName().getValue())); } return result; } diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/LogicalPlanner.java b/presto-main/src/main/java/io/prestosql/sql/planner/LogicalPlanner.java index 0c328f3e1eb4..7c6cec7fa373 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/LogicalPlanner.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/LogicalPlanner.java @@ -85,10 +85,12 @@ import java.util.AbstractMap.SimpleImmutableEntry; import java.util.ArrayList; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Objects; import java.util.Optional; import static com.google.common.base.Preconditions.checkState; @@ -596,17 +598,61 @@ private RelationPlan createRelationPlan(Analysis analysis, Query query) private static Map, Symbol> buildLambdaDeclarationToSymbolMap(Analysis analysis, SymbolAllocator symbolAllocator) { - Map, Symbol> resultMap = new LinkedHashMap<>(); + Map allocations = new HashMap<>(); + Map, Symbol> result = new LinkedHashMap<>(); + for (Entry, Type> entry : analysis.getTypes().entrySet()) { if (!(entry.getKey().getNode() instanceof LambdaArgumentDeclaration)) { continue; } - NodeRef lambdaArgumentDeclaration = NodeRef.of((LambdaArgumentDeclaration) entry.getKey().getNode()); - if (resultMap.containsKey(lambdaArgumentDeclaration)) { - continue; + + LambdaArgumentDeclaration argument = (LambdaArgumentDeclaration) entry.getKey().getNode(); + Key key = new Key(argument, entry.getValue()); + + // Allocate the same symbol for all lambda argument names with a given type. This is needed to be able to + // properly identify multiple instances of syntactically equal lambda expressions during planning as expressions + // get rewritten via TranslationMap + Symbol symbol = allocations.get(key); + if (symbol == null) { + symbol = symbolAllocator.newSymbol(argument, entry.getValue()); + allocations.put(key, symbol); } - resultMap.put(lambdaArgumentDeclaration, symbolAllocator.newSymbol(lambdaArgumentDeclaration.getNode(), entry.getValue())); + + result.put(NodeRef.of(argument), symbol); + } + + return result; + } + + private static class Key + { + private final LambdaArgumentDeclaration argument; + private final Type type; + + public Key(LambdaArgumentDeclaration argument, Type type) + { + this.argument = requireNonNull(argument, "argument is null"); + this.type = requireNonNull(type, "type is null"); + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Key key = (Key) o; + return Objects.equals(argument, key.argument) && + Objects.equals(type, key.type); + } + + @Override + public int hashCode() + { + return Objects.hash(argument, type); } - return resultMap; } } diff --git a/presto-main/src/test/java/io/prestosql/sql/query/TestLambdaExpressions.java b/presto-main/src/test/java/io/prestosql/sql/query/TestLambdaExpressions.java new file mode 100644 index 000000000000..d88feb2327d3 --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/query/TestLambdaExpressions.java @@ -0,0 +1,79 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.query; + +import org.testng.annotations.AfterClass; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.Test; + +public class TestLambdaExpressions +{ + private QueryAssertions assertions; + + @BeforeClass + public void init() + { + assertions = new QueryAssertions(); + } + + @AfterClass(alwaysRun = true) + public void teardown() + { + assertions.close(); + assertions = null; + } + + @Test + public void testDuplicateLambdaExpressions() + { + assertions.assertQuery("" + + "SELECT cardinality(filter(a, x -> x > 0)) " + + "FROM (VALUES " + + " ARRAY[1,2,3], " + + " ARRAY[0,1,2]," + + " ARRAY[0,0,0]" + + ") AS t(a) " + + "GROUP BY cardinality(filter(a, x -> x > 0))" + + "ORDER BY cardinality(filter(a, x -> x > 0))", + "VALUES BIGINT '0', BIGINT '2', BIGINT '3'"); + + // same type + assertions.assertQuery("" + + "SELECT transform(a, x -> x + 1), transform(b, x -> x + 1) " + + "FROM (VALUES ROW(ARRAY[1, 2, 3], ARRAY[10, 20, 30])) t(a, b)", + "VALUES ROW(ARRAY[2, 3, 4], ARRAY[11, 21, 31])"); + + // different type + assertions.assertQuery("" + + "SELECT transform(a, x -> x + 1), transform(b, x -> x + 1) " + + "FROM (VALUES ROW(ARRAY[1, 2, 3], ARRAY[10e0, 20e0, 30e0])) t(a, b)", + "VALUES ROW(ARRAY[2, 3, 4], ARRAY[11e0, 21e0, 31e0])"); + } + + @Test + public void testNestedLambda() + { + // same argument name + assertions.assertQuery("" + + "SELECT transform(a, x -> transform(ARRAY[x], x -> x + 1)) " + + "FROM (VALUES ARRAY[1, 2, 3]) t(a)", + "VALUES ARRAY[ARRAY[2], ARRAY[3], ARRAY[4]]"); + + // different argument name + assertions.assertQuery("" + + "SELECT transform(a, x -> transform(ARRAY[x], y -> y + 1)) " + + "FROM (VALUES ARRAY[1, 2, 3]) t(a)", + "VALUES ARRAY[ARRAY[2], ARRAY[3], ARRAY[4]]"); + } +} From 34cb1d0a7281aa60d706290791a2fec908122afd Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Fri, 10 Apr 2020 08:05:22 +0200 Subject: [PATCH 112/519] Fix formatting in release notes --- presto-docs/src/main/sphinx/release/release-332.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/presto-docs/src/main/sphinx/release/release-332.rst b/presto-docs/src/main/sphinx/release/release-332.rst index 806602ed4e7b..aac0fbac9cda 100644 --- a/presto-docs/src/main/sphinx/release/release-332.rst +++ b/presto-docs/src/main/sphinx/release/release-332.rst @@ -62,7 +62,7 @@ Hive Connector Changes created by Presto. Default value is ``0777``. (:issue:`3126`) * Add ``hive.partition-use-column-names`` configuration property and matching ``partition_use_column_names`` catalog session property that allows to match columns between table and partition schemas by names. By default they are mapped - by index. (:issue:2933`) + by index. (:issue:`2933`) * Add support for ``CREATE SCHEMA ... AUTHORIZATION`` to create a schema with specified owner. (:issue:`3066`). * Allow specifying the Glue metastore endpoint URL using the ``hive.metastore.glue.endpoint-url`` configuration property. (:issue:`3239`) @@ -108,5 +108,5 @@ SPI Changes * Expose row filters and column masks in ``QueryCompletedEvent``. (:issue:`3183`) * Expose referenced functions and procedures in ``QueryCompletedEvent``. (:issue:`3246`) * Allow ``Connector`` to provide ``EventListener`` instances. (:issue:`3166`) -* Deprecate the ``ConnectorPageSourceProvider.createPageSource()` variant without the +* Deprecate the ``ConnectorPageSourceProvider.createPageSource()`` variant without the ``dynamicFilter`` parameter. The method will be removed in a future release. (:issue:`3255`) From 80c08799cdecb59c9c445cf8a5298e54c7592c80 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Fri, 10 Apr 2020 08:05:31 +0200 Subject: [PATCH 113/519] Improve wording in release notes --- presto-docs/src/main/sphinx/release/release-332.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-docs/src/main/sphinx/release/release-332.rst b/presto-docs/src/main/sphinx/release/release-332.rst index aac0fbac9cda..d671116909f6 100644 --- a/presto-docs/src/main/sphinx/release/release-332.rst +++ b/presto-docs/src/main/sphinx/release/release-332.rst @@ -59,7 +59,7 @@ Hive Connector Changes from partitioned tables would return zero rows, and reading from unpartitioned tables would fail with a cryptic error. (:issue:`3366`) * Add ``hive.fs.new-directory-permissions`` configuration property for setting the permissions of new directories - created by Presto. Default value is ``0777``. (:issue:`3126`) + created by Presto. Default value is ``0777``, which corresponds to previous behavior. (:issue:`3126`) * Add ``hive.partition-use-column-names`` configuration property and matching ``partition_use_column_names`` catalog session property that allows to match columns between table and partition schemas by names. By default they are mapped by index. (:issue:`2933`) From 8685427909a42f38ac7f9075ce0ee15e67ab0cba Mon Sep 17 00:00:00 2001 From: praveenkrishna Date: Fri, 20 Mar 2020 23:00:45 +0530 Subject: [PATCH 114/519] Add sample node output symbols pruning rule --- .../prestosql/sql/planner/PlanOptimizers.java | 2 + .../iterative/rule/PruneSampleColumns.java | 39 ++++++++++++ .../rule/TestPruneSampleColumns.java | 61 +++++++++++++++++++ 3 files changed, 102 insertions(+) create mode 100644 presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneSampleColumns.java create mode 100644 presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneSampleColumns.java diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java index 994967674d0b..739f5f27665b 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java @@ -80,6 +80,7 @@ import io.prestosql.sql.planner.iterative.rule.PruneOrderByInAggregation; import io.prestosql.sql.planner.iterative.rule.PruneOutputColumns; import io.prestosql.sql.planner.iterative.rule.PruneProjectColumns; +import io.prestosql.sql.planner.iterative.rule.PruneSampleColumns; import io.prestosql.sql.planner.iterative.rule.PruneSemiJoinColumns; import io.prestosql.sql.planner.iterative.rule.PruneSemiJoinFilteringSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneTableScanColumns; @@ -257,6 +258,7 @@ public PlanOptimizers( new PruneMarkDistinctColumns(), new PruneOutputColumns(), new PruneProjectColumns(), + new PruneSampleColumns(), new PruneSemiJoinColumns(), new PruneSemiJoinFilteringSourceColumns(), new PruneTopNColumns(), diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneSampleColumns.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneSampleColumns.java new file mode 100644 index 000000000000..0c3007f08abc --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneSampleColumns.java @@ -0,0 +1,39 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.plan.PlanNode; +import io.prestosql.sql.planner.plan.SampleNode; + +import java.util.Optional; +import java.util.Set; + +import static io.prestosql.sql.planner.iterative.rule.Util.restrictChildOutputs; +import static io.prestosql.sql.planner.plan.Patterns.sample; + +public class PruneSampleColumns + extends ProjectOffPushDownRule +{ + public PruneSampleColumns() + { + super(sample()); + } + + @Override + protected Optional pushDownProjectOff(Context context, SampleNode sampleNode, Set referencedOutputs) + { + return restrictChildOutputs(context.getIdAllocator(), sampleNode, referencedOutputs); + } +} diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneSampleColumns.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneSampleColumns.java new file mode 100644 index 000000000000..7b0118d2759c --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneSampleColumns.java @@ -0,0 +1,61 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableMap; +import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; +import io.prestosql.sql.planner.plan.Assignments; +import io.prestosql.sql.planner.plan.SampleNode; +import org.testng.annotations.Test; + +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.node; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.strictProject; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; +import static io.prestosql.sql.planner.plan.SampleNode.Type.SYSTEM; + +public class TestPruneSampleColumns + extends BaseRuleTest +{ + @Test + public void testNotAllInputsReferenced() + { + tester().assertThat(new PruneSampleColumns()) + .on(p -> p.project( + Assignments.identity(p.symbol("b")), + p.sample( + 0.5, + SYSTEM, + p.values(p.symbol("a"), p.symbol("b"))))) + .matches( + strictProject( + ImmutableMap.of("b", expression("b")), + node(SampleNode.class, + strictProject( + ImmutableMap.of("b", expression("b")), + values("a", "b"))))); + } + + @Test + public void testAllOutputsReferenced() + { + tester().assertThat(new PruneSampleColumns()) + .on(p -> p.project( + Assignments.identity(p.symbol("a"), p.symbol("b")), + p.sample( + 0.5, + SYSTEM, + p.values(p.symbol("a"), p.symbol("b"))))).doesNotFire(); + } +} From e17615b67a1c39f7945f5d99e6b735972b4e229b Mon Sep 17 00:00:00 2001 From: Karol Sobczak Date: Thu, 9 Apr 2020 15:04:42 +0200 Subject: [PATCH 115/519] Fix formatting --- .../io/prestosql/spiller/FileSingleStreamSpillerFactory.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/spiller/FileSingleStreamSpillerFactory.java b/presto-main/src/main/java/io/prestosql/spiller/FileSingleStreamSpillerFactory.java index e4fe1c12e6dd..8c433adf72ec 100644 --- a/presto-main/src/main/java/io/prestosql/spiller/FileSingleStreamSpillerFactory.java +++ b/presto-main/src/main/java/io/prestosql/spiller/FileSingleStreamSpillerFactory.java @@ -126,8 +126,8 @@ public FileSingleStreamSpillerFactory( this.roundRobinIndex = 0; this.spillPathHealthCache = CacheBuilder.newBuilder() - .expireAfterWrite(SPILL_PATH_HEALTH_EXPIRY_INTERVAL) - .build(CacheLoader.from(path -> isAccessible(path) && isSeeminglyHealthy(path))); + .expireAfterWrite(SPILL_PATH_HEALTH_EXPIRY_INTERVAL) + .build(CacheLoader.from(path -> isAccessible(path) && isSeeminglyHealthy(path))); } @PostConstruct From 3f81c82a0e1725335e7a94adb397fe43c7f64b96 Mon Sep 17 00:00:00 2001 From: Karol Sobczak Date: Fri, 10 Apr 2020 13:06:23 +0200 Subject: [PATCH 116/519] Remove problematic spill checks Coordinators do not require spill paths to be setup. Therefore spill check is problematic as it doesn't distinguish between coordinator and workers. --- .../io/prestosql/SystemSessionProperties.java | 17 ++--------------- .../prestosql/sql/analyzer/FeaturesConfig.java | 7 ------- .../sql/analyzer/TestFeaturesConfig.java | 10 ---------- 3 files changed, 2 insertions(+), 32 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/SystemSessionProperties.java b/presto-main/src/main/java/io/prestosql/SystemSessionProperties.java index f860357de4a3..8faa9bb293a7 100644 --- a/presto-main/src/main/java/io/prestosql/SystemSessionProperties.java +++ b/presto-main/src/main/java/io/prestosql/SystemSessionProperties.java @@ -41,7 +41,6 @@ import static io.prestosql.spi.session.PropertyMetadata.integerProperty; import static io.prestosql.spi.session.PropertyMetadata.stringProperty; import static io.prestosql.spi.type.BigintType.BIGINT; -import static io.prestosql.spi.type.BooleanType.BOOLEAN; import static io.prestosql.spi.type.IntegerType.INTEGER; import static io.prestosql.sql.analyzer.FeaturesConfig.JoinReorderingStrategy.ELIMINATE_CROSS_JOINS; import static io.prestosql.sql.analyzer.FeaturesConfig.JoinReorderingStrategy.NONE; @@ -356,23 +355,11 @@ public SystemSessionProperties( "Experimental: Run a fixed number of groups concurrently for eligible JOINs", featuresConfig.getConcurrentLifespansPerTask(), false), - new PropertyMetadata<>( + booleanProperty( SPILL_ENABLED, "Enable spilling", - BOOLEAN, - Boolean.class, featuresConfig.isSpillEnabled(), - false, - value -> { - boolean spillEnabled = (Boolean) value; - if (spillEnabled && featuresConfig.getSpillerSpillPaths().isEmpty()) { - throw new PrestoException( - INVALID_SESSION_PROPERTY, - format("%s cannot be set to true; no spill paths configured", SPILL_ENABLED)); - } - return spillEnabled; - }, - value -> value), + false), booleanProperty( SPILL_ORDER_BY, "Spill in OrderBy if spill_enabled is also set", diff --git a/presto-main/src/main/java/io/prestosql/sql/analyzer/FeaturesConfig.java b/presto-main/src/main/java/io/prestosql/sql/analyzer/FeaturesConfig.java index 3df19c539b80..72e04e97b4c2 100644 --- a/presto-main/src/main/java/io/prestosql/sql/analyzer/FeaturesConfig.java +++ b/presto-main/src/main/java/io/prestosql/sql/analyzer/FeaturesConfig.java @@ -27,7 +27,6 @@ import io.prestosql.operator.aggregation.histogram.HistogramGroupImplementation; import io.prestosql.operator.aggregation.multimapagg.MultimapAggGroupImplementation; -import javax.validation.constraints.AssertTrue; import javax.validation.constraints.DecimalMax; import javax.validation.constraints.DecimalMin; import javax.validation.constraints.Min; @@ -680,12 +679,6 @@ public FeaturesConfig setSpillerSpillPaths(String spillPaths) return this; } - @AssertTrue(message = SPILLER_SPILL_PATH + " must be configured when " + SPILL_ENABLED + " is set to true") - public boolean isSpillerSpillPathsConfiguredIfSpillEnabled() - { - return !isSpillEnabled() || !spillerSpillPaths.isEmpty(); - } - @Min(1) public int getSpillerThreads() { diff --git a/presto-main/src/test/java/io/prestosql/sql/analyzer/TestFeaturesConfig.java b/presto-main/src/test/java/io/prestosql/sql/analyzer/TestFeaturesConfig.java index c19f0b06638f..b76cc844719b 100644 --- a/presto-main/src/test/java/io/prestosql/sql/analyzer/TestFeaturesConfig.java +++ b/presto-main/src/test/java/io/prestosql/sql/analyzer/TestFeaturesConfig.java @@ -14,7 +14,6 @@ package io.prestosql.sql.analyzer; import com.google.common.collect.ImmutableMap; -import io.airlift.configuration.ConfigurationFactory; import io.airlift.units.DataSize; import io.airlift.units.Duration; import io.prestosql.operator.aggregation.arrayagg.ArrayAggGroupImplementation; @@ -34,8 +33,6 @@ import static io.airlift.units.DataSize.Unit.MEGABYTE; import static io.prestosql.sql.analyzer.FeaturesConfig.JoinDistributionType.BROADCAST; import static io.prestosql.sql.analyzer.FeaturesConfig.JoinReorderingStrategy.NONE; -import static io.prestosql.sql.analyzer.FeaturesConfig.SPILLER_SPILL_PATH; -import static io.prestosql.sql.analyzer.FeaturesConfig.SPILL_ENABLED; import static io.prestosql.sql.analyzer.RegexLibrary.JONI; import static io.prestosql.sql.analyzer.RegexLibrary.RE2J; import static java.util.concurrent.TimeUnit.MINUTES; @@ -262,11 +259,4 @@ public void testExplicitPropertyMappings() .setIgnoreDownstreamPreferences(true); assertFullMapping(properties, expected); } - - @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = ".*\\Q" + SPILLER_SPILL_PATH + " must be configured when " + SPILL_ENABLED + " is set to true\\E.*") - public void testValidateSpillConfiguredIfEnabled() - { - new ConfigurationFactory(ImmutableMap.of(SPILL_ENABLED, "true")) - .build(FeaturesConfig.class); - } } From a2267f5dcea3c7505ca646c08f3ebef2ddf297a2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Grzegorz=20Kokosi=C5=84ski?= Date: Fri, 10 Apr 2020 14:05:45 +0200 Subject: [PATCH 117/519] Depend connectionUrl on JdbcIdentity In some connectors connenction url changes depending on JdbcIdentity. --- .../plugin/jdbc/DriverConnectionFactory.java | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/DriverConnectionFactory.java b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/DriverConnectionFactory.java index d6788725be09..e5e41ce3b1a2 100644 --- a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/DriverConnectionFactory.java +++ b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/DriverConnectionFactory.java @@ -21,6 +21,7 @@ import java.sql.Driver; import java.sql.SQLException; import java.util.Properties; +import java.util.function.Function; import static com.google.common.base.Preconditions.checkState; import static java.util.Objects.requireNonNull; @@ -29,7 +30,7 @@ public class DriverConnectionFactory implements ConnectionFactory { private final Driver driver; - private final String connectionUrl; + private final Function connectionUrl; private final Properties connectionProperties; private final CredentialPropertiesProvider credentialPropertiesProvider; @@ -47,6 +48,15 @@ public DriverConnectionFactory(Driver driver, String connectionUrl, Properties c } public DriverConnectionFactory(Driver driver, String connectionUrl, Properties connectionProperties, CredentialPropertiesProvider credentialPropertiesProvider) + { + this(driver, jdbcIdentity -> connectionUrl, connectionProperties, credentialPropertiesProvider); + } + + public DriverConnectionFactory( + Driver driver, + Function connectionUrl, + Properties connectionProperties, + CredentialPropertiesProvider credentialPropertiesProvider) { this.driver = requireNonNull(driver, "driver is null"); this.connectionUrl = requireNonNull(connectionUrl, "connectionUrl is null"); @@ -60,7 +70,7 @@ public Connection openConnection(JdbcIdentity identity) throws SQLException { Properties properties = getCredentialProperties(identity); - Connection connection = driver.connect(connectionUrl, properties); + Connection connection = driver.connect(connectionUrl.apply(identity), properties); checkState(connection != null, "Driver returned null connection"); return connection; } From ece91e5265894e9b2c29ff3419362989b8921021 Mon Sep 17 00:00:00 2001 From: afinkelstein Date: Fri, 10 Apr 2020 22:44:05 +0300 Subject: [PATCH 118/519] Fix NPE when inlining try() to NULL value --- .../io/prestosql/sql/planner/ExpressionInterpreter.java | 5 +++-- .../test/java/io/prestosql/sql/query/TestExpressions.java | 7 +++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/ExpressionInterpreter.java b/presto-main/src/main/java/io/prestosql/sql/planner/ExpressionInterpreter.java index db2dda8dbf8c..bd51c7e70a2f 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/ExpressionInterpreter.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/ExpressionInterpreter.java @@ -141,6 +141,7 @@ import static io.prestosql.util.Failures.checkCondition; import static java.lang.Math.toIntExact; import static java.util.Objects.requireNonNull; +import static java.util.stream.Collectors.toList; public class ExpressionInterpreter { @@ -519,7 +520,7 @@ protected Object visitCoalesceExpression(CoalesceExpression node, Object context } return Stream.of(expression); }) - .collect(Collectors.toList()); + .collect(toList()); if ((!values.isEmpty() && !(values.get(0) instanceof Expression)) || values.size() == 1) { return values.get(0); @@ -974,7 +975,7 @@ protected Object visitBindExpression(BindExpression node, Object context) { List values = node.getValues().stream() .map(value -> process(value, context)) - .collect(toImmutableList()); + .collect(toList()); // values are nullable Object function = process(node.getFunction(), context); if (hasUnresolvedValue(values) || hasUnresolvedValue(function)) { diff --git a/presto-main/src/test/java/io/prestosql/sql/query/TestExpressions.java b/presto-main/src/test/java/io/prestosql/sql/query/TestExpressions.java index b95eb35ee1f1..8405af1d762a 100644 --- a/presto-main/src/test/java/io/prestosql/sql/query/TestExpressions.java +++ b/presto-main/src/test/java/io/prestosql/sql/query/TestExpressions.java @@ -48,4 +48,11 @@ public void testBooleanExpressionInCase() assertions.assertQuery("VALUES CASE 1 < 2 WHEN true THEN 10 ELSE 20 END", "VALUES 10"); assertions.assertQuery("VALUES CASE 1 > 2 WHEN true THEN 10 ELSE 20 END", "VALUES 20"); } + + @Test + public void testInlineNullBind() + { + // https://github.com/prestosql/presto/issues/3411 + assertions.assertQuery("SELECT try(k) FROM (SELECT null) t(k)", "VALUES null"); + } } From 5f31a013f5808ad38eff504450d82474c70b9bc7 Mon Sep 17 00:00:00 2001 From: Mateusz Gajewski Date: Mon, 6 Apr 2020 11:47:42 +0200 Subject: [PATCH 119/519] Use testcontainers:elasticsearch version from bom --- presto-elasticsearch/pom.xml | 1 - 1 file changed, 1 deletion(-) diff --git a/presto-elasticsearch/pom.xml b/presto-elasticsearch/pom.xml index 7a16926e6f0a..b592a90937c3 100644 --- a/presto-elasticsearch/pom.xml +++ b/presto-elasticsearch/pom.xml @@ -303,7 +303,6 @@ org.testcontainers elasticsearch - 1.12.3 test From 840d589dbb3c389abfea8d08bdb0d496431cf97f Mon Sep 17 00:00:00 2001 From: Mateusz Gajewski Date: Mon, 6 Apr 2020 11:57:41 +0200 Subject: [PATCH 120/519] Update testcontainers to 1.13.0 --- pom.xml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 83ed82a03b15..037d23970a69 100644 --- a/pom.xml +++ b/pom.xml @@ -834,6 +834,12 @@ 2.3 + + org.apache.commons + commons-compress + 1.20 + + org.apache.commons commons-math3 @@ -1194,7 +1200,7 @@ org.testcontainers testcontainers-bom - 1.12.4 + 1.13.0 pom import From 1c71e2ebacb9180de2c4615296e934f6c86e5558 Mon Sep 17 00:00:00 2001 From: eskabetxe Date: Sat, 11 Apr 2020 10:45:36 +0200 Subject: [PATCH 121/519] Add Elastic 7 tests --- ...t.java => BaseElasticsearchSmokeTest.java} | 185 +++++++++--------- .../elasticsearch/ElasticsearchServer.java | 4 +- ...estElasticsearch6IntegrationSmokeTest.java | 39 ++++ ...estElasticsearch7IntegrationSmokeTest.java | 37 ++++ 4 files changed, 167 insertions(+), 98 deletions(-) rename presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/{TestElasticsearchIntegrationSmokeTest.java => BaseElasticsearchSmokeTest.java} (87%) create mode 100644 presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearch6IntegrationSmokeTest.java create mode 100644 presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearch7IntegrationSmokeTest.java diff --git a/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearchIntegrationSmokeTest.java b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/BaseElasticsearchSmokeTest.java similarity index 87% rename from presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearchIntegrationSmokeTest.java rename to presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/BaseElasticsearchSmokeTest.java index 88779baf8ddf..450f89f89f98 100644 --- a/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearchIntegrationSmokeTest.java +++ b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/BaseElasticsearchSmokeTest.java @@ -13,6 +13,7 @@ */ package io.prestosql.elasticsearch; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.net.HostAndPort; @@ -23,8 +24,6 @@ import org.apache.http.HttpHost; import org.apache.http.entity.ContentType; import org.apache.http.nio.entity.NStringEntity; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestHighLevelClient; import org.intellij.lang.annotations.Language; @@ -44,17 +43,23 @@ import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThatThrownBy; -public class TestElasticsearchIntegrationSmokeTest +public abstract class BaseElasticsearchSmokeTest extends AbstractTestIntegrationSmokeTest { + private final String elasticVersion; private ElasticsearchServer elasticsearch; private RestHighLevelClient client; + BaseElasticsearchSmokeTest(String elasticVersion) + { + this.elasticVersion = elasticVersion; + } + @Override protected QueryRunner createQueryRunner() throws Exception { - elasticsearch = new ElasticsearchServer(); + elasticsearch = new ElasticsearchServer(elasticVersion); HostAndPort address = elasticsearch.getAddress(); client = new RestHighLevelClient(RestClient.builder(new HttpHost(address.getHost(), address.getPort()))); @@ -144,10 +149,9 @@ public void testArrayFields() { String indexName = "test_arrays"; + @Language("JSON") String mapping = "" + "{" + - " \"mappings\": {" + - " \"doc\": {" + " \"_meta\": {" + " \"presto\": {" + " \"a\": {" + @@ -222,8 +226,6 @@ public void testArrayFields() " \"type\": \"long\"" + " }" + " }" + - " }" + - " }" + "}"; createIndex(indexName, mapping); @@ -322,28 +324,25 @@ public void testDataTypes() { String indexName = "types"; - String mapping = "" + + @Language("JSON") + String mappings = "" + "{" + - " \"mappings\": {" + - " \"doc\": {" + - " \"properties\": {" + - " \"boolean_column\": { \"type\": \"boolean\" }," + - " \"float_column\": { \"type\": \"float\" }," + - " \"double_column\": { \"type\": \"double\" }," + - " \"integer_column\": { \"type\": \"integer\" }," + - " \"long_column\": { \"type\": \"long\" }," + - " \"keyword_column\": { \"type\": \"keyword\" }," + - " \"text_column\": { \"type\": \"text\" }," + - " \"binary_column\": { \"type\": \"binary\" }," + - " \"timestamp_column\": { \"type\": \"date\" }," + - " \"ipv4_column\": { \"type\": \"ip\" }," + - " \"ipv6_column\": { \"type\": \"ip\" }" + - " }" + - " }" + + " \"properties\": { " + + " \"boolean_column\": { \"type\": \"boolean\" }," + + " \"float_column\": { \"type\": \"float\" }," + + " \"double_column\": { \"type\": \"double\" }," + + " \"integer_column\": { \"type\": \"integer\" }," + + " \"long_column\": { \"type\": \"long\" }," + + " \"keyword_column\": { \"type\": \"keyword\" }," + + " \"text_column\": { \"type\": \"text\" }," + + " \"binary_column\": { \"type\": \"binary\" }," + + " \"timestamp_column\": { \"type\": \"date\" }," + + " \"ipv4_column\": { \"type\": \"ip\" }," + + " \"ipv6_column\": { \"type\": \"ip\" }" + " }" + "}"; - createIndex(indexName, mapping); + createIndex(indexName, mappings); index(indexName, ImmutableMap.builder() .put("boolean_column", true) @@ -388,28 +387,25 @@ public void testFilters() { String indexName = "filter_pushdown"; - String mapping = "" + + @Language("JSON") + String mappings = "" + "{" + - " \"mappings\": {" + - " \"doc\": {" + - " \"properties\": {" + - " \"boolean_column\": { \"type\": \"boolean\" }," + - " \"float_column\": { \"type\": \"float\" }," + - " \"double_column\": { \"type\": \"double\" }," + - " \"integer_column\": { \"type\": \"integer\" }," + - " \"long_column\": { \"type\": \"long\" }," + - " \"keyword_column\": { \"type\": \"keyword\" }," + - " \"text_column\": { \"type\": \"text\" }," + - " \"binary_column\": { \"type\": \"binary\" }," + - " \"timestamp_column\": { \"type\": \"date\" }," + - " \"ipv4_column\": { \"type\": \"ip\" }," + - " \"ipv6_column\": { \"type\": \"ip\" }" + - " }" + - " }" + + " \"properties\": { " + + " \"boolean_column\": { \"type\": \"boolean\" }," + + " \"float_column\": { \"type\": \"float\" }," + + " \"double_column\": { \"type\": \"double\" }," + + " \"integer_column\": { \"type\": \"integer\" }," + + " \"long_column\": { \"type\": \"long\" }," + + " \"keyword_column\": { \"type\": \"keyword\" }," + + " \"text_column\": { \"type\": \"text\" }," + + " \"binary_column\": { \"type\": \"binary\" }," + + " \"timestamp_column\": { \"type\": \"date\" }," + + " \"ipv4_column\": { \"type\": \"ip\" }," + + " \"ipv6_column\": { \"type\": \"ip\" }" + " }" + "}"; - createIndex(indexName, mapping); + createIndex(indexName, mappings); index(indexName, ImmutableMap.builder() .put("boolean_column", true) @@ -525,32 +521,29 @@ public void testDataTypesNested() { String indexName = "types_nested"; - String mapping = "" + + @Language("JSON") + String properties = "" + "{" + - " \"mappings\": {" + - " \"doc\": {" + + " \"properties\":{" + + " \"field\": {" + " \"properties\": {" + - " \"field\": {" + - " \"properties\": {" + - " \"boolean_column\": { \"type\": \"boolean\" }," + - " \"float_column\": { \"type\": \"float\" }," + - " \"double_column\": { \"type\": \"double\" }," + - " \"integer_column\": { \"type\": \"integer\" }," + - " \"long_column\": { \"type\": \"long\" }," + - " \"keyword_column\": { \"type\": \"keyword\" }," + - " \"text_column\": { \"type\": \"text\" }," + - " \"binary_column\": { \"type\": \"binary\" }," + - " \"timestamp_column\": { \"type\": \"date\" }," + - " \"ipv4_column\": { \"type\": \"ip\" }," + - " \"ipv6_column\": { \"type\": \"ip\" }" + - " }" + - " }" + + " \"boolean_column\": { \"type\": \"boolean\" }," + + " \"float_column\": { \"type\": \"float\" }," + + " \"double_column\": { \"type\": \"double\" }," + + " \"integer_column\": { \"type\": \"integer\" }," + + " \"long_column\": { \"type\": \"long\" }," + + " \"keyword_column\": { \"type\": \"keyword\" }," + + " \"text_column\": { \"type\": \"text\" }," + + " \"binary_column\": { \"type\": \"binary\" }," + + " \"timestamp_column\": { \"type\": \"date\" }," + + " \"ipv4_column\": { \"type\": \"ip\" }," + + " \"ipv6_column\": { \"type\": \"ip\" }" + " }" + " }" + " }" + "}"; - createIndex(indexName, mapping); + createIndex(indexName, properties); index(indexName, ImmutableMap.of( "field", @@ -597,33 +590,30 @@ public void testNestedTypeDataTypesNested() { String indexName = "nested_type_nested"; - String mapping = "" + + @Language("JSON") + String mappings = "" + "{" + - " \"mappings\": {" + - " \"doc\": {" + + " \"properties\":{" + + " \"nested_field\": {" + + " \"type\":\"nested\"," + " \"properties\": {" + - " \"nested_field\": {" + - " \"type\":\"nested\"," + - " \"properties\": {" + - " \"boolean_column\": { \"type\": \"boolean\" }," + - " \"float_column\": { \"type\": \"float\" }," + - " \"double_column\": { \"type\": \"double\" }," + - " \"integer_column\": { \"type\": \"integer\" }," + - " \"long_column\": { \"type\": \"long\" }," + - " \"keyword_column\": { \"type\": \"keyword\" }," + - " \"text_column\": { \"type\": \"text\" }," + - " \"binary_column\": { \"type\": \"binary\" }," + - " \"timestamp_column\": { \"type\": \"date\" }," + - " \"ipv4_column\": { \"type\": \"ip\" }," + - " \"ipv6_column\": { \"type\": \"ip\" }" + - " }" + - " }" + + " \"boolean_column\": { \"type\": \"boolean\" }," + + " \"float_column\": { \"type\": \"float\" }," + + " \"double_column\": { \"type\": \"double\" }," + + " \"integer_column\": { \"type\": \"integer\" }," + + " \"long_column\": { \"type\": \"long\" }," + + " \"keyword_column\": { \"type\": \"keyword\" }," + + " \"text_column\": { \"type\": \"text\" }," + + " \"binary_column\": { \"type\": \"binary\" }," + + " \"timestamp_column\": { \"type\": \"date\" }," + + " \"ipv4_column\": { \"type\": \"ip\" }," + + " \"ipv6_column\": { \"type\": \"ip\" }" + " }" + " }" + " }" + "}"; - createIndex(indexName, mapping); + createIndex(indexName, mappings); index(indexName, ImmutableMap.of( "nested_field", @@ -700,17 +690,14 @@ public void testNumericKeyword() throws IOException { String indexName = "numeric_keyword"; - @Language("JSON") String mapping = "" + + @Language("JSON") + String properties = "" + "{" + - " \"mappings\": {" + - " \"doc\": {" + - " \"properties\": {" + - " \"numeric_keyword\": { \"type\": \"keyword\" }" + - " }" + - " }" + + " \"properties\":{" + + " \"numeric_keyword\": { \"type\": \"keyword\" }" + " }" + "}"; - createIndex(indexName, mapping); + createIndex(indexName, properties); index(indexName, ImmutableMap.builder() .put("numeric_keyword", 20) .build()); @@ -752,12 +739,15 @@ public void testMultiIndexAlias() "SELECT (SELECT count(*) FROM region) + (SELECT count(*) FROM nation)"); } + protected abstract String indexEndpoint(String index, String docId); + private void index(String index, Map document) throws IOException { - client.index(new IndexRequest(index, "doc") - .source(document) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)); + String json = new ObjectMapper().writeValueAsString(document); + String endpoint = format("%s?refresh", indexEndpoint(index, String.valueOf(System.nanoTime()))); + client.getLowLevelClient() + .performRequest("PUT", endpoint, ImmutableMap.of(), new NStringEntity(json, ContentType.APPLICATION_JSON)); } private void addAlias(String index, String alias) @@ -769,11 +759,14 @@ private void addAlias(String index, String alias) refreshIndex(alias); } - private void createIndex(String indexName, @Language("JSON") String mapping) + protected abstract String indexMapping(@Language("JSON") String properties); + + private void createIndex(String indexName, @Language("JSON") String properties) throws IOException { + String mappings = indexMapping(properties); client.getLowLevelClient() - .performRequest("PUT", "/" + indexName, ImmutableMap.of(), new NStringEntity(mapping, ContentType.APPLICATION_JSON)); + .performRequest("PUT", "/" + indexName, ImmutableMap.of(), new NStringEntity(mappings, ContentType.APPLICATION_JSON)); } private void refreshIndex(String index) diff --git a/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/ElasticsearchServer.java b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/ElasticsearchServer.java index 9d2e5001bc6f..3e7b44a8e8f3 100644 --- a/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/ElasticsearchServer.java +++ b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/ElasticsearchServer.java @@ -20,9 +20,9 @@ public class ElasticsearchServer { private final ElasticsearchContainer container; - public ElasticsearchServer() + public ElasticsearchServer(String version) { - container = new ElasticsearchContainer("docker.elastic.co/elasticsearch/elasticsearch-oss:6.0.0"); + container = new ElasticsearchContainer("docker.elastic.co/elasticsearch/elasticsearch-oss:" + version); container.start(); } diff --git a/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearch6IntegrationSmokeTest.java b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearch6IntegrationSmokeTest.java new file mode 100644 index 000000000000..e6663fae674c --- /dev/null +++ b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearch6IntegrationSmokeTest.java @@ -0,0 +1,39 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.elasticsearch; + +import static java.lang.String.format; + +public class TestElasticsearch6IntegrationSmokeTest + extends BaseElasticsearchSmokeTest +{ + public TestElasticsearch6IntegrationSmokeTest() + { + super("6.0.0"); + } + + @Override + protected String indexEndpoint(String index, String docId) + { + return format("/%s/doc/%s", index, docId); + } + + @Override + protected String indexMapping(String properties) + { + return "{\"mappings\": " + + " {\"doc\": " + properties + "}" + + "}"; + } +} diff --git a/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearch7IntegrationSmokeTest.java b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearch7IntegrationSmokeTest.java new file mode 100644 index 000000000000..41fc786c393b --- /dev/null +++ b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/TestElasticsearch7IntegrationSmokeTest.java @@ -0,0 +1,37 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.elasticsearch; + +import static java.lang.String.format; + +public class TestElasticsearch7IntegrationSmokeTest + extends BaseElasticsearchSmokeTest +{ + public TestElasticsearch7IntegrationSmokeTest() + { + super("7.0.0"); + } + + @Override + protected String indexEndpoint(String index, String docId) + { + return format("/%s/_doc/%s", index, docId); + } + + @Override + protected String indexMapping(String properties) + { + return "{\"mappings\": " + properties + "}"; + } +} From 7e38595d54aac8fc36ecabb36f7fbc8976686f42 Mon Sep 17 00:00:00 2001 From: David Rabinowitz Date: Sun, 12 Apr 2020 17:25:45 -0700 Subject: [PATCH 122/519] Upgrade BigQuery dependencies --- presto-bigquery/pom.xml | 52 ++++++++++++++++++++++------------------- 1 file changed, 28 insertions(+), 24 deletions(-) diff --git a/presto-bigquery/pom.xml b/presto-bigquery/pom.xml index 9ad0e21b5ca5..38c67ff8f1e4 100644 --- a/presto-bigquery/pom.xml +++ b/presto-bigquery/pom.xml @@ -14,25 +14,40 @@ ${project.parent.basedir} - 1.49.1 - 0.18.0 - 1.91.3 - com.google.api.grpc - proto-google-common-protos - 1.17.0 + com.google.cloud + libraries-bom + 4.4.1 + pom + import - io.grpc - grpc-bom - 1.24.1 - pom - import + com.google.errorprone + error_prone_annotations + 2.3.4 + + + + com.google.guava + guava + 28.2-jre + + + + org.checkerframework + checker-qual + 2.10.0 + + + + org.threeten + threetenbp + 1.4.2 @@ -67,13 +82,11 @@ com.google.api gax - ${dep.gax.version} com.google.api gax-grpc - ${dep.gax.version} @@ -84,31 +97,26 @@ com.google.auth google-auth-library-credentials - ${dep.google-auth-library.version} com.google.auth google-auth-library-oauth2-http - ${dep.google-auth-library.version} com.google.cloud google-cloud-core - ${dep.google-cloud-core.version} com.google.cloud google-cloud-core-http - ${dep.google-cloud-core.version} com.google.http-client google-http-client - 1.32.1 commons-logging @@ -120,13 +128,11 @@ com.google.api.grpc proto-google-cloud-bigquerystorage-v1beta1 - 0.84.0 com.google.cloud google-cloud-bigquery - 1.101.0 com.google.guava @@ -146,7 +152,6 @@ com.google.cloud google-cloud-bigquerystorage - 0.119.0-beta com.google.guava @@ -163,7 +168,6 @@ com.google.protobuf protobuf-java - 3.10.0 @@ -242,13 +246,13 @@ test - + io.prestosql presto-tpch test - + io.prestosql presto-testing test From 5344920384d78d816d3cefe42489c22b626ad71d Mon Sep 17 00:00:00 2001 From: Yukihiro Okada Date: Thu, 9 Apr 2020 20:43:39 +0900 Subject: [PATCH 123/519] Add starts_with function --- presto-docs/src/main/sphinx/functions/list.rst | 1 + presto-docs/src/main/sphinx/functions/string.rst | 4 ++++ .../prestosql/operator/scalar/StringFunctions.java | 12 ++++++++++++ .../operator/scalar/TestStringFunctions.java | 11 +++++++++++ 4 files changed, 28 insertions(+) diff --git a/presto-docs/src/main/sphinx/functions/list.rst b/presto-docs/src/main/sphinx/functions/list.rst index 3a5760b3f4d3..bf48a25742de 100644 --- a/presto-docs/src/main/sphinx/functions/list.rst +++ b/presto-docs/src/main/sphinx/functions/list.rst @@ -390,6 +390,7 @@ S - :func:`ST_Y` - :func:`ST_YMax` - :func:`ST_YMin` +- :func:`starts_with` - :func:`stddev` - :func:`stddev_pop` - :func:`stddev_samp` diff --git a/presto-docs/src/main/sphinx/functions/string.rst b/presto-docs/src/main/sphinx/functions/string.rst index 0f823e1438d0..c7460e73fcbf 100644 --- a/presto-docs/src/main/sphinx/functions/string.rst +++ b/presto-docs/src/main/sphinx/functions/string.rst @@ -144,6 +144,10 @@ String Functions Returns the starting position of the first instance of ``substring`` in ``string``. Positions start with ``1``. If not found, ``0`` is returned. +.. function:: starts_with(string, substring) -> boolean + + Tests whether ``substring`` is a prefix of ``string``. + .. function:: substr(string, start) -> varchar Returns the rest of ``string`` from the starting position ``start``. diff --git a/presto-main/src/main/java/io/prestosql/operator/scalar/StringFunctions.java b/presto-main/src/main/java/io/prestosql/operator/scalar/StringFunctions.java index eb1297c29b25..2fb86d047f8a 100644 --- a/presto-main/src/main/java/io/prestosql/operator/scalar/StringFunctions.java +++ b/presto-main/src/main/java/io/prestosql/operator/scalar/StringFunctions.java @@ -899,4 +899,16 @@ public static Slice concat(@LiteralParameter("x") Long x, @SqlType("char(x)") Sl return result; } + + @Description("Determine whether source starts with prefix or not") + @ScalarFunction + @LiteralParameters({"x", "y"}) + @SqlType(StandardTypes.BOOLEAN) + public static boolean startsWith(@SqlType("varchar(x)") Slice source, @SqlType("varchar(y)") Slice prefix) + { + if (source.length() < prefix.length()) { + return false; + } + return source.compareTo(0, prefix.length(), prefix, 0, prefix.length()) == 0; + } } diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/TestStringFunctions.java b/presto-main/src/test/java/io/prestosql/operator/scalar/TestStringFunctions.java index 49b65627096f..da44808cdd97 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/TestStringFunctions.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/TestStringFunctions.java @@ -284,6 +284,17 @@ public void testStringPosition() testStrPosAndPosition("", null, null); testStrPosAndPosition(null, null, null); + assertFunction("STARTS_WITH('foo', 'foo')", BOOLEAN, true); + assertFunction("STARTS_WITH('foo', 'bar')", BOOLEAN, false); + assertFunction("STARTS_WITH('foo', '')", BOOLEAN, true); + assertFunction("STARTS_WITH('', 'foo')", BOOLEAN, false); + assertFunction("STARTS_WITH('', '')", BOOLEAN, true); + assertFunction("STARTS_WITH('foo_bar_baz', 'foo')", BOOLEAN, true); + assertFunction("STARTS_WITH('foo_bar_baz', 'bar')", BOOLEAN, false); + assertFunction("STARTS_WITH('foo', 'foo_bar_baz')", BOOLEAN, false); + assertFunction("STARTS_WITH('信念 爱 希望', '信念')", BOOLEAN, true); + assertFunction("STARTS_WITH('信念 爱 希望', '爱')", BOOLEAN, false); + assertFunction("STRPOS(NULL, '')", BIGINT, null); assertFunction("STRPOS('', NULL)", BIGINT, null); assertFunction("STRPOS(NULL, NULL)", BIGINT, null); From 065338a4cccd6040ab70b7fe187f0c89fc7ff9c8 Mon Sep 17 00:00:00 2001 From: Dain Sundstrom Date: Mon, 13 Apr 2020 14:44:27 -0700 Subject: [PATCH 124/519] Fix missing cast in generated serialize of StateCompiler --- .../sql/gen/SqlTypeBytecodeExpression.java | 2 +- .../minmaxby/TestMinMaxByAggregation.java | 17 +++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/gen/SqlTypeBytecodeExpression.java b/presto-main/src/main/java/io/prestosql/sql/gen/SqlTypeBytecodeExpression.java index f79e58ad3ffc..71ad4c2dc23c 100644 --- a/presto-main/src/main/java/io/prestosql/sql/gen/SqlTypeBytecodeExpression.java +++ b/presto-main/src/main/java/io/prestosql/sql/gen/SqlTypeBytecodeExpression.java @@ -113,7 +113,7 @@ public BytecodeExpression writeValue(BytecodeExpression blockBuilder, BytecodeEx return invoke("writeDouble", void.class, blockBuilder, value); } if (fromJavaElementType == Slice.class) { - return invoke("writeSlice", void.class, blockBuilder, value); + return invoke("writeSlice", void.class, blockBuilder, value.cast(Slice.class)); } return invoke("writeObject", void.class, blockBuilder, value.cast(Object.class)); } diff --git a/presto-main/src/test/java/io/prestosql/operator/aggregation/minmaxby/TestMinMaxByAggregation.java b/presto-main/src/test/java/io/prestosql/operator/aggregation/minmaxby/TestMinMaxByAggregation.java index 7c2ee1bb71c8..409fe47e11ce 100644 --- a/presto-main/src/test/java/io/prestosql/operator/aggregation/minmaxby/TestMinMaxByAggregation.java +++ b/presto-main/src/test/java/io/prestosql/operator/aggregation/minmaxby/TestMinMaxByAggregation.java @@ -177,6 +177,23 @@ public void testMaxDoubleDouble() createDoublesBlock(1.0, 1.5, null)); } + @Test + public void testMinVarcharDouble() + { + InternalAggregationFunction function = METADATA.getAggregateFunctionImplementation(METADATA.resolveFunction(QualifiedName.of("min_by"), fromTypes(DOUBLE, VARCHAR))); + assertAggregation( + function, + 100.0, + createDoublesBlock(100.0, 1.0, 50.0, 2.0), + createStringsBlock("a", "b", "c", "d")); + + assertAggregation( + function, + -1.0, + createDoublesBlock(100.0, 50.0, 2.0, -1.0), + createStringsBlock("x", "y", "z", "a")); + } + @Test public void testMinDoubleVarchar() { From 5a93fff593e74f58a48300db8b4573d16cde9f46 Mon Sep 17 00:00:00 2001 From: Alex Albu Date: Mon, 13 Apr 2020 07:21:16 -0400 Subject: [PATCH 125/519] Enable tests to construct new LDAP object definitions --- .../tests/ImmutableLdapObjectDefinitions.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/presto-product-tests/src/main/java/io/prestosql/tests/ImmutableLdapObjectDefinitions.java b/presto-product-tests/src/main/java/io/prestosql/tests/ImmutableLdapObjectDefinitions.java index aedf4f3c087c..30e30c996550 100644 --- a/presto-product-tests/src/main/java/io/prestosql/tests/ImmutableLdapObjectDefinitions.java +++ b/presto-product-tests/src/main/java/io/prestosql/tests/ImmutableLdapObjectDefinitions.java @@ -57,7 +57,7 @@ private ImmutableLdapObjectDefinitions() public static final LdapObjectDefinition USER_IN_MULTIPLE_GROUPS = buildLdapUserObject("UserInMultipleGroups", Optional.of(Arrays.asList("DefaultGroup", "ParentGroup")), LDAP_PASSWORD); - private static LdapObjectDefinition buildLdapOrganizationObject(String id, String distinguishedName, String unit) + public static LdapObjectDefinition buildLdapOrganizationObject(String id, String distinguishedName, String unit) { return LdapObjectDefinition.builder(id) .setDistinguishedName(distinguishedName) @@ -66,7 +66,7 @@ private static LdapObjectDefinition buildLdapOrganizationObject(String id, Strin .build(); } - private static LdapObjectDefinition buildLdapGroupObject(String groupName, String userName, Optional> childGroupNames) + public static LdapObjectDefinition buildLdapGroupObject(String groupName, String userName, Optional> childGroupNames) { if (childGroupNames.isPresent()) { return buildLdapGroupObject(groupName, AMERICA_DISTINGUISHED_NAME, userName, ASIA_DISTINGUISHED_NAME, childGroupNames, Optional.of(AMERICA_DISTINGUISHED_NAME)); @@ -77,7 +77,7 @@ private static LdapObjectDefinition buildLdapGroupObject(String groupName, Strin } } - private static LdapObjectDefinition buildLdapGroupObject(String groupName, String groupOrganizationName, + public static LdapObjectDefinition buildLdapGroupObject(String groupName, String groupOrganizationName, String userName, String userOrganizationName, Optional> childGroupNames, Optional childGroupOrganizationName) { if (childGroupNames.isPresent() && childGroupOrganizationName.isPresent()) { @@ -101,7 +101,7 @@ private static LdapObjectDefinition buildLdapGroupObject(String groupName, Strin } } - private static LdapObjectDefinition buildLdapUserObject(String userName, Optional> groupNames, String password) + public static LdapObjectDefinition buildLdapUserObject(String userName, Optional> groupNames, String password) { if (groupNames.isPresent()) { return buildLdapUserObject(userName, ASIA_DISTINGUISHED_NAME, @@ -113,7 +113,7 @@ private static LdapObjectDefinition buildLdapUserObject(String userName, Optiona } } - private static LdapObjectDefinition buildLdapUserObject(String userName, String userOrganizationName, + public static LdapObjectDefinition buildLdapUserObject(String userName, String userOrganizationName, Optional> groupNames, Optional groupOrganizationName, String password) { if (groupNames.isPresent() && groupOrganizationName.isPresent()) { From 893abbe7d43f7668bafb2391b4eff32d7d9c3550 Mon Sep 17 00:00:00 2001 From: Joao Boto Date: Fri, 10 Apr 2020 18:58:46 +0200 Subject: [PATCH 126/519] Remove unused classes --- .../plugin/kafka/util/JsonEncoder.java | 44 ------------------- .../plugin/kafka/util/NumberEncoder.java | 37 ---------------- 2 files changed, 81 deletions(-) delete mode 100644 presto-kafka/src/test/java/io/prestosql/plugin/kafka/util/JsonEncoder.java delete mode 100644 presto-kafka/src/test/java/io/prestosql/plugin/kafka/util/NumberEncoder.java diff --git a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/util/JsonEncoder.java b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/util/JsonEncoder.java deleted file mode 100644 index 70f88453df52..000000000000 --- a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/util/JsonEncoder.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package io.prestosql.plugin.kafka.util; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import kafka.serializer.Encoder; -import kafka.utils.VerifiableProperties; - -import java.io.UncheckedIOException; - -public class JsonEncoder - implements Encoder -{ - private final ObjectMapper objectMapper = new ObjectMapper(); - - @SuppressWarnings("UnusedParameters") - public JsonEncoder(VerifiableProperties properties) - { - // constructor required by Kafka - } - - @Override - public byte[] toBytes(Object o) - { - try { - return objectMapper.writeValueAsBytes(o); - } - catch (JsonProcessingException e) { - throw new UncheckedIOException(e); - } - } -} diff --git a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/util/NumberEncoder.java b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/util/NumberEncoder.java deleted file mode 100644 index c7086a26434d..000000000000 --- a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/util/NumberEncoder.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package io.prestosql.plugin.kafka.util; - -import kafka.serializer.Encoder; -import kafka.utils.VerifiableProperties; - -import java.nio.ByteBuffer; - -public class NumberEncoder - implements Encoder -{ - @SuppressWarnings("UnusedParameters") - public NumberEncoder(VerifiableProperties properties) - { - // constructor required by Kafka - } - - @Override - public byte[] toBytes(Number value) - { - ByteBuffer buf = ByteBuffer.allocate(8); - buf.putLong(value == null ? 0L : value.longValue()); - return buf.array(); - } -} From 8a0cc3664c123b1565f7f229f9b3ae54b1e6e4f0 Mon Sep 17 00:00:00 2001 From: Joao Boto Date: Fri, 10 Apr 2020 18:59:22 +0200 Subject: [PATCH 127/519] Upgrade kafka version --- presto-kafka/pom.xml | 35 +------------------ .../plugin/kafka/util/TestingKafka.java | 2 +- 2 files changed, 2 insertions(+), 35 deletions(-) diff --git a/presto-kafka/pom.xml b/presto-kafka/pom.xml index 5741b29623ce..d3766a79de48 100644 --- a/presto-kafka/pom.xml +++ b/presto-kafka/pom.xml @@ -67,26 +67,10 @@ validation-api - - org.apache.kafka - kafka_2.12 - 1.1.1 - - - log4j - log4j - - - org.slf4j - slf4j-log4j12 - - - - org.apache.kafka kafka-clients - 1.1.1 + 2.4.1 @@ -147,23 +131,6 @@ runtime - - com.101tec - zkclient - 0.10 - runtime - - - log4j - log4j - - - org.slf4j - slf4j-log4j12 - - - - org.testng diff --git a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/util/TestingKafka.java b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/util/TestingKafka.java index a7a0674bfcbb..8c441e361c66 100644 --- a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/util/TestingKafka.java +++ b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/util/TestingKafka.java @@ -34,7 +34,7 @@ public class TestingKafka public TestingKafka() { - container = new KafkaContainer("5.2.1"); + container = new KafkaContainer("5.4.1"); } public void start() From ccd0ae1950f5117209a12710f83037d733726fda Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Sun, 5 Apr 2020 13:01:34 +0200 Subject: [PATCH 128/519] Minor refactor in PushProjectionThroughExchange rule --- .../rule/PushProjectionThroughExchange.java | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PushProjectionThroughExchange.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PushProjectionThroughExchange.java index b5b255c4e387..2a958976a5f7 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PushProjectionThroughExchange.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PushProjectionThroughExchange.java @@ -14,6 +14,7 @@ package io.prestosql.sql.planner.iterative.rule; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.prestosql.matching.Capture; import io.prestosql.matching.Captures; @@ -29,7 +30,6 @@ import io.prestosql.sql.tree.Expression; import io.prestosql.sql.tree.SymbolReference; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -87,7 +87,7 @@ public Result apply(ProjectNode project, Captures captures, Context context) ImmutableList.Builder newSourceBuilder = ImmutableList.builder(); ImmutableList.Builder> inputsBuilder = ImmutableList.builder(); for (int i = 0; i < exchange.getSources().size(); i++) { - Map outputToInputMap = extractExchangeOutputToInput(exchange, i); + Map outputToInputMap = mapExchangeOutputToInput(exchange, i); Assignments.Builder projections = Assignments.builder(); ImmutableList.Builder inputs = ImmutableList.builder(); @@ -95,10 +95,9 @@ public Result apply(ProjectNode project, Captures captures, Context context) // Need to retain the partition keys for the exchange partitioningColumns.stream() .map(outputToInputMap::get) - .forEach(nameReference -> { - Symbol symbol = Symbol.from(nameReference); - projections.put(symbol, nameReference); - inputs.add(symbol); + .forEach(inputSymbol -> { + projections.put(inputSymbol, inputSymbol.toSymbolReference()); + inputs.add(inputSymbol); }); if (exchange.getPartitioningScheme().getHashColumn().isPresent()) { @@ -113,15 +112,16 @@ public Result apply(ProjectNode project, Captures captures, Context context) // do not project the same symbol twice as ExchangeNode verifies that source input symbols match partitioning scheme outputLayout .filter(symbol -> !partitioningColumns.contains(symbol)) .map(outputToInputMap::get) - .forEach(nameReference -> { - Symbol symbol = Symbol.from(nameReference); - projections.put(symbol, nameReference); - inputs.add(symbol); + .forEach(inputSymbol -> { + projections.put(inputSymbol, inputSymbol.toSymbolReference()); + inputs.add(inputSymbol); }); } for (Map.Entry projection : project.getAssignments().entrySet()) { - Expression translatedExpression = inlineSymbols(outputToInputMap, projection.getValue()); + ImmutableMap.Builder translationMap = ImmutableMap.builder(); + outputToInputMap.forEach((key, value) -> translationMap.put(key, value.toSymbolReference())); + Expression translatedExpression = inlineSymbols(translationMap.build(), projection.getValue()); Type type = context.getSymbolAllocator().getTypes().get(projection.getKey()); Symbol symbol = context.getSymbolAllocator().newSymbol(translatedExpression, type); projections.put(symbol, translatedExpression); @@ -167,15 +167,15 @@ public Result apply(ProjectNode project, Captures captures, Context context) private static boolean isSymbolToSymbolProjection(ProjectNode project) { - return project.getAssignments().getExpressions().stream().allMatch(e -> e instanceof SymbolReference); + return project.getAssignments().getExpressions().stream().allMatch(SymbolReference.class::isInstance); } - private static Map extractExchangeOutputToInput(ExchangeNode exchange, int sourceIndex) + private static Map mapExchangeOutputToInput(ExchangeNode exchange, int sourceIndex) { - Map outputToInputMap = new HashMap<>(); + ImmutableMap.Builder outputToInputMap = ImmutableMap.builder(); for (int i = 0; i < exchange.getOutputSymbols().size(); i++) { - outputToInputMap.put(exchange.getOutputSymbols().get(i), exchange.getInputs().get(sourceIndex).get(i).toSymbolReference()); + outputToInputMap.put(exchange.getOutputSymbols().get(i), exchange.getInputs().get(sourceIndex).get(i)); } - return outputToInputMap; + return outputToInputMap.build(); } } From 2a681eebd7f1fcb301aa496cdd37b6587dd02456 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Sun, 5 Apr 2020 16:42:45 +0200 Subject: [PATCH 129/519] Fix comments --- .../iterative/rule/PushProjectionThroughExchange.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PushProjectionThroughExchange.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PushProjectionThroughExchange.java index 2a958976a5f7..5c984d31f217 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PushProjectionThroughExchange.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PushProjectionThroughExchange.java @@ -54,7 +54,7 @@ * Project(x = e1, y = e2) * Source(a, b, c) * - * Or if Exchange needs symbols from Source for partitioning or as hash symbol to: + * Or if Exchange needs symbols from Source for partitioning, ordering or as hash symbol to: *
  *  Project(x, y)
  *    Exchange()
@@ -107,9 +107,9 @@ public Result apply(ProjectNode project, Captures captures, Context context)
             }
 
             if (exchange.getOrderingScheme().isPresent()) {
-                // need to retain ordering columns for the exchange
+                // Need to retain ordering columns for the exchange
                 exchange.getOrderingScheme().get().getOrderBy().stream()
-                        // do not project the same symbol twice as ExchangeNode verifies that source input symbols match partitioning scheme outputLayout
+                        // Do not duplicate symbols in inputs list
                         .filter(symbol -> !partitioningColumns.contains(symbol))
                         .map(outputToInputMap::get)
                         .forEach(inputSymbol -> {
@@ -137,6 +137,7 @@ public Result apply(ProjectNode project, Captures captures, Context context)
         exchange.getPartitioningScheme().getHashColumn().ifPresent(outputBuilder::add);
         if (exchange.getOrderingScheme().isPresent()) {
             exchange.getOrderingScheme().get().getOrderBy().stream()
+                    // Do not duplicate symbols in outputs list (for consistency with inputs lists)
                     .filter(symbol -> !partitioningColumns.contains(symbol))
                     .forEach(outputBuilder::add);
         }

From 9c4b68b3df5f3575092405fdcb74b86665f86dea Mon Sep 17 00:00:00 2001
From: kasiafi <30203062+kasiafi@users.noreply.github.com>
Date: Sun, 5 Apr 2020 18:29:33 +0200
Subject: [PATCH 130/519] Use source's symbol for projection instead of output
 symbol

In PushProjectionThroughExchange rule, there is a projection
inserted into the plan between ExchangeNode and each of it's
sources. The projection is supposed to retain source's hash symbol.
Before this change, there was added an identity assignment
on hash symbol from ExchangeNode's output. It was wrong in the
case when source used other symbol (with different name) than the
ExchangeNode.
It was fixed by using the source's symbol instead.
---
 .../rule/PushProjectionThroughExchange.java   | 12 ++++---
 .../TestPushProjectionThroughExchange.java    | 31 +++++++++++++++++++
 2 files changed, 38 insertions(+), 5 deletions(-)

diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PushProjectionThroughExchange.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PushProjectionThroughExchange.java
index 5c984d31f217..c7b1a15e27b3 100644
--- a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PushProjectionThroughExchange.java
+++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PushProjectionThroughExchange.java
@@ -100,11 +100,13 @@ public Result apply(ProjectNode project, Captures captures, Context context)
                         inputs.add(inputSymbol);
                     });
 
-            if (exchange.getPartitioningScheme().getHashColumn().isPresent()) {
-                // Need to retain the hash symbol for the exchange
-                projections.put(exchange.getPartitioningScheme().getHashColumn().get(), exchange.getPartitioningScheme().getHashColumn().get().toSymbolReference());
-                inputs.add(exchange.getPartitioningScheme().getHashColumn().get());
-            }
+            // Need to retain the hash symbol for the exchange
+            exchange.getPartitioningScheme().getHashColumn()
+                    .map(outputToInputMap::get)
+                    .ifPresent(inputSymbol -> {
+                        projections.put(inputSymbol, inputSymbol.toSymbolReference());
+                        inputs.add(inputSymbol);
+                    });
 
             if (exchange.getOrderingScheme().isPresent()) {
                 // Need to retain ordering columns for the exchange
diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPushProjectionThroughExchange.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPushProjectionThroughExchange.java
index f8d016fef9c0..4882531171f9 100644
--- a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPushProjectionThroughExchange.java
+++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPushProjectionThroughExchange.java
@@ -29,6 +29,7 @@
 import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression;
 import static io.prestosql.sql.planner.assertions.PlanMatchPattern.project;
 import static io.prestosql.sql.planner.assertions.PlanMatchPattern.sort;
+import static io.prestosql.sql.planner.assertions.PlanMatchPattern.strictProject;
 import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values;
 import static io.prestosql.sql.planner.plan.ExchangeNode.Scope.REMOTE;
 import static io.prestosql.sql.planner.plan.ExchangeNode.Type.GATHER;
@@ -107,6 +108,36 @@ c2, new SymbolReference("c")),
                                 .withAlias("x2"));
     }
 
+    @Test
+    public void testHashMapping()
+    {
+        tester().assertThat(new PushProjectionThroughExchange())
+                .on(p -> {
+                    Symbol a = p.symbol("a");
+                    Symbol h1 = p.symbol("h_1");
+                    Symbol c = p.symbol("c");
+                    Symbol h = p.symbol("h");
+                    Symbol cTimes5 = p.symbol("c_times_5");
+                    return p.project(
+                            Assignments.of(
+                                    cTimes5, new ArithmeticBinaryExpression(ArithmeticBinaryExpression.Operator.MULTIPLY, new SymbolReference("c"), new LongLiteral("5"))),
+                            p.exchange(e -> e
+                                    .addSource(
+                                            p.values(a, h1))
+                                    .addInputsSet(a, h1)
+                                    .fixedHashDistributionParitioningScheme(
+                                            ImmutableList.of(c, h),
+                                            ImmutableList.of(c),
+                                            h)));
+                })
+                .matches(
+                        project(
+                                exchange(
+                                        strictProject(
+                                                ImmutableMap.of("a", expression("a"), "h_1", expression("h_1"), "a_times_5", expression("a * 5")),
+                                                values(ImmutableList.of("a", "h_1"))))));
+    }
+
     @Test
     public void testPartitioningColumnAndHashWithoutIdentityMappingInProjection()
     {

From af036d22c9d58c8d0d6011790dd53795fe5decbf Mon Sep 17 00:00:00 2001
From: kasiafi <30203062+kasiafi@users.noreply.github.com>
Date: Sun, 5 Apr 2020 23:30:56 +0200
Subject: [PATCH 131/519] Fix duplicate symbols in Exchange outputs

In PushProjectionThroughExchange rule, when the pushed-down
Projection has an identity assignment on a symbol which is used
for partitioning, ordering or as a hash symbol by the Exchange,
the symbol is added to Exchange's outputs twice:
- once as a symbol required by Exchange
- once as required by the parent Projection
This is fixed by skipping the other occurrence
---
 .../rule/PushProjectionThroughExchange.java   |  25 +++-
 .../TestPushProjectionThroughExchange.java    | 110 ++++++++++++++++++
 2 files changed, 132 insertions(+), 3 deletions(-)

diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PushProjectionThroughExchange.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PushProjectionThroughExchange.java
index c7b1a15e27b3..d2b240c583b6 100644
--- a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PushProjectionThroughExchange.java
+++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PushProjectionThroughExchange.java
@@ -33,6 +33,7 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.stream.Collectors;
 
 import static io.prestosql.matching.Capture.newCapture;
 import static io.prestosql.sql.planner.ExpressionSymbolInliner.inlineSymbols;
@@ -120,10 +121,21 @@ public Result apply(ProjectNode project, Captures captures, Context context)
                         });
             }
 
+            ImmutableSet.Builder outputBuilder = ImmutableSet.builder();
+            partitioningColumns.forEach(outputBuilder::add);
+            exchange.getPartitioningScheme().getHashColumn().ifPresent(outputBuilder::add);
+            exchange.getOrderingScheme().ifPresent(orderingScheme -> outputBuilder.addAll(orderingScheme.getOrderBy()));
+            Set partitioningHashAndOrderingOutputs = outputBuilder.build();
+
+            Map translationMap = outputToInputMap.entrySet().stream()
+                    .collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue().toSymbolReference()));
+
             for (Map.Entry projection : project.getAssignments().entrySet()) {
-                ImmutableMap.Builder translationMap = ImmutableMap.builder();
-                outputToInputMap.forEach((key, value) -> translationMap.put(key, value.toSymbolReference()));
-                Expression translatedExpression = inlineSymbols(translationMap.build(), projection.getValue());
+                // Skip identity projection if symbol is in outputs already
+                if (partitioningHashAndOrderingOutputs.contains(projection.getKey())) {
+                    continue;
+                }
+                Expression translatedExpression = inlineSymbols(translationMap, projection.getValue());
                 Type type = context.getSymbolAllocator().getTypes().get(projection.getKey());
                 Symbol symbol = context.getSymbolAllocator().newSymbol(translatedExpression, type);
                 projections.put(symbol, translatedExpression);
@@ -143,7 +155,14 @@ public Result apply(ProjectNode project, Captures captures, Context context)
                     .filter(symbol -> !partitioningColumns.contains(symbol))
                     .forEach(outputBuilder::add);
         }
+
+        Set partitioningHashAndOrderingOutputs = ImmutableSet.copyOf(outputBuilder.build());
+
         for (Map.Entry projection : project.getAssignments().entrySet()) {
+            // Do not add output for identity projection if symbol is in outputs already
+            if (partitioningHashAndOrderingOutputs.contains(projection.getKey())) {
+                continue;
+            }
             outputBuilder.add(projection.getKey());
         }
 
diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPushProjectionThroughExchange.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPushProjectionThroughExchange.java
index 4882531171f9..7636e8adc1f1 100644
--- a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPushProjectionThroughExchange.java
+++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPushProjectionThroughExchange.java
@@ -138,6 +138,116 @@ cTimes5, new ArithmeticBinaryExpression(ArithmeticBinaryExpression.Operator.MULT
                                                 values(ImmutableList.of("a", "h_1"))))));
     }
 
+    @Test
+    public void testSkipIdentityProjectionIfOutputPresent()
+    {
+        // In the following example, the Projection over Exchange has got an identity assignment (a -> a).
+        // The Projection is pushed down to Exchange's source, and the identity assignment is translated into
+        // a0 -> a. The assignment is added to the pushed-down Projection because the input symbol 'a' is
+        // required by the Exchange as a partitioning symbol.
+        // When all the assignments from the parent Projection are added to the pushed-down Projection,
+        // this assignment is omitted. Otherwise the doubled assignment would cause an error.
+        tester().assertThat(new PushProjectionThroughExchange())
+                .on(p -> {
+                    Symbol a = p.symbol("a");
+                    Symbol aTimes5 = p.symbol("a_times_5");
+                    return p.project(
+                            Assignments.of(
+                                    aTimes5, new ArithmeticBinaryExpression(ArithmeticBinaryExpression.Operator.MULTIPLY, new SymbolReference("a"), new LongLiteral("5")),
+                                    a, a.toSymbolReference()),
+                            p.exchange(e -> e
+                                    .addSource(p.values(a))
+                                    .addInputsSet(a)
+                                    .fixedHashDistributionParitioningScheme(ImmutableList.of(a), ImmutableList.of(a))));
+                })
+                .matches(
+                        exchange(
+                                strictProject(
+                                        ImmutableMap.of("a_0", expression("a"), "a_times_5", expression("a * 5")),
+                                        values(ImmutableList.of("a")))));
+
+        // In the following example, the Projection over Exchange has got an identity assignment (b -> b).
+        // The Projection is pushed down to Exchange's source, and the identity assignment is translated into
+        // a0 -> a. The assignment is added to the pushed-down Projection because the input symbol 'a' is
+        // required by the Exchange as a partitioning symbol.
+        // When all the assignments from the parent Projection are added to the pushed-down Projection,
+        // this assignment is omitted. Otherwise the doubled assignment would cause an error.
+        tester().assertThat(new PushProjectionThroughExchange())
+                .on(p -> {
+                    Symbol a = p.symbol("a");
+                    Symbol bTimes5 = p.symbol("b_times_5");
+                    Symbol b = p.symbol("b");
+                    return p.project(
+                            Assignments.of(
+                                    bTimes5, new ArithmeticBinaryExpression(ArithmeticBinaryExpression.Operator.MULTIPLY, new SymbolReference("b"), new LongLiteral("5")),
+                                    b, b.toSymbolReference()),
+                            p.exchange(e -> e
+                                    .addSource(p.values(a))
+                                    .addInputsSet(a)
+                                    .fixedHashDistributionParitioningScheme(ImmutableList.of(b), ImmutableList.of(b))));
+                })
+                .matches(
+                        exchange(
+                                strictProject(
+                                        ImmutableMap.of("a_0", expression("a"), "a_times_5", expression("a * 5")),
+                                        values(ImmutableList.of("a")))));
+    }
+
+    @Test
+    public void testDoNotSkipIdentityProjectionIfOutputAbsent()
+    {
+        // In the following example, the Projection over Exchange has got an identity assignment (a -> a).
+        // The Projection is pushed down to Exchange's source, and the identity assignment is translated into
+        // a0 -> a. Input symbol 'a' is not used in the Exchange for partitioning, ordering or as a hash symbol.
+        // It is just passed to output.
+        // When all the assignments from the parent Projection are added to the pushed-down Projection,
+        // the translated assignment is added too, so that the input symbol 'a' can be passed to the Exchange's output.
+        tester().assertThat(new PushProjectionThroughExchange())
+                .on(p -> {
+                    Symbol a = p.symbol("a");
+                    Symbol aTimes5 = p.symbol("a_times_5");
+                    return p.project(
+                            Assignments.of(
+                                    aTimes5, new ArithmeticBinaryExpression(ArithmeticBinaryExpression.Operator.MULTIPLY, new SymbolReference("a"), new LongLiteral("5")),
+                                    a, a.toSymbolReference()),
+                            p.exchange(e -> e
+                                    .addSource(p.values(a))
+                                    .addInputsSet(a)
+                                    .singleDistributionPartitioningScheme(a)));
+                })
+                .matches(
+                        exchange(
+                                strictProject(
+                                        ImmutableMap.of("a_0", expression("a"), "a_times_5", expression("a * 5")),
+                                        values(ImmutableList.of("a")))));
+
+        // In the following example, the Projection over Exchange has got an identity assignment (b -> b).
+        // The Projection is pushed down to Exchange's source, and the identity assignment is translated into
+        // a0 -> a. Input symbol 'a' is not used in the Exchange for partitioning, ordering or as a hash symbol.
+        // It is just passed to output.
+        // When all the assignments from the parent Projection are added to the pushed-down Projection,
+        // the translated assignment is added too, so that the input symbol 'a' can be passed to the Exchange's output.
+        tester().assertThat(new PushProjectionThroughExchange())
+                .on(p -> {
+                    Symbol a = p.symbol("a");
+                    Symbol bTimes5 = p.symbol("b_times_5");
+                    Symbol b = p.symbol("b");
+                    return p.project(
+                            Assignments.of(
+                                    bTimes5, new ArithmeticBinaryExpression(ArithmeticBinaryExpression.Operator.MULTIPLY, new SymbolReference("b"), new LongLiteral("5")),
+                                    b, b.toSymbolReference()),
+                            p.exchange(e -> e
+                                    .addSource(p.values(a))
+                                    .addInputsSet(a)
+                                    .singleDistributionPartitioningScheme(b)));
+                })
+                .matches(
+                        exchange(
+                                strictProject(
+                                        ImmutableMap.of("a_0", expression("a"), "a_times_5", expression("a * 5")),
+                                        values(ImmutableList.of("a")))));
+    }
+
     @Test
     public void testPartitioningColumnAndHashWithoutIdentityMappingInProjection()
     {

From e087f783948507843303338dc82649d11094c37e Mon Sep 17 00:00:00 2001
From: Roman Zeyde 
Date: Sun, 15 Dec 2019 08:38:50 +0200
Subject: [PATCH 132/519] Optimize TupleDomain#columnWiseUnion during dynamic
 filtering collection

---
 .../sql/planner/LocalDynamicFilter.java       | 27 ++++++++++---------
 1 file changed, 14 insertions(+), 13 deletions(-)

diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/LocalDynamicFilter.java b/presto-main/src/main/java/io/prestosql/sql/planner/LocalDynamicFilter.java
index 180300325a42..c8515c63e845 100644
--- a/presto-main/src/main/java/io/prestosql/sql/planner/LocalDynamicFilter.java
+++ b/presto-main/src/main/java/io/prestosql/sql/planner/LocalDynamicFilter.java
@@ -29,6 +29,7 @@
 import io.prestosql.sql.planner.plan.TableScanNode;
 import io.prestosql.sql.tree.SymbolReference;
 
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 import java.util.Optional;
@@ -57,11 +58,11 @@ public class LocalDynamicFilter
 
     private final SettableFuture> resultFuture;
 
-    // The resulting predicate for local dynamic filtering.
-    private TupleDomain result;
+    // Number of build-side partitions to be collected.
+    private final int partitionCount;
 
-    // Number of partitions left to be processed.
-    private int partitionsLeft;
+    // The resulting predicates from each build-side partition.
+    private final List> partitions;
 
     public LocalDynamicFilter(Multimap probeSymbols, Map buildChannels, TypeProvider types, int partitionCount)
     {
@@ -72,21 +73,21 @@ public LocalDynamicFilter(Multimap probeSymbols, Map(partitionCount);
     }
 
     private synchronized void addPartition(TupleDomain tupleDomain)
     {
         // Called concurrently by each DynamicFilterSourceOperator instance (when collection is over).
-        partitionsLeft -= 1;
-        verify(partitionsLeft >= 0);
+        verify(partitions.size() < partitionCount);
         // NOTE: may result in a bit more relaxed constraint if there are multiple columns and multiple rows.
         // See the comment at TupleDomain::columnWiseUnion() for more details.
-        result = TupleDomain.columnWiseUnion(result, tupleDomain);
-        if (partitionsLeft == 0) {
+        partitions.add(tupleDomain);
+        if (partitions.size() == partitionCount) {
+            Map result = convertTupleDomain(TupleDomain.columnWiseUnion(partitions));
             // No more partitions are left to be processed.
-            verify(resultFuture.set(convertTupleDomain(result)), "dynamic filter result is provided more than once");
+            resultFuture.set(result);
         }
     }
 
@@ -188,8 +189,8 @@ public String toString()
         return toStringHelper(this)
                 .add("probeSymbols", probeSymbols)
                 .add("buildChannels", buildChannels)
-                .add("result", result)
-                .add("partitionsLeft", partitionsLeft)
+                .add("partitionCount", partitionCount)
+                .add("partitions", partitions)
                 .toString();
     }
 }

From fe7e78ec2b9dc4e6eae61a50837c28ee33e891cf Mon Sep 17 00:00:00 2001
From: kasiafi <30203062+kasiafi@users.noreply.github.com>
Date: Tue, 7 Apr 2020 21:26:59 +0200
Subject: [PATCH 133/519] Add column-pruning rule for ExchangeNode

---
 .../prestosql/sql/planner/PlanOptimizers.java |  2 +
 .../rule/PruneExchangeSourceColumns.java      | 49 ++++++++++
 .../rule/TestPruneExchangeSourceColumns.java  | 98 +++++++++++++++++++
 3 files changed, 149 insertions(+)
 create mode 100644 presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneExchangeSourceColumns.java
 create mode 100644 presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneExchangeSourceColumns.java

diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java
index 739f5f27665b..718436814dfa 100644
--- a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java
+++ b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java
@@ -69,6 +69,7 @@
 import io.prestosql.sql.planner.iterative.rule.PruneDistinctLimitSourceColumns;
 import io.prestosql.sql.planner.iterative.rule.PruneEnforceSingleRowColumns;
 import io.prestosql.sql.planner.iterative.rule.PruneExceptSourceColumns;
+import io.prestosql.sql.planner.iterative.rule.PruneExchangeSourceColumns;
 import io.prestosql.sql.planner.iterative.rule.PruneFilterColumns;
 import io.prestosql.sql.planner.iterative.rule.PruneIndexSourceColumns;
 import io.prestosql.sql.planner.iterative.rule.PruneIntersectSourceColumns;
@@ -250,6 +251,7 @@ public PlanOptimizers(
                 new PruneDistinctLimitSourceColumns(),
                 new PruneEnforceSingleRowColumns(),
                 new PruneExceptSourceColumns(),
+                new PruneExchangeSourceColumns(),
                 new PruneFilterColumns(),
                 new PruneIndexSourceColumns(),
                 new PruneIntersectSourceColumns(),
diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneExchangeSourceColumns.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneExchangeSourceColumns.java
new file mode 100644
index 000000000000..7b534a592ef3
--- /dev/null
+++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneExchangeSourceColumns.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.prestosql.sql.planner.iterative.rule;
+
+import com.google.common.collect.ImmutableSet;
+import io.prestosql.matching.Captures;
+import io.prestosql.matching.Pattern;
+import io.prestosql.sql.planner.Symbol;
+import io.prestosql.sql.planner.iterative.Rule;
+import io.prestosql.sql.planner.plan.ExchangeNode;
+
+import java.util.Set;
+
+import static io.prestosql.sql.planner.iterative.rule.Util.restrictChildOutputs;
+import static io.prestosql.sql.planner.plan.Patterns.exchange;
+
+public class PruneExchangeSourceColumns
+        implements Rule
+{
+    @Override
+    public Pattern getPattern()
+    {
+        return exchange();
+    }
+
+    @Override
+    public Result apply(ExchangeNode node, Captures captures, Context context)
+    {
+        @SuppressWarnings("unchecked")
+        Set[] referencedInputs = new Set[node.getSources().size()];
+        for (int i = 0; i < node.getSources().size(); i++) {
+            referencedInputs[i] = ImmutableSet.copyOf(node.getInputs().get(i));
+        }
+        return restrictChildOutputs(context.getIdAllocator(), node, referencedInputs)
+                .map(Rule.Result::ofPlanNode)
+                .orElse(Rule.Result.empty());
+    }
+}
diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneExchangeSourceColumns.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneExchangeSourceColumns.java
new file mode 100644
index 000000000000..dd8df9da957e
--- /dev/null
+++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneExchangeSourceColumns.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.prestosql.sql.planner.iterative.rule;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import io.prestosql.sql.planner.Symbol;
+import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest;
+import org.testng.annotations.Test;
+
+import static io.prestosql.sql.planner.assertions.PlanMatchPattern.exchange;
+import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression;
+import static io.prestosql.sql.planner.assertions.PlanMatchPattern.strictProject;
+import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values;
+
+public class TestPruneExchangeSourceColumns
+        extends BaseRuleTest
+{
+    @Test
+    public void testPruneOneChild()
+    {
+        tester().assertThat(new PruneExchangeSourceColumns())
+                .on(p -> {
+                    Symbol a = p.symbol("a");
+                    Symbol b = p.symbol("b");
+                    Symbol c1 = p.symbol("c_1");
+                    Symbol c2 = p.symbol("c_2");
+                    return p.exchange(e -> e
+                            .addSource(p.values(b))
+                            .addInputsSet(b)
+                            .addSource(p.values(c1, c2))
+                            .addInputsSet(c1)
+                            .singleDistributionPartitioningScheme(a));
+                })
+                .matches(
+                        exchange(
+                                values(ImmutableList.of("b")),
+                                strictProject(
+                                        ImmutableMap.of("c_1", expression("c_1")),
+                                        values(ImmutableList.of("c_1", "c_2")))));
+    }
+
+    @Test
+    public void testPruneAllChildren()
+    {
+        tester().assertThat(new PruneExchangeSourceColumns())
+                .on(p -> {
+                    Symbol a = p.symbol("a");
+                    Symbol b1 = p.symbol("b_1");
+                    Symbol b2 = p.symbol("b_2");
+                    Symbol c1 = p.symbol("c_1");
+                    Symbol c2 = p.symbol("c_2");
+                    return p.exchange(e -> e
+                            .addSource(p.values(b1, b2))
+                            .addInputsSet(b1)
+                            .addSource(p.values(c1, c2))
+                            .addInputsSet(c1)
+                            .singleDistributionPartitioningScheme(a));
+                })
+                .matches(
+                        exchange(
+                                strictProject(
+                                        ImmutableMap.of("b_1", expression("b_1")),
+                                        values(ImmutableList.of("b_1", "b_2"))),
+                                strictProject(
+                                        ImmutableMap.of("c_1", expression("c_1")),
+                                        values(ImmutableList.of("c_1", "c_2")))));
+    }
+
+    @Test
+    public void testAllInputsReferenced()
+    {
+        tester().assertThat(new PruneExchangeSourceColumns())
+                .on(p -> {
+                    Symbol a = p.symbol("a");
+                    Symbol b = p.symbol("b");
+                    Symbol c = p.symbol("c");
+                    return p.exchange(e -> e
+                            .addSource(p.values(b))
+                            .addInputsSet(b)
+                            .addSource(p.values(c))
+                            .addInputsSet(c)
+                            .singleDistributionPartitioningScheme(a));
+                })
+                .doesNotFire();
+    }
+}

From 3607363a3026c1dd323e4af472b25251470dc4aa Mon Sep 17 00:00:00 2001
From: kasiafi <30203062+kasiafi@users.noreply.github.com>
Date: Tue, 14 Apr 2020 19:01:16 +0200
Subject: [PATCH 134/519] Add project-off rule for ExchangeNode

---
 .../prestosql/sql/planner/PlanOptimizers.java |   2 +
 .../iterative/rule/PruneExchangeColumns.java  | 119 ++++++++++++
 .../planner/assertions/ExchangeMatcher.java   |  29 ++-
 .../planner/assertions/PlanMatchPattern.java  |  13 +-
 .../rule/TestPruneExchangeColumns.java        | 173 ++++++++++++++++++
 5 files changed, 331 insertions(+), 5 deletions(-)
 create mode 100644 presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneExchangeColumns.java
 create mode 100644 presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneExchangeColumns.java

diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java
index 718436814dfa..fe4b84d5f36e 100644
--- a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java
+++ b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java
@@ -69,6 +69,7 @@
 import io.prestosql.sql.planner.iterative.rule.PruneDistinctLimitSourceColumns;
 import io.prestosql.sql.planner.iterative.rule.PruneEnforceSingleRowColumns;
 import io.prestosql.sql.planner.iterative.rule.PruneExceptSourceColumns;
+import io.prestosql.sql.planner.iterative.rule.PruneExchangeColumns;
 import io.prestosql.sql.planner.iterative.rule.PruneExchangeSourceColumns;
 import io.prestosql.sql.planner.iterative.rule.PruneFilterColumns;
 import io.prestosql.sql.planner.iterative.rule.PruneIndexSourceColumns;
@@ -251,6 +252,7 @@ public PlanOptimizers(
                 new PruneDistinctLimitSourceColumns(),
                 new PruneEnforceSingleRowColumns(),
                 new PruneExceptSourceColumns(),
+                new PruneExchangeColumns(),
                 new PruneExchangeSourceColumns(),
                 new PruneFilterColumns(),
                 new PruneIndexSourceColumns(),
diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneExchangeColumns.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneExchangeColumns.java
new file mode 100644
index 000000000000..6cfc8d726fb9
--- /dev/null
+++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneExchangeColumns.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.prestosql.sql.planner.iterative.rule;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+import io.prestosql.sql.planner.PartitioningScheme;
+import io.prestosql.sql.planner.Symbol;
+import io.prestosql.sql.planner.plan.ExchangeNode;
+import io.prestosql.sql.planner.plan.PlanNode;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+import java.util.Set;
+
+import static io.prestosql.sql.planner.plan.Patterns.exchange;
+
+/**
+ * This rule restricts the outputs of ExchangeNode based on which
+ * ExchangeNode's output symbols are either referenced by the
+ * parent node or used for partitioning, ordering or as a hash
+ * symbol by the ExchangeNode.
+ * 

+ * For each symbol removed from the output symbols list, the corresponding + * input symbols are removed from ExchangeNode's inputs lists. + *

+ * Transforms: + *

+ * - Project (o1)
+ *      - Exchange:
+ *        outputs [o1, o2, o3, h]
+ *        partitioning by (o2)
+ *        hash h
+ *        inputs [[a1, a2, a3, h1], [b1, b2, b3, h2]]
+ *          - source [a1, a2, a3, h1]
+ *          - source [b1, b2, b3, h2]
+ * 
+ * Into: + *
+ * - Project (o1)
+ *      - Exchange:
+ *        outputs [o1, o2, h]
+ *        partitioning by (o2)
+ *        hash h
+ *        inputs [[a1, a2, h1], [b1, b2, h2]]
+ *          - source [a1, a2, a3, h1]
+ *          - source [b1, b2, b3, h2]
+ * 
+ */ +public class PruneExchangeColumns + extends ProjectOffPushDownRule +{ + public PruneExchangeColumns() + { + super(exchange()); + } + + @Override + protected Optional pushDownProjectOff(Context context, ExchangeNode exchangeNode, Set referencedOutputs) + { + // Extract output symbols referenced by parent node or used for partitioning, ordering or as a hash symbol of the Exchange + ImmutableSet.Builder builder = ImmutableSet.builder(); + builder.addAll(referencedOutputs); + builder.addAll(exchangeNode.getPartitioningScheme().getPartitioning().getColumns()); + exchangeNode.getPartitioningScheme().getHashColumn().ifPresent(builder::add); + exchangeNode.getOrderingScheme().ifPresent(orderingScheme -> builder.addAll(orderingScheme.getOrderBy())); + Set outputsToRetain = builder.build(); + + if (outputsToRetain.size() == exchangeNode.getOutputSymbols().size()) { + return Optional.empty(); + } + + ImmutableList.Builder newOutputs = ImmutableList.builder(); + List> newInputs = new ArrayList<>(exchangeNode.getInputs().size()); + for (int i = 0; i < exchangeNode.getInputs().size(); i++) { + newInputs.add(new ArrayList<>()); + } + + // Retain used symbols from output list and corresponding symbols from all input lists + for (int i = 0; i < exchangeNode.getOutputSymbols().size(); i++) { + Symbol output = exchangeNode.getOutputSymbols().get(i); + if (outputsToRetain.contains(output)) { + newOutputs.add(output); + for (int source = 0; source < exchangeNode.getInputs().size(); source++) { + newInputs.get(source).add(exchangeNode.getInputs().get(source).get(i)); + } + } + } + + // newOutputs contains all partition, sort and hash symbols so simply swap the output layout + PartitioningScheme newPartitioningScheme = new PartitioningScheme( + exchangeNode.getPartitioningScheme().getPartitioning(), + newOutputs.build(), + exchangeNode.getPartitioningScheme().getHashColumn(), + exchangeNode.getPartitioningScheme().isReplicateNullsAndAny(), + exchangeNode.getPartitioningScheme().getBucketToPartition()); + + return Optional.of(new ExchangeNode( + exchangeNode.getId(), + exchangeNode.getType(), + exchangeNode.getScope(), + newPartitioningScheme, + exchangeNode.getSources(), + newInputs, + exchangeNode.getOrderingScheme())); + } +} diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/ExchangeMatcher.java b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/ExchangeMatcher.java index d0e668dee837..17251332fd0b 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/ExchangeMatcher.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/ExchangeMatcher.java @@ -13,6 +13,7 @@ */ package io.prestosql.sql.planner.assertions; +import com.google.common.base.MoreObjects.ToStringHelper; import io.prestosql.Session; import io.prestosql.cost.StatsProvider; import io.prestosql.metadata.Metadata; @@ -22,10 +23,12 @@ import io.prestosql.sql.planner.plan.PlanNode; import java.util.List; +import java.util.Optional; import java.util.Set; import static com.google.common.base.MoreObjects.toStringHelper; import static com.google.common.base.Preconditions.checkState; +import static com.google.common.collect.ImmutableList.toImmutableList; import static io.prestosql.sql.planner.assertions.MatchResult.NO_MATCH; import static io.prestosql.sql.planner.assertions.Util.orderingSchemeMatches; import static java.util.Objects.requireNonNull; @@ -37,13 +40,15 @@ final class ExchangeMatcher private final ExchangeNode.Type type; private final List orderBy; private final Set partitionedBy; + private final Optional>> inputs; - public ExchangeMatcher(ExchangeNode.Scope scope, ExchangeNode.Type type, List orderBy, Set partitionedBy) + public ExchangeMatcher(ExchangeNode.Scope scope, ExchangeNode.Type type, List orderBy, Set partitionedBy, Optional>> inputs) { this.scope = scope; this.type = type; this.orderBy = requireNonNull(orderBy, "orderBy is null"); this.partitionedBy = requireNonNull(partitionedBy, "partitionedBy is null"); + this.inputs = requireNonNull(inputs, "inputs is null"); } @Override @@ -83,17 +88,33 @@ public MatchResult detailMatches(PlanNode node, StatsProvider stats, Session ses } } + if (inputs.isPresent()) { + if (inputs.get().size() != exchangeNode.getInputs().size()) { + return NO_MATCH; + } + for (int i = 0; i < exchangeNode.getInputs().size(); i++) { + if (!inputs.get().get(i).stream() + .map(symbolAliases::get) + .map(Symbol::from) + .collect(toImmutableList()) + .equals(exchangeNode.getInputs().get(i))) { + return NO_MATCH; + } + } + } + return MatchResult.match(); } @Override public String toString() { - return toStringHelper(this) + ToStringHelper string = toStringHelper(this) .add("scope", scope) .add("type", type) .add("orderBy", orderBy) - .add("partitionedBy", partitionedBy) - .toString(); + .add("partitionedBy", partitionedBy); + inputs.ifPresent(inputs -> string.add("inputs", inputs)); + return string.toString(); } } diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java index 43b23cb7e5bf..ff99d5b80f47 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java @@ -495,9 +495,20 @@ public static PlanMatchPattern exchange(ExchangeNode.Scope scope, ExchangeNode.T } public static PlanMatchPattern exchange(ExchangeNode.Scope scope, ExchangeNode.Type type, List orderBy, Set partitionedBy, PlanMatchPattern... sources) + { + return exchange(scope, type, orderBy, partitionedBy, Optional.empty(), sources); + } + + public static PlanMatchPattern exchange( + ExchangeNode.Scope scope, + ExchangeNode.Type type, + List orderBy, + Set partitionedBy, + Optional>> inputs, + PlanMatchPattern... sources) { return node(ExchangeNode.class, sources) - .with(new ExchangeMatcher(scope, type, orderBy, partitionedBy)); + .with(new ExchangeMatcher(scope, type, orderBy, partitionedBy, inputs)); } public static PlanMatchPattern union(PlanMatchPattern... sources) diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneExchangeColumns.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneExchangeColumns.java new file mode 100644 index 000000000000..ec8b11c6d20b --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneExchangeColumns.java @@ -0,0 +1,173 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import io.prestosql.spi.block.SortOrder; +import io.prestosql.sql.planner.OrderingScheme; +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; +import io.prestosql.sql.planner.plan.Assignments; +import org.testng.annotations.Test; + +import java.util.Optional; + +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.exchange; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.project; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; +import static io.prestosql.sql.planner.plan.ExchangeNode.Scope.REMOTE; +import static io.prestosql.sql.planner.plan.ExchangeNode.Type.GATHER; + +public class TestPruneExchangeColumns + extends BaseRuleTest +{ + @Test + public void testDoNotPruneReferencedOutputSymbol() + { + tester().assertThat(new PruneExchangeColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + return p.project( + Assignments.identity(a), + p.exchange(e -> e + .addSource(p.values(b)) + .addInputsSet(b) + .singleDistributionPartitioningScheme(a))); + }) + .doesNotFire(); + } + + @Test + public void testDoNotPrunePartitioningSymbol() + { + tester().assertThat(new PruneExchangeColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + return p.project( + Assignments.of(), + p.exchange(e -> e + .addSource(p.values(b)) + .addInputsSet(b) + .fixedHashDistributionParitioningScheme( + ImmutableList.of(a), + ImmutableList.of(a)))); + }) + .doesNotFire(); + } + + @Test + public void testDoNotPruneHashSymbol() + { + tester().assertThat(new PruneExchangeColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol h = p.symbol("h"); + Symbol b = p.symbol("b"); + Symbol h1 = p.symbol("h_1"); + return p.project( + Assignments.identity(a), + p.exchange(e -> e + .addSource(p.values(b, h1)) + .addInputsSet(b, h1) + .fixedHashDistributionParitioningScheme( + ImmutableList.of(a, h), + ImmutableList.of(a), + h))); + }) + .doesNotFire(); + } + + @Test + public void testDoNotPruneOrderingSymbol() + { + tester().assertThat(new PruneExchangeColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + return p.project( + Assignments.of(), + p.exchange(e -> e + .addSource(p.values(b)) + .addInputsSet(b) + .singleDistributionPartitioningScheme(a) + .orderingScheme(new OrderingScheme(ImmutableList.of(a), ImmutableMap.of(a, SortOrder.ASC_NULLS_FIRST))))); + }) + .doesNotFire(); + } + + @Test + public void testPruneUnreferencedSymbol() + { + tester().assertThat(new PruneExchangeColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + return p.project( + Assignments.identity(a), + p.exchange(e -> e + .addSource(p.values(a, b)) + .addInputsSet(a, b) + .singleDistributionPartitioningScheme(a, b))); + }) + .matches( + project( + ImmutableMap.of("a", expression("a")), + exchange( + REMOTE, + GATHER, + ImmutableList.of(), + ImmutableSet.of(), + Optional.of(ImmutableList.of(ImmutableList.of("a"))), + values(ImmutableList.of("a", "b"))) + .withExactOutputs("a"))); + } + + @Test + public void testPruneUnreferencedSymbolMultipleSources() + { + tester().assertThat(new PruneExchangeColumns()) + .on(p -> { + Symbol a1 = p.symbol("a_1"); + Symbol a2 = p.symbol("a_2"); + Symbol b1 = p.symbol("b_1"); + Symbol b2 = p.symbol("b_2"); + Symbol c1 = p.symbol("c_1"); + Symbol c2 = p.symbol("c_2"); + return p.project( + Assignments.identity(a1), + p.exchange(e -> e + .addSource(p.values(b1, b2)) + .addInputsSet(b1, b2) + .addSource(p.values(c1, c2)) + .addInputsSet(c1, c2) + .singleDistributionPartitioningScheme(a1, a2))); + }) + .matches( + project( + exchange( + REMOTE, + GATHER, + ImmutableList.of(), + ImmutableSet.of(), + Optional.of(ImmutableList.of(ImmutableList.of("b_1"), ImmutableList.of("c_1"))), + values(ImmutableList.of("b_1", "b_2")), + values(ImmutableList.of("c_1", "c_2"))) + .withNumberOfOutputColumns(1))); + } +} From e6ce079f0bf666a789a26f44ae2102b97fcf5f8a Mon Sep 17 00:00:00 2001 From: Rohan Garg Date: Sat, 1 Feb 2020 20:22:06 +0530 Subject: [PATCH 135/519] Session property for setting worker count to be used while planning --- .../java/io/prestosql/SystemSessionProperties.java | 13 ++++++++++++- .../cost/CostCalculatorUsingExchanges.java | 10 ++++++---- .../cost/CostCalculatorWithEstimatedExchanges.java | 12 +++++++----- .../java/io/prestosql/cost/TaskCountEstimator.java | 9 +++++++-- .../rule/DetermineJoinDistributionType.java | 2 +- .../rule/DetermineSemiJoinDistributionType.java | 2 +- 6 files changed, 34 insertions(+), 14 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/SystemSessionProperties.java b/presto-main/src/main/java/io/prestosql/SystemSessionProperties.java index 8faa9bb293a7..eb07b56c5e94 100644 --- a/presto-main/src/main/java/io/prestosql/SystemSessionProperties.java +++ b/presto-main/src/main/java/io/prestosql/SystemSessionProperties.java @@ -128,6 +128,7 @@ public final class SystemSessionProperties public static final String IGNORE_DOWNSTREAM_PREFERENCES = "ignore_downstream_preferences"; public static final String REQUIRED_WORKERS_COUNT = "required_workers_count"; public static final String REQUIRED_WORKERS_MAX_WAIT_TIME = "required_workers_max_wait_time"; + public static final String COST_ESTIMATION_WORKER_COUNT = "cost_estimation_worker_count"; private final List> sessionProperties; @@ -558,7 +559,12 @@ public SystemSessionProperties( REQUIRED_WORKERS_MAX_WAIT_TIME, "Maximum time to wait for minimum number of workers before the query is failed", queryManagerConfig.getRequiredWorkersMaxWait(), - false)); + false), + integerProperty( + COST_ESTIMATION_WORKER_COUNT, + "Set the estimate count of workers while planning", + null, + true)); } public List> getSessionProperties() @@ -1003,4 +1009,9 @@ public static Duration getRequiredWorkersMaxWait(Session session) { return session.getSystemProperty(REQUIRED_WORKERS_MAX_WAIT_TIME, Duration.class); } + + public static Integer getCostEstimationWorkerCount(Session session) + { + return session.getSystemProperty(COST_ESTIMATION_WORKER_COUNT, Integer.class); + } } diff --git a/presto-main/src/main/java/io/prestosql/cost/CostCalculatorUsingExchanges.java b/presto-main/src/main/java/io/prestosql/cost/CostCalculatorUsingExchanges.java index ff15f3548113..f308e2d37e62 100644 --- a/presto-main/src/main/java/io/prestosql/cost/CostCalculatorUsingExchanges.java +++ b/presto-main/src/main/java/io/prestosql/cost/CostCalculatorUsingExchanges.java @@ -76,7 +76,7 @@ public CostCalculatorUsingExchanges(TaskCountEstimator taskCountEstimator) @Override public PlanCostEstimate calculateCost(PlanNode node, StatsProvider stats, CostProvider sourcesCosts, Session session, TypeProvider types) { - CostEstimator costEstimator = new CostEstimator(stats, sourcesCosts, types, taskCountEstimator); + CostEstimator costEstimator = new CostEstimator(stats, sourcesCosts, types, taskCountEstimator, session); return node.accept(costEstimator, null); } @@ -87,13 +87,15 @@ private static class CostEstimator private final CostProvider sourcesCosts; private final TypeProvider types; private final TaskCountEstimator taskCountEstimator; + private final Session session; - CostEstimator(StatsProvider stats, CostProvider sourcesCosts, TypeProvider types, TaskCountEstimator taskCountEstimator) + CostEstimator(StatsProvider stats, CostProvider sourcesCosts, TypeProvider types, TaskCountEstimator taskCountEstimator, Session session) { this.stats = requireNonNull(stats, "stats is null"); this.sourcesCosts = requireNonNull(sourcesCosts, "sourcesCosts is null"); this.types = requireNonNull(types, "types is null"); this.taskCountEstimator = requireNonNull(taskCountEstimator, "taskCountEstimator is null"); + this.session = requireNonNull(session, "session is null"); } @Override @@ -196,7 +198,7 @@ private LocalCostEstimate calculateJoinCost(PlanNode join, PlanNode probe, PlanN stats, types, replicated, - taskCountEstimator.estimateSourceDistributedTaskCount()); + taskCountEstimator.estimateSourceDistributedTaskCount(session)); LocalCostEstimate joinOutputCost = calculateJoinOutputCost(join); return addPartialComponents(joinInputCost, joinOutputCost); } @@ -239,7 +241,7 @@ private LocalCostEstimate calculateExchangeCost(ExchangeNode node) // assuming that destination is always source distributed // it is true as now replicated exchange is used for joins only // for replicated join probe side is usually source distributed - return calculateRemoteReplicateCost(inputSizeInBytes, taskCountEstimator.estimateSourceDistributedTaskCount()); + return calculateRemoteReplicateCost(inputSizeInBytes, taskCountEstimator.estimateSourceDistributedTaskCount(session)); default: throw new IllegalArgumentException("Unexpected type: " + node.getType()); } diff --git a/presto-main/src/main/java/io/prestosql/cost/CostCalculatorWithEstimatedExchanges.java b/presto-main/src/main/java/io/prestosql/cost/CostCalculatorWithEstimatedExchanges.java index abc0ebf8e627..b73e6501deea 100644 --- a/presto-main/src/main/java/io/prestosql/cost/CostCalculatorWithEstimatedExchanges.java +++ b/presto-main/src/main/java/io/prestosql/cost/CostCalculatorWithEstimatedExchanges.java @@ -60,7 +60,7 @@ public CostCalculatorWithEstimatedExchanges(CostCalculator costCalculator, TaskC @Override public PlanCostEstimate calculateCost(PlanNode node, StatsProvider stats, CostProvider sourcesCosts, Session session, TypeProvider types) { - ExchangeCostEstimator exchangeCostEstimator = new ExchangeCostEstimator(stats, types, taskCountEstimator); + ExchangeCostEstimator exchangeCostEstimator = new ExchangeCostEstimator(stats, types, taskCountEstimator, session); PlanCostEstimate costEstimate = costCalculator.calculateCost(node, stats, sourcesCosts, session, types); LocalCostEstimate estimatedExchangeCost = node.accept(exchangeCostEstimator, null); return addExchangeCost(costEstimate, estimatedExchangeCost); @@ -90,12 +90,14 @@ private static class ExchangeCostEstimator private final StatsProvider stats; private final TypeProvider types; private final TaskCountEstimator taskCountEstimator; + private final Session session; - ExchangeCostEstimator(StatsProvider stats, TypeProvider types, TaskCountEstimator taskCountEstimator) + ExchangeCostEstimator(StatsProvider stats, TypeProvider types, TaskCountEstimator taskCountEstimator, Session session) { this.stats = requireNonNull(stats, "stats is null"); this.types = requireNonNull(types, "types is null"); this.taskCountEstimator = requireNonNull(taskCountEstimator, "taskCountEstimator is null"); + this.session = requireNonNull(session, "session is null"); } @Override @@ -133,7 +135,7 @@ public LocalCostEstimate visitJoin(JoinNode node, Void context) stats, types, Objects.equals(node.getDistributionType(), Optional.of(JoinNode.DistributionType.REPLICATED)), - taskCountEstimator.estimateSourceDistributedTaskCount()); + taskCountEstimator.estimateSourceDistributedTaskCount(session)); } @Override @@ -145,7 +147,7 @@ public LocalCostEstimate visitSemiJoin(SemiJoinNode node, Void context) stats, types, Objects.equals(node.getDistributionType(), Optional.of(SemiJoinNode.DistributionType.REPLICATED)), - taskCountEstimator.estimateSourceDistributedTaskCount()); + taskCountEstimator.estimateSourceDistributedTaskCount(session)); } @Override @@ -157,7 +159,7 @@ public LocalCostEstimate visitSpatialJoin(SpatialJoinNode node, Void context) stats, types, node.getDistributionType() == SpatialJoinNode.DistributionType.REPLICATED, - taskCountEstimator.estimateSourceDistributedTaskCount()); + taskCountEstimator.estimateSourceDistributedTaskCount(session)); } @Override diff --git a/presto-main/src/main/java/io/prestosql/cost/TaskCountEstimator.java b/presto-main/src/main/java/io/prestosql/cost/TaskCountEstimator.java index d0196c2d675d..0b5c009f4239 100644 --- a/presto-main/src/main/java/io/prestosql/cost/TaskCountEstimator.java +++ b/presto-main/src/main/java/io/prestosql/cost/TaskCountEstimator.java @@ -23,6 +23,7 @@ import java.util.Set; import java.util.function.IntSupplier; +import static io.prestosql.SystemSessionProperties.getCostEstimationWorkerCount; import static io.prestosql.SystemSessionProperties.getHashPartitionCount; import static java.lang.Math.min; import static java.lang.Math.toIntExact; @@ -53,13 +54,17 @@ public TaskCountEstimator(IntSupplier numberOfNodes) this.numberOfNodes = requireNonNull(numberOfNodes, "numberOfNodes is null"); } - public int estimateSourceDistributedTaskCount() + public int estimateSourceDistributedTaskCount(Session session) { + Integer costEstimationWorkerCount = getCostEstimationWorkerCount(session); + if (costEstimationWorkerCount != null) { + return costEstimationWorkerCount; + } return numberOfNodes.getAsInt(); } public int estimateHashedTaskCount(Session session) { - return min(numberOfNodes.getAsInt(), getHashPartitionCount(session)); + return min(estimateSourceDistributedTaskCount(session), getHashPartitionCount(session)); } } diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/DetermineJoinDistributionType.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/DetermineJoinDistributionType.java index a47bb827e02a..a405fd31a85c 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/DetermineJoinDistributionType.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/DetermineJoinDistributionType.java @@ -176,7 +176,7 @@ private PlanNodeWithCost getJoinNodeWithCost(Context context, JoinNode possibleJ * * TODO Decision about the distribution should be based on LocalCostEstimate only when PlanCostEstimate cannot be calculated. Otherwise cost comparator cannot take query.max-memory into account. */ - int estimatedSourceDistributedTaskCount = taskCountEstimator.estimateSourceDistributedTaskCount(); + int estimatedSourceDistributedTaskCount = taskCountEstimator.estimateSourceDistributedTaskCount(context.getSession()); LocalCostEstimate cost = calculateJoinCostWithoutOutput( possibleJoinNode.getLeft(), possibleJoinNode.getRight(), diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/DetermineSemiJoinDistributionType.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/DetermineSemiJoinDistributionType.java index 1845485acd12..4e19718b4391 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/DetermineSemiJoinDistributionType.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/DetermineSemiJoinDistributionType.java @@ -147,7 +147,7 @@ private PlanNodeWithCost getSemiJoinNodeWithCost(SemiJoinNode possibleJoinNode, * TODO Decision about the distribution should be based on LocalCostEstimate only when PlanCostEstimate cannot be calculated. Otherwise cost comparator cannot take query.max-memory into account. */ - int estimatedSourceDistributedTaskCount = taskCountEstimator.estimateSourceDistributedTaskCount(); + int estimatedSourceDistributedTaskCount = taskCountEstimator.estimateSourceDistributedTaskCount(context.getSession()); LocalCostEstimate cost = calculateJoinCostWithoutOutput( possibleJoinNode.getSource(), possibleJoinNode.getFilteringSource(), From 282cc073a9a64368c93b3d829d1503c35fac1e52 Mon Sep 17 00:00:00 2001 From: msosnicki Date: Tue, 25 Feb 2020 10:06:11 +0100 Subject: [PATCH 136/519] Skip unknown types in nested bson in MongoDB --- .../io/prestosql/plugin/mongodb/MongoSession.java | 10 +++++----- .../mongodb/TestMongoIntegrationSmokeTest.java | 13 +++++++++++++ 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/MongoSession.java b/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/MongoSession.java index 7a62844818f3..8dc0a9172c1c 100644 --- a/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/MongoSession.java +++ b/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/MongoSession.java @@ -593,13 +593,13 @@ else if (value instanceof Document) { for (String key : ((Document) value).keySet()) { Optional fieldType = guessFieldType(((Document) value).get(key)); - if (!fieldType.isPresent()) { - return Optional.empty(); + if (fieldType.isPresent()) { + parameters.add(TypeSignatureParameter.namedTypeParameter(new NamedTypeSignature(Optional.of(new RowFieldName(key)), fieldType.get()))); } - - parameters.add(TypeSignatureParameter.namedTypeParameter(new NamedTypeSignature(Optional.of(new RowFieldName(key)), fieldType.get()))); } - typeSignature = new TypeSignature(StandardTypes.ROW, parameters); + if (!parameters.isEmpty()) { + typeSignature = new TypeSignature(StandardTypes.ROW, parameters); + } } return Optional.ofNullable(typeSignature); diff --git a/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoIntegrationSmokeTest.java b/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoIntegrationSmokeTest.java index 806f13f4d6ef..996bd7071e31 100644 --- a/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoIntegrationSmokeTest.java +++ b/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoIntegrationSmokeTest.java @@ -172,6 +172,19 @@ public void testTemporalArrays() assertOneNotNullResult("SELECT col[1] FROM tmp_array8"); } + @Test + public void testSkipUnknownTypes() + { + Document document1 = new Document("col", Document.parse("{\"key1\": \"value1\", \"key2\": null}")); + client.getDatabase("test").getCollection("tmp_guess_schema1").insertOne(document1); + assertQuery("SHOW COLUMNS FROM test.tmp_guess_schema1", "SELECT 'col', 'row(key1 varchar)', '', ''"); + assertQuery("SELECT col.key1 FROM test.tmp_guess_schema1", "SELECT 'value1'"); + + Document document2 = new Document("col", new Document("key1", null)); + client.getDatabase("test").getCollection("tmp_guess_schema2").insertOne(document2); + assertQueryReturnsEmptyResult("SHOW COLUMNS FROM test.tmp_guess_schema2"); + } + @Test public void testMaps() { From fc57389e40afd6e583001533e17b0e3a20fbbafa Mon Sep 17 00:00:00 2001 From: Yuya Ebihara Date: Mon, 6 Apr 2020 22:27:59 +0900 Subject: [PATCH 137/519] Use listCollections command to find MongoDB view MongoDB connector throws "not authorized on db to execute command" error if user cannot access system.views collection. --- .../prestosql/plugin/mongodb/MongoSession.java | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/MongoSession.java b/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/MongoSession.java index 8dc0a9172c1c..9eb0b1f237fc 100644 --- a/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/MongoSession.java +++ b/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/MongoSession.java @@ -18,6 +18,7 @@ import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.primitives.Primitives; import com.google.common.primitives.Shorts; @@ -607,8 +608,17 @@ else if (value instanceof Document) { private boolean isView(SchemaTableName tableName) { - MongoCollection views = client.getDatabase(tableName.getSchemaName()).getCollection("system.views"); - Object view = views.find(new Document("_id", tableName.toString())).first(); - return view != null; + Document listCollectionsCommand = new Document(new ImmutableMap.Builder() + .put("listCollections", 1.0) + .put("filter", documentOf("name", tableName.getTableName())) + .put("nameOnly", true) + .build()); + Document cursor = client.getDatabase(tableName.getSchemaName()).runCommand(listCollectionsCommand).get("cursor", Document.class); + List firstBatch = cursor.get("firstBatch", List.class); + if (firstBatch.isEmpty()) { + return false; + } + String type = firstBatch.get(0).getString("type"); + return "view".equals(type); } } From b942223f9765f2a0c1980eeff0d8fb258f6c5d84 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Tue, 4 Feb 2020 13:50:06 +0100 Subject: [PATCH 138/519] Fix CorrelatedJoin naming in PlanPrinter --- .../java/io/prestosql/sql/planner/planprinter/PlanPrinter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/planprinter/PlanPrinter.java b/presto-main/src/main/java/io/prestosql/sql/planner/planprinter/PlanPrinter.java index 4e973741a7be..8c01610ac044 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/planprinter/PlanPrinter.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/planprinter/PlanPrinter.java @@ -1133,7 +1133,7 @@ public Void visitApply(ApplyNode node, Void context) public Void visitCorrelatedJoin(CorrelatedJoinNode node, Void context) { addNode(node, - "Lateral", + "CorrelatedJoin", format("[%s%s]", node.getCorrelation(), node.getFilter().equals(TRUE_LITERAL) ? "" : " " + node.getFilter())); From 82706b8dc3e2929c500331717a2a760a1278bcea Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Tue, 14 Apr 2020 17:18:46 +0200 Subject: [PATCH 139/519] Minor refactor in correlated Aggregation rewriter --- .../ScalarAggregationToJoinRewriter.java | 24 ++++++++----------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/ScalarAggregationToJoinRewriter.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/ScalarAggregationToJoinRewriter.java index b029b36e0f8b..250a50bf5512 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/ScalarAggregationToJoinRewriter.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/ScalarAggregationToJoinRewriter.java @@ -71,7 +71,7 @@ public ScalarAggregationToJoinRewriter(Metadata metadata, SymbolAllocator symbol public PlanNode rewriteScalarAggregation(CorrelatedJoinNode correlatedJoinNode, AggregationNode aggregation) { List correlation = correlatedJoinNode.getCorrelation(); - Optional source = planNodeDecorrelator.decorrelateFilters(lookup.resolve(aggregation.getSource()), correlation); + Optional source = planNodeDecorrelator.decorrelateFilters(aggregation.getSource(), correlation); if (!source.isPresent()) { return correlatedJoinNode; } @@ -107,7 +107,7 @@ private PlanNode rewriteScalarAggregation( symbolAllocator.newSymbol("unique", BigintType.BIGINT)); JoinNode leftOuterJoin = new JoinNode( - idAllocator.getNextId(), + correlatedJoinNode.getId(), JoinNode.Type.LEFT, inputWithUniqueColumns, scalarAggregationSource, @@ -122,21 +122,17 @@ private PlanNode rewriteScalarAggregation( ImmutableMap.of(), Optional.empty()); - Optional aggregationNode = createAggregationNode( + AggregationNode aggregationNode = createAggregationNode( scalarAggregation, leftOuterJoin, nonNull); - if (!aggregationNode.isPresent()) { - return correlatedJoinNode; - } - Optional subqueryProjection = searchFrom(correlatedJoinNode.getSubquery(), lookup) .where(ProjectNode.class::isInstance) .recurseOnlyWhen(EnforceSingleRowNode.class::isInstance) .findFirst(); - List aggregationOutputSymbols = getTruncatedAggregationSymbols(correlatedJoinNode, aggregationNode.get()); + List aggregationOutputSymbols = getTruncatedAggregationSymbols(correlatedJoinNode, aggregationNode); if (subqueryProjection.isPresent()) { Assignments assignments = Assignments.builder() @@ -146,13 +142,13 @@ private PlanNode rewriteScalarAggregation( return new ProjectNode( idAllocator.getNextId(), - aggregationNode.get(), + aggregationNode, assignments); } else { return new ProjectNode( idAllocator.getNextId(), - aggregationNode.get(), + aggregationNode, Assignments.identity(aggregationOutputSymbols)); } } @@ -165,7 +161,7 @@ private static List getTruncatedAggregationSymbols(CorrelatedJoinNode co .collect(toImmutableList()); } - private Optional createAggregationNode( + private AggregationNode createAggregationNode( AggregationNode scalarAggregation, JoinNode leftOuterJoin, Symbol nonNullableAggregationSourceSymbol) @@ -192,14 +188,14 @@ private Optional createAggregationNode( } } - return Optional.of(new AggregationNode( - idAllocator.getNextId(), + return new AggregationNode( + scalarAggregation.getId(), leftOuterJoin, aggregations.build(), singleGroupingSet(leftOuterJoin.getLeft().getOutputSymbols()), ImmutableList.of(), scalarAggregation.getStep(), scalarAggregation.getHashSymbol(), - Optional.empty())); + Optional.empty()); } } From e2e5af6cd8891943c7c98a3e78785d937542eac3 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Tue, 14 Apr 2020 19:19:01 +0200 Subject: [PATCH 140/519] Fix source property in CorrelatedJoin pattern resolve the child node to skip GroupReference --- .../src/main/java/io/prestosql/sql/planner/plan/Patterns.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/plan/Patterns.java b/presto-main/src/main/java/io/prestosql/sql/planner/plan/Patterns.java index aeaec22d5cbf..10adba2d7fab 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/plan/Patterns.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/plan/Patterns.java @@ -250,7 +250,7 @@ public static Property> correlation() public static Property subquery() { - return property("subquery", CorrelatedJoinNode::getSubquery); + return property("subquery", (node, context) -> context.resolve(node.getSubquery())); } public static Property filter() From f61987cc839d907939179c8e2efc207627b08566 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Tue, 14 Apr 2020 22:34:22 +0200 Subject: [PATCH 141/519] Add rule to remove redundant EnforceSingleRowNodes --- .../prestosql/sql/planner/PlanOptimizers.java | 2 + .../RemoveRedundantEnforceSingleRowNode.java | 43 ++++++++++++++++ .../sql/planner/TestLogicalPlanner.java | 10 ++-- ...stRemoveRedundantEnforceSingleRowNode.java | 51 +++++++++++++++++++ 4 files changed, 100 insertions(+), 6 deletions(-) create mode 100644 presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/RemoveRedundantEnforceSingleRowNode.java create mode 100644 presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestRemoveRedundantEnforceSingleRowNode.java diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java index fe4b84d5f36e..d8724db9d514 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java @@ -119,6 +119,7 @@ import io.prestosql.sql.planner.iterative.rule.RemoveFullSample; import io.prestosql.sql.planner.iterative.rule.RemoveRedundantCrossJoin; import io.prestosql.sql.planner.iterative.rule.RemoveRedundantDistinctLimit; +import io.prestosql.sql.planner.iterative.rule.RemoveRedundantEnforceSingleRowNode; import io.prestosql.sql.planner.iterative.rule.RemoveRedundantIdentityProjections; import io.prestosql.sql.planner.iterative.rule.RemoveRedundantJoin; import io.prestosql.sql.planner.iterative.rule.RemoveRedundantLimit; @@ -354,6 +355,7 @@ public PlanOptimizers( new RemoveRedundantDistinctLimit(), new RemoveRedundantCrossJoin(), new RemoveRedundantJoin(), + new RemoveRedundantEnforceSingleRowNode(), new ImplementFilteredAggregations(metadata), new SingleDistinctAggregationToGroupBy(), new MultipleDistinctAggregationToMarkDistinct(), diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/RemoveRedundantEnforceSingleRowNode.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/RemoveRedundantEnforceSingleRowNode.java new file mode 100644 index 000000000000..06692f002b3f --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/RemoveRedundantEnforceSingleRowNode.java @@ -0,0 +1,43 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import io.prestosql.matching.Captures; +import io.prestosql.matching.Pattern; +import io.prestosql.sql.planner.iterative.Rule; +import io.prestosql.sql.planner.plan.EnforceSingleRowNode; + +import static io.prestosql.sql.planner.optimizations.QueryCardinalityUtil.isScalar; +import static io.prestosql.sql.planner.plan.Patterns.enforceSingleRow; + +public class RemoveRedundantEnforceSingleRowNode + implements Rule +{ + private static final Pattern PATTERN = enforceSingleRow(); + + @Override + public Pattern getPattern() + { + return PATTERN; + } + + @Override + public Result apply(EnforceSingleRowNode node, Captures captures, Context context) + { + if (isScalar(node.getSource(), context.getLookup())) { + return Result.ofPlanNode(node.getSource()); + } + return Result.empty(); + } +} diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/TestLogicalPlanner.java b/presto-main/src/test/java/io/prestosql/sql/planner/TestLogicalPlanner.java index c535aa2a2b30..f4e11a5f7474 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/TestLogicalPlanner.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/TestLogicalPlanner.java @@ -74,7 +74,6 @@ import static io.prestosql.sql.planner.assertions.PlanMatchPattern.assignUniqueId; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.constrainedTableScan; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.constrainedTableScanWithTableLayout; -import static io.prestosql.sql.planner.assertions.PlanMatchPattern.enforceSingleRow; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.equiJoinClause; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.exchange; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression; @@ -178,7 +177,6 @@ public void testAllFieldsDereferenceOnSubquery() ImmutableMap.of( "output_1", expression("CAST(\"row\" AS ROW(f0 bigint,f1 varchar(25))).f0"), "output_2", expression("CAST(\"row\" AS ROW(f0 bigint,f1 varchar(25))).f1")), - enforceSingleRow( project( ImmutableMap.of("row", expression("ROW(min, max)")), aggregation( @@ -192,7 +190,7 @@ public void testAllFieldsDereferenceOnSubquery() "min_regionkey", functionCall("min", ImmutableList.of("REGIONKEY")), "max_name", functionCall("max", ImmutableList.of("NAME"))), PARTIAL, - tableScan("nation", ImmutableMap.of("NAME", "name", "REGIONKEY", "regionkey")))))))))); + tableScan("nation", ImmutableMap.of("NAME", "name", "REGIONKEY", "regionkey"))))))))); } @Test @@ -406,13 +404,13 @@ public void testSameScalarSubqueryIsAppliedOnlyOnce() assertEquals( countOfMatchingNodes( plan("SELECT * FROM orders WHERE CAST(orderkey AS INTEGER) = (SELECT 1) AND custkey = (SELECT 2) AND CAST(custkey as REAL) != (SELECT 1)"), - EnforceSingleRowNode.class::isInstance), + ValuesNode.class::isInstance), 2); // same query used for left, right and complex join condition assertEquals( countOfMatchingNodes( - plan("SELECT * FROM orders o1 JOIN orders o2 ON o1.orderkey = (SELECT 1) AND o2.orderkey = (SELECT 1) AND o1.orderkey + o2.orderkey = (SELECT 1)"), - EnforceSingleRowNode.class::isInstance), + plan("SELECT * FROM orders o1 JOIN orders o2 ON o1.orderkey = (SELECT 1) AND o2.orderkey = (SELECT 1) AND o1.orderkey + o2.orderkey > (SELECT 1)"), + ValuesNode.class::isInstance), 1); } diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestRemoveRedundantEnforceSingleRowNode.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestRemoveRedundantEnforceSingleRowNode.java new file mode 100644 index 000000000000..e3f34840789b --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestRemoveRedundantEnforceSingleRowNode.java @@ -0,0 +1,51 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableList; +import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; +import io.prestosql.sql.planner.plan.AggregationNode; +import org.testng.annotations.Test; + +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.node; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; +import static io.prestosql.sql.planner.iterative.rule.test.PlanBuilder.expression; + +public class TestRemoveRedundantEnforceSingleRowNode + extends BaseRuleTest +{ + @Test + public void testRemoveEnforceWhenSourceScalar() + { + tester().assertThat(new RemoveRedundantEnforceSingleRowNode()) + .on(p -> p.enforceSingleRow(p.aggregation(builder -> builder + .addAggregation(p.symbol("c"), expression("count(a)"), ImmutableList.of(BIGINT)) + .globalGrouping() + .source(p.values(p.symbol("a")))))) + .matches(node(AggregationNode.class, values("a"))); + } + + @Test + public void testDoNotFireWhenSourceNotScalar() + { + tester().assertThat(new RemoveRedundantEnforceSingleRowNode()) + .on(p -> p.enforceSingleRow(p.values(10, p.symbol("a")))) + .doesNotFire(); + + tester().assertThat(new RemoveRedundantEnforceSingleRowNode()) + .on(p -> p.enforceSingleRow(p.values(p.symbol("a")))) + .doesNotFire(); + } +} From 3be0c5f07ca09c1586a392b86ee87a3cc55ea3e3 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Wed, 15 Apr 2020 18:40:59 +0200 Subject: [PATCH 142/519] Use fixed pattern in a rule Splits the TransformCorrelatedScalarAggregationToJoin Optimizer rule into a set of two rules with fixed patterns instead of using PlanNodeSearcher inside the rule. After this change, the only patterns supported by the rules are: - CorrelatedJoin - Input - global Aggregation and - CorrelatedJoin - Input - Project - global Aggregation Previously, the rule supported also cases were 1. there were multiple Projections above the Aggregation. This case was handled incorrectly. 2. there were EnforceSingleRowNodes in the subquery above the Aggregation. This functionality is restored by adding a rule to remove redundant EnforceSingleRowNodes. --- .../prestosql/sql/planner/PlanOptimizers.java | 15 +- ...formCorrelatedScalarAggregationToJoin.java | 155 ++++++++++++++---- .../ScalarAggregationToJoinRewriter.java | 40 +---- ...elatedScalarAggregationWithProjection.java | 135 +++++++++++++++ ...edScalarAggregationWithoutProjection.java} | 47 ++++-- 5 files changed, 293 insertions(+), 99 deletions(-) create mode 100644 presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestTransformCorrelatedScalarAggregationWithProjection.java rename presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/{TestTransformCorrelatedScalarAggregationToJoin.java => TestTransformCorrelatedScalarAggregationWithoutProjection.java} (80%) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java index d8724db9d514..c0ec22e5f7be 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java @@ -405,12 +405,15 @@ public PlanOptimizers( ruleStats, statsCalculator, estimatedExchangesCostCalculator, - ImmutableSet.of( - new RemoveUnreferencedScalarSubqueries(), - new TransformUncorrelatedSubqueryToJoin(), - new TransformUncorrelatedInPredicateSubqueryToSemiJoin(), - new TransformCorrelatedScalarAggregationToJoin(metadata), - new TransformCorrelatedJoinToJoin(metadata))), + ImmutableSet.>builder() + .add( + new RemoveRedundantEnforceSingleRowNode(), + new RemoveUnreferencedScalarSubqueries(), + new TransformUncorrelatedSubqueryToJoin(), + new TransformUncorrelatedInPredicateSubqueryToSemiJoin(), + new TransformCorrelatedJoinToJoin(metadata)) + .addAll(new TransformCorrelatedScalarAggregationToJoin(metadata).rules()) + .build()), new IterativeOptimizer( ruleStats, statsCalculator, diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/TransformCorrelatedScalarAggregationToJoin.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/TransformCorrelatedScalarAggregationToJoin.java index f63667914085..01668c092834 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/TransformCorrelatedScalarAggregationToJoin.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/TransformCorrelatedScalarAggregationToJoin.java @@ -13,28 +13,38 @@ */ package io.prestosql.sql.planner.iterative.rule; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableSet; +import io.prestosql.matching.Capture; import io.prestosql.matching.Captures; import io.prestosql.matching.Pattern; import io.prestosql.metadata.Metadata; -import io.prestosql.sql.planner.iterative.Lookup; +import io.prestosql.sql.planner.Symbol; import io.prestosql.sql.planner.iterative.Rule; import io.prestosql.sql.planner.optimizations.ScalarAggregationToJoinRewriter; import io.prestosql.sql.planner.plan.AggregationNode; +import io.prestosql.sql.planner.plan.Assignments; import io.prestosql.sql.planner.plan.CorrelatedJoinNode; -import io.prestosql.sql.planner.plan.EnforceSingleRowNode; import io.prestosql.sql.planner.plan.PlanNode; import io.prestosql.sql.planner.plan.ProjectNode; -import java.util.Optional; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import static com.google.common.collect.ImmutableList.toImmutableList; +import static io.prestosql.matching.Capture.newCapture; +import static io.prestosql.matching.Pattern.empty; import static io.prestosql.matching.Pattern.nonEmpty; -import static io.prestosql.sql.planner.optimizations.PlanNodeSearcher.searchFrom; -import static io.prestosql.sql.planner.optimizations.QueryCardinalityUtil.isScalar; +import static io.prestosql.sql.planner.plan.Patterns.Aggregation.groupingColumns; import static io.prestosql.sql.planner.plan.Patterns.CorrelatedJoin.correlation; import static io.prestosql.sql.planner.plan.Patterns.CorrelatedJoin.filter; +import static io.prestosql.sql.planner.plan.Patterns.CorrelatedJoin.subquery; +import static io.prestosql.sql.planner.plan.Patterns.aggregation; import static io.prestosql.sql.planner.plan.Patterns.correlatedJoin; +import static io.prestosql.sql.planner.plan.Patterns.project; +import static io.prestosql.sql.planner.plan.Patterns.source; import static io.prestosql.sql.tree.BooleanLiteral.TRUE_LITERAL; -import static io.prestosql.util.MorePredicates.isInstanceOfAny; import static java.util.Objects.requireNonNull; /** @@ -66,18 +76,7 @@ * Note that only conjunction predicates in FilterNode are supported */ public class TransformCorrelatedScalarAggregationToJoin - implements Rule { - private static final Pattern PATTERN = correlatedJoin() - .with(nonEmpty(correlation())) - .with(filter().equalTo(TRUE_LITERAL)); // todo non-trivial join filter: adding filter/project on top of aggregation - - @Override - public Pattern getPattern() - { - return PATTERN; - } - private final Metadata metadata; public TransformCorrelatedScalarAggregationToJoin(Metadata metadata) @@ -85,36 +84,120 @@ public TransformCorrelatedScalarAggregationToJoin(Metadata metadata) this.metadata = requireNonNull(metadata, "metadata is null"); } - @Override - public Result apply(CorrelatedJoinNode correlatedJoinNode, Captures captures, Context context) + public Set> rules() + { + return ImmutableSet.of( + new TransformCorrelatedScalarAggregationToJoin.TransformCorrelatedScalarAggregationWithProjection(metadata), + new TransformCorrelatedScalarAggregationToJoin.TransformCorrelatedScalarAggregationWithoutProjection(metadata)); + } + + @VisibleForTesting + static final class TransformCorrelatedScalarAggregationWithProjection + implements Rule { - PlanNode subquery = correlatedJoinNode.getSubquery(); + private static final Capture PROJECTION = newCapture(); + private static final Capture AGGREGATION = newCapture(); + + private static final Pattern PATTERN = correlatedJoin() + .with(nonEmpty(correlation())) + .with(filter().equalTo(TRUE_LITERAL)) + .with(subquery().matching(project() + .capturedAs(PROJECTION) + .with(source().matching(aggregation() + .with(empty(groupingColumns())) + .capturedAs(AGGREGATION))))); + + private final Metadata metadata; - if (!isScalar(subquery, context.getLookup())) { - return Result.empty(); + @VisibleForTesting + TransformCorrelatedScalarAggregationWithProjection(Metadata metadata) + { + this.metadata = requireNonNull(metadata, "metadata is null"); } - Optional aggregation = findAggregation(subquery, context.getLookup()); - if (!(aggregation.isPresent() && aggregation.get().getGroupingKeys().isEmpty())) { - return Result.empty(); + @Override + public Pattern getPattern() + { + return PATTERN; } - ScalarAggregationToJoinRewriter rewriter = new ScalarAggregationToJoinRewriter(metadata, context.getSymbolAllocator(), context.getIdAllocator(), context.getLookup()); + @Override + public Result apply(CorrelatedJoinNode correlatedJoinNode, Captures captures, Context context) + { + PlanNode rewrittenNode = new ScalarAggregationToJoinRewriter(metadata, context.getSymbolAllocator(), context.getIdAllocator(), context.getLookup()) + .rewriteScalarAggregation(correlatedJoinNode, captures.get(AGGREGATION)); - PlanNode rewrittenNode = rewriter.rewriteScalarAggregation(correlatedJoinNode, aggregation.get()); + if (rewrittenNode instanceof CorrelatedJoinNode) { + // Failed to decorrelate subquery + return Result.empty(); + } - if (rewrittenNode instanceof CorrelatedJoinNode) { - return Result.empty(); - } + // Restrict outputs and apply projection + Set outputSymbols = new HashSet<>(correlatedJoinNode.getOutputSymbols()); + List expectedAggregationOutputs = rewrittenNode.getOutputSymbols().stream() + .filter(outputSymbols::contains) + .collect(toImmutableList()); + + Assignments assignments = Assignments.builder() + .putIdentities(expectedAggregationOutputs) + .putAll(captures.get(PROJECTION).getAssignments()) + .build(); - return Result.ofPlanNode(rewrittenNode); + return Result.ofPlanNode(new ProjectNode( + context.getIdAllocator().getNextId(), + rewrittenNode, + assignments)); + } } - private static Optional findAggregation(PlanNode rootNode, Lookup lookup) + @VisibleForTesting + static final class TransformCorrelatedScalarAggregationWithoutProjection + implements Rule { - return searchFrom(rootNode, lookup) - .where(AggregationNode.class::isInstance) - .recurseOnlyWhen(isInstanceOfAny(ProjectNode.class, EnforceSingleRowNode.class)) - .findFirst(); + private static final Capture AGGREGATION = newCapture(); + + private static final Pattern PATTERN = correlatedJoin() + .with(nonEmpty(correlation())) + .with(filter().equalTo(TRUE_LITERAL)) // todo non-trivial join filter: adding filter/project on top of aggregation + .with(subquery().matching(aggregation() + .with(empty(groupingColumns())) + .capturedAs(AGGREGATION))); + + private final Metadata metadata; + + @VisibleForTesting + TransformCorrelatedScalarAggregationWithoutProjection(Metadata metadata) + { + this.metadata = requireNonNull(metadata, "metadata is null"); + } + + @Override + public Pattern getPattern() + { + return PATTERN; + } + + @Override + public Result apply(CorrelatedJoinNode correlatedJoinNode, Captures captures, Context context) + { + PlanNode rewrittenNode = new ScalarAggregationToJoinRewriter(metadata, context.getSymbolAllocator(), context.getIdAllocator(), context.getLookup()) + .rewriteScalarAggregation(correlatedJoinNode, captures.get(AGGREGATION)); + + if (rewrittenNode instanceof CorrelatedJoinNode) { + // Failed to decorrelate subquery + return Result.empty(); + } + + // Restrict outputs + Set outputSymbols = new HashSet<>(correlatedJoinNode.getOutputSymbols()); + List expectedAggregationOutputs = rewrittenNode.getOutputSymbols().stream() + .filter(outputSymbols::contains) + .collect(toImmutableList()); + + return Result.ofPlanNode(new ProjectNode( + context.getIdAllocator().getNextId(), + rewrittenNode, + Assignments.identity(expectedAggregationOutputs))); + } } } diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/ScalarAggregationToJoinRewriter.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/ScalarAggregationToJoinRewriter.java index 250a50bf5512..ba64b2593981 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/ScalarAggregationToJoinRewriter.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/ScalarAggregationToJoinRewriter.java @@ -30,22 +30,17 @@ import io.prestosql.sql.planner.plan.AssignUniqueId; import io.prestosql.sql.planner.plan.Assignments; import io.prestosql.sql.planner.plan.CorrelatedJoinNode; -import io.prestosql.sql.planner.plan.EnforceSingleRowNode; import io.prestosql.sql.planner.plan.JoinNode; import io.prestosql.sql.planner.plan.PlanNode; import io.prestosql.sql.planner.plan.ProjectNode; import io.prestosql.sql.tree.Expression; import io.prestosql.sql.tree.QualifiedName; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.Set; -import static com.google.common.collect.ImmutableList.toImmutableList; import static io.prestosql.sql.analyzer.TypeSignatureProvider.fromTypes; -import static io.prestosql.sql.planner.optimizations.PlanNodeSearcher.searchFrom; import static io.prestosql.sql.planner.plan.AggregationNode.singleGroupingSet; import static io.prestosql.sql.tree.BooleanLiteral.TRUE_LITERAL; import static java.util.Objects.requireNonNull; @@ -122,43 +117,10 @@ private PlanNode rewriteScalarAggregation( ImmutableMap.of(), Optional.empty()); - AggregationNode aggregationNode = createAggregationNode( + return createAggregationNode( scalarAggregation, leftOuterJoin, nonNull); - - Optional subqueryProjection = searchFrom(correlatedJoinNode.getSubquery(), lookup) - .where(ProjectNode.class::isInstance) - .recurseOnlyWhen(EnforceSingleRowNode.class::isInstance) - .findFirst(); - - List aggregationOutputSymbols = getTruncatedAggregationSymbols(correlatedJoinNode, aggregationNode); - - if (subqueryProjection.isPresent()) { - Assignments assignments = Assignments.builder() - .putIdentities(aggregationOutputSymbols) - .putAll(subqueryProjection.get().getAssignments()) - .build(); - - return new ProjectNode( - idAllocator.getNextId(), - aggregationNode, - assignments); - } - else { - return new ProjectNode( - idAllocator.getNextId(), - aggregationNode, - Assignments.identity(aggregationOutputSymbols)); - } - } - - private static List getTruncatedAggregationSymbols(CorrelatedJoinNode correlatedJoinNode, AggregationNode aggregationNode) - { - Set applySymbols = new HashSet<>(correlatedJoinNode.getOutputSymbols()); - return aggregationNode.getOutputSymbols().stream() - .filter(applySymbols::contains) - .collect(toImmutableList()); } private AggregationNode createAggregationNode( diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestTransformCorrelatedScalarAggregationWithProjection.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestTransformCorrelatedScalarAggregationWithProjection.java new file mode 100644 index 000000000000..6368b6f06383 --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestTransformCorrelatedScalarAggregationWithProjection.java @@ -0,0 +1,135 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.sql.planner.iterative.rule.TransformCorrelatedScalarAggregationToJoin.TransformCorrelatedScalarAggregationWithProjection; +import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; +import io.prestosql.sql.planner.iterative.rule.test.PlanBuilder; +import io.prestosql.sql.planner.plan.Assignments; +import io.prestosql.sql.planner.plan.JoinNode; +import org.testng.annotations.Test; + +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.aggregation; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.assignUniqueId; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.functionCall; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.join; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.project; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; + +public class TestTransformCorrelatedScalarAggregationWithProjection + extends BaseRuleTest +{ + @Test + public void doesNotFireOnPlanWithoutCorrelatedJoinNode() + { + tester().assertThat(new TransformCorrelatedScalarAggregationWithProjection(tester().getMetadata())) + .on(p -> p.values(p.symbol("a"))) + .doesNotFire(); + } + + @Test + public void doesNotFireOnCorrelatedWithoutAggregation() + { + tester().assertThat(new TransformCorrelatedScalarAggregationWithProjection(tester().getMetadata())) + .on(p -> p.correlatedJoin( + ImmutableList.of(p.symbol("corr")), + p.values(p.symbol("corr")), + p.values(p.symbol("a")))) + .doesNotFire(); + } + + @Test + public void doesNotFireOnUncorrelated() + { + tester().assertThat(new TransformCorrelatedScalarAggregationWithProjection(tester().getMetadata())) + .on(p -> p.correlatedJoin( + ImmutableList.of(), + p.values(p.symbol("a")), + p.values(p.symbol("b")))) + .doesNotFire(); + } + + @Test + public void doesNotFireOnCorrelatedWithNonScalarAggregation() + { + tester().assertThat(new TransformCorrelatedScalarAggregationWithProjection(tester().getMetadata())) + .on(p -> p.correlatedJoin( + ImmutableList.of(p.symbol("corr")), + p.values(p.symbol("corr")), + p.aggregation(ab -> ab + .source(p.values(p.symbol("a"), p.symbol("b"))) + .addAggregation(p.symbol("sum"), PlanBuilder.expression("sum(a)"), ImmutableList.of(BIGINT)) + .singleGroupingSet(p.symbol("b"))))) + .doesNotFire(); + } + + @Test + public void doesNotFireOnMultipleProjections() + { + tester().assertThat(new TransformCorrelatedScalarAggregationWithProjection(tester().getMetadata())) + .on(p -> p.correlatedJoin( + ImmutableList.of(p.symbol("corr")), + p.values(p.symbol("corr")), + p.project( + Assignments.of(p.symbol("expr_2"), p.expression("expr - 1")), + p.project( + Assignments.of(p.symbol("expr"), p.expression("sum + 1")), + p.aggregation(ab -> ab + .source(p.values(p.symbol("a"), p.symbol("b"))) + .addAggregation(p.symbol("sum"), PlanBuilder.expression("sum(a)"), ImmutableList.of(BIGINT)) + .globalGrouping()))))) + .doesNotFire(); + } + + @Test + public void doesNotFireOnSubqueryWithoutProjection() + { + tester().assertThat(new TransformCorrelatedScalarAggregationWithProjection(tester().getMetadata())) + .on(p -> p.correlatedJoin( + ImmutableList.of(p.symbol("corr")), + p.values(p.symbol("corr")), + p.aggregation(ab -> ab + .source(p.values(p.symbol("a"), p.symbol("b"))) + .addAggregation(p.symbol("sum"), PlanBuilder.expression("sum(a)"), ImmutableList.of(BIGINT)) + .globalGrouping()))) + .doesNotFire(); + } + + @Test + public void rewritesOnSubqueryWithProjection() + { + tester().assertThat(new TransformCorrelatedScalarAggregationWithProjection(tester().getMetadata())) + .on(p -> p.correlatedJoin( + ImmutableList.of(p.symbol("corr")), + p.values(p.symbol("corr")), + p.project(Assignments.of(p.symbol("expr"), p.expression("sum + 1")), + p.aggregation(ab -> ab + .source(p.values(p.symbol("a"), p.symbol("b"))) + .addAggregation(p.symbol("sum"), PlanBuilder.expression("sum(a)"), ImmutableList.of(BIGINT)) + .globalGrouping())))) + .matches( + project(ImmutableMap.of("corr", expression("corr"), "expr", expression("(\"sum_1\" + 1)")), + aggregation(ImmutableMap.of("sum_1", functionCall("sum", ImmutableList.of("a"))), + join(JoinNode.Type.LEFT, + ImmutableList.of(), + assignUniqueId("unique", + values(ImmutableMap.of("corr", 0))), + project(ImmutableMap.of("non_null", expression("true")), + values(ImmutableMap.of("a", 0, "b", 1))))))); + } +} diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestTransformCorrelatedScalarAggregationToJoin.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestTransformCorrelatedScalarAggregationWithoutProjection.java similarity index 80% rename from presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestTransformCorrelatedScalarAggregationToJoin.java rename to presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestTransformCorrelatedScalarAggregationWithoutProjection.java index 257d798598f5..6f319a2f0f2c 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestTransformCorrelatedScalarAggregationToJoin.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestTransformCorrelatedScalarAggregationWithoutProjection.java @@ -15,6 +15,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import io.prestosql.sql.planner.iterative.rule.TransformCorrelatedScalarAggregationToJoin.TransformCorrelatedScalarAggregationWithoutProjection; import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; import io.prestosql.sql.planner.iterative.rule.test.PlanBuilder; import io.prestosql.sql.planner.plan.Assignments; @@ -30,13 +31,13 @@ import static io.prestosql.sql.planner.assertions.PlanMatchPattern.project; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; -public class TestTransformCorrelatedScalarAggregationToJoin +public class TestTransformCorrelatedScalarAggregationWithoutProjection extends BaseRuleTest { @Test - public void doesNotFireOnPlanWithoutApplyNode() + public void doesNotFireOnPlanWithoutCorrelatedJoinNode() { - tester().assertThat(new TransformCorrelatedScalarAggregationToJoin(tester().getMetadata())) + tester().assertThat(new TransformCorrelatedScalarAggregationWithoutProjection(tester().getMetadata())) .on(p -> p.values(p.symbol("a"))) .doesNotFire(); } @@ -44,7 +45,7 @@ public void doesNotFireOnPlanWithoutApplyNode() @Test public void doesNotFireOnCorrelatedWithoutAggregation() { - tester().assertThat(new TransformCorrelatedScalarAggregationToJoin(tester().getMetadata())) + tester().assertThat(new TransformCorrelatedScalarAggregationWithoutProjection(tester().getMetadata())) .on(p -> p.correlatedJoin( ImmutableList.of(p.symbol("corr")), p.values(p.symbol("corr")), @@ -55,7 +56,7 @@ public void doesNotFireOnCorrelatedWithoutAggregation() @Test public void doesNotFireOnUncorrelated() { - tester().assertThat(new TransformCorrelatedScalarAggregationToJoin(tester().getMetadata())) + tester().assertThat(new TransformCorrelatedScalarAggregationWithoutProjection(tester().getMetadata())) .on(p -> p.correlatedJoin( ImmutableList.of(), p.values(p.symbol("a")), @@ -66,7 +67,7 @@ public void doesNotFireOnUncorrelated() @Test public void doesNotFireOnCorrelatedWithNonScalarAggregation() { - tester().assertThat(new TransformCorrelatedScalarAggregationToJoin(tester().getMetadata())) + tester().assertThat(new TransformCorrelatedScalarAggregationWithoutProjection(tester().getMetadata())) .on(p -> p.correlatedJoin( ImmutableList.of(p.symbol("corr")), p.values(p.symbol("corr")), @@ -77,10 +78,28 @@ public void doesNotFireOnCorrelatedWithNonScalarAggregation() .doesNotFire(); } + @Test + public void doesNotFireOnMultipleProjections() + { + tester().assertThat(new TransformCorrelatedScalarAggregationWithoutProjection(tester().getMetadata())) + .on(p -> p.correlatedJoin( + ImmutableList.of(p.symbol("corr")), + p.values(p.symbol("corr")), + p.project( + Assignments.of(p.symbol("expr_2"), p.expression("expr - 1")), + p.project( + Assignments.of(p.symbol("expr"), p.expression("sum + 1")), + p.aggregation(ab -> ab + .source(p.values(p.symbol("a"), p.symbol("b"))) + .addAggregation(p.symbol("sum"), PlanBuilder.expression("sum(a)"), ImmutableList.of(BIGINT)) + .globalGrouping()))))) + .doesNotFire(); + } + @Test public void rewritesOnSubqueryWithoutProjection() { - tester().assertThat(new TransformCorrelatedScalarAggregationToJoin(tester().getMetadata())) + tester().assertThat(new TransformCorrelatedScalarAggregationWithoutProjection(tester().getMetadata())) .on(p -> p.correlatedJoin( ImmutableList.of(p.symbol("corr")), p.values(p.symbol("corr")), @@ -102,7 +121,7 @@ public void rewritesOnSubqueryWithoutProjection() @Test public void rewritesOnSubqueryWithProjection() { - tester().assertThat(new TransformCorrelatedScalarAggregationToJoin(tester().getMetadata())) + tester().assertThat(new TransformCorrelatedScalarAggregationWithoutProjection(tester().getMetadata())) .on(p -> p.correlatedJoin( ImmutableList.of(p.symbol("corr")), p.values(p.symbol("corr")), @@ -111,21 +130,13 @@ public void rewritesOnSubqueryWithProjection() .source(p.values(p.symbol("a"), p.symbol("b"))) .addAggregation(p.symbol("sum"), PlanBuilder.expression("sum(a)"), ImmutableList.of(BIGINT)) .globalGrouping())))) - .matches( - project(ImmutableMap.of("corr", expression("corr"), "expr", expression("(\"sum_1\" + 1)")), - aggregation(ImmutableMap.of("sum_1", functionCall("sum", ImmutableList.of("a"))), - join(JoinNode.Type.LEFT, - ImmutableList.of(), - assignUniqueId("unique", - values(ImmutableMap.of("corr", 0))), - project(ImmutableMap.of("non_null", expression("true")), - values(ImmutableMap.of("a", 0, "b", 1))))))); + .doesNotFire(); } @Test public void testSubqueryWithCount() { - tester().assertThat(new TransformCorrelatedScalarAggregationToJoin(tester().getMetadata())) + tester().assertThat(new TransformCorrelatedScalarAggregationWithoutProjection(tester().getMetadata())) .on(p -> p.correlatedJoin( ImmutableList.of(p.symbol("corr")), p.values(p.symbol("corr")), From e66b8022c6cc25a55daac5f5a7661cb1b314564d Mon Sep 17 00:00:00 2001 From: James Petty Date: Tue, 14 Apr 2020 08:09:34 -0400 Subject: [PATCH 143/519] Refactor AWS SDK Client Metrics Collection Adds a parent abstract class to PrestoS3FileSystemMetricsCollector so that other SDK clients can share the metrics collector support. Adds reporting for client retry pause time indicating how long the thread was asleep between request retries in the client itself. Fixes the reporting client timings. Previously, when the client retried a request only the first request timings would be recorded in the stats. Now, all request timings are reported individually. --- .../hive/aws/AbstractSdkMetricsCollector.java | 86 +++++++++++++++++++ .../s3/PrestoS3FileSystemMetricCollector.java | 65 +++++++------- .../hive/s3/PrestoS3FileSystemStats.java | 13 +++ 3 files changed, 128 insertions(+), 36 deletions(-) create mode 100644 presto-hive/src/main/java/io/prestosql/plugin/hive/aws/AbstractSdkMetricsCollector.java diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/aws/AbstractSdkMetricsCollector.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/aws/AbstractSdkMetricsCollector.java new file mode 100644 index 000000000000..1188bb6198d4 --- /dev/null +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/aws/AbstractSdkMetricsCollector.java @@ -0,0 +1,86 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.hive.aws; + +import com.amazonaws.Request; +import com.amazonaws.Response; +import com.amazonaws.metrics.RequestMetricCollector; +import com.amazonaws.util.AWSRequestMetrics; +import com.amazonaws.util.TimingInfo; +import io.airlift.units.Duration; + +import java.util.List; +import java.util.function.Consumer; + +import static com.amazonaws.util.AWSRequestMetrics.Field.ClientExecuteTime; +import static com.amazonaws.util.AWSRequestMetrics.Field.HttpClientRetryCount; +import static com.amazonaws.util.AWSRequestMetrics.Field.HttpRequestTime; +import static com.amazonaws.util.AWSRequestMetrics.Field.RequestCount; +import static com.amazonaws.util.AWSRequestMetrics.Field.RetryPauseTime; +import static com.amazonaws.util.AWSRequestMetrics.Field.ThrottleException; +import static java.util.concurrent.TimeUnit.MILLISECONDS; + +public abstract class AbstractSdkMetricsCollector + extends RequestMetricCollector +{ + @Override + public final void collectMetrics(Request request, Response response) + { + TimingInfo timingInfo = request.getAWSRequestMetrics().getTimingInfo(); + + Number requestCounts = timingInfo.getCounter(RequestCount.name()); + if (requestCounts != null) { + recordRequestCount(requestCounts.longValue()); + } + + Number retryCounts = timingInfo.getCounter(HttpClientRetryCount.name()); + if (retryCounts != null) { + recordRetryCount(retryCounts.longValue()); + } + + Number throttleExceptions = timingInfo.getCounter(ThrottleException.name()); + if (throttleExceptions != null) { + recordThrottleExceptionCount(throttleExceptions.longValue()); + } + + recordSubTimingDurations(timingInfo, HttpRequestTime, this::recordHttpRequestTime); + recordSubTimingDurations(timingInfo, ClientExecuteTime, this::recordClientExecutionTime); + recordSubTimingDurations(timingInfo, RetryPauseTime, this::recordRetryPauseTime); + } + + protected abstract void recordRequestCount(long count); + + protected abstract void recordRetryCount(long count); + + protected abstract void recordThrottleExceptionCount(long count); + + protected abstract void recordHttpRequestTime(Duration duration); + + protected abstract void recordClientExecutionTime(Duration duration); + + protected abstract void recordRetryPauseTime(Duration duration); + + private static void recordSubTimingDurations(TimingInfo timingInfo, AWSRequestMetrics.Field field, Consumer consumer) + { + List subTimings = timingInfo.getAllSubMeasurements(field.name()); + if (subTimings != null) { + for (TimingInfo subTiming : subTimings) { + Double millis = subTiming.getTimeTakenMillisIfKnown(); + if (millis != null) { + consumer.accept(new Duration(millis, MILLISECONDS)); + } + } + } + } +} diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystemMetricCollector.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystemMetricCollector.java index 43294a670b44..3c05883c09f9 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystemMetricCollector.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystemMetricCollector.java @@ -13,23 +13,13 @@ */ package io.prestosql.plugin.hive.s3; -import com.amazonaws.Request; -import com.amazonaws.Response; -import com.amazonaws.metrics.RequestMetricCollector; -import com.amazonaws.util.AWSRequestMetrics; -import com.amazonaws.util.TimingInfo; import io.airlift.units.Duration; +import io.prestosql.plugin.hive.aws.AbstractSdkMetricsCollector; -import static com.amazonaws.util.AWSRequestMetrics.Field.ClientExecuteTime; -import static com.amazonaws.util.AWSRequestMetrics.Field.HttpClientRetryCount; -import static com.amazonaws.util.AWSRequestMetrics.Field.HttpRequestTime; -import static com.amazonaws.util.AWSRequestMetrics.Field.RequestCount; -import static com.amazonaws.util.AWSRequestMetrics.Field.ThrottleException; import static java.util.Objects.requireNonNull; -import static java.util.concurrent.TimeUnit.MILLISECONDS; public class PrestoS3FileSystemMetricCollector - extends RequestMetricCollector + extends AbstractSdkMetricsCollector { private final PrestoS3FileSystemStats stats; @@ -39,35 +29,38 @@ public PrestoS3FileSystemMetricCollector(PrestoS3FileSystemStats stats) } @Override - public void collectMetrics(Request request, Response response) + protected void recordRequestCount(long count) { - AWSRequestMetrics metrics = request.getAWSRequestMetrics(); - - TimingInfo timingInfo = metrics.getTimingInfo(); - Number requestCounts = timingInfo.getCounter(RequestCount.name()); - Number retryCounts = timingInfo.getCounter(HttpClientRetryCount.name()); - Number throttleExceptions = timingInfo.getCounter(ThrottleException.name()); - TimingInfo requestTime = timingInfo.getSubMeasurement(HttpRequestTime.name()); - TimingInfo clientExecuteTime = timingInfo.getSubMeasurement(ClientExecuteTime.name()); + stats.updateAwsRequestCount(count); + } - if (requestCounts != null) { - stats.updateAwsRequestCount(requestCounts.longValue()); - } + @Override + protected void recordRetryCount(long count) + { + stats.updateAwsRetryCount(count); + } - if (retryCounts != null) { - stats.updateAwsRetryCount(retryCounts.longValue()); - } + @Override + protected void recordThrottleExceptionCount(long count) + { + stats.updateAwsThrottleExceptionsCount(count); + } - if (throttleExceptions != null) { - stats.updateAwsThrottleExceptionsCount(throttleExceptions.longValue()); - } + @Override + protected void recordHttpRequestTime(Duration duration) + { + stats.addAwsRequestTime(duration); + } - if (requestTime != null && requestTime.getTimeTakenMillisIfKnown() != null) { - stats.addAwsRequestTime(new Duration(requestTime.getTimeTakenMillisIfKnown(), MILLISECONDS)); - } + @Override + protected void recordClientExecutionTime(Duration duration) + { + stats.addAwsClientExecuteTime(duration); + } - if (clientExecuteTime != null && clientExecuteTime.getTimeTakenMillisIfKnown() != null) { - stats.addAwsClientExecuteTime(new Duration(clientExecuteTime.getTimeTakenMillisIfKnown(), MILLISECONDS)); - } + @Override + protected void recordRetryPauseTime(Duration duration) + { + stats.addAwsClientRetryPauseTime(duration); } } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystemStats.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystemStats.java index e82825f48f33..16886ed62221 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystemStats.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/PrestoS3FileSystemStats.java @@ -51,6 +51,7 @@ public class PrestoS3FileSystemStats private final CounterStat awsThrottleExceptions = new CounterStat(); private final TimeStat awsRequestTime = new TimeStat(MILLISECONDS); private final TimeStat awsClientExecuteTime = new TimeStat(MILLISECONDS); + private final TimeStat awsClientRetryPauseTime = new TimeStat(MILLISECONDS); @Managed @Nested @@ -185,6 +186,13 @@ public TimeStat getAwsClientExecuteTime() return awsClientExecuteTime; } + @Managed + @Nested + public TimeStat getAwsClientRetryPauseTime() + { + return awsClientRetryPauseTime; + } + @Managed @Nested public CounterStat getGetObjectRetries() @@ -302,6 +310,11 @@ public void addAwsClientExecuteTime(Duration duration) awsClientExecuteTime.add(duration); } + public void addAwsClientRetryPauseTime(Duration duration) + { + awsClientRetryPauseTime.add(duration); + } + public void newGetObjectRetry() { getObjectRetries.update(1); From 0e24cb7f4ff191d2e125b920cf46ca1fcaa70f7e Mon Sep 17 00:00:00 2001 From: James Petty Date: Tue, 14 Apr 2020 09:18:00 -0400 Subject: [PATCH 144/519] Use shared metrics collector in PestoS3ClientFactory Previously, an instance of PrestoS3FileSystemStats instance was created in PrestoS3ClientFactory which means it would not report S3 client stats to the instance registered with JMX. This would only have affected PrestoS3Select clients. Now the same metric instance is shared with PrestoS3FileSystem --- .../plugin/hive/s3select/PrestoS3ClientFactory.java | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3select/PrestoS3ClientFactory.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3select/PrestoS3ClientFactory.java index 12d0a25d12ee..25b1bb57a745 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3select/PrestoS3ClientFactory.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3select/PrestoS3ClientFactory.java @@ -20,15 +20,14 @@ import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; -import com.amazonaws.metrics.RequestMetricCollector; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Builder; import com.amazonaws.services.s3.AmazonS3Client; import io.airlift.units.Duration; import io.prestosql.plugin.hive.HiveConfig; import io.prestosql.plugin.hive.s3.HiveS3Config; +import io.prestosql.plugin.hive.s3.PrestoS3FileSystem; import io.prestosql.plugin.hive.s3.PrestoS3FileSystemMetricCollector; -import io.prestosql.plugin.hive.s3.PrestoS3FileSystemStats; import org.apache.hadoop.conf.Configuration; import javax.annotation.concurrent.GuardedBy; @@ -106,13 +105,11 @@ private AmazonS3 createS3Client(Configuration config) .withUserAgentPrefix(userAgentPrefix) .withUserAgentSuffix(enabled ? "presto-select" : "presto"); - PrestoS3FileSystemStats stats = new PrestoS3FileSystemStats(); - RequestMetricCollector metricCollector = new PrestoS3FileSystemMetricCollector(stats); AWSCredentialsProvider awsCredentialsProvider = getAwsCredentialsProvider(config, defaults); AmazonS3Builder, ? extends AmazonS3> clientBuilder = AmazonS3Client.builder() .withCredentials(awsCredentialsProvider) .withClientConfiguration(clientConfiguration) - .withMetricsCollector(metricCollector) + .withMetricsCollector(new PrestoS3FileSystemMetricCollector(PrestoS3FileSystem.getFileSystemStats())) .enablePathStyleAccess(); boolean regionOrEndpointSet = false; From d5730ae94889fe3436c4ccc432f2aaf077626759 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Tue, 14 Apr 2020 17:39:47 -0700 Subject: [PATCH 145/519] Extract top level ObjectNameGeneratorConfig class --- .../ConnectorObjectNameGeneratorModule.java | 22 ++----------- .../base/jmx/ObjectNameGeneratorConfig.java | 33 +++++++++++++++++++ .../db/ObjectNameGeneratorConfig.java | 33 +++++++++++++++++++ .../db/PrefixObjectNameGeneratorModule.java | 22 ++----------- 4 files changed, 70 insertions(+), 40 deletions(-) create mode 100644 presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/jmx/ObjectNameGeneratorConfig.java create mode 100644 presto-resource-group-managers/src/main/java/io/prestosql/plugin/resourcegroups/db/ObjectNameGeneratorConfig.java diff --git a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/jmx/ConnectorObjectNameGeneratorModule.java b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/jmx/ConnectorObjectNameGeneratorModule.java index 9f7e6a2b903c..49ebc670f0d7 100644 --- a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/jmx/ConnectorObjectNameGeneratorModule.java +++ b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/jmx/ConnectorObjectNameGeneratorModule.java @@ -17,7 +17,6 @@ import com.google.inject.Binder; import com.google.inject.Module; import com.google.inject.Provides; -import io.airlift.configuration.Config; import org.weakref.jmx.ObjectNameBuilder; import org.weakref.jmx.ObjectNameGenerator; @@ -44,33 +43,16 @@ public ConnectorObjectNameGeneratorModule(String catalogName, String packageName @Override public void configure(Binder binder) { - configBinder(binder).bindConfig(ConnectorObjectNameGeneratorConfig.class); + configBinder(binder).bindConfig(ObjectNameGeneratorConfig.class); } @Provides - ObjectNameGenerator createPrefixObjectNameGenerator(ConnectorObjectNameGeneratorConfig config) + ObjectNameGenerator createPrefixObjectNameGenerator(ObjectNameGeneratorConfig config) { String domainBase = firstNonNull(config.getDomainBase(), defaultDomainBase); return new ConnectorObjectNameGenerator(packageName, domainBase, catalogName); } - public static class ConnectorObjectNameGeneratorConfig - { - private String domainBase; - - public String getDomainBase() - { - return domainBase; - } - - @Config("jmx.base-name") - public ConnectorObjectNameGeneratorConfig setDomainBase(String domainBase) - { - this.domainBase = domainBase; - return this; - } - } - public static final class ConnectorObjectNameGenerator implements ObjectNameGenerator { diff --git a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/jmx/ObjectNameGeneratorConfig.java b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/jmx/ObjectNameGeneratorConfig.java new file mode 100644 index 000000000000..1b6ccd204032 --- /dev/null +++ b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/jmx/ObjectNameGeneratorConfig.java @@ -0,0 +1,33 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.base.jmx; + +import io.airlift.configuration.Config; + +public class ObjectNameGeneratorConfig +{ + private String domainBase; + + public String getDomainBase() + { + return domainBase; + } + + @Config("jmx.base-name") + public ObjectNameGeneratorConfig setDomainBase(String domainBase) + { + this.domainBase = domainBase; + return this; + } +} diff --git a/presto-resource-group-managers/src/main/java/io/prestosql/plugin/resourcegroups/db/ObjectNameGeneratorConfig.java b/presto-resource-group-managers/src/main/java/io/prestosql/plugin/resourcegroups/db/ObjectNameGeneratorConfig.java new file mode 100644 index 000000000000..e165c5e0cec9 --- /dev/null +++ b/presto-resource-group-managers/src/main/java/io/prestosql/plugin/resourcegroups/db/ObjectNameGeneratorConfig.java @@ -0,0 +1,33 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.resourcegroups.db; + +import io.airlift.configuration.Config; + +public class ObjectNameGeneratorConfig +{ + private String domainBase; + + public String getDomainBase() + { + return domainBase; + } + + @Config("jmx.base-name") + public ObjectNameGeneratorConfig setDomainBase(String domainBase) + { + this.domainBase = domainBase; + return this; + } +} diff --git a/presto-resource-group-managers/src/main/java/io/prestosql/plugin/resourcegroups/db/PrefixObjectNameGeneratorModule.java b/presto-resource-group-managers/src/main/java/io/prestosql/plugin/resourcegroups/db/PrefixObjectNameGeneratorModule.java index 7ec4971f0c62..1b2faa967702 100644 --- a/presto-resource-group-managers/src/main/java/io/prestosql/plugin/resourcegroups/db/PrefixObjectNameGeneratorModule.java +++ b/presto-resource-group-managers/src/main/java/io/prestosql/plugin/resourcegroups/db/PrefixObjectNameGeneratorModule.java @@ -16,7 +16,6 @@ import com.google.inject.Binder; import com.google.inject.Module; import com.google.inject.Provides; -import io.airlift.configuration.Config; import org.weakref.jmx.ObjectNameBuilder; import org.weakref.jmx.ObjectNameGenerator; @@ -33,11 +32,11 @@ public class PrefixObjectNameGeneratorModule @Override public void configure(Binder binder) { - configBinder(binder).bindConfig(PrefixObjectNameGeneratorConfig.class); + configBinder(binder).bindConfig(ObjectNameGeneratorConfig.class); } @Provides - ObjectNameGenerator createPrefixObjectNameGenerator(PrefixObjectNameGeneratorConfig config) + ObjectNameGenerator createPrefixObjectNameGenerator(ObjectNameGeneratorConfig config) { String domainBase = DEFAULT_DOMAIN_BASE; if (config.getDomainBase() != null) { @@ -46,23 +45,6 @@ ObjectNameGenerator createPrefixObjectNameGenerator(PrefixObjectNameGeneratorCon return new PrefixObjectNameGenerator(domainBase); } - public static class PrefixObjectNameGeneratorConfig - { - private String domainBase; - - public String getDomainBase() - { - return domainBase; - } - - @Config("jmx.base-name") - public PrefixObjectNameGeneratorConfig setDomainBase(String domainBase) - { - this.domainBase = domainBase; - return this; - } - } - public static final class PrefixObjectNameGenerator implements ObjectNameGenerator { From 15f1e318232c7ea93c79de984d60e8aec2937f03 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Tue, 14 Apr 2020 17:52:24 -0700 Subject: [PATCH 146/519] Move PrefixObjectNameGenerator to plugin toolkit --- .../jmx}/PrefixObjectNameGeneratorModule.java | 31 ++++++++++++------- ...ourceGroupConfigurationManagerFactory.java | 3 +- 2 files changed, 21 insertions(+), 13 deletions(-) rename {presto-resource-group-managers/src/main/java/io/prestosql/plugin/resourcegroups/db => presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/jmx}/PrefixObjectNameGeneratorModule.java (62%) diff --git a/presto-resource-group-managers/src/main/java/io/prestosql/plugin/resourcegroups/db/PrefixObjectNameGeneratorModule.java b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/jmx/PrefixObjectNameGeneratorModule.java similarity index 62% rename from presto-resource-group-managers/src/main/java/io/prestosql/plugin/resourcegroups/db/PrefixObjectNameGeneratorModule.java rename to presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/jmx/PrefixObjectNameGeneratorModule.java index 1b2faa967702..0b38928cd5f3 100644 --- a/presto-resource-group-managers/src/main/java/io/prestosql/plugin/resourcegroups/db/PrefixObjectNameGeneratorModule.java +++ b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/jmx/PrefixObjectNameGeneratorModule.java @@ -11,7 +11,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.prestosql.plugin.resourcegroups.db; +package io.prestosql.plugin.base.jmx; import com.google.inject.Binder; import com.google.inject.Module; @@ -21,13 +21,21 @@ import java.util.Map; +import static com.google.common.base.MoreObjects.firstNonNull; import static io.airlift.configuration.ConfigBinder.configBinder; +import static java.util.Objects.requireNonNull; public class PrefixObjectNameGeneratorModule implements Module { - private static final String CONNECTOR_PACKAGE_NAME = "io.prestosql.plugin.resourcegroups.db"; - private static final String DEFAULT_DOMAIN_BASE = "presto.plugin.resourcegroups.db"; + private final String packageName; + private final String defaultDomainBase; + + public PrefixObjectNameGeneratorModule(String packageName, String defaultDomainBase) + { + this.packageName = requireNonNull(packageName, "packageName is null"); + this.defaultDomainBase = requireNonNull(defaultDomainBase, "defaultDomainBase is null"); + } @Override public void configure(Binder binder) @@ -38,21 +46,20 @@ public void configure(Binder binder) @Provides ObjectNameGenerator createPrefixObjectNameGenerator(ObjectNameGeneratorConfig config) { - String domainBase = DEFAULT_DOMAIN_BASE; - if (config.getDomainBase() != null) { - domainBase = config.getDomainBase(); - } - return new PrefixObjectNameGenerator(domainBase); + String domainBase = firstNonNull(config.getDomainBase(), defaultDomainBase); + return new PrefixObjectNameGenerator(packageName, domainBase); } public static final class PrefixObjectNameGenerator implements ObjectNameGenerator { + private final String packageName; private final String domainBase; - public PrefixObjectNameGenerator(String domainBase) + public PrefixObjectNameGenerator(String packageName, String domainBase) { - this.domainBase = domainBase; + this.packageName = requireNonNull(packageName, "packageName is null"); + this.domainBase = requireNonNull(domainBase, "domainBase is null"); } @Override @@ -66,8 +73,8 @@ public String generatedNameOf(Class type, Map properties) private String toDomain(Class type) { String domain = type.getPackage().getName(); - if (domain.startsWith(CONNECTOR_PACKAGE_NAME)) { - domain = domainBase + domain.substring(CONNECTOR_PACKAGE_NAME.length()); + if (domain.startsWith(packageName)) { + domain = domainBase + domain.substring(packageName.length()); } return domain; } diff --git a/presto-resource-group-managers/src/main/java/io/prestosql/plugin/resourcegroups/db/DbResourceGroupConfigurationManagerFactory.java b/presto-resource-group-managers/src/main/java/io/prestosql/plugin/resourcegroups/db/DbResourceGroupConfigurationManagerFactory.java index 5b01c63deecf..197431f9cb34 100644 --- a/presto-resource-group-managers/src/main/java/io/prestosql/plugin/resourcegroups/db/DbResourceGroupConfigurationManagerFactory.java +++ b/presto-resource-group-managers/src/main/java/io/prestosql/plugin/resourcegroups/db/DbResourceGroupConfigurationManagerFactory.java @@ -17,6 +17,7 @@ import io.airlift.bootstrap.Bootstrap; import io.airlift.json.JsonModule; import io.prestosql.plugin.base.jmx.MBeanServerModule; +import io.prestosql.plugin.base.jmx.PrefixObjectNameGeneratorModule; import io.prestosql.spi.memory.ClusterMemoryPoolManager; import io.prestosql.spi.resourcegroups.ResourceGroupConfigurationManager; import io.prestosql.spi.resourcegroups.ResourceGroupConfigurationManagerContext; @@ -42,7 +43,7 @@ public ResourceGroupConfigurationManager create(Map config, R new MBeanServerModule(), new JsonModule(), new DbResourceGroupsModule(), - new PrefixObjectNameGeneratorModule(), + new PrefixObjectNameGeneratorModule("io.prestosql.plugin.resourcegroups.db", "presto.plugin.resourcegroups.db"), binder -> binder.bind(String.class).annotatedWith(ForEnvironment.class).toInstance(context.getEnvironment()), binder -> binder.bind(ClusterMemoryPoolManager.class).toInstance(context.getMemoryPoolManager())); From 21decc43aeb6ca1017dd4f5a39ffc7c86c9a1588 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Fri, 10 Apr 2020 13:59:24 -0700 Subject: [PATCH 147/519] Restore development log level for com.ning.http.client This logger is used by the Maven plugin loading code. --- presto-main/etc/log.properties | 3 +++ 1 file changed, 3 insertions(+) diff --git a/presto-main/etc/log.properties b/presto-main/etc/log.properties index 97f82d5268ba..0c3621683344 100644 --- a/presto-main/etc/log.properties +++ b/presto-main/etc/log.properties @@ -8,3 +8,6 @@ io.prestosql=INFO com.sun.jersey.guice.spi.container.GuiceComponentProviderFactory=WARN io.prestosql.server.PluginManager=DEBUG + +# Maven plugin loading code +com.ning.http.client=WARN From 9c030c66c46ba0be492e7419c5030e2341c6a9fc Mon Sep 17 00:00:00 2001 From: David Phillips Date: Wed, 15 Apr 2020 17:19:07 -0700 Subject: [PATCH 148/519] Disable HTTP/2 for CLI and JDBC on pre JDK 11 The combination of SocketChannelSocketFactory and HTTP/2 over TLS causes hangs on JDK 8u252. Previously, connections used HTTP/1.1, but now use HTTP/2 over TLS due to the new ALPN support in JDK 8u252. --- .../src/main/java/io/prestosql/client/OkHttpUtil.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/presto-client/src/main/java/io/prestosql/client/OkHttpUtil.java b/presto-client/src/main/java/io/prestosql/client/OkHttpUtil.java index 132f5fc94ff9..fa15ebe878e2 100644 --- a/presto-client/src/main/java/io/prestosql/client/OkHttpUtil.java +++ b/presto-client/src/main/java/io/prestosql/client/OkHttpUtil.java @@ -16,12 +16,14 @@ import com.google.common.base.CharMatcher; import com.google.common.base.Splitter; import com.google.common.base.StandardSystemProperty; +import com.google.common.collect.ImmutableList; import com.google.common.net.HostAndPort; import io.airlift.security.pem.PemReader; import okhttp3.Credentials; import okhttp3.Interceptor; import okhttp3.JavaNetCookieJar; import okhttp3.OkHttpClient; +import okhttp3.Protocol; import okhttp3.Request; import javax.net.ssl.KeyManager; @@ -245,6 +247,7 @@ public static void setupChannelSocket(OkHttpClient.Builder clientBuilder) // Enable socket factory only for pre JDK 11 if (!isAtLeastJava11()) { clientBuilder.socketFactory(new SocketChannelSocketFactory()); + clientBuilder.protocols(ImmutableList.of(Protocol.HTTP_1_1)); } } From 0451d8906e2ec20c30596a279958f7226ca20207 Mon Sep 17 00:00:00 2001 From: Yuya Ebihara Date: Thu, 16 Apr 2020 19:02:20 +0900 Subject: [PATCH 149/519] Remove unused methods from MongoDB TypeUtils --- .../prestosql/plugin/mongodb/TypeUtils.java | 27 ------------------- 1 file changed, 27 deletions(-) diff --git a/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/TypeUtils.java b/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/TypeUtils.java index c47d92473190..be643552c181 100644 --- a/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/TypeUtils.java +++ b/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/TypeUtils.java @@ -18,13 +18,6 @@ import io.prestosql.spi.type.RowType; import io.prestosql.spi.type.Type; -import java.util.function.Predicate; - -import static io.prestosql.spi.type.DateType.DATE; -import static io.prestosql.spi.type.TimeType.TIME; -import static io.prestosql.spi.type.TimestampType.TIMESTAMP; -import static io.prestosql.spi.type.TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE; - public final class TypeUtils { private TypeUtils() {} @@ -43,24 +36,4 @@ public static boolean isRowType(Type type) { return type instanceof RowType; } - - public static boolean isDateType(Type type) - { - return type.equals(DATE) || - type.equals(TIME) || - type.equals(TIMESTAMP) || - type.equals(TIMESTAMP_WITH_TIME_ZONE); - } - - public static boolean containsType(Type type, Predicate predicate, Predicate... orPredicates) - { - for (Predicate orPredicate : orPredicates) { - predicate = predicate.or(orPredicate); - } - if (predicate.test(type)) { - return true; - } - - return type.getTypeParameters().stream().anyMatch(predicate); - } } From d14e7b736fd782cb0d43efc4a1b474c70d7afe04 Mon Sep 17 00:00:00 2001 From: praveenkrishna Date: Thu, 16 Apr 2020 17:02:48 +0530 Subject: [PATCH 150/519] Fix comment --- .../java/io/prestosql/sql/planner/LocalExecutionPlanner.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/LocalExecutionPlanner.java b/presto-main/src/main/java/io/prestosql/sql/planner/LocalExecutionPlanner.java index 8a38520efdb3..cec258243491 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/LocalExecutionPlanner.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/LocalExecutionPlanner.java @@ -1798,7 +1798,7 @@ private PhysicalOperation createNestedLoopJoin(JoinNode node, LocalExecutionPlan checkState( buildSource.getPipelineExecutionStrategy() == UNGROUPED_EXECUTION, - "Build source of a nested loop join is expected to be GROUPED_EXECUTION."); + "Build source of a nested loop join is expected to be UNGROUPED_EXECUTION."); checkArgument(node.getType() == INNER, "NestedLoopJoin is only used for inner join"); JoinBridgeManager nestedLoopJoinBridgeManager = new JoinBridgeManager<>( From b934d9ca9cffe9122a3c18b09c91199469f4c569 Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Wed, 15 Apr 2020 18:34:51 -0700 Subject: [PATCH 151/519] Fix incorrect computation of reader columns for S3SelectRecordCursor --- .../plugin/hive/s3select/S3SelectRecordCursorProvider.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3select/S3SelectRecordCursorProvider.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3select/S3SelectRecordCursorProvider.java index 31b25a34a92f..948f2f1c5ecd 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3select/S3SelectRecordCursorProvider.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3select/S3SelectRecordCursorProvider.java @@ -13,7 +13,6 @@ */ package io.prestosql.plugin.hive.s3select; -import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import io.prestosql.plugin.hive.HdfsEnvironment; import io.prestosql.plugin.hive.HiveColumnHandle; @@ -91,7 +90,7 @@ public Optional createRecordCursor( if (CSV_SERDES.contains(serdeName)) { List readerColumns = projectedReaderColumns .map(ReaderProjections::getReaderColumns) - .orElse(ImmutableList.of()); + .orElse(columns); IonSqlQueryBuilder queryBuilder = new IonSqlQueryBuilder(typeManager); String ionSqlQuery = queryBuilder.buildSql(readerColumns, effectivePredicate); From 07abff3652cf5a6b9d1bc5a4746c95768d3d1928 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Grzegorz=20Kokosi=C5=84ski?= Date: Tue, 14 Apr 2020 13:45:08 +0200 Subject: [PATCH 152/519] Remove unused field --- .../io/prestosql/security/TestFileBasedSystemAccessControl.java | 1 - 1 file changed, 1 deletion(-) diff --git a/presto-main/src/test/java/io/prestosql/security/TestFileBasedSystemAccessControl.java b/presto-main/src/test/java/io/prestosql/security/TestFileBasedSystemAccessControl.java index 3188455fab82..fa64f8530168 100644 --- a/presto-main/src/test/java/io/prestosql/security/TestFileBasedSystemAccessControl.java +++ b/presto-main/src/test/java/io/prestosql/security/TestFileBasedSystemAccessControl.java @@ -64,7 +64,6 @@ public class TestFileBasedSystemAccessControl private static final CatalogSchemaName aliceSchema = new CatalogSchemaName("alice-catalog", "schema"); private static final QualifiedObjectName staffTable = new QualifiedObjectName("staff-catalog", "schema2", "table"); private static final QualifiedObjectName staffView = new QualifiedObjectName("staff-catalog", "schema2", "view"); - private static final CatalogSchemaName staffSchema = new CatalogSchemaName("staff-catalog", "schema2"); @Test public void testCanImpersonateUserOperations() From 44cdf5b478303dd4f8494481086f9955586058f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Grzegorz=20Kokosi=C5=84ski?= Date: Tue, 14 Apr 2020 13:45:09 +0200 Subject: [PATCH 153/519] Inline FullConnectorSecurityContext --- .../FullConnectorSecurityContext.java | 45 ------------------- .../security/AccessControlManager.java | 3 +- .../security/TestFileBasedAccessControl.java | 17 ++----- .../connector/ConnectorSecurityContext.java | 23 ++++++++-- 4 files changed, 24 insertions(+), 64 deletions(-) delete mode 100644 presto-main/src/main/java/io/prestosql/FullConnectorSecurityContext.java diff --git a/presto-main/src/main/java/io/prestosql/FullConnectorSecurityContext.java b/presto-main/src/main/java/io/prestosql/FullConnectorSecurityContext.java deleted file mode 100644 index 28c68f7ae708..000000000000 --- a/presto-main/src/main/java/io/prestosql/FullConnectorSecurityContext.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package io.prestosql; - -import io.prestosql.spi.connector.ConnectorSecurityContext; -import io.prestosql.spi.connector.ConnectorTransactionHandle; -import io.prestosql.spi.security.ConnectorIdentity; - -import static java.util.Objects.requireNonNull; - -public class FullConnectorSecurityContext - implements ConnectorSecurityContext -{ - private final ConnectorTransactionHandle transactionHandle; - private final ConnectorIdentity identity; - - public FullConnectorSecurityContext(ConnectorTransactionHandle transactionHandle, ConnectorIdentity identity) - { - this.transactionHandle = requireNonNull(transactionHandle, "transactionHandle is null"); - this.identity = requireNonNull(identity, "identity is null"); - } - - @Override - public ConnectorTransactionHandle getTransactionHandle() - { - return transactionHandle; - } - - @Override - public ConnectorIdentity getIdentity() - { - return identity; - } -} diff --git a/presto-main/src/main/java/io/prestosql/security/AccessControlManager.java b/presto-main/src/main/java/io/prestosql/security/AccessControlManager.java index f9db7e7d71c0..1f6339f2735d 100644 --- a/presto-main/src/main/java/io/prestosql/security/AccessControlManager.java +++ b/presto-main/src/main/java/io/prestosql/security/AccessControlManager.java @@ -19,7 +19,6 @@ import com.google.common.collect.ImmutableSet; import io.airlift.log.Logger; import io.airlift.stats.CounterStat; -import io.prestosql.FullConnectorSecurityContext; import io.prestosql.connector.CatalogName; import io.prestosql.metadata.QualifiedObjectName; import io.prestosql.plugin.base.security.AllowAllSystemAccessControl; @@ -933,7 +932,7 @@ public ConnectorSecurityContext toConnectorSecurityContext(SecurityContext secur public ConnectorSecurityContext toConnectorSecurityContext(TransactionId requiredTransactionId, Identity identity) { - return new FullConnectorSecurityContext( + return new ConnectorSecurityContext( transactionManager.getConnectorTransaction(requiredTransactionId, catalogName), identity.toConnectorIdentity(catalogName.getCatalogName())); } diff --git a/presto-plugin-toolkit/src/test/java/io/prestosql/plugin/base/security/TestFileBasedAccessControl.java b/presto-plugin-toolkit/src/test/java/io/prestosql/plugin/base/security/TestFileBasedAccessControl.java index fc751c174444..09cec9865308 100644 --- a/presto-plugin-toolkit/src/test/java/io/prestosql/plugin/base/security/TestFileBasedAccessControl.java +++ b/presto-plugin-toolkit/src/test/java/io/prestosql/plugin/base/security/TestFileBasedAccessControl.java @@ -200,20 +200,9 @@ public void testEverythingImplemented() private static ConnectorSecurityContext user(String name, Set groups) { - return new ConnectorSecurityContext() - { - @Override - public ConnectorTransactionHandle getTransactionHandle() - { - return new ConnectorTransactionHandle() {}; - } - - @Override - public ConnectorIdentity getIdentity() - { - return ConnectorIdentity.forUser(name).withGroups(groups).build(); - } - }; + return new ConnectorSecurityContext( + new ConnectorTransactionHandle() {}, + ConnectorIdentity.forUser(name).withGroups(groups).build()); } private ConnectorAccessControl createAccessControl(String fileName) diff --git a/presto-spi/src/main/java/io/prestosql/spi/connector/ConnectorSecurityContext.java b/presto-spi/src/main/java/io/prestosql/spi/connector/ConnectorSecurityContext.java index 205bae550b51..dae09401a1e2 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/connector/ConnectorSecurityContext.java +++ b/presto-spi/src/main/java/io/prestosql/spi/connector/ConnectorSecurityContext.java @@ -15,9 +15,26 @@ import io.prestosql.spi.security.ConnectorIdentity; -public interface ConnectorSecurityContext +import static java.util.Objects.requireNonNull; + +public class ConnectorSecurityContext { - ConnectorTransactionHandle getTransactionHandle(); + private final ConnectorTransactionHandle transactionHandle; + private final ConnectorIdentity identity; + + public ConnectorSecurityContext(ConnectorTransactionHandle transactionHandle, ConnectorIdentity identity) + { + this.transactionHandle = requireNonNull(transactionHandle, "transactionHandle is null"); + this.identity = requireNonNull(identity, "identity is null"); + } + + public ConnectorTransactionHandle getTransactionHandle() + { + return transactionHandle; + } - ConnectorIdentity getIdentity(); + public ConnectorIdentity getIdentity() + { + return identity; + } } From 01aac4cb1371556a49969fb5bdd334b0f4cee3ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Grzegorz=20Kokosi=C5=84ski?= Date: Tue, 14 Apr 2020 13:45:10 +0200 Subject: [PATCH 154/519] Add queryId to security contexts --- .../src/main/java/io/prestosql/Session.java | 6 +- .../security/AccessControlManager.java | 22 +++-- .../prestosql/security/SecurityContext.java | 20 +++- .../io/prestosql/sql/analyzer/Analysis.java | 5 +- .../io/prestosql/sql/analyzer/Analyzer.java | 2 +- .../security/TestAccessControlManager.java | 8 +- .../TestFileBasedSystemAccessControl.java | 90 ++++++++--------- .../security/TestFileBasedAccessControl.java | 4 +- .../TestFileBasedSystemAccessControl.java | 98 ++++++++++--------- .../connector/ConnectorSecurityContext.java | 10 +- .../spi/security/SystemSecurityContext.java | 13 ++- 11 files changed, 159 insertions(+), 119 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/Session.java b/presto-main/src/main/java/io/prestosql/Session.java index 4566c8152e3b..4eba00609783 100644 --- a/presto-main/src/main/java/io/prestosql/Session.java +++ b/presto-main/src/main/java/io/prestosql/Session.java @@ -316,7 +316,7 @@ public Session beginTransactionId(TransactionId transactionId, TransactionManage for (Entry property : catalogProperties.entrySet()) { // verify permissions - accessControl.checkCanSetCatalogSessionProperty(new SecurityContext(transactionId, identity), catalogName, property.getKey()); + accessControl.checkCanSetCatalogSessionProperty(new SecurityContext(transactionId, identity, queryId), catalogName, property.getKey()); // validate session property value sessionPropertyManager.validateCatalogSessionProperty(catalog, catalogName, property.getKey(), property.getValue()); @@ -332,7 +332,7 @@ public Session beginTransactionId(TransactionId transactionId, TransactionManage .orElseThrow(() -> new PrestoException(NOT_FOUND, "Catalog for role does not exist: " + catalogName)) .getCatalogName(); if (role.getType() == SelectedRole.Type.ROLE) { - accessControl.checkCanSetRole(new SecurityContext(transactionId, identity), role.getRole().get(), catalogName); + accessControl.checkCanSetRole(new SecurityContext(transactionId, identity, queryId), role.getRole().get(), catalogName); } roles.put(catalog.getCatalogName(), role); @@ -518,7 +518,7 @@ public static SessionBuilder builder(Session session) public SecurityContext toSecurityContext() { - return new SecurityContext(getRequiredTransactionId(), getIdentity()); + return new SecurityContext(getRequiredTransactionId(), getIdentity(), queryId); } public static class SessionBuilder diff --git a/presto-main/src/main/java/io/prestosql/security/AccessControlManager.java b/presto-main/src/main/java/io/prestosql/security/AccessControlManager.java index 1f6339f2735d..307d3353490f 100644 --- a/presto-main/src/main/java/io/prestosql/security/AccessControlManager.java +++ b/presto-main/src/main/java/io/prestosql/security/AccessControlManager.java @@ -26,6 +26,7 @@ import io.prestosql.plugin.base.security.ForwardingSystemAccessControl; import io.prestosql.plugin.base.security.ReadOnlySystemAccessControl; import io.prestosql.spi.PrestoException; +import io.prestosql.spi.QueryId; import io.prestosql.spi.connector.CatalogSchemaName; import io.prestosql.spi.connector.CatalogSchemaTableName; import io.prestosql.spi.connector.ColumnMetadata; @@ -188,7 +189,7 @@ public void checkCanImpersonateUser(Identity identity, String userName) requireNonNull(identity, "identity is null"); requireNonNull(userName, "userName is null"); - systemAuthorizationCheck(control -> control.checkCanImpersonateUser(new SystemSecurityContext(identity), userName)); + systemAuthorizationCheck(control -> control.checkCanImpersonateUser(new SystemSecurityContext(identity, Optional.empty()), userName)); } @Override @@ -206,7 +207,7 @@ public void checkCanExecuteQuery(Identity identity) { requireNonNull(identity, "identity is null"); - systemAuthorizationCheck(control -> control.checkCanExecuteQuery(new SystemSecurityContext(identity))); + systemAuthorizationCheck(control -> control.checkCanExecuteQuery(new SystemSecurityContext(identity, Optional.empty()))); } @Override @@ -214,14 +215,14 @@ public void checkCanViewQueryOwnedBy(Identity identity, String queryOwner) { requireNonNull(identity, "identity is null"); - systemAuthorizationCheck(control -> control.checkCanViewQueryOwnedBy(new SystemSecurityContext(identity), queryOwner)); + systemAuthorizationCheck(control -> control.checkCanViewQueryOwnedBy(new SystemSecurityContext(identity, Optional.empty()), queryOwner)); } @Override public Set filterQueriesOwnedBy(Identity identity, Set queryOwners) { for (SystemAccessControl systemAccessControl : systemAccessControls.get()) { - queryOwners = systemAccessControl.filterViewQueryOwnedBy(new SystemSecurityContext(identity), queryOwners); + queryOwners = systemAccessControl.filterViewQueryOwnedBy(new SystemSecurityContext(identity, Optional.empty()), queryOwners); } return queryOwners; } @@ -232,7 +233,7 @@ public void checkCanKillQueryOwnedBy(Identity identity, String queryOwner) requireNonNull(identity, "identity is null"); requireNonNull(queryOwner, "queryOwner is null"); - systemAuthorizationCheck(control -> control.checkCanKillQueryOwnedBy(new SystemSecurityContext(identity), queryOwner)); + systemAuthorizationCheck(control -> control.checkCanKillQueryOwnedBy(new SystemSecurityContext(identity, Optional.empty()), queryOwner)); } @Override @@ -242,7 +243,7 @@ public Set filterCatalogs(Identity identity, Set catalogs) requireNonNull(catalogs, "catalogs is null"); for (SystemAccessControl systemAccessControl : systemAccessControls.get()) { - catalogs = systemAccessControl.filterCatalogs(new SystemSecurityContext(identity), catalogs); + catalogs = systemAccessControl.filterCatalogs(new SystemSecurityContext(identity, Optional.empty()), catalogs); } return catalogs; } @@ -634,7 +635,7 @@ public void checkCanSetSystemSessionProperty(Identity identity, String propertyN requireNonNull(identity, "identity is null"); requireNonNull(propertyName, "propertyName is null"); - systemAuthorizationCheck(control -> control.checkCanSetSystemSessionProperty(new SystemSecurityContext(identity), propertyName)); + systemAuthorizationCheck(control -> control.checkCanSetSystemSessionProperty(new SystemSecurityContext(identity, Optional.empty()), propertyName)); } @Override @@ -927,14 +928,15 @@ public ConnectorTransactionHandle getTransactionHandle(TransactionId transaction public ConnectorSecurityContext toConnectorSecurityContext(SecurityContext securityContext) { - return toConnectorSecurityContext(securityContext.getTransactionId(), securityContext.getIdentity()); + return toConnectorSecurityContext(securityContext.getTransactionId(), securityContext.getIdentity(), securityContext.getQueryId()); } - public ConnectorSecurityContext toConnectorSecurityContext(TransactionId requiredTransactionId, Identity identity) + public ConnectorSecurityContext toConnectorSecurityContext(TransactionId requiredTransactionId, Identity identity, QueryId queryId) { return new ConnectorSecurityContext( transactionManager.getConnectorTransaction(requiredTransactionId, catalogName), - identity.toConnectorIdentity(catalogName.getCatalogName())); + identity.toConnectorIdentity(catalogName.getCatalogName()), + queryId); } } diff --git a/presto-main/src/main/java/io/prestosql/security/SecurityContext.java b/presto-main/src/main/java/io/prestosql/security/SecurityContext.java index 52ba4b9bd9da..a83ac89a63e8 100644 --- a/presto-main/src/main/java/io/prestosql/security/SecurityContext.java +++ b/presto-main/src/main/java/io/prestosql/security/SecurityContext.java @@ -14,11 +14,13 @@ package io.prestosql.security; import io.prestosql.Session; +import io.prestosql.spi.QueryId; import io.prestosql.spi.security.Identity; import io.prestosql.spi.security.SystemSecurityContext; import io.prestosql.transaction.TransactionId; import java.util.Objects; +import java.util.Optional; import static java.util.Objects.requireNonNull; @@ -27,16 +29,18 @@ public class SecurityContext public static SecurityContext of(Session session) { requireNonNull(session, "session is null"); - return new SecurityContext(session.getRequiredTransactionId(), session.getIdentity()); + return new SecurityContext(session.getRequiredTransactionId(), session.getIdentity(), session.getQueryId()); } private final TransactionId transactionId; private final Identity identity; + private final QueryId queryId; - public SecurityContext(TransactionId transactionId, Identity identity) + public SecurityContext(TransactionId transactionId, Identity identity, QueryId queryId) { this.transactionId = requireNonNull(transactionId, "transactionId is null"); this.identity = requireNonNull(identity, "identity is null"); + this.queryId = requireNonNull(queryId, "queryId is null"); } public TransactionId getTransactionId() @@ -49,9 +53,14 @@ public Identity getIdentity() return identity; } + public QueryId getQueryId() + { + return queryId; + } + public SystemSecurityContext toSystemSecurityContext() { - return new SystemSecurityContext(identity); + return new SystemSecurityContext(identity, Optional.of(queryId)); } @Override @@ -66,13 +75,14 @@ public boolean equals(Object o) } SecurityContext that = (SecurityContext) o; return Objects.equals(transactionId, that.transactionId) && - Objects.equals(identity, that.identity); + Objects.equals(identity, that.identity) && + Objects.equals(queryId, that.queryId); } @Override public int hashCode() { // this is needed by io.prestosql.sql.analyzer.Analysis.AccessControlInfo - return Objects.hash(transactionId, identity); + return Objects.hash(transactionId, identity, queryId); } } diff --git a/presto-main/src/main/java/io/prestosql/sql/analyzer/Analysis.java b/presto-main/src/main/java/io/prestosql/sql/analyzer/Analysis.java index 627bb2845e75..cddf666cdbee 100644 --- a/presto-main/src/main/java/io/prestosql/sql/analyzer/Analysis.java +++ b/presto-main/src/main/java/io/prestosql/sql/analyzer/Analysis.java @@ -27,6 +27,7 @@ import io.prestosql.metadata.TableHandle; import io.prestosql.security.AccessControl; import io.prestosql.security.SecurityContext; +import io.prestosql.spi.QueryId; import io.prestosql.spi.connector.ColumnHandle; import io.prestosql.spi.connector.ConnectorTableMetadata; import io.prestosql.spi.eventlistener.ColumnInfo; @@ -1002,9 +1003,9 @@ public AccessControl getAccessControl() return accessControl; } - public SecurityContext getSecurityContext(TransactionId transactionId) + public SecurityContext getSecurityContext(TransactionId transactionId, QueryId queryId) { - return new SecurityContext(transactionId, identity); + return new SecurityContext(transactionId, identity, queryId); } @Override diff --git a/presto-main/src/main/java/io/prestosql/sql/analyzer/Analyzer.java b/presto-main/src/main/java/io/prestosql/sql/analyzer/Analyzer.java index 07df5a6161fd..cf0e04d98587 100644 --- a/presto-main/src/main/java/io/prestosql/sql/analyzer/Analyzer.java +++ b/presto-main/src/main/java/io/prestosql/sql/analyzer/Analyzer.java @@ -86,7 +86,7 @@ public Analysis analyze(Statement statement, boolean isDescribe) analysis.getTableColumnReferences().forEach((accessControlInfo, tableColumnReferences) -> tableColumnReferences.forEach((tableName, columns) -> accessControlInfo.getAccessControl().checkCanSelectFromColumns( - accessControlInfo.getSecurityContext(session.getRequiredTransactionId()), + accessControlInfo.getSecurityContext(session.getRequiredTransactionId(), session.getQueryId()), tableName, columns))); return analysis; diff --git a/presto-main/src/test/java/io/prestosql/security/TestAccessControlManager.java b/presto-main/src/test/java/io/prestosql/security/TestAccessControlManager.java index b79293c39660..3c968b2063f0 100644 --- a/presto-main/src/test/java/io/prestosql/security/TestAccessControlManager.java +++ b/presto-main/src/test/java/io/prestosql/security/TestAccessControlManager.java @@ -28,6 +28,7 @@ import io.prestosql.plugin.base.security.ReadOnlySystemAccessControl; import io.prestosql.plugin.tpch.TpchConnectorFactory; import io.prestosql.spi.PrestoException; +import io.prestosql.spi.QueryId; import io.prestosql.spi.connector.CatalogSchemaName; import io.prestosql.spi.connector.CatalogSchemaTableName; import io.prestosql.spi.connector.Connector; @@ -72,6 +73,7 @@ public class TestAccessControlManager { private static final Principal PRINCIPAL = new BasicPrincipal("principal"); private static final String USER_NAME = "user_name"; + private static final QueryId queryId = new QueryId("query_id"); @Test(expectedExceptions = PrestoException.class, expectedExceptionsMessageRegExp = "Presto server is still initializing") public void testInitializing() @@ -102,7 +104,7 @@ public void testReadOnlySystemAccessControl() transaction(transactionManager, accessControlManager) .execute(transactionId -> { - SecurityContext context = new SecurityContext(transactionId, identity); + SecurityContext context = new SecurityContext(transactionId, identity, queryId); accessControlManager.checkCanSetCatalogSessionProperty(context, "catalog", "property"); accessControlManager.checkCanShowSchemas(context, "catalog"); accessControlManager.checkCanShowTables(context, new CatalogSchemaName("catalog", "schema")); @@ -121,7 +123,7 @@ public void testReadOnlySystemAccessControl() try { transaction(transactionManager, accessControlManager) .execute(transactionId -> { - accessControlManager.checkCanInsertIntoTable(new SecurityContext(transactionId, identity), tableName); + accessControlManager.checkCanInsertIntoTable(new SecurityContext(transactionId, identity, queryId), tableName); }); fail(); } @@ -240,7 +242,7 @@ public void checkCanShowCreateTable(ConnectorSecurityContext context, SchemaTabl private static SecurityContext context(TransactionId transactionId) { Identity identity = Identity.forUser(USER_NAME).withPrincipal(PRINCIPAL).build(); - return new SecurityContext(transactionId, identity); + return new SecurityContext(transactionId, identity, queryId); } @Test(expectedExceptions = PrestoException.class, expectedExceptionsMessageRegExp = "Access Denied: Cannot select from table secured_catalog.schema.table") diff --git a/presto-main/src/test/java/io/prestosql/security/TestFileBasedSystemAccessControl.java b/presto-main/src/test/java/io/prestosql/security/TestFileBasedSystemAccessControl.java index fa64f8530168..ba760907e738 100644 --- a/presto-main/src/test/java/io/prestosql/security/TestFileBasedSystemAccessControl.java +++ b/presto-main/src/test/java/io/prestosql/security/TestFileBasedSystemAccessControl.java @@ -17,6 +17,7 @@ import com.google.common.collect.ImmutableSet; import io.prestosql.metadata.QualifiedObjectName; import io.prestosql.plugin.base.security.FileBasedSystemAccessControl; +import io.prestosql.spi.QueryId; import io.prestosql.spi.connector.CatalogSchemaName; import io.prestosql.spi.connector.SchemaTableName; import io.prestosql.spi.security.AccessDeniedException; @@ -64,6 +65,7 @@ public class TestFileBasedSystemAccessControl private static final CatalogSchemaName aliceSchema = new CatalogSchemaName("alice-catalog", "schema"); private static final QualifiedObjectName staffTable = new QualifiedObjectName("staff-catalog", "schema2", "table"); private static final QualifiedObjectName staffView = new QualifiedObjectName("staff-catalog", "schema2", "view"); + private static final QueryId queryId = new QueryId("query_id"); @Test public void testCanImpersonateUserOperations() @@ -216,16 +218,16 @@ public void testSchemaOperations() transaction(transactionManager, accessControlManager) .execute(transactionId -> { Set aliceSchemas = ImmutableSet.of("schema"); - assertEquals(accessControlManager.filterSchemas(new SecurityContext(transactionId, alice), "alice-catalog", aliceSchemas), aliceSchemas); - assertEquals(accessControlManager.filterSchemas(new SecurityContext(transactionId, bob), "alice-catalog", aliceSchemas), ImmutableSet.of()); + assertEquals(accessControlManager.filterSchemas(new SecurityContext(transactionId, alice, queryId), "alice-catalog", aliceSchemas), aliceSchemas); + assertEquals(accessControlManager.filterSchemas(new SecurityContext(transactionId, bob, queryId), "alice-catalog", aliceSchemas), ImmutableSet.of()); - accessControlManager.checkCanCreateSchema(new SecurityContext(transactionId, alice), aliceSchema); - accessControlManager.checkCanDropSchema(new SecurityContext(transactionId, alice), aliceSchema); - accessControlManager.checkCanRenameSchema(new SecurityContext(transactionId, alice), aliceSchema, "new-schema"); - accessControlManager.checkCanShowSchemas(new SecurityContext(transactionId, alice), "alice-catalog"); + accessControlManager.checkCanCreateSchema(new SecurityContext(transactionId, alice, queryId), aliceSchema); + accessControlManager.checkCanDropSchema(new SecurityContext(transactionId, alice, queryId), aliceSchema); + accessControlManager.checkCanRenameSchema(new SecurityContext(transactionId, alice, queryId), aliceSchema, "new-schema"); + accessControlManager.checkCanShowSchemas(new SecurityContext(transactionId, alice, queryId), "alice-catalog"); }); assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> { - accessControlManager.checkCanCreateSchema(new SecurityContext(transactionId, bob), aliceSchema); + accessControlManager.checkCanCreateSchema(new SecurityContext(transactionId, bob, queryId), aliceSchema); })); } @@ -238,26 +240,26 @@ public void testSchemaOperationsReadOnly() transaction(transactionManager, accessControlManager) .execute(transactionId -> { Set aliceSchemas = ImmutableSet.of("schema"); - assertEquals(accessControlManager.filterSchemas(new SecurityContext(transactionId, alice), "alice-catalog", aliceSchemas), aliceSchemas); - assertEquals(accessControlManager.filterSchemas(new SecurityContext(transactionId, bob), "alice-catalog", aliceSchemas), ImmutableSet.of()); + assertEquals(accessControlManager.filterSchemas(new SecurityContext(transactionId, alice, queryId), "alice-catalog", aliceSchemas), aliceSchemas); + assertEquals(accessControlManager.filterSchemas(new SecurityContext(transactionId, bob, queryId), "alice-catalog", aliceSchemas), ImmutableSet.of()); - accessControlManager.checkCanShowSchemas(new SecurityContext(transactionId, alice), "alice-catalog"); + accessControlManager.checkCanShowSchemas(new SecurityContext(transactionId, alice, queryId), "alice-catalog"); }); assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> { - accessControlManager.checkCanCreateSchema(new SecurityContext(transactionId, alice), aliceSchema); + accessControlManager.checkCanCreateSchema(new SecurityContext(transactionId, alice, queryId), aliceSchema); })); assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> { - accessControlManager.checkCanDropSchema(new SecurityContext(transactionId, alice), aliceSchema); + accessControlManager.checkCanDropSchema(new SecurityContext(transactionId, alice, queryId), aliceSchema); })); assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> { - accessControlManager.checkCanRenameSchema(new SecurityContext(transactionId, alice), aliceSchema, "new-schema"); + accessControlManager.checkCanRenameSchema(new SecurityContext(transactionId, alice, queryId), aliceSchema, "new-schema"); })); assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> { - accessControlManager.checkCanCreateSchema(new SecurityContext(transactionId, bob), aliceSchema); + accessControlManager.checkCanCreateSchema(new SecurityContext(transactionId, bob, queryId), aliceSchema); })); } @@ -270,9 +272,9 @@ public void testTableOperations() transaction(transactionManager, accessControlManager) .execute(transactionId -> { Set aliceTables = ImmutableSet.of(new SchemaTableName("schema", "table")); - SecurityContext aliceContext = new SecurityContext(transactionId, alice); - SecurityContext bobContext = new SecurityContext(transactionId, bob); - SecurityContext nonAsciiContext = new SecurityContext(transactionId, nonAsciiUser); + SecurityContext aliceContext = new SecurityContext(transactionId, alice, queryId); + SecurityContext bobContext = new SecurityContext(transactionId, bob, queryId); + SecurityContext nonAsciiContext = new SecurityContext(transactionId, nonAsciiUser, queryId); assertEquals(accessControlManager.filterTables(aliceContext, "alice-catalog", aliceTables), aliceTables); assertEquals(accessControlManager.filterTables(aliceContext, "staff-catalog", aliceTables), aliceTables); @@ -340,38 +342,38 @@ public void testTableOperationsReadOnly() transaction(transactionManager, accessControlManager) .execute(transactionId -> { Set aliceTables = ImmutableSet.of(new SchemaTableName("schema", "table")); - assertEquals(accessControlManager.filterTables(new SecurityContext(transactionId, alice), "alice-catalog", aliceTables), aliceTables); - assertEquals(accessControlManager.filterTables(new SecurityContext(transactionId, bob), "alice-catalog", aliceTables), ImmutableSet.of()); + assertEquals(accessControlManager.filterTables(new SecurityContext(transactionId, alice, queryId), "alice-catalog", aliceTables), aliceTables); + assertEquals(accessControlManager.filterTables(new SecurityContext(transactionId, bob, queryId), "alice-catalog", aliceTables), ImmutableSet.of()); - accessControlManager.checkCanSelectFromColumns(new SecurityContext(transactionId, alice), aliceTable, ImmutableSet.of()); + accessControlManager.checkCanSelectFromColumns(new SecurityContext(transactionId, alice, queryId), aliceTable, ImmutableSet.of()); }); assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> { - accessControlManager.checkCanCreateTable(new SecurityContext(transactionId, alice), aliceTable); + accessControlManager.checkCanCreateTable(new SecurityContext(transactionId, alice, queryId), aliceTable); })); assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> { - accessControlManager.checkCanDropTable(new SecurityContext(transactionId, alice), aliceTable); + accessControlManager.checkCanDropTable(new SecurityContext(transactionId, alice, queryId), aliceTable); })); assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> { - accessControlManager.checkCanInsertIntoTable(new SecurityContext(transactionId, alice), aliceTable); + accessControlManager.checkCanInsertIntoTable(new SecurityContext(transactionId, alice, queryId), aliceTable); })); assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> { - accessControlManager.checkCanDeleteFromTable(new SecurityContext(transactionId, alice), aliceTable); + accessControlManager.checkCanDeleteFromTable(new SecurityContext(transactionId, alice, queryId), aliceTable); })); assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> { - accessControlManager.checkCanAddColumns(new SecurityContext(transactionId, alice), aliceTable); + accessControlManager.checkCanAddColumns(new SecurityContext(transactionId, alice, queryId), aliceTable); })); assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> { - accessControlManager.checkCanRenameColumn(new SecurityContext(transactionId, alice), aliceTable); + accessControlManager.checkCanRenameColumn(new SecurityContext(transactionId, alice, queryId), aliceTable); })); assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> { - accessControlManager.checkCanCreateTable(new SecurityContext(transactionId, bob), aliceTable); + accessControlManager.checkCanCreateTable(new SecurityContext(transactionId, bob, queryId), aliceTable); })); } @@ -383,9 +385,9 @@ public void testViewOperations() transaction(transactionManager, accessControlManager) .execute(transactionId -> { - SecurityContext aliceContext = new SecurityContext(transactionId, alice); - SecurityContext bobContext = new SecurityContext(transactionId, bob); - SecurityContext nonAsciiContext = new SecurityContext(transactionId, nonAsciiUser); + SecurityContext aliceContext = new SecurityContext(transactionId, alice, queryId); + SecurityContext bobContext = new SecurityContext(transactionId, bob, queryId); + SecurityContext nonAsciiContext = new SecurityContext(transactionId, nonAsciiUser, queryId); accessControlManager.checkCanCreateView(aliceContext, aliceView); accessControlManager.checkCanDropView(aliceContext, aliceView); @@ -451,37 +453,37 @@ public void testViewOperationsReadOnly() transaction(transactionManager, accessControlManager) .execute(transactionId -> { - SecurityContext context = new SecurityContext(transactionId, alice); + SecurityContext context = new SecurityContext(transactionId, alice, queryId); accessControlManager.checkCanSelectFromColumns(context, aliceView, ImmutableSet.of()); accessControlManager.checkCanSetCatalogSessionProperty(context, "alice-catalog", "property"); }); assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> { - accessControlManager.checkCanCreateView(new SecurityContext(transactionId, alice), aliceView); + accessControlManager.checkCanCreateView(new SecurityContext(transactionId, alice, queryId), aliceView); })); assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> { - accessControlManager.checkCanDropView(new SecurityContext(transactionId, alice), aliceView); + accessControlManager.checkCanDropView(new SecurityContext(transactionId, alice, queryId), aliceView); })); assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> { - accessControlManager.checkCanCreateViewWithSelectFromColumns(new SecurityContext(transactionId, alice), aliceTable, ImmutableSet.of()); + accessControlManager.checkCanCreateViewWithSelectFromColumns(new SecurityContext(transactionId, alice, queryId), aliceTable, ImmutableSet.of()); })); assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> { - accessControlManager.checkCanCreateViewWithSelectFromColumns(new SecurityContext(transactionId, alice), aliceView, ImmutableSet.of()); + accessControlManager.checkCanCreateViewWithSelectFromColumns(new SecurityContext(transactionId, alice, queryId), aliceView, ImmutableSet.of()); })); assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> { - accessControlManager.checkCanGrantTablePrivilege(new SecurityContext(transactionId, alice), SELECT, aliceTable, new PrestoPrincipal(USER, "grantee"), true); + accessControlManager.checkCanGrantTablePrivilege(new SecurityContext(transactionId, alice, queryId), SELECT, aliceTable, new PrestoPrincipal(USER, "grantee"), true); })); assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> { - accessControlManager.checkCanRevokeTablePrivilege(new SecurityContext(transactionId, alice), SELECT, aliceTable, new PrestoPrincipal(USER, "revokee"), true); + accessControlManager.checkCanRevokeTablePrivilege(new SecurityContext(transactionId, alice, queryId), SELECT, aliceTable, new PrestoPrincipal(USER, "revokee"), true); })); assertThrows(AccessDeniedException.class, () -> transaction(transactionManager, accessControlManager).execute(transactionId -> { - accessControlManager.checkCanCreateView(new SecurityContext(transactionId, bob), aliceView); + accessControlManager.checkCanCreateView(new SecurityContext(transactionId, bob, queryId), aliceView); })); } @@ -501,9 +503,9 @@ public void testRefreshing() transaction(transactionManager, accessControlManager) .execute(transactionId -> { - accessControlManager.checkCanCreateView(new SecurityContext(transactionId, alice), aliceView); - accessControlManager.checkCanCreateView(new SecurityContext(transactionId, alice), aliceView); - accessControlManager.checkCanCreateView(new SecurityContext(transactionId, alice), aliceView); + accessControlManager.checkCanCreateView(new SecurityContext(transactionId, alice, queryId), aliceView); + accessControlManager.checkCanCreateView(new SecurityContext(transactionId, alice, queryId), aliceView); + accessControlManager.checkCanCreateView(new SecurityContext(transactionId, alice, queryId), aliceView); }); copy(new File(getResourcePath("security-config-file-with-unknown-rules.json")), configFile); @@ -511,14 +513,14 @@ public void testRefreshing() assertThatThrownBy(() -> transaction(transactionManager, accessControlManager) .execute(transactionId -> { - accessControlManager.checkCanCreateView(new SecurityContext(transactionId, alice), aliceView); + accessControlManager.checkCanCreateView(new SecurityContext(transactionId, alice, queryId), aliceView); })) .isInstanceOf(IllegalArgumentException.class) .hasMessageStartingWith("Invalid JSON file"); // test if file based cached control was not cached somewhere assertThatThrownBy(() -> transaction(transactionManager, accessControlManager) .execute(transactionId -> { - accessControlManager.checkCanCreateView(new SecurityContext(transactionId, alice), aliceView); + accessControlManager.checkCanCreateView(new SecurityContext(transactionId, alice, queryId), aliceView); })) .isInstanceOf(IllegalArgumentException.class) .hasMessageStartingWith("Invalid JSON file"); @@ -528,7 +530,7 @@ public void testRefreshing() transaction(transactionManager, accessControlManager) .execute(transactionId -> { - accessControlManager.checkCanCreateView(new SecurityContext(transactionId, alice), aliceView); + accessControlManager.checkCanCreateView(new SecurityContext(transactionId, alice, queryId), aliceView); }); } diff --git a/presto-plugin-toolkit/src/test/java/io/prestosql/plugin/base/security/TestFileBasedAccessControl.java b/presto-plugin-toolkit/src/test/java/io/prestosql/plugin/base/security/TestFileBasedAccessControl.java index 09cec9865308..1eb5735b3e78 100644 --- a/presto-plugin-toolkit/src/test/java/io/prestosql/plugin/base/security/TestFileBasedAccessControl.java +++ b/presto-plugin-toolkit/src/test/java/io/prestosql/plugin/base/security/TestFileBasedAccessControl.java @@ -15,6 +15,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; +import io.prestosql.spi.QueryId; import io.prestosql.spi.connector.ColumnMetadata; import io.prestosql.spi.connector.ConnectorAccessControl; import io.prestosql.spi.connector.ConnectorSecurityContext; @@ -202,7 +203,8 @@ private static ConnectorSecurityContext user(String name, Set groups) { return new ConnectorSecurityContext( new ConnectorTransactionHandle() {}, - ConnectorIdentity.forUser(name).withGroups(groups).build()); + ConnectorIdentity.forUser(name).withGroups(groups).build(), + new QueryId("query_id")); } private ConnectorAccessControl createAccessControl(String fileName) diff --git a/presto-plugin-toolkit/src/test/java/io/prestosql/plugin/base/security/TestFileBasedSystemAccessControl.java b/presto-plugin-toolkit/src/test/java/io/prestosql/plugin/base/security/TestFileBasedSystemAccessControl.java index d0e91bee9075..7379f3a28a95 100644 --- a/presto-plugin-toolkit/src/test/java/io/prestosql/plugin/base/security/TestFileBasedSystemAccessControl.java +++ b/presto-plugin-toolkit/src/test/java/io/prestosql/plugin/base/security/TestFileBasedSystemAccessControl.java @@ -15,6 +15,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import io.prestosql.spi.QueryId; import io.prestosql.spi.connector.CatalogSchemaName; import io.prestosql.spi.connector.CatalogSchemaTableName; import io.prestosql.spi.security.AccessDeniedException; @@ -57,6 +58,7 @@ public class TestFileBasedSystemAccessControl private static final Identity nonAsciiUser = Identity.ofUser("\u0194\u0194\u0194"); private static final Set allCatalogs = ImmutableSet.of("secret", "open-to-all", "all-allowed", "alice-catalog", "allowed-absent", "\u0200\u0200\u0200"); private static final CatalogSchemaTableName aliceView = new CatalogSchemaTableName("alice-catalog", "schema", "view"); + private static final Optional queryId = Optional.empty(); @Test public void testCanSetUserOperations() @@ -105,25 +107,25 @@ public void testQuery() { SystemAccessControl accessControlManager = newFileBasedSystemAccessControl("query.json"); - accessControlManager.checkCanExecuteQuery(new SystemSecurityContext(admin)); - accessControlManager.checkCanViewQueryOwnedBy(new SystemSecurityContext(admin), "any"); - assertEquals(accessControlManager.filterViewQueryOwnedBy(new SystemSecurityContext(admin), ImmutableSet.of("a", "b")), ImmutableSet.of("a", "b")); - accessControlManager.checkCanKillQueryOwnedBy(new SystemSecurityContext(admin), "any"); - - accessControlManager.checkCanExecuteQuery(new SystemSecurityContext(alice)); - accessControlManager.checkCanViewQueryOwnedBy(new SystemSecurityContext(alice), "any"); - assertEquals(accessControlManager.filterViewQueryOwnedBy(new SystemSecurityContext(alice), ImmutableSet.of("a", "b")), ImmutableSet.of("a", "b")); - assertThrows(AccessDeniedException.class, () -> accessControlManager.checkCanKillQueryOwnedBy(new SystemSecurityContext(alice), "any")); - - assertThrows(AccessDeniedException.class, () -> accessControlManager.checkCanExecuteQuery(new SystemSecurityContext(bob))); - assertThrows(AccessDeniedException.class, () -> accessControlManager.checkCanViewQueryOwnedBy(new SystemSecurityContext(bob), "any")); - assertEquals(accessControlManager.filterViewQueryOwnedBy(new SystemSecurityContext(bob), ImmutableSet.of("a", "b")), ImmutableSet.of()); - accessControlManager.checkCanKillQueryOwnedBy(new SystemSecurityContext(bob), "any"); - - accessControlManager.checkCanExecuteQuery(new SystemSecurityContext(nonAsciiUser)); - accessControlManager.checkCanViewQueryOwnedBy(new SystemSecurityContext(nonAsciiUser), "any"); - assertEquals(accessControlManager.filterViewQueryOwnedBy(new SystemSecurityContext(nonAsciiUser), ImmutableSet.of("a", "b")), ImmutableSet.of("a", "b")); - accessControlManager.checkCanKillQueryOwnedBy(new SystemSecurityContext(nonAsciiUser), "any"); + accessControlManager.checkCanExecuteQuery(new SystemSecurityContext(admin, queryId)); + accessControlManager.checkCanViewQueryOwnedBy(new SystemSecurityContext(admin, queryId), "any"); + assertEquals(accessControlManager.filterViewQueryOwnedBy(new SystemSecurityContext(admin, queryId), ImmutableSet.of("a", "b")), ImmutableSet.of("a", "b")); + accessControlManager.checkCanKillQueryOwnedBy(new SystemSecurityContext(admin, queryId), "any"); + + accessControlManager.checkCanExecuteQuery(new SystemSecurityContext(alice, queryId)); + accessControlManager.checkCanViewQueryOwnedBy(new SystemSecurityContext(alice, queryId), "any"); + assertEquals(accessControlManager.filterViewQueryOwnedBy(new SystemSecurityContext(alice, queryId), ImmutableSet.of("a", "b")), ImmutableSet.of("a", "b")); + assertThrows(AccessDeniedException.class, () -> accessControlManager.checkCanKillQueryOwnedBy(new SystemSecurityContext(alice, queryId), "any")); + + assertThrows(AccessDeniedException.class, () -> accessControlManager.checkCanExecuteQuery(new SystemSecurityContext(bob, queryId))); + assertThrows(AccessDeniedException.class, () -> accessControlManager.checkCanViewQueryOwnedBy(new SystemSecurityContext(bob, queryId), "any")); + assertEquals(accessControlManager.filterViewQueryOwnedBy(new SystemSecurityContext(bob, queryId), ImmutableSet.of("a", "b")), ImmutableSet.of()); + accessControlManager.checkCanKillQueryOwnedBy(new SystemSecurityContext(bob, queryId), "any"); + + accessControlManager.checkCanExecuteQuery(new SystemSecurityContext(nonAsciiUser, queryId)); + accessControlManager.checkCanViewQueryOwnedBy(new SystemSecurityContext(nonAsciiUser, queryId), "any"); + assertEquals(accessControlManager.filterViewQueryOwnedBy(new SystemSecurityContext(nonAsciiUser, queryId), ImmutableSet.of("a", "b")), ImmutableSet.of("a", "b")); + accessControlManager.checkCanKillQueryOwnedBy(new SystemSecurityContext(nonAsciiUser, queryId), "any"); } @Test @@ -131,10 +133,10 @@ public void testQueryNotSet() { SystemAccessControl accessControlManager = newFileBasedSystemAccessControl("catalog.json"); - accessControlManager.checkCanExecuteQuery(new SystemSecurityContext(bob)); - accessControlManager.checkCanViewQueryOwnedBy(new SystemSecurityContext(bob), "any"); - assertEquals(accessControlManager.filterViewQueryOwnedBy(new SystemSecurityContext(bob), ImmutableSet.of("a", "b")), ImmutableSet.of("a", "b")); - accessControlManager.checkCanKillQueryOwnedBy(new SystemSecurityContext(bob), "any"); + accessControlManager.checkCanExecuteQuery(new SystemSecurityContext(bob, queryId)); + accessControlManager.checkCanViewQueryOwnedBy(new SystemSecurityContext(bob, queryId), "any"); + assertEquals(accessControlManager.filterViewQueryOwnedBy(new SystemSecurityContext(bob, queryId), ImmutableSet.of("a", "b")), ImmutableSet.of("a", "b")); + accessControlManager.checkCanKillQueryOwnedBy(new SystemSecurityContext(bob, queryId), "any"); } @Test @@ -143,20 +145,20 @@ public void testDocsExample() String rulesFile = new File("../presto-docs/src/main/sphinx/security/query-access.json").getAbsolutePath(); SystemAccessControl accessControlManager = newFileBasedSystemAccessControl(ImmutableMap.of("security.config-file", rulesFile)); - accessControlManager.checkCanExecuteQuery(new SystemSecurityContext(admin)); - accessControlManager.checkCanViewQueryOwnedBy(new SystemSecurityContext(admin), "any"); - assertEquals(accessControlManager.filterViewQueryOwnedBy(new SystemSecurityContext(admin), ImmutableSet.of("a", "b")), ImmutableSet.of("a", "b")); - accessControlManager.checkCanKillQueryOwnedBy(new SystemSecurityContext(admin), "any"); + accessControlManager.checkCanExecuteQuery(new SystemSecurityContext(admin, queryId)); + accessControlManager.checkCanViewQueryOwnedBy(new SystemSecurityContext(admin, queryId), "any"); + assertEquals(accessControlManager.filterViewQueryOwnedBy(new SystemSecurityContext(admin, queryId), ImmutableSet.of("a", "b")), ImmutableSet.of("a", "b")); + accessControlManager.checkCanKillQueryOwnedBy(new SystemSecurityContext(admin, queryId), "any"); - accessControlManager.checkCanExecuteQuery(new SystemSecurityContext(alice)); - assertThrows(AccessDeniedException.class, () -> accessControlManager.checkCanViewQueryOwnedBy(new SystemSecurityContext(alice), "any")); - assertEquals(accessControlManager.filterViewQueryOwnedBy(new SystemSecurityContext(alice), ImmutableSet.of("a", "b")), ImmutableSet.of()); - accessControlManager.checkCanKillQueryOwnedBy(new SystemSecurityContext(alice), "any"); + accessControlManager.checkCanExecuteQuery(new SystemSecurityContext(alice, queryId)); + assertThrows(AccessDeniedException.class, () -> accessControlManager.checkCanViewQueryOwnedBy(new SystemSecurityContext(alice, queryId), "any")); + assertEquals(accessControlManager.filterViewQueryOwnedBy(new SystemSecurityContext(alice, queryId), ImmutableSet.of("a", "b")), ImmutableSet.of()); + accessControlManager.checkCanKillQueryOwnedBy(new SystemSecurityContext(alice, queryId), "any"); - accessControlManager.checkCanExecuteQuery(new SystemSecurityContext(bob)); - assertThrows(AccessDeniedException.class, () -> accessControlManager.checkCanViewQueryOwnedBy(new SystemSecurityContext(bob), "any")); - assertEquals(accessControlManager.filterViewQueryOwnedBy(new SystemSecurityContext(bob), ImmutableSet.of("a", "b")), ImmutableSet.of()); - assertThrows(AccessDeniedException.class, () -> accessControlManager.checkCanKillQueryOwnedBy(new SystemSecurityContext(bob), "any")); + accessControlManager.checkCanExecuteQuery(new SystemSecurityContext(bob, queryId)); + assertThrows(AccessDeniedException.class, () -> accessControlManager.checkCanViewQueryOwnedBy(new SystemSecurityContext(bob, queryId), "any")); + assertEquals(accessControlManager.filterViewQueryOwnedBy(new SystemSecurityContext(bob, queryId), ImmutableSet.of("a", "b")), ImmutableSet.of()); + assertThrows(AccessDeniedException.class, () -> accessControlManager.checkCanKillQueryOwnedBy(new SystemSecurityContext(bob, queryId), "any")); } @Test @@ -167,25 +169,25 @@ public void testSchemaOperations() PrestoPrincipal user = new PrestoPrincipal(PrincipalType.USER, "some_user"); PrestoPrincipal role = new PrestoPrincipal(PrincipalType.ROLE, "some_user"); - accessControl.checkCanSetSchemaAuthorization(new SystemSecurityContext(admin), new CatalogSchemaName("alice-catalog", "some_schema"), user); - accessControl.checkCanSetSchemaAuthorization(new SystemSecurityContext(admin), new CatalogSchemaName("alice-catalog", "some_schema"), role); + accessControl.checkCanSetSchemaAuthorization(new SystemSecurityContext(admin, queryId), new CatalogSchemaName("alice-catalog", "some_schema"), user); + accessControl.checkCanSetSchemaAuthorization(new SystemSecurityContext(admin, queryId), new CatalogSchemaName("alice-catalog", "some_schema"), role); - accessControl.checkCanSetSchemaAuthorization(new SystemSecurityContext(alice), new CatalogSchemaName("alice-catalog", "some_schema"), user); - accessControl.checkCanSetSchemaAuthorization(new SystemSecurityContext(alice), new CatalogSchemaName("alice-catalog", "some_schema"), role); + accessControl.checkCanSetSchemaAuthorization(new SystemSecurityContext(alice, queryId), new CatalogSchemaName("alice-catalog", "some_schema"), user); + accessControl.checkCanSetSchemaAuthorization(new SystemSecurityContext(alice, queryId), new CatalogSchemaName("alice-catalog", "some_schema"), role); - assertThatThrownBy(() -> accessControl.checkCanSetSchemaAuthorization(new SystemSecurityContext(bob), new CatalogSchemaName("alice-catalog", "some_schema"), user)) + assertThatThrownBy(() -> accessControl.checkCanSetSchemaAuthorization(new SystemSecurityContext(bob, queryId), new CatalogSchemaName("alice-catalog", "some_schema"), user)) .isInstanceOf(AccessDeniedException.class) .hasMessageStartingWith("Access Denied: Cannot set authorization for schema alice-catalog.some_schema"); - assertThatThrownBy(() -> accessControl.checkCanSetSchemaAuthorization(new SystemSecurityContext(bob), new CatalogSchemaName("alice-catalog", "some_schema"), role)) + assertThatThrownBy(() -> accessControl.checkCanSetSchemaAuthorization(new SystemSecurityContext(bob, queryId), new CatalogSchemaName("alice-catalog", "some_schema"), role)) .isInstanceOf(AccessDeniedException.class) .hasMessageStartingWith("Access Denied: Cannot set authorization for schema alice-catalog.some_schema"); - assertThatThrownBy(() -> accessControl.checkCanSetSchemaAuthorization(new SystemSecurityContext(alice), new CatalogSchemaName("secret", "some_schema"), user)) + assertThatThrownBy(() -> accessControl.checkCanSetSchemaAuthorization(new SystemSecurityContext(alice, queryId), new CatalogSchemaName("secret", "some_schema"), user)) .isInstanceOf(AccessDeniedException.class) .hasMessageStartingWith("Access Denied: Cannot set authorization for schema secret.some_schema"); - assertThatThrownBy(() -> accessControl.checkCanSetSchemaAuthorization(new SystemSecurityContext(alice), new CatalogSchemaName("secret", "some_schema"), role)) + assertThatThrownBy(() -> accessControl.checkCanSetSchemaAuthorization(new SystemSecurityContext(alice, queryId), new CatalogSchemaName("secret", "some_schema"), role)) .isInstanceOf(AccessDeniedException.class) .hasMessageStartingWith("Access Denied: Cannot set authorization for schema secret.some_schema"); } @@ -195,13 +197,13 @@ public void testCatalogOperations() { SystemAccessControl accessControl = newFileBasedSystemAccessControl("catalog.json"); - assertEquals(accessControl.filterCatalogs(new SystemSecurityContext(admin), allCatalogs), allCatalogs); + assertEquals(accessControl.filterCatalogs(new SystemSecurityContext(admin, queryId), allCatalogs), allCatalogs); Set aliceCatalogs = ImmutableSet.of("open-to-all", "alice-catalog", "all-allowed"); - assertEquals(accessControl.filterCatalogs(new SystemSecurityContext(alice), allCatalogs), aliceCatalogs); + assertEquals(accessControl.filterCatalogs(new SystemSecurityContext(alice, queryId), allCatalogs), aliceCatalogs); Set bobCatalogs = ImmutableSet.of("open-to-all", "all-allowed"); - assertEquals(accessControl.filterCatalogs(new SystemSecurityContext(bob), allCatalogs), bobCatalogs); + assertEquals(accessControl.filterCatalogs(new SystemSecurityContext(bob, queryId), allCatalogs), bobCatalogs); Set nonAsciiUserCatalogs = ImmutableSet.of("open-to-all", "all-allowed", "\u0200\u0200\u0200"); - assertEquals(accessControl.filterCatalogs(new SystemSecurityContext(nonAsciiUser), allCatalogs), nonAsciiUserCatalogs); + assertEquals(accessControl.filterCatalogs(new SystemSecurityContext(nonAsciiUser, queryId), allCatalogs), nonAsciiUserCatalogs); } @Test @@ -222,7 +224,7 @@ public void testRefreshing() SECURITY_CONFIG_FILE, configFile.getAbsolutePath(), SECURITY_REFRESH_PERIOD, "1ms")); - SystemSecurityContext alice = new SystemSecurityContext(TestFileBasedSystemAccessControl.alice); + SystemSecurityContext alice = new SystemSecurityContext(TestFileBasedSystemAccessControl.alice, queryId); accessControl.checkCanCreateView(alice, aliceView); accessControl.checkCanCreateView(alice, aliceView); accessControl.checkCanCreateView(alice, aliceView); diff --git a/presto-spi/src/main/java/io/prestosql/spi/connector/ConnectorSecurityContext.java b/presto-spi/src/main/java/io/prestosql/spi/connector/ConnectorSecurityContext.java index dae09401a1e2..6976ad82010e 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/connector/ConnectorSecurityContext.java +++ b/presto-spi/src/main/java/io/prestosql/spi/connector/ConnectorSecurityContext.java @@ -13,6 +13,7 @@ */ package io.prestosql.spi.connector; +import io.prestosql.spi.QueryId; import io.prestosql.spi.security.ConnectorIdentity; import static java.util.Objects.requireNonNull; @@ -21,11 +22,13 @@ public class ConnectorSecurityContext { private final ConnectorTransactionHandle transactionHandle; private final ConnectorIdentity identity; + private final QueryId queryId; - public ConnectorSecurityContext(ConnectorTransactionHandle transactionHandle, ConnectorIdentity identity) + public ConnectorSecurityContext(ConnectorTransactionHandle transactionHandle, ConnectorIdentity identity, QueryId queryId) { this.transactionHandle = requireNonNull(transactionHandle, "transactionHandle is null"); this.identity = requireNonNull(identity, "identity is null"); + this.queryId = requireNonNull(queryId, "queryId is null"); } public ConnectorTransactionHandle getTransactionHandle() @@ -37,4 +40,9 @@ public ConnectorIdentity getIdentity() { return identity; } + + public QueryId getQueryId() + { + return queryId; + } } diff --git a/presto-spi/src/main/java/io/prestosql/spi/security/SystemSecurityContext.java b/presto-spi/src/main/java/io/prestosql/spi/security/SystemSecurityContext.java index f17a059dfe2b..5b2229bcb7f9 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/security/SystemSecurityContext.java +++ b/presto-spi/src/main/java/io/prestosql/spi/security/SystemSecurityContext.java @@ -13,19 +13,30 @@ */ package io.prestosql.spi.security; +import io.prestosql.spi.QueryId; + +import java.util.Optional; + import static java.util.Objects.requireNonNull; public class SystemSecurityContext { private final Identity identity; + private final Optional queryId; - public SystemSecurityContext(Identity identity) + public SystemSecurityContext(Identity identity, Optional queryId) { this.identity = requireNonNull(identity, "identity is null"); + this.queryId = requireNonNull(queryId, "queryId is null"); } public Identity getIdentity() { return identity; } + + public Optional getQueryId() + { + return queryId; + } } From f66f62e2cdc752cd30d0acb2040a9a7ec1bde3c0 Mon Sep 17 00:00:00 2001 From: James Petty Date: Tue, 14 Apr 2020 07:58:45 -0400 Subject: [PATCH 155/519] Add status endpoint for HTTP HEAD requests Previously, node pings in HeartbeatFailureDetector would go to the root path and return 404 from workers because no endpoint was mapped to handle them. Now the failure detector points to /v1/status and receives empty HTTP OK responses instead. --- .../failuredetector/HeartbeatFailureDetector.java | 3 ++- .../java/io/prestosql/server/StatusResource.java | 9 +++++++++ .../test/java/io/prestosql/server/TestServer.java | 15 +++++++++++++++ 3 files changed, 26 insertions(+), 1 deletion(-) diff --git a/presto-main/src/main/java/io/prestosql/failuredetector/HeartbeatFailureDetector.java b/presto-main/src/main/java/io/prestosql/failuredetector/HeartbeatFailureDetector.java index b69a19bda100..4254fde61bab 100644 --- a/presto-main/src/main/java/io/prestosql/failuredetector/HeartbeatFailureDetector.java +++ b/presto-main/src/main/java/io/prestosql/failuredetector/HeartbeatFailureDetector.java @@ -66,6 +66,7 @@ import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static io.airlift.concurrent.Threads.daemonThreadsNamed; +import static io.airlift.http.client.HttpUriBuilder.uriBuilderFrom; import static io.airlift.http.client.Request.Builder.prepareHead; import static io.prestosql.failuredetector.FailureDetector.State.ALIVE; import static io.prestosql.failuredetector.FailureDetector.State.GONE; @@ -257,7 +258,7 @@ void updateMonitoredServices() URI uri = getHttpUri(service); if (uri != null) { - tasks.put(service.getId(), new MonitoringTask(service, uri)); + tasks.put(service.getId(), new MonitoringTask(service, uriBuilderFrom(uri).appendPath("/v1/status").build())); } } diff --git a/presto-main/src/main/java/io/prestosql/server/StatusResource.java b/presto-main/src/main/java/io/prestosql/server/StatusResource.java index f1f0bdb59533..b3f946e4b098 100644 --- a/presto-main/src/main/java/io/prestosql/server/StatusResource.java +++ b/presto-main/src/main/java/io/prestosql/server/StatusResource.java @@ -20,8 +20,10 @@ import javax.inject.Inject; import javax.ws.rs.GET; +import javax.ws.rs.HEAD; import javax.ws.rs.Path; import javax.ws.rs.Produces; +import javax.ws.rs.core.Response; import java.lang.management.ManagementFactory; import java.lang.management.MemoryMXBean; @@ -61,6 +63,13 @@ public StatusResource(NodeVersion nodeVersion, NodeInfo nodeInfo, ServerConfig s } } + @HEAD + @Produces(APPLICATION_JSON) // to match the GET route + public Response statusPing() + { + return Response.ok().build(); + } + @GET @Produces(APPLICATION_JSON) public NodeStatus getStatus() diff --git a/presto-main/src/test/java/io/prestosql/server/TestServer.java b/presto-main/src/test/java/io/prestosql/server/TestServer.java index 0a186c41972d..717bdfe7bc83 100644 --- a/presto-main/src/test/java/io/prestosql/server/TestServer.java +++ b/presto-main/src/test/java/io/prestosql/server/TestServer.java @@ -45,11 +45,13 @@ import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Throwables.getStackTraceAsString; +import static com.google.common.net.HttpHeaders.CONTENT_TYPE; import static com.google.common.net.HttpHeaders.X_FORWARDED_HOST; import static com.google.common.net.HttpHeaders.X_FORWARDED_PORT; import static com.google.common.net.HttpHeaders.X_FORWARDED_PROTO; import static io.airlift.http.client.FullJsonResponseHandler.createFullJsonResponseHandler; import static io.airlift.http.client.Request.Builder.prepareGet; +import static io.airlift.http.client.Request.Builder.prepareHead; import static io.airlift.http.client.Request.Builder.preparePost; import static io.airlift.http.client.StaticBodyGenerator.createStaticBodyGenerator; import static io.airlift.http.client.StatusResponseHandler.createStatusResponseHandler; @@ -72,6 +74,7 @@ import static io.prestosql.spi.StandardErrorCode.INCOMPATIBLE_CLIENT; import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.Objects.requireNonNull; +import static javax.ws.rs.core.MediaType.APPLICATION_JSON; import static javax.ws.rs.core.Response.Status.OK; import static javax.ws.rs.core.Response.Status.SEE_OTHER; import static org.testng.Assert.assertEquals; @@ -229,6 +232,18 @@ public Object[][] testVersionOnErrorDataProvider() }; } + @Test + public void testStatusPing() + { + Request request = prepareHead() + .setUri(uriFor("/v1/status")) + .setFollowRedirects(false) + .build(); + StatusResponse response = client.execute(request, createStatusResponseHandler()); + assertEquals(response.getStatusCode(), OK.getStatusCode(), "Status code"); + assertEquals(response.getHeader(CONTENT_TYPE), APPLICATION_JSON, "Content Type"); + } + @Test public void testRedirectToUi() { From 487938a272beae9536547e6aa979a11249b400e5 Mon Sep 17 00:00:00 2001 From: Mateusz Gajewski Date: Thu, 16 Apr 2020 10:08:55 +0200 Subject: [PATCH 156/519] Pass ConnectorSession to JdbcClient.isLimitGuaranteed --- .../main/java/io/prestosql/plugin/jdbc/BaseJdbcClient.java | 2 +- .../main/java/io/prestosql/plugin/jdbc/CachingJdbcClient.java | 4 ++-- .../java/io/prestosql/plugin/jdbc/ForwardingJdbcClient.java | 4 ++-- .../src/main/java/io/prestosql/plugin/jdbc/JdbcClient.java | 2 +- .../src/main/java/io/prestosql/plugin/jdbc/JdbcMetadata.java | 2 +- .../prestosql/plugin/jdbc/jmx/StatisticsAwareJdbcClient.java | 4 ++-- .../main/java/io/prestosql/plugin/memsql/MemSqlClient.java | 3 ++- .../src/main/java/io/prestosql/plugin/mysql/MySqlClient.java | 2 +- .../java/io/prestosql/plugin/postgresql/PostgreSqlClient.java | 2 +- .../java/io/prestosql/plugin/redshift/RedshiftClient.java | 3 ++- .../java/io/prestosql/plugin/sqlserver/SqlServerClient.java | 2 +- 11 files changed, 16 insertions(+), 14 deletions(-) diff --git a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/BaseJdbcClient.java b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/BaseJdbcClient.java index f1921a8411a1..32677101553e 100644 --- a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/BaseJdbcClient.java +++ b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/BaseJdbcClient.java @@ -885,7 +885,7 @@ protected Optional> limitFunction() } @Override - public boolean isLimitGuaranteed() + public boolean isLimitGuaranteed(ConnectorSession session) { throw new PrestoException(JDBC_ERROR, "limitFunction() is implemented without isLimitGuaranteed()"); } diff --git a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/CachingJdbcClient.java b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/CachingJdbcClient.java index f5035504bb38..78ee5632eb17 100644 --- a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/CachingJdbcClient.java +++ b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/CachingJdbcClient.java @@ -160,9 +160,9 @@ public boolean supportsLimit() } @Override - public boolean isLimitGuaranteed() + public boolean isLimitGuaranteed(ConnectorSession session) { - return delegate.isLimitGuaranteed(); + return delegate.isLimitGuaranteed(session); } @Override diff --git a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/ForwardingJdbcClient.java b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/ForwardingJdbcClient.java index 0a5c1ba8cf75..1e7f7086ed05 100644 --- a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/ForwardingJdbcClient.java +++ b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/ForwardingJdbcClient.java @@ -190,9 +190,9 @@ public boolean supportsLimit() } @Override - public boolean isLimitGuaranteed() + public boolean isLimitGuaranteed(ConnectorSession session) { - return delegate().isLimitGuaranteed(); + return delegate().isLimitGuaranteed(session); } @Override diff --git a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcClient.java b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcClient.java index e558e9337c36..f9a155859b84 100644 --- a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcClient.java +++ b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcClient.java @@ -66,7 +66,7 @@ PreparedStatement buildSql(ConnectorSession session, Connection connection, Jdbc boolean supportsLimit(); - boolean isLimitGuaranteed(); + boolean isLimitGuaranteed(ConnectorSession session); void addColumn(ConnectorSession session, JdbcTableHandle handle, ColumnMetadata column); diff --git a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcMetadata.java b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcMetadata.java index 2880e53b1571..bc96bac745de 100644 --- a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcMetadata.java +++ b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcMetadata.java @@ -175,7 +175,7 @@ public Optional> applyLimit(Connect handle.getConstraint(), OptionalLong.of(limit)); - return Optional.of(new LimitApplicationResult<>(handle, jdbcClient.isLimitGuaranteed())); + return Optional.of(new LimitApplicationResult<>(handle, jdbcClient.isLimitGuaranteed(session))); } @Override diff --git a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/jmx/StatisticsAwareJdbcClient.java b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/jmx/StatisticsAwareJdbcClient.java index 27bdc5203305..4efb023664fd 100644 --- a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/jmx/StatisticsAwareJdbcClient.java +++ b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/jmx/StatisticsAwareJdbcClient.java @@ -235,9 +235,9 @@ public boolean supportsLimit() } @Override - public boolean isLimitGuaranteed() + public boolean isLimitGuaranteed(ConnectorSession session) { - return delegate().isLimitGuaranteed(); + return delegate().isLimitGuaranteed(session); } @Override diff --git a/presto-memsql/src/main/java/io/prestosql/plugin/memsql/MemSqlClient.java b/presto-memsql/src/main/java/io/prestosql/plugin/memsql/MemSqlClient.java index d42cab28d420..97cb5a85f978 100644 --- a/presto-memsql/src/main/java/io/prestosql/plugin/memsql/MemSqlClient.java +++ b/presto-memsql/src/main/java/io/prestosql/plugin/memsql/MemSqlClient.java @@ -17,6 +17,7 @@ import io.prestosql.plugin.jdbc.BaseJdbcClient; import io.prestosql.plugin.jdbc.BaseJdbcConfig; import io.prestosql.plugin.jdbc.ConnectionFactory; +import io.prestosql.spi.connector.ConnectorSession; import javax.inject.Inject; @@ -85,7 +86,7 @@ protected Optional> limitFunction() } @Override - public boolean isLimitGuaranteed() + public boolean isLimitGuaranteed(ConnectorSession session) { return true; } diff --git a/presto-mysql/src/main/java/io/prestosql/plugin/mysql/MySqlClient.java b/presto-mysql/src/main/java/io/prestosql/plugin/mysql/MySqlClient.java index 9f5058895878..ad19e3f46112 100644 --- a/presto-mysql/src/main/java/io/prestosql/plugin/mysql/MySqlClient.java +++ b/presto-mysql/src/main/java/io/prestosql/plugin/mysql/MySqlClient.java @@ -292,7 +292,7 @@ protected Optional> limitFunction() } @Override - public boolean isLimitGuaranteed() + public boolean isLimitGuaranteed(ConnectorSession session) { return true; } diff --git a/presto-postgresql/src/main/java/io/prestosql/plugin/postgresql/PostgreSqlClient.java b/presto-postgresql/src/main/java/io/prestosql/plugin/postgresql/PostgreSqlClient.java index 1298010dc402..cca87d4fb63b 100644 --- a/presto-postgresql/src/main/java/io/prestosql/plugin/postgresql/PostgreSqlClient.java +++ b/presto-postgresql/src/main/java/io/prestosql/plugin/postgresql/PostgreSqlClient.java @@ -434,7 +434,7 @@ protected Optional> limitFunction() } @Override - public boolean isLimitGuaranteed() + public boolean isLimitGuaranteed(ConnectorSession session) { return true; } diff --git a/presto-redshift/src/main/java/io/prestosql/plugin/redshift/RedshiftClient.java b/presto-redshift/src/main/java/io/prestosql/plugin/redshift/RedshiftClient.java index 5d36879abcf5..5c5cca3fb36b 100644 --- a/presto-redshift/src/main/java/io/prestosql/plugin/redshift/RedshiftClient.java +++ b/presto-redshift/src/main/java/io/prestosql/plugin/redshift/RedshiftClient.java @@ -18,6 +18,7 @@ import io.prestosql.plugin.jdbc.ConnectionFactory; import io.prestosql.plugin.jdbc.JdbcIdentity; import io.prestosql.spi.PrestoException; +import io.prestosql.spi.connector.ConnectorSession; import io.prestosql.spi.connector.SchemaTableName; import javax.inject.Inject; @@ -71,7 +72,7 @@ protected Optional> limitFunction() } @Override - public boolean isLimitGuaranteed() + public boolean isLimitGuaranteed(ConnectorSession session) { return true; } diff --git a/presto-sqlserver/src/main/java/io/prestosql/plugin/sqlserver/SqlServerClient.java b/presto-sqlserver/src/main/java/io/prestosql/plugin/sqlserver/SqlServerClient.java index b6052ce2a89c..b834cfc44919 100644 --- a/presto-sqlserver/src/main/java/io/prestosql/plugin/sqlserver/SqlServerClient.java +++ b/presto-sqlserver/src/main/java/io/prestosql/plugin/sqlserver/SqlServerClient.java @@ -170,7 +170,7 @@ protected Optional> limitFunction() } @Override - public boolean isLimitGuaranteed() + public boolean isLimitGuaranteed(ConnectorSession session) { return true; } From 4000f9270df4bf29eddcaa96a63d130a60aad9e9 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Fri, 17 Apr 2020 10:18:51 +0200 Subject: [PATCH 157/519] Enforce ClusterMemoryManager is bound on coordinator only --- .../java/io/prestosql/memory/ClusterMemoryManager.java | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/memory/ClusterMemoryManager.java b/presto-main/src/main/java/io/prestosql/memory/ClusterMemoryManager.java index e3d487d9e111..d0dfd6e91360 100644 --- a/presto-main/src/main/java/io/prestosql/memory/ClusterMemoryManager.java +++ b/presto-main/src/main/java/io/prestosql/memory/ClusterMemoryManager.java @@ -62,6 +62,7 @@ import java.util.function.Consumer; import java.util.function.Supplier; +import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Verify.verify; import static com.google.common.base.Verify.verifyNotNull; import static com.google.common.collect.ImmutableList.toImmutableList; @@ -101,7 +102,6 @@ public class ClusterMemoryManager private final JsonCodec assignmentsRequestJsonCodec; private final DataSize maxQueryMemory; private final DataSize maxQueryTotalMemory; - private final boolean enabled; private final LowMemoryKiller lowMemoryKiller; private final Duration killOnOutOfMemoryDelay; private final String coordinatorId; @@ -147,6 +147,8 @@ public ClusterMemoryManager( requireNonNull(nodeMemoryConfig, "nodeMemoryConfig is null"); requireNonNull(serverConfig, "serverConfig is null"); requireNonNull(schedulerConfig, "schedulerConfig is null"); + checkState(serverConfig.isCoordinator(), "ClusterMemoryManager must not be bound on worker"); + this.nodeManager = requireNonNull(nodeManager, "nodeManager is null"); this.locationFactory = requireNonNull(locationFactory, "locationFactory is null"); this.httpClient = requireNonNull(httpClient, "httpClient is null"); @@ -157,7 +159,6 @@ public ClusterMemoryManager( this.maxQueryMemory = config.getMaxQueryMemory(); this.maxQueryTotalMemory = config.getMaxQueryTotalMemory(); this.coordinatorId = queryIdGenerator.getCoordinatorId(); - this.enabled = serverConfig.isCoordinator(); this.killOnOutOfMemoryDelay = config.getKillOnOutOfMemoryDelay(); this.isWorkScheduledOnCoordinator = schedulerConfig.isIncludeCoordinator(); @@ -203,10 +204,6 @@ public synchronized boolean memoryPoolExists(MemoryPoolId poolId) public synchronized void process(Iterable runningQueries, Supplier> allQueryInfoSupplier) { - if (!enabled) { - return; - } - // TODO revocable memory reservations can also leak and may need to be detected in the future // We are only concerned about the leaks in general pool. memoryLeakDetector.checkForMemoryLeaks(allQueryInfoSupplier, pools.get(GENERAL_POOL).getQueryMemoryReservations()); From dbb0305f22761ea33a19c93c00b6bade4fbdb24f Mon Sep 17 00:00:00 2001 From: "Mateusz \"Serafin\" Gajewski" Date: Thu, 16 Apr 2020 10:19:08 +0200 Subject: [PATCH 158/519] Update running instructions --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index eeee7094ba03..79466753ce44 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ See the [User Manual](https://prestosql.io/docs/current/) for deployment instruc ## Requirements * Mac OS X or Linux -* Java 8 Update 161 or higher (8u161+), 64-bit. Both Oracle JDK and OpenJDK are supported. +* Java 8 Update 161 or higher (8u161+), 64-bit is the minimum supported version. Java 11 is recommended and will soon become required. Both Oracle JDK and OpenJDK are supported. * Python 2.6+ (for running with the launcher script) ## Building Presto @@ -49,6 +49,8 @@ Additionally, the Hive plugin must be configured with the location of your Hive -Dhive.metastore.uri=thrift://localhost:9083 +When running on Java 8, the VM Options must be supplemented with `-Dpresto-temporarily-allow-java8=true` in order for Presto to start. + ### Using SOCKS for Hive or HDFS If your Hive metastore or HDFS cluster is not directly accessible to your local machine, you can use SSH port forwarding to access it. Setup a dynamic SOCKS proxy with SSH listening on local port 1080: From 6541b8fd0bcb05b86755274b495673d67b4dea37 Mon Sep 17 00:00:00 2001 From: Mateusz Gajewski Date: Fri, 17 Apr 2020 15:12:19 +0200 Subject: [PATCH 159/519] Add book badge --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 79466753ce44..1d2d4f74b1ed 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,7 @@ # Presto [![Maven Central](https://img.shields.io/maven-central/v/io.prestosql/presto-server.svg?label=Download)](https://prestosql.io/download.html) [![Presto Slack](https://img.shields.io/static/v1?logo=slack&logoColor=959DA5&label=Slack&labelColor=333a41&message=join%20conversation&color=3AC358)](https://prestosql.io/slack.html) +[![Presto: The Definitive Guide book download](https://img.shields.io/badge/Presto%3A%20The%20Definitive%20Guide-download-brightgreen)](https://www.starburstdata.com/oreilly-presto-guide-download/) Presto is a distributed SQL query engine for big data. From c9b55bb54f46919ebd1c798ae1a9b727c24b2e9e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Grzegorz=20Kokosi=C5=84ski?= Date: Fri, 17 Apr 2020 11:46:36 +0200 Subject: [PATCH 160/519] Use ConnectorFactory class to setup ThreadContextClassLoader Previously InternalConnectorFactory was used that belongs to presto-main and so system classloader was set in the thread context classloader while we should use plugin classloader. --- .../src/main/java/io/prestosql/connector/ConnectorManager.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-main/src/main/java/io/prestosql/connector/ConnectorManager.java b/presto-main/src/main/java/io/prestosql/connector/ConnectorManager.java index a6e65da8583f..495d0b112149 100644 --- a/presto-main/src/main/java/io/prestosql/connector/ConnectorManager.java +++ b/presto-main/src/main/java/io/prestosql/connector/ConnectorManager.java @@ -345,7 +345,7 @@ private Connector createConnector(CatalogName catalogName, InternalConnectorFact pageIndexerFactory, factory.getDuplicatePluginClassLoaderFactory()); - try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(factory.getClass().getClassLoader())) { + try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(factory.getConnectorFactory().getClass().getClassLoader())) { return factory.getConnectorFactory().create(catalogName.getCatalogName(), properties, context); } } From b1d1e9c8a6bf9d1476522a293003542990c57b08 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Sat, 18 Apr 2020 21:23:13 +0200 Subject: [PATCH 161/519] Remove unused field --- .../rule/PruneIndexSourceColumns.java | 16 ++---------- .../optimizations/IndexJoinOptimizer.java | 3 +-- .../PruneUnreferencedOutputs.java | 2 +- .../UnaliasSymbolReferences.java | 2 +- .../sql/planner/plan/IndexSourceNode.java | 12 +-------- .../assertions/IndexSourceMatcher.java | 25 ------------------- .../planner/assertions/PlanMatchPattern.java | 4 +-- .../rule/TestPruneIndexSourceColumns.java | 11 ++------ .../iterative/rule/test/PlanBuilder.java | 6 ++--- 9 files changed, 12 insertions(+), 69 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneIndexSourceColumns.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneIndexSourceColumns.java index df91c6c0855a..c20f1c828eb6 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneIndexSourceColumns.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneIndexSourceColumns.java @@ -15,7 +15,6 @@ import com.google.common.collect.Maps; import io.prestosql.spi.connector.ColumnHandle; -import io.prestosql.spi.predicate.TupleDomain; import io.prestosql.sql.planner.Symbol; import io.prestosql.sql.planner.plan.IndexSourceNode; import io.prestosql.sql.planner.plan.PlanNode; @@ -46,8 +45,7 @@ protected Optional pushDownProjectOff(Context context, IndexSourceNode Map prunedAssignments = Maps.filterEntries( indexSourceNode.getAssignments(), - entry -> referencedOutputs.contains(entry.getKey()) || - tupleDomainReferencesColumnHandle(indexSourceNode.getCurrentConstraint(), entry.getValue())); + entry -> referencedOutputs.contains(entry.getKey())); List prunedOutputList = indexSourceNode.getOutputSymbols().stream() @@ -61,16 +59,6 @@ protected Optional pushDownProjectOff(Context context, IndexSourceNode indexSourceNode.getTableHandle(), prunedLookupSymbols, prunedOutputList, - prunedAssignments, - indexSourceNode.getCurrentConstraint())); - } - - private static boolean tupleDomainReferencesColumnHandle( - TupleDomain tupleDomain, - ColumnHandle columnHandle) - { - return tupleDomain.getDomains() - .map(domains -> domains.containsKey(columnHandle)) - .orElse(false); + prunedAssignments)); } } diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/IndexJoinOptimizer.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/IndexJoinOptimizer.java index de4da0aa35ac..5444b025a4a5 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/IndexJoinOptimizer.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/IndexJoinOptimizer.java @@ -320,8 +320,7 @@ private PlanNode planTableScan(TableScanNode node, Expression predicate, Context node.getTable(), context.getLookupSymbols(), node.getOutputSymbols(), - node.getAssignments(), - simplifiedConstraint); + node.getAssignments()); Expression resultingPredicate = combineConjuncts( metadata, diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java index da2ba562bcb9..5aac524bbb1b 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java @@ -360,7 +360,7 @@ public PlanNode visitIndexSource(IndexSourceNode node, RewriteContext newAssignments = newOutputSymbols.stream() .collect(Collectors.toMap(Function.identity(), node.getAssignments()::get)); - return new IndexSourceNode(node.getId(), node.getIndexHandle(), node.getTableHandle(), newLookupSymbols, newOutputSymbols, newAssignments, node.getCurrentConstraint()); + return new IndexSourceNode(node.getId(), node.getIndexHandle(), node.getTableHandle(), newLookupSymbols, newOutputSymbols, newAssignments); } @Override diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/UnaliasSymbolReferences.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/UnaliasSymbolReferences.java index 19b9b0b8671b..c7f0109379fd 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/UnaliasSymbolReferences.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/UnaliasSymbolReferences.java @@ -582,7 +582,7 @@ public PlanNode visitSpatialJoin(SpatialJoinNode node, RewriteContext cont @Override public PlanNode visitIndexSource(IndexSourceNode node, RewriteContext context) { - return new IndexSourceNode(node.getId(), node.getIndexHandle(), node.getTableHandle(), canonicalize(node.getLookupSymbols()), node.getOutputSymbols(), node.getAssignments(), node.getCurrentConstraint()); + return new IndexSourceNode(node.getId(), node.getIndexHandle(), node.getTableHandle(), canonicalize(node.getLookupSymbols()), node.getOutputSymbols(), node.getAssignments()); } @Override diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/plan/IndexSourceNode.java b/presto-main/src/main/java/io/prestosql/sql/planner/plan/IndexSourceNode.java index 877e18ab0871..6125f2abcb8a 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/plan/IndexSourceNode.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/plan/IndexSourceNode.java @@ -21,7 +21,6 @@ import io.prestosql.metadata.IndexHandle; import io.prestosql.metadata.TableHandle; import io.prestosql.spi.connector.ColumnHandle; -import io.prestosql.spi.predicate.TupleDomain; import io.prestosql.sql.planner.Symbol; import java.util.List; @@ -39,7 +38,6 @@ public class IndexSourceNode private final Set lookupSymbols; private final List outputSymbols; private final Map assignments; // symbol -> column - private final TupleDomain currentConstraint; // constraint over the input data the operator will guarantee @JsonCreator public IndexSourceNode( @@ -48,8 +46,7 @@ public IndexSourceNode( @JsonProperty("tableHandle") TableHandle tableHandle, @JsonProperty("lookupSymbols") Set lookupSymbols, @JsonProperty("outputSymbols") List outputSymbols, - @JsonProperty("assignments") Map assignments, - @JsonProperty("currentConstraint") TupleDomain currentConstraint) + @JsonProperty("assignments") Map assignments) { super(id); this.indexHandle = requireNonNull(indexHandle, "indexHandle is null"); @@ -57,7 +54,6 @@ public IndexSourceNode( this.lookupSymbols = ImmutableSet.copyOf(requireNonNull(lookupSymbols, "lookupSymbols is null")); this.outputSymbols = ImmutableList.copyOf(requireNonNull(outputSymbols, "outputSymbols is null")); this.assignments = ImmutableMap.copyOf(requireNonNull(assignments, "assignments is null")); - this.currentConstraint = requireNonNull(currentConstraint, "effectiveTupleDomain is null"); checkArgument(!lookupSymbols.isEmpty(), "lookupSymbols is empty"); checkArgument(!outputSymbols.isEmpty(), "outputSymbols is empty"); checkArgument(assignments.keySet().containsAll(lookupSymbols), "Assignments do not include all lookup symbols"); @@ -95,12 +91,6 @@ public Map getAssignments() return assignments; } - @JsonProperty - public TupleDomain getCurrentConstraint() - { - return currentConstraint; - } - @Override public List getSources() { diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/IndexSourceMatcher.java b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/IndexSourceMatcher.java index dc1051612b95..b613cbcde9db 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/IndexSourceMatcher.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/IndexSourceMatcher.java @@ -13,41 +13,27 @@ */ package io.prestosql.sql.planner.assertions; -import com.google.common.collect.ImmutableMap; import io.prestosql.Session; import io.prestosql.cost.StatsProvider; import io.prestosql.metadata.Metadata; import io.prestosql.metadata.TableMetadata; -import io.prestosql.spi.predicate.Domain; import io.prestosql.sql.planner.plan.IndexSourceNode; import io.prestosql.sql.planner.plan.PlanNode; -import java.util.Map; -import java.util.Optional; - import static com.google.common.base.MoreObjects.toStringHelper; import static com.google.common.base.Preconditions.checkState; import static io.prestosql.sql.planner.assertions.MatchResult.NO_MATCH; import static io.prestosql.sql.planner.assertions.MatchResult.match; -import static io.prestosql.sql.planner.assertions.Util.domainsMatch; import static java.util.Objects.requireNonNull; final class IndexSourceMatcher implements Matcher { private final String expectedTableName; - private final Optional> expectedConstraint; public IndexSourceMatcher(String expectedTableName) { this.expectedTableName = requireNonNull(expectedTableName, "expectedTableName is null"); - expectedConstraint = Optional.empty(); - } - - public IndexSourceMatcher(String expectedTableName, Map expectedConstraint) - { - this.expectedTableName = requireNonNull(expectedTableName, "expectedTableName is null"); - this.expectedConstraint = Optional.of(ImmutableMap.copyOf(expectedConstraint)); } @Override @@ -69,16 +55,6 @@ public MatchResult detailMatches(PlanNode node, StatsProvider stats, Session ses return NO_MATCH; } - if (expectedConstraint.isPresent() && - !domainsMatch( - expectedConstraint, - indexSourceNode.getCurrentConstraint(), - indexSourceNode.getTableHandle(), - session, - metadata)) { - return NO_MATCH; - } - return match(); } @@ -88,7 +64,6 @@ public String toString() return toStringHelper(this) .omitNullValues() .add("expectedTableName", expectedTableName) - .add("expectedConstraint", expectedConstraint.orElse(null)) .toString(); } } diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java index ff99d5b80f47..63b38dbf4049 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java @@ -183,10 +183,10 @@ public static PlanMatchPattern constrainedTableScanWithTableLayout(String expect return result.addColumnReferences(expectedTableName, columnReferences); } - public static PlanMatchPattern constrainedIndexSource(String expectedTableName, Map constraint, Map columnReferences) + public static PlanMatchPattern constrainedIndexSource(String expectedTableName, Map columnReferences) { return node(IndexSourceNode.class) - .with(new IndexSourceMatcher(expectedTableName, constraint)) + .with(new IndexSourceMatcher(expectedTableName)) .addColumnReferences(expectedTableName, columnReferences); } diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneIndexSourceColumns.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneIndexSourceColumns.java index c9a7a6bf44e0..e773f4d0d290 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneIndexSourceColumns.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneIndexSourceColumns.java @@ -23,8 +23,6 @@ import io.prestosql.plugin.tpch.TpchTableHandle; import io.prestosql.plugin.tpch.TpchTransactionHandle; import io.prestosql.spi.connector.ColumnHandle; -import io.prestosql.spi.predicate.Domain; -import io.prestosql.spi.predicate.TupleDomain; import io.prestosql.sql.planner.Symbol; import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; import io.prestosql.sql.planner.iterative.rule.test.PlanBuilder; @@ -37,7 +35,6 @@ import static com.google.common.collect.ImmutableList.toImmutableList; import static io.prestosql.plugin.tpch.TpchMetadata.TINY_SCALE_FACTOR; -import static io.prestosql.spi.predicate.NullableValue.asNull; import static io.prestosql.spi.type.DoubleType.DOUBLE; import static io.prestosql.spi.type.IntegerType.INTEGER; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.constrainedIndexSource; @@ -57,10 +54,7 @@ public void testNotAllOutputsReferenced() ImmutableMap.of("x", expression("orderkey")), constrainedIndexSource( "orders", - ImmutableMap.of("totalprice", Domain.onlyNull(DOUBLE)), - ImmutableMap.of( - "orderkey", "orderkey", - "totalprice", "totalprice")))); + ImmutableMap.of("orderkey", "orderkey")))); } @Test @@ -96,7 +90,6 @@ private static PlanNode buildProjectedIndexSource(PlanBuilder p, Predicate lookupSymbols, List outputSymbols, - Map assignments, - TupleDomain effectiveTupleDomain) + Map assignments) { return new IndexSourceNode( idAllocator.getNextId(), @@ -573,8 +572,7 @@ public IndexSourceNode indexSource( tableHandle, lookupSymbols, outputSymbols, - assignments, - effectiveTupleDomain); + assignments); } public ExchangeNode exchange(Consumer exchangeBuilderConsumer) From 3758c974ca04d9d9310c7929833fd8869c32f346 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Sat, 18 Apr 2020 22:55:56 +0200 Subject: [PATCH 162/519] Push down even for large IN --- .../io/prestosql/plugin/sqlserver/SqlServerClient.java | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/presto-sqlserver/src/main/java/io/prestosql/plugin/sqlserver/SqlServerClient.java b/presto-sqlserver/src/main/java/io/prestosql/plugin/sqlserver/SqlServerClient.java index b834cfc44919..764ae4fddf99 100644 --- a/presto-sqlserver/src/main/java/io/prestosql/plugin/sqlserver/SqlServerClient.java +++ b/presto-sqlserver/src/main/java/io/prestosql/plugin/sqlserver/SqlServerClient.java @@ -57,12 +57,11 @@ public class SqlServerClient // SqlServer supports 2100 parameters in prepared statement, let's create a space for about 4 big IN predicates private static final int SQL_SERVER_MAX_LIST_EXPRESSIONS = 500; - // TODO improve this by calling Domain#simplify - private static final UnaryOperator DISABLE_UNSUPPORTED_PUSHDOWN = domain -> { + private static final UnaryOperator SIMPLIFY_UNSUPPORTED_PUSHDOWN = domain -> { if (domain.getValues().getRanges().getRangeCount() <= SQL_SERVER_MAX_LIST_EXPRESSIONS) { return domain; } - return Domain.all(domain.getType()); + return domain.simplify(); }; @Inject @@ -121,7 +120,7 @@ public Optional toPrestoType(ConnectorSession session, Connection columnMapping.getType(), columnMapping.getReadFunction(), columnMapping.getWriteFunction(), - DISABLE_UNSUPPORTED_PUSHDOWN)); + SIMPLIFY_UNSUPPORTED_PUSHDOWN)); } @Override From b8f38e2a4aaa8899374621ec9447359395da7452 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 13 Apr 2020 00:42:51 +0200 Subject: [PATCH 163/519] Fix and improve exception message Previously message was misleading as to what was actually expected. --- .../src/main/java/io/prestosql/testing/QueryAssertions.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-testing/src/main/java/io/prestosql/testing/QueryAssertions.java b/presto-testing/src/main/java/io/prestosql/testing/QueryAssertions.java index 234d1c7eaa6d..019eed9d7cb8 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/QueryAssertions.java +++ b/presto-testing/src/main/java/io/prestosql/testing/QueryAssertions.java @@ -81,7 +81,7 @@ public static void assertUpdate(QueryRunner queryRunner, Session session, @Langu if (results.getUpdateCount().isPresent()) { if (!count.isPresent()) { - fail("update count should be present"); + fail("expected no update count, but got " + results.getUpdateCount().getAsLong()); } assertEquals(results.getUpdateCount().getAsLong(), count.getAsLong(), "update count"); } From 7ef756b995c7604057e76c0d9456d3d9c0822830 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 13 Apr 2020 00:42:52 +0200 Subject: [PATCH 164/519] Fix formatting --- .../prestosql/plugin/hive/HiveMetadata.java | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java index e3774b232b9c..57b5ea611689 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java @@ -2001,18 +2001,18 @@ private HiveColumnHandle createProjectedColumnHandle(HiveColumnHandle column, Li HiveColumnProjectionInfo columnProjectionInfo = new HiveColumnProjectionInfo( // Merge indices ImmutableList.builder() - .addAll(column.getHiveColumnProjectionInfo() - .map(HiveColumnProjectionInfo::getDereferenceIndices) - .orElse(ImmutableList.of())) - .addAll(indices) - .build(), + .addAll(column.getHiveColumnProjectionInfo() + .map(HiveColumnProjectionInfo::getDereferenceIndices) + .orElse(ImmutableList.of())) + .addAll(indices) + .build(), // Merge names ImmutableList.builder() - .addAll(column.getHiveColumnProjectionInfo() - .map(HiveColumnProjectionInfo::getDereferenceNames) - .orElse(ImmutableList.of())) - .addAll(oldHiveType.getHiveDereferenceNames(indices)) - .build(), + .addAll(column.getHiveColumnProjectionInfo() + .map(HiveColumnProjectionInfo::getDereferenceNames) + .orElse(ImmutableList.of())) + .addAll(oldHiveType.getHiveDereferenceNames(indices)) + .build(), newHiveType, newHiveType.getType(typeManager)); From f08a7675f8378e957b9256dd5046a07e7cc0408a Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 13 Apr 2020 00:42:54 +0200 Subject: [PATCH 165/519] Remove unintentional override --- .../io/prestosql/plugin/kudu/TestKuduDistributedQueries.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java b/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java index 58235bf5e7ea..8836d54a99eb 100644 --- a/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java +++ b/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java @@ -20,6 +20,7 @@ import io.prestosql.tpch.TpchTable; import org.testng.SkipException; import org.testng.annotations.AfterClass; +import org.testng.annotations.Test; import java.util.Optional; @@ -77,8 +78,8 @@ public void testDataMappingSmokeTest(DataMappingTestSetup dataMappingTestSetup) // TODO Support these test once kudu connector can create tables with default partitions } - @Override - public void testPredicatePushdown() + @Test + public void testKuduPredicatePushdown() { assertUpdate("CREATE TABLE IF NOT EXISTS test_is_null (" + "id INT WITH (primary_key=true), " + From 44ce907d9fda6726c3cbc37457ea8b42ce36a6bf Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 13 Apr 2020 00:42:55 +0200 Subject: [PATCH 166/519] Add generic test for pushdown with nullable domain --- .../prestosql/plugin/kudu/TestKuduDistributedQueries.java | 1 + .../prestosql/testing/AbstractTestDistributedQueries.java | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java b/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java index 8836d54a99eb..76565be2044b 100644 --- a/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java +++ b/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java @@ -79,6 +79,7 @@ public void testDataMappingSmokeTest(DataMappingTestSetup dataMappingTestSetup) } @Test + @Deprecated // TODO remove when testDataMappingSmokeTest is enabled for Kudu public void testKuduPredicatePushdown() { assertUpdate("CREATE TABLE IF NOT EXISTS test_is_null (" + diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java index 243f8a94b994..4316d56a26cd 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java @@ -1279,6 +1279,12 @@ public void testDataMappingSmokeTest(DataMappingTestSetup dataMappingTestSetup) assertQuery("SELECT id FROM " + tableName + " WHERE value > " + sampleValueLiteral, "VALUES 'high value'"); assertQuery("SELECT id FROM " + tableName + " WHERE value <= " + highValueLiteral, "VALUES ('sample value'), ('high value')"); + assertQuery("SELECT id FROM " + tableName + " WHERE value IS NULL OR value = " + sampleValueLiteral, "VALUES ('null value'), ('sample value')"); + assertQuery("SELECT id FROM " + tableName + " WHERE value IS NULL OR value != " + sampleValueLiteral, "VALUES ('null value'), ('high value')"); + assertQuery("SELECT id FROM " + tableName + " WHERE value IS NULL OR value <= " + sampleValueLiteral, "VALUES ('null value'), ('sample value')"); + assertQuery("SELECT id FROM " + tableName + " WHERE value IS NULL OR value > " + sampleValueLiteral, "VALUES ('null value'), ('high value')"); + assertQuery("SELECT id FROM " + tableName + " WHERE value IS NULL OR value <= " + highValueLiteral, "VALUES ('null value'), ('sample value'), ('high value')"); + assertUpdate("DROP TABLE " + tableName); } From 6f0d0f01f8defc0130f651a55aad39b69b903d09 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 13 Apr 2020 00:42:56 +0200 Subject: [PATCH 167/519] Throw PrestoException when unsupported type in CTAS in Hive Previously a CREATE TABLE AS .. with an unsupported type would fail with a non-PrestoException: java.lang.IllegalArgumentException: Unsupported type: time at io.prestosql.plugin.hive.metastore.thrift.ThriftMetastoreUtil.getSupportedColumnStatistics(ThriftMetastoreUtil.java:974) at io.prestosql.plugin.hive.metastore.file.FileHiveMetastore.getSupportedColumnStatistics(FileHiveMetastore.java:320) at io.prestosql.plugin.hive.metastore.cache.CachingHiveMetastore.getSupportedColumnStatistics(CachingHiveMetastore.java:302) at io.prestosql.plugin.hive.HiveMetastoreClosure.getSupportedColumnStatistics(HiveMetastoreClosure.java:76) at io.prestosql.plugin.hive.metastore.SemiTransactionalHiveMetastore.getSupportedColumnStatistics(SemiTransactionalHiveMetastore.java:203) at io.prestosql.plugin.hive.HiveMetadata.getColumnStatisticMetadata(HiveMetadata.java:2287) at java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:193) at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:175) at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:175) at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:175) at java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1382) at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:482) at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:472) at java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:708) at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) at java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:566) at io.prestosql.plugin.hive.HiveMetadata.getStatisticsCollectionMetadata(HiveMetadata.java:2279) at io.prestosql.plugin.hive.HiveMetadata.getStatisticsCollectionMetadataForWrite(HiveMetadata.java:2261) at io.prestosql.plugin.base.classloader.ClassLoaderSafeConnectorMetadata.getStatisticsCollectionMetadataForWrite(ClassLoaderSafeConnectorMetadata.java:141) at io.prestosql.metadata.MetadataManager.getStatisticsCollectionMetadataForWrite(MetadataManager.java:706) at io.prestosql.sql.planner.LogicalPlanner.createTableCreationPlan(LogicalPlanner.java:330) at io.prestosql.sql.planner.LogicalPlanner.planStatementWithoutOutput(LogicalPlanner.java:235) at io.prestosql.sql.planner.LogicalPlanner.planStatement(LogicalPlanner.java:226) at io.prestosql.sql.planner.LogicalPlanner.plan(LogicalPlanner.java:192) at io.prestosql.sql.planner.LogicalPlanner.plan(LogicalPlanner.java:187) at io.prestosql.sql.planner.LogicalPlanner.plan(LogicalPlanner.java:182) at io.prestosql.execution.SqlQueryExecution.doPlanQuery(SqlQueryExecution.java:393) at io.prestosql.execution.SqlQueryExecution.planQuery(SqlQueryExecution.java:381) at io.prestosql.execution.SqlQueryExecution.start(SqlQueryExecution.java:336) at io.prestosql.execution.SqlQueryManager.createQuery(SqlQueryManager.java:237) at io.prestosql.dispatcher.LocalDispatchQuery.lambda$startExecution$7(LocalDispatchQuery.java:132) at io.prestosql.$gen.Presto_2496c42__testversion____20200412_214808_2015.run(Unknown Source) ... 3 more --- .../io/prestosql/plugin/hive/HiveMetadata.java | 17 +++++++++++------ .../hive/TestHiveIntegrationSmokeTest.java | 7 +++++++ 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java index 57b5ea611689..df31614d75a7 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java @@ -810,7 +810,7 @@ public void createTable(ConnectorSession session, ConnectorTableMetadata tableMe throw new PrestoException(NOT_SUPPORTED, "Bucketing/Partitioning columns not supported when Avro schema url is set"); } - List columnHandles = getColumnHandles(tableMetadata, ImmutableSet.copyOf(partitionedBy), typeTranslator); + List columnHandles = getColumnHandles(tableMetadata, ImmutableSet.copyOf(partitionedBy)); HiveStorageFormat hiveStorageFormat = getHiveStorageFormat(tableMetadata.getProperties()); Map tableProperties = getEmptyTableProperties(tableMetadata, bucketProperty, new HdfsContext(session, schemaName, tableName)); @@ -1232,7 +1232,7 @@ public HiveOutputTableHandle beginCreateTable(ConnectorSession session, Connecto String tableName = schemaTableName.getTableName(); Map tableProperties = getEmptyTableProperties(tableMetadata, bucketProperty, new HdfsContext(session, schemaName, tableName)); - List columnHandles = getColumnHandles(tableMetadata, ImmutableSet.copyOf(partitionedBy), typeTranslator); + List columnHandles = getColumnHandles(tableMetadata, ImmutableSet.copyOf(partitionedBy)); HiveStorageFormat partitionStorageFormat = isRespectTableFormat(session) ? tableStorageFormat : getHiveStorageFormat(session); // unpartitioned tables ignore the partition storage format @@ -2222,7 +2222,7 @@ public Optional getNewTableLayout(ConnectorSession sess { validatePartitionColumns(tableMetadata); validateBucketColumns(tableMetadata); - validateCsvColumns(tableMetadata); + validateColumns(tableMetadata); Optional bucketProperty = getBucketProperty(tableMetadata.getProperties()); if (!bucketProperty.isPresent()) { // return preferred layout which is partitioned by partition columns @@ -2433,11 +2433,11 @@ private static void validatePartitionColumns(ConnectorTableMetadata tableMetadat } } - private static List getColumnHandles(ConnectorTableMetadata tableMetadata, Set partitionColumnNames, TypeTranslator typeTranslator) + private List getColumnHandles(ConnectorTableMetadata tableMetadata, Set partitionColumnNames) { validatePartitionColumns(tableMetadata); validateBucketColumns(tableMetadata); - validateCsvColumns(tableMetadata); + validateColumns(tableMetadata); ImmutableList.Builder columnHandles = ImmutableList.builder(); int ordinal = 0; @@ -2465,8 +2465,13 @@ else if (column.isHidden()) { return columnHandles.build(); } - private static void validateCsvColumns(ConnectorTableMetadata tableMetadata) + private void validateColumns(ConnectorTableMetadata tableMetadata) { + // Validate types are supported + for (ColumnMetadata column : tableMetadata.getColumns()) { + toHiveType(typeTranslator, column.getType()); + } + if (getHiveStorageFormat(tableMetadata.getProperties()) != HiveStorageFormat.CSV) { return; } diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java index d04f3d8d8e25..05f5859dc28d 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java @@ -1493,6 +1493,13 @@ private void testCreatePartitionedTableAs(Session session, HiveStorageFormat sto assertFalse(getQueryRunner().tableExists(session, "test_create_partitioned_table_as")); } + @Test + public void testCreateTableWithUnsupportedType() + { + assertQueryFails("CREATE TABLE test_create_table_with_unsupported_type(x time)", "Unsupported Hive type: time"); + assertQueryFails("CREATE TABLE test_create_table_with_unsupported_type AS SELECT TIME '00:00:00' x", "Unsupported Hive type: time"); + } + @Test public void testPropertiesTable() { From 0b0ef1b9dba72d9c63fe9e37709c3e3f02d338be Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 13 Apr 2020 00:42:57 +0200 Subject: [PATCH 168/519] Use CTAS in testDataMappingSmokeTest Some connectors, e.g. Kudu, allow CREATE TABLE AS SELECT but do not allow CREATE TABLE. --- .../plugin/iceberg/TestIcebergDistributed.java | 1 - .../testing/AbstractTestDistributedQueries.java | 15 ++++++++++----- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/presto-iceberg/src/test/java/io/prestosql/plugin/iceberg/TestIcebergDistributed.java b/presto-iceberg/src/test/java/io/prestosql/plugin/iceberg/TestIcebergDistributed.java index b752ee9fc0d0..346f01973a0a 100644 --- a/presto-iceberg/src/test/java/io/prestosql/plugin/iceberg/TestIcebergDistributed.java +++ b/presto-iceberg/src/test/java/io/prestosql/plugin/iceberg/TestIcebergDistributed.java @@ -76,7 +76,6 @@ protected Optional filterDataMappingSmokeTestData(DataMapp String typeName = dataMappingTestSetup.getPrestoTypeName(); if (typeName.equals("tinyint") || typeName.equals("smallint") - || typeName.equals("timestamp") || typeName.startsWith("char(")) { return Optional.of(dataMappingTestSetup.asUnsupported()); } diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java index 4316d56a26cd..3d4e130df97c 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java @@ -1249,11 +1249,16 @@ public void testDataMappingSmokeTest(DataMappingTestSetup dataMappingTestSetup) String tableName = "test_data_mapping_smoke_" + prestoTypeName.replaceAll("[^a-zA-Z0-9]", "_") + "_" + randomTableSuffix(); Runnable setup = () -> { - String createTable = format("CREATE TABLE %s(id varchar, value %s)", tableName, prestoTypeName); - assertUpdate(createTable); - assertUpdate( - format("INSERT INTO %s VALUES ('null value', NULL), ('sample value', %s), ('high value', %s)", tableName, sampleValueLiteral, highValueLiteral), - 3); + // TODO test with both CTAS *and* CREATE TABLE + INSERT, since they use different connector API methods. + String createTable = "" + + "CREATE TABLE " + tableName + " AS " + + "SELECT CAST(id AS varchar) id, CAST(value AS " + prestoTypeName + ") value " + + "FROM (VALUES " + + " ('null value', NULL), " + + " ('sample value', " + sampleValueLiteral + "), " + + " ('high value', " + highValueLiteral + ")) " + + " t(id, value)"; + assertUpdate(createTable, 3); }; if (dataMappingTestSetup.isUnsupportedType()) { String typeNameBase = prestoTypeName.replaceFirst("\\(.*", ""); From ec549ac8a1192b18c3667203974f453d5fe5a9fa Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 13 Apr 2020 00:42:58 +0200 Subject: [PATCH 169/519] Enable testDataMappingSmokeTest for Kudu --- .../io/prestosql/plugin/kudu/TypeHelper.java | 4 +- .../kudu/TestKuduDistributedQueries.java | 42 ++++++++----------- 2 files changed, 21 insertions(+), 25 deletions(-) diff --git a/presto-kudu/src/main/java/io/prestosql/plugin/kudu/TypeHelper.java b/presto-kudu/src/main/java/io/prestosql/plugin/kudu/TypeHelper.java index ccd9f0766d1f..85e7da4b8afb 100644 --- a/presto-kudu/src/main/java/io/prestosql/plugin/kudu/TypeHelper.java +++ b/presto-kudu/src/main/java/io/prestosql/plugin/kudu/TypeHelper.java @@ -15,6 +15,7 @@ import io.airlift.slice.Slice; import io.airlift.slice.Slices; +import io.prestosql.spi.PrestoException; import io.prestosql.spi.type.BigintType; import io.prestosql.spi.type.BooleanType; import io.prestosql.spi.type.CharType; @@ -37,6 +38,7 @@ import java.math.BigDecimal; import java.math.BigInteger; +import static io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED; import static io.prestosql.spi.type.Decimals.decodeUnscaledValue; import static java.lang.Float.floatToRawIntBits; import static java.lang.Float.intBitsToFloat; @@ -86,7 +88,7 @@ public static org.apache.kudu.Type toKuduClientType(Type type) if (type instanceof CharType) { return org.apache.kudu.Type.STRING; } - throw new IllegalStateException("Type mapping implemented for Presto type: " + type); + throw new PrestoException(NOT_SUPPORTED, "Unsupported type: " + type); } public static Type fromKuduColumn(ColumnSchema column) diff --git a/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java b/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java index 76565be2044b..55198d8539a4 100644 --- a/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java +++ b/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java @@ -20,7 +20,6 @@ import io.prestosql.tpch.TpchTable; import org.testng.SkipException; import org.testng.annotations.AfterClass; -import org.testng.annotations.Test; import java.util.Optional; @@ -72,29 +71,6 @@ public void testInsert() // TODO Support these test once kudu connector can create tables with default partitions } - @Override - public void testDataMappingSmokeTest(DataMappingTestSetup dataMappingTestSetup) - { - // TODO Support these test once kudu connector can create tables with default partitions - } - - @Test - @Deprecated // TODO remove when testDataMappingSmokeTest is enabled for Kudu - public void testKuduPredicatePushdown() - { - assertUpdate("CREATE TABLE IF NOT EXISTS test_is_null (" + - "id INT WITH (primary_key=true), " + - "col_nullable bigint with (nullable=true)" + - ") WITH (" + - " partition_by_hash_columns = ARRAY['id'], " + - " partition_by_hash_buckets = 2" + - ")"); - - assertUpdate("INSERT INTO test_is_null VALUES (1, 1)", 1); - assertUpdate("INSERT INTO test_is_null(id) VALUES (2)", 1); - assertQuery("SELECT id FROM test_is_null WHERE col_nullable = 1 OR col_nullable IS NULL", "VALUES (1), (2)"); - } - @Override public void testAddColumn() { @@ -156,4 +132,22 @@ public void testWrittenStats() { // TODO Kudu connector supports CTAS and inserts, but the test would fail } + + @Override + protected Optional filterDataMappingSmokeTestData(DataMappingTestSetup dataMappingTestSetup) + { + String typeName = dataMappingTestSetup.getPrestoTypeName(); + if (typeName.equals("time") + || typeName.equals("timestamp with time zone")) { + return Optional.of(dataMappingTestSetup.asUnsupported()); + } + + if (typeName.equals("date") // date gets stored as varchar + || typeName.equals("varbinary")) { // TODO (https://github.com/prestosql/presto/issues/3416) + // TODO this should either work or fail cleanly + return Optional.empty(); + } + + return Optional.of(dataMappingTestSetup); + } } From 329a48592ecb6c628712ef3648f2e8da7eb33d38 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 20 Apr 2020 10:43:32 +0200 Subject: [PATCH 170/519] Add generic column names test --- .../TestCassandraDistributedQueries.java | 6 ++ .../hive/TestHiveDistributedQueries.java | 14 ++++ .../kudu/TestKuduDistributedQueries.java | 7 ++ presto-mongodb/pom.xml | 6 ++ .../mongodb/TestMongoDistributedQueries.java | 16 ++++ .../mysql/TestMySqlDistributedQueries.java | 7 ++ .../TestPhoenixDistributedQueries.java | 7 ++ .../AbstractTestDistributedQueries.java | 79 +++++++++++++++++++ 8 files changed, 142 insertions(+) diff --git a/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraDistributedQueries.java b/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraDistributedQueries.java index 6c2737846af6..32edb5ff8f1f 100644 --- a/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraDistributedQueries.java +++ b/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraDistributedQueries.java @@ -160,6 +160,12 @@ protected TestTable createTableWithDefaultColumns() throw new SkipException("Cassandra connector does not support column default values"); } + @Override + public void testColumnName(String columnName) + { + // Cassandra connector currently does not support create table + } + @Override public void testDataMappingSmokeTest(DataMappingTestSetup dataMappingTestSetup) { diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveDistributedQueries.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveDistributedQueries.java index 92ae861499c7..806f50a5bfca 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveDistributedQueries.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveDistributedQueries.java @@ -25,6 +25,7 @@ import static com.google.common.collect.Iterables.getOnlyElement; import static io.prestosql.sql.tree.ExplainType.Type.LOGICAL; import static io.prestosql.tpch.TpchTable.getTables; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.testng.Assert.assertEquals; public class TestHiveDistributedQueries @@ -59,6 +60,19 @@ public void testExplainOfCreateTableAs() assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan(query, LOGICAL)); } + @Override + public void testColumnName(String columnName) + { + if (columnName.equals("atrailingspace ")) { + // TODO (https://github.com/prestosql/presto/issues/3461) + assertThatThrownBy(() -> super.testColumnName(columnName)) + .hasMessageMatching("Table '.*' does not have columns \\[atrailingspace ]"); + throw new SkipException("works incorrectly, column name is trimmed"); + } + + super.testColumnName(columnName); + } + @Override protected Optional filterDataMappingSmokeTestData(DataMappingTestSetup dataMappingTestSetup) { diff --git a/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java b/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java index 55198d8539a4..6ac9776f1a5d 100644 --- a/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java +++ b/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java @@ -133,6 +133,13 @@ public void testWrittenStats() // TODO Kudu connector supports CTAS and inserts, but the test would fail } + @Override + public void testColumnName(String columnName) + { + // TODO (https://github.com/prestosql/presto/issues/3477) enable the test + throw new SkipException("TODO"); + } + @Override protected Optional filterDataMappingSmokeTestData(DataMappingTestSetup dataMappingTestSetup) { diff --git a/presto-mongodb/pom.xml b/presto-mongodb/pom.xml index a919919038ba..f4ecf7d60ce2 100644 --- a/presto-mongodb/pom.xml +++ b/presto-mongodb/pom.xml @@ -160,6 +160,12 @@ test + + org.assertj + assertj-core + test + + io.netty netty-transport diff --git a/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoDistributedQueries.java b/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoDistributedQueries.java index 498c0df7dca7..7947fbb05b7a 100644 --- a/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoDistributedQueries.java +++ b/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoDistributedQueries.java @@ -25,6 +25,7 @@ import java.util.Optional; import static io.prestosql.plugin.mongodb.MongoQueryRunner.createMongoQueryRunner; +import static org.assertj.core.api.Assertions.assertThatThrownBy; @Test public class TestMongoDistributedQueries @@ -115,6 +116,21 @@ protected TestTable createTableWithDefaultColumns() throw new SkipException("test disabled for Mongo"); } + @Override + @Test(dataProvider = "testColumnNameDataProvider") + public void testColumnName(String columnName) + { + if (columnName.equals("a.dot")) { + // TODO (https://github.com/prestosql/presto/issues/3460) + assertThatThrownBy(() -> super.testColumnName(columnName)) + .hasStackTraceContaining("TableWriterOperator") // during INSERT + .hasMessage("Invalid BSON field name a.dot"); + throw new SkipException("Insert would fail"); + } + + super.testColumnName(columnName); + } + @Override protected Optional filterDataMappingSmokeTestData(DataMappingTestSetup dataMappingTestSetup) { diff --git a/presto-mysql/src/test/java/io/prestosql/plugin/mysql/TestMySqlDistributedQueries.java b/presto-mysql/src/test/java/io/prestosql/plugin/mysql/TestMySqlDistributedQueries.java index 96a6e38ef21e..42cd8af8a349 100644 --- a/presto-mysql/src/test/java/io/prestosql/plugin/mysql/TestMySqlDistributedQueries.java +++ b/presto-mysql/src/test/java/io/prestosql/plugin/mysql/TestMySqlDistributedQueries.java @@ -23,6 +23,7 @@ import java.util.Optional; +import static com.google.common.base.Strings.nullToEmpty; import static io.prestosql.plugin.mysql.MySqlQueryRunner.createMySqlQueryRunner; import static io.prestosql.spi.type.VarcharType.VARCHAR; import static io.prestosql.testing.MaterializedResult.resultBuilder; @@ -119,6 +120,12 @@ public void testDelete() // delete is not supported } + @Override + protected boolean isColumnNameRejected(Exception exception, String columnName, boolean delimited) + { + return nullToEmpty(exception.getMessage()).matches(".*(Incorrect column name).*"); + } + @Override protected Optional filterDataMappingSmokeTestData(DataMappingTestSetup dataMappingTestSetup) { diff --git a/presto-phoenix/src/test/java/io/prestosql/plugin/phoenix/TestPhoenixDistributedQueries.java b/presto-phoenix/src/test/java/io/prestosql/plugin/phoenix/TestPhoenixDistributedQueries.java index af9147499bf3..0030b75e076a 100644 --- a/presto-phoenix/src/test/java/io/prestosql/plugin/phoenix/TestPhoenixDistributedQueries.java +++ b/presto-phoenix/src/test/java/io/prestosql/plugin/phoenix/TestPhoenixDistributedQueries.java @@ -133,6 +133,13 @@ public void testCreateSchema() throw new SkipException("test disabled until issue fixed"); // TODO https://github.com/prestosql/presto/issues/2348 } + @Override + public void testColumnName(String columnName) + { + // TODO (https://github.com/prestosql/presto/issues/3466) Phoenix generally lacks quoting in underlying queries + throw new SkipException("TODO"); + } + @Override public void testDataMappingSmokeTest(DataMappingTestSetup dataMappingTestSetup) { diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java index 3d4e130df97c..e3641527de30 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java @@ -70,6 +70,7 @@ import static java.lang.String.format; import static java.lang.Thread.currentThread; import static java.util.Collections.nCopies; +import static java.util.Locale.ENGLISH; import static java.util.Objects.requireNonNull; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.MINUTES; @@ -1239,6 +1240,84 @@ public void testInsertForDefaultColumn() protected abstract TestTable createTableWithDefaultColumns(); + @Test(dataProvider = "testColumnNameDataProvider") + public void testColumnName(String columnName) + { + if (!requiresDelimiting(columnName)) { + testColumnName(columnName, false); + } + testColumnName(columnName, true); + } + + private void testColumnName(String columnName, boolean delimited) + { + String nameInSql = columnName; + if (delimited) { + nameInSql = "\"" + columnName.replace("\"", "\"\"") + "\""; + } + String tableName = "test_column_names_" + nameInSql.toLowerCase(ENGLISH).replaceAll("[^a-z0-9]", "_") + "_" + randomTableSuffix(); + + try { + // TODO test with both CTAS *and* CREATE TABLE + INSERT, since they use different connector API methods. + assertUpdate("CREATE TABLE " + tableName + "(id varchar, " + nameInSql + " varchar)"); + } + catch (RuntimeException e) { + if (isColumnNameRejected(e, columnName, delimited)) { + // It is OK if give column name is not allowed and is clearly rejected by the connector. + return; + } + throw e; + } + assertUpdate("INSERT INTO " + tableName + " VALUES ('null value', NULL), ('sample value', 'abc'), ('other value', 'xyz')", 3); + + // SELECT * + assertQuery("SELECT * FROM " + tableName, "VALUES ('null value', NULL), ('sample value', 'abc'), ('other value', 'xyz')"); + + // projection + assertQuery("SELECT " + nameInSql + " FROM " + tableName, "VALUES (NULL), ('abc'), ('xyz')"); + + // predicate + assertQuery("SELECT id FROM " + tableName + " WHERE " + nameInSql + " IS NULL", "VALUES ('null value')"); + assertQuery("SELECT id FROM " + tableName + " WHERE " + nameInSql + " = 'abc'", "VALUES ('sample value')"); + + assertUpdate("DROP TABLE " + tableName); + } + + protected boolean isColumnNameRejected(Exception exception, String columnName, boolean delimited) + { + return false; + } + + private static boolean requiresDelimiting(String identifierName) + { + return !identifierName.matches("[a-zA-Z][a-zA-Z0-9_]*"); + } + + @DataProvider + public Object[][] testColumnNameDataProvider() + { + return new Object[][] { + {"lowercase"}, + {"UPPERCASE"}, + {"MixedCase"}, + {"an_underscore"}, + {"a-hyphen-minus"}, // ASCII '-' is HYPHEN-MINUS in Unicode + {"a space"}, + {"atrailingspace "}, + {"a.dot"}, + {"a:colon"}, + {"a;semicolon"}, + {"an@at"}, + {"a\"quote"}, + {"an'apostrophe"}, + {"a`backtick`"}, + {"a/slash`"}, + {"a\\backslash`"}, + {"adigit0"}, + {"0startingwithdigit"}, + }; + } + @Test(dataProvider = "testDataMappingSmokeTestDataProvider") public void testDataMappingSmokeTest(DataMappingTestSetup dataMappingTestSetup) { From ef0abff6854d2867c63ecbf9e57aeccee82795c6 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 20 Apr 2020 11:18:01 +0200 Subject: [PATCH 171/519] Ignore asList(expression) warning `asList` is usually used to construct nullable test data (we prefer `ImmutableList.of` in general). As such, it's common that `asList()` or `asList(expression)` call is next to `asList(expr1, expr2, ...)`. --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 1d2d4f74b1ed..6678a10c5d66 100644 --- a/README.md +++ b/README.md @@ -106,6 +106,7 @@ Enable the following inspections: Disable the following inspections: +- ``Java | Performance | Call to 'Arrays.asList()' with too few arguments``, - ``Java | Abstraction issues | 'Optional' used as field or parameter type``. ### Building the Web UI From a2b032e33806e21049f0b01c53cc0eccd1833270 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 13 Apr 2020 21:19:45 +0200 Subject: [PATCH 172/519] Move parser test to appropriate test class --- .../io/prestosql/sql/parser/TestSqlParserErrorHandling.java | 2 ++ .../src/main/java/io/prestosql/testing/AbstractTestQueries.java | 1 - 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/presto-parser/src/test/java/io/prestosql/sql/parser/TestSqlParserErrorHandling.java b/presto-parser/src/test/java/io/prestosql/sql/parser/TestSqlParserErrorHandling.java index 2068515239e0..668411f452e8 100644 --- a/presto-parser/src/test/java/io/prestosql/sql/parser/TestSqlParserErrorHandling.java +++ b/presto-parser/src/test/java/io/prestosql/sql/parser/TestSqlParserErrorHandling.java @@ -139,6 +139,8 @@ public Object[][] getStatements() ", "}, {"SHOW CATALOGS LIKE '%$_%' ESCAPE", "line 1:33: mismatched input ''. Expecting: "}, + {"SHOW SCHEMAS IN foo LIKE '%$_%' ESCAPE", + "line 1:39: mismatched input ''. Expecting: "}, {"SHOW FUNCTIONS LIKE '%$_%' ESCAPE", "line 1:34: mismatched input ''. Expecting: "}, {"SHOW SESSION LIKE '%$_%' ESCAPE", diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestQueries.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestQueries.java index 507d1cd76530..e628b140af24 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestQueries.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestQueries.java @@ -697,7 +697,6 @@ public void testShowSchemasLike() @Test public void testShowSchemasLikeWithEscape() { - assertQueryFails("SHOW SCHEMAS IN foo LIKE '%$_%' ESCAPE", "line 1:39: mismatched input ''. Expecting: "); assertQueryFails("SHOW SCHEMAS LIKE 't$_%' ESCAPE ''", "Escape string must be a single character"); assertQueryFails("SHOW SCHEMAS LIKE 't$_%' ESCAPE '$$'", "Escape string must be a single character"); From 7fa92370bed82e05914f746325be41cce5decfd0 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 13 Apr 2020 00:49:55 +0200 Subject: [PATCH 173/519] Require PrestoException in assertQueryFails --- .../prestosql/sql/query/QueryAssertions.java | 10 ++-- .../AbstractTestDistributedQueries.java | 13 +---- .../io/prestosql/testing/QueryAssertions.java | 52 +++++++++++++++---- 3 files changed, 49 insertions(+), 26 deletions(-) diff --git a/presto-main/src/test/java/io/prestosql/sql/query/QueryAssertions.java b/presto-main/src/test/java/io/prestosql/sql/query/QueryAssertions.java index 3ab818dfb052..fe21b47da14b 100644 --- a/presto-main/src/test/java/io/prestosql/sql/query/QueryAssertions.java +++ b/presto-main/src/test/java/io/prestosql/sql/query/QueryAssertions.java @@ -17,6 +17,7 @@ import com.google.common.collect.Iterables; import io.prestosql.Session; import io.prestosql.execution.warnings.WarningCollector; +import io.prestosql.spi.PrestoException; import io.prestosql.sql.planner.Plan; import io.prestosql.sql.planner.assertions.PlanAssert; import io.prestosql.sql.planner.assertions.PlanMatchPattern; @@ -30,11 +31,11 @@ import java.util.List; import java.util.function.Consumer; -import static com.google.common.base.Strings.nullToEmpty; import static io.airlift.testing.Assertions.assertEqualsIgnoreOrder; import static io.prestosql.testing.TestingSession.testSessionBuilder; import static java.lang.String.format; import static java.util.Objects.requireNonNull; +import static org.assertj.core.api.Assertions.assertThat; import static org.testng.Assert.assertEquals; import static org.testng.Assert.fail; @@ -68,9 +69,10 @@ public void assertFails(@Language("SQL") String sql, @Language("RegExp") String fail(format("Expected query to fail: %s", sql)); } catch (RuntimeException exception) { - if (!nullToEmpty(exception.getMessage()).matches(expectedMessageRegExp)) { - fail(format("Expected exception message '%s' to match '%s' for query: %s", exception.getMessage(), expectedMessageRegExp, sql), exception); - } + exception.addSuppressed(new Exception("Query: " + sql)); + assertThat(exception) + .isInstanceOf(PrestoException.class) + .hasMessageMatching(expectedMessageRegExp); } } diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java index e3641527de30..7961b79a4dde 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java @@ -14,7 +14,6 @@ package io.prestosql.testing; import com.google.common.base.Joiner; -import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -27,7 +26,6 @@ import io.prestosql.execution.QueryInfo; import io.prestosql.execution.QueryManager; import io.prestosql.server.BasicQueryInfo; -import io.prestosql.spi.PrestoException; import io.prestosql.spi.security.Identity; import io.prestosql.testing.sql.TestTable; import org.intellij.lang.annotations.Language; @@ -41,7 +39,6 @@ import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.Iterables.getOnlyElement; -import static com.google.common.collect.MoreCollectors.toOptional; import static com.google.common.util.concurrent.Uninterruptibles.sleepUninterruptibly; import static io.airlift.units.Duration.nanosSince; import static io.prestosql.SystemSessionProperties.QUERY_MAX_MEMORY; @@ -49,6 +46,7 @@ import static io.prestosql.spi.type.VarcharType.VARCHAR; import static io.prestosql.testing.MaterializedResult.resultBuilder; import static io.prestosql.testing.QueryAssertions.assertContains; +import static io.prestosql.testing.QueryAssertions.getPrestoExceptionCause; import static io.prestosql.testing.TestingAccessControlManager.TestingPrivilegeType.ADD_COLUMN; import static io.prestosql.testing.TestingAccessControlManager.TestingPrivilegeType.CREATE_TABLE; import static io.prestosql.testing.TestingAccessControlManager.TestingPrivilegeType.CREATE_VIEW; @@ -1410,15 +1408,6 @@ protected Optional filterDataMappingSmokeTestData(DataMapp return Optional.of(dataMappingTestSetup); } - private static RuntimeException getPrestoExceptionCause(Throwable e) - { - return Throwables.getCausalChain(e).stream() - .filter(cause -> cause.toString().startsWith(PrestoException.class.getName() + ": ")) // e.g. io.prestosql.client.FailureInfo - .map(RuntimeException.class::cast) - .collect(toOptional()) - .orElseThrow(() -> new IllegalArgumentException("Exception does not have PrestoException cause", e)); - } - protected static final class DataMappingTestSetup { private final String prestoTypeName; diff --git a/presto-testing/src/main/java/io/prestosql/testing/QueryAssertions.java b/presto-testing/src/main/java/io/prestosql/testing/QueryAssertions.java index 019eed9d7cb8..e9bec7da7eb9 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/QueryAssertions.java +++ b/presto-testing/src/main/java/io/prestosql/testing/QueryAssertions.java @@ -14,6 +14,7 @@ package io.prestosql.testing; import com.google.common.base.Joiner; +import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMultiset; import com.google.common.collect.Iterables; import com.google.common.collect.Multiset; @@ -23,6 +24,8 @@ import io.prestosql.Session; import io.prestosql.execution.warnings.WarningCollector; import io.prestosql.metadata.QualifiedObjectName; +import io.prestosql.spi.PrestoException; +import io.prestosql.sql.parser.ParsingException; import io.prestosql.sql.planner.Plan; import io.prestosql.testing.QueryRunner.MaterializedResultWithPlan; import io.prestosql.tpch.TpchTable; @@ -34,12 +37,13 @@ import java.util.function.Consumer; import java.util.function.Supplier; -import static com.google.common.base.Strings.nullToEmpty; import static io.airlift.units.Duration.nanosSince; import static io.prestosql.testing.assertions.Assert.assertEventually; import static java.lang.String.format; import static java.util.Locale.ENGLISH; +import static java.util.Objects.requireNonNull; import static java.util.concurrent.TimeUnit.SECONDS; +import static org.assertj.core.api.Assertions.assertThat; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.fail; @@ -286,8 +290,11 @@ protected static void assertQueryFails(QueryRunner queryRunner, Session session, queryRunner.execute(session, sql); fail(format("Expected query to fail: %s", sql)); } - catch (RuntimeException ex) { - assertExceptionMessage(sql, ex, expectedMessageRegExp); + catch (RuntimeException exception) { + exception.addSuppressed(new Exception("Query: " + sql)); + assertThat(exception) + .hasMessageMatching(expectedMessageRegExp) + .satisfies(e -> assertThat(getPrestoExceptionCause(e)).hasMessageMatching(expectedMessageRegExp)); } } @@ -303,13 +310,6 @@ protected static void assertQueryReturnsEmptyResult(QueryRunner queryRunner, Ses } } - private static void assertExceptionMessage(String sql, Exception exception, @Language("RegExp") String regex) - { - if (!nullToEmpty(exception.getMessage()).matches(regex)) { - fail(format("Expected exception message '%s' to match '%s' for query: %s", exception.getMessage(), regex, sql), exception); - } - } - public static void copyTpchTables( QueryRunner queryRunner, String sourceCatalog, @@ -339,4 +339,36 @@ public static void copyTable(QueryRunner queryRunner, QualifiedObjectName table, long rows = (Long) queryRunner.execute(session, sql).getMaterializedRows().get(0).getField(0); log.info("Imported %s rows for %s in %s", rows, table.getObjectName(), nanosSince(start).convertToMostSuccinctTimeUnit()); } + + static RuntimeException getPrestoExceptionCause(Throwable e) + { + return Throwables.getCausalChain(e).stream() + .filter(QueryAssertions::isPrestoException) + .findFirst() // TODO .collect(toOptional()) -- should be exactly one in the causal chain + .map(RuntimeException.class::cast) + .orElseThrow(() -> new IllegalArgumentException("Exception does not have PrestoException cause", e)); + } + + private static boolean isPrestoException(Throwable exception) + { + requireNonNull(exception, "exception is null"); + + if (exception instanceof PrestoException || exception instanceof ParsingException) { + return true; + } + + if (exception.getClass().getName().equals("io.prestosql.client.FailureInfo$FailureException")) { + try { + String originalClassName = exception.toString().split(":", 2)[0]; + Class originalClass = Class.forName(originalClassName).asSubclass(Throwable.class); + return PrestoException.class.isAssignableFrom(originalClass) || + ParsingException.class.isAssignableFrom(originalClass); + } + catch (ClassNotFoundException e) { + return false; + } + } + + return false; + } } From 3459b9a7e19996b54fc79407e85a12e3040b8db6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Grzegorz=20Kokosi=C5=84ski?= Date: Mon, 20 Apr 2020 10:14:47 +0200 Subject: [PATCH 174/519] Provide default for can show create table check Deny show create table by default --- .../io/prestosql/spi/connector/ConnectorAccessControl.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/presto-spi/src/main/java/io/prestosql/spi/connector/ConnectorAccessControl.java b/presto-spi/src/main/java/io/prestosql/spi/connector/ConnectorAccessControl.java index ae054ba47d7e..5414ef8cb971 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/connector/ConnectorAccessControl.java +++ b/presto-spi/src/main/java/io/prestosql/spi/connector/ConnectorAccessControl.java @@ -51,6 +51,7 @@ import static io.prestosql.spi.security.AccessDeniedException.denySetRole; import static io.prestosql.spi.security.AccessDeniedException.denySetSchemaAuthorization; import static io.prestosql.spi.security.AccessDeniedException.denyShowColumns; +import static io.prestosql.spi.security.AccessDeniedException.denyShowCreateTable; import static io.prestosql.spi.security.AccessDeniedException.denyShowCurrentRoles; import static io.prestosql.spi.security.AccessDeniedException.denyShowRoleGrants; import static io.prestosql.spi.security.AccessDeniedException.denyShowRoles; @@ -127,7 +128,10 @@ default Set filterSchemas(ConnectorSecurityContext context, Set * * @throws io.prestosql.spi.security.AccessDeniedException if not allowed */ - void checkCanShowCreateTable(ConnectorSecurityContext context, SchemaTableName tableName); + default void checkCanShowCreateTable(ConnectorSecurityContext context, SchemaTableName tableName) + { + denyShowCreateTable(tableName.toString(), null); + } /** * Check if identity is allowed to create the specified table in this catalog. From 583c1b9d745e90c93ba941f5e7885e3ce6220121 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Grzegorz=20Kokosi=C5=84ski?= Date: Mon, 20 Apr 2020 10:14:49 +0200 Subject: [PATCH 175/519] Simplify DenyConnectorAccessControl --- .../security/TestAccessControlManager.java | 128 ------------------ 1 file changed, 128 deletions(-) diff --git a/presto-main/src/test/java/io/prestosql/security/TestAccessControlManager.java b/presto-main/src/test/java/io/prestosql/security/TestAccessControlManager.java index 3c968b2063f0..bb63b76558f4 100644 --- a/presto-main/src/test/java/io/prestosql/security/TestAccessControlManager.java +++ b/presto-main/src/test/java/io/prestosql/security/TestAccessControlManager.java @@ -38,8 +38,6 @@ import io.prestosql.spi.security.AccessDeniedException; import io.prestosql.spi.security.BasicPrincipal; import io.prestosql.spi.security.Identity; -import io.prestosql.spi.security.PrestoPrincipal; -import io.prestosql.spi.security.Privilege; import io.prestosql.spi.security.SystemAccessControl; import io.prestosql.spi.security.SystemAccessControlFactory; import io.prestosql.spi.security.SystemSecurityContext; @@ -59,7 +57,6 @@ import static io.prestosql.connector.CatalogName.createInformationSchemaCatalogName; import static io.prestosql.connector.CatalogName.createSystemTablesCatalogName; import static io.prestosql.metadata.MetadataManager.createTestMetadataManager; -import static io.prestosql.spi.security.AccessDeniedException.denySelectColumns; import static io.prestosql.spi.security.AccessDeniedException.denySelectTable; import static io.prestosql.spi.type.BigintType.BIGINT; import static io.prestosql.transaction.InMemoryTransactionManager.createTestTransactionManager; @@ -460,130 +457,5 @@ public Set filterCatalogs(SystemSecurityContext context, Set cat private static class DenyConnectorAccessControl implements ConnectorAccessControl { - @Override - public void checkCanSelectFromColumns(ConnectorSecurityContext context, SchemaTableName tableName, Set columnNames) - { - denySelectColumns(tableName.toString(), columnNames); - } - - @Override - public void checkCanCreateSchema(ConnectorSecurityContext context, String schemaName) - { - throw new UnsupportedOperationException(); - } - - @Override - public void checkCanDropSchema(ConnectorSecurityContext context, String schemaName) - { - throw new UnsupportedOperationException(); - } - - @Override - public void checkCanRenameSchema(ConnectorSecurityContext context, String schemaName, String newSchemaName) - { - throw new UnsupportedOperationException(); - } - - @Override - public void checkCanShowCreateTable(ConnectorSecurityContext context, SchemaTableName tableName) - { - throw new UnsupportedOperationException(); - } - - @Override - public void checkCanCreateTable(ConnectorSecurityContext context, SchemaTableName tableName) - { - throw new UnsupportedOperationException(); - } - - @Override - public void checkCanDropTable(ConnectorSecurityContext context, SchemaTableName tableName) - { - throw new UnsupportedOperationException(); - } - - @Override - public void checkCanRenameTable(ConnectorSecurityContext context, SchemaTableName tableName, SchemaTableName newTableName) - { - throw new UnsupportedOperationException(); - } - - @Override - public void checkCanSetTableComment(ConnectorSecurityContext context, SchemaTableName tableName) - { - throw new UnsupportedOperationException(); - } - - @Override - public void checkCanAddColumn(ConnectorSecurityContext context, SchemaTableName tableName) - { - throw new UnsupportedOperationException(); - } - - @Override - public void checkCanDropColumn(ConnectorSecurityContext context, SchemaTableName tableName) - { - throw new UnsupportedOperationException(); - } - - @Override - public void checkCanRenameColumn(ConnectorSecurityContext context, SchemaTableName tableName) - { - throw new UnsupportedOperationException(); - } - - @Override - public void checkCanInsertIntoTable(ConnectorSecurityContext context, SchemaTableName tableName) - { - throw new UnsupportedOperationException(); - } - - @Override - public void checkCanDeleteFromTable(ConnectorSecurityContext context, SchemaTableName tableName) - { - throw new UnsupportedOperationException(); - } - - @Override - public void checkCanCreateView(ConnectorSecurityContext context, SchemaTableName viewName) - { - throw new UnsupportedOperationException(); - } - - @Override - public void checkCanRenameView(ConnectorSecurityContext context, SchemaTableName viewName, SchemaTableName newViewName) - { - throw new UnsupportedOperationException(); - } - - @Override - public void checkCanDropView(ConnectorSecurityContext context, SchemaTableName viewName) - { - throw new UnsupportedOperationException(); - } - - @Override - public void checkCanCreateViewWithSelectFromColumns(ConnectorSecurityContext context, SchemaTableName tableName, Set columnNames) - { - throw new UnsupportedOperationException(); - } - - @Override - public void checkCanSetCatalogSessionProperty(ConnectorSecurityContext context, String propertyName) - { - throw new UnsupportedOperationException(); - } - - @Override - public void checkCanGrantTablePrivilege(ConnectorSecurityContext context, Privilege privilege, SchemaTableName tableName, PrestoPrincipal grantee, boolean grantOption) - { - throw new UnsupportedOperationException(); - } - - @Override - public void checkCanRevokeTablePrivilege(ConnectorSecurityContext context, Privilege privilege, SchemaTableName tableName, PrestoPrincipal revokee, boolean grantOption) - { - throw new UnsupportedOperationException(); - } } } From 26c384abc861911b42fa8eb23c6c8ff503a54e95 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Sun, 19 Apr 2020 22:51:53 +0200 Subject: [PATCH 176/519] Refactor visitJoin in PruneUnreferencedOutputs Avoid adding context symbols to expected inputs twice. Simplify code style. --- .../PruneUnreferencedOutputs.java | 37 +++++++------------ 1 file changed, 13 insertions(+), 24 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java index 5aac524bbb1b..a1426ab93c01 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java @@ -78,7 +78,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; @@ -212,32 +211,22 @@ public PlanNode visitExchange(ExchangeNode node, RewriteContext> con @Override public PlanNode visitJoin(JoinNode node, RewriteContext> context) { - Set expectedFilterInputs = new HashSet<>(); - if (node.getFilter().isPresent()) { - expectedFilterInputs = ImmutableSet.builder() - .addAll(SymbolsExtractor.extractUnique(node.getFilter().get())) - .addAll(context.get()) - .build(); - } + Set expectedFilterInputs = node.getFilter().map(SymbolsExtractor::extractUnique).orElse(ImmutableSet.of()); - ImmutableSet.Builder leftInputsBuilder = ImmutableSet.builder(); - leftInputsBuilder.addAll(context.get()).addAll(node.getCriteria().stream().map(JoinNode.EquiJoinClause::getLeft).iterator()); - if (node.getLeftHashSymbol().isPresent()) { - leftInputsBuilder.add(node.getLeftHashSymbol().get()); - } - leftInputsBuilder.addAll(expectedFilterInputs); - Set leftInputs = leftInputsBuilder.build(); + ImmutableSet.Builder leftInputs = ImmutableSet.builder() + .addAll(context.get()) + .addAll(expectedFilterInputs) + .addAll(Iterables.transform(node.getCriteria(), JoinNode.EquiJoinClause::getLeft)); + node.getLeftHashSymbol().ifPresent(leftInputs::add); - ImmutableSet.Builder rightInputsBuilder = ImmutableSet.builder(); - rightInputsBuilder.addAll(context.get()).addAll(Iterables.transform(node.getCriteria(), JoinNode.EquiJoinClause::getRight)); - if (node.getRightHashSymbol().isPresent()) { - rightInputsBuilder.add(node.getRightHashSymbol().get()); - } - rightInputsBuilder.addAll(expectedFilterInputs); - Set rightInputs = rightInputsBuilder.build(); + ImmutableSet.Builder rightInputs = ImmutableSet.builder() + .addAll(context.get()) + .addAll(expectedFilterInputs) + .addAll(Iterables.transform(node.getCriteria(), JoinNode.EquiJoinClause::getRight)); + node.getRightHashSymbol().ifPresent(rightInputs::add); - PlanNode left = context.rewrite(node.getLeft(), leftInputs); - PlanNode right = context.rewrite(node.getRight(), rightInputs); + PlanNode left = context.rewrite(node.getLeft(), leftInputs.build()); + PlanNode right = context.rewrite(node.getRight(), rightInputs.build()); List leftOutputSymbols = node.getLeftOutputSymbols().stream() .filter(context.get()::contains) From f08bd47171555c18759931f5bdda8e28808feccc Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Fri, 17 Apr 2020 22:15:06 +0200 Subject: [PATCH 177/519] Refactor GroupIdMatcher --- .../planner/assertions/GroupIdMatcher.java | 31 ++++++++++--------- .../planner/assertions/PlanMatchPattern.java | 5 ++- ...TestOptimizeMixedDistinctAggregations.java | 4 +-- 3 files changed, 20 insertions(+), 20 deletions(-) diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/GroupIdMatcher.java b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/GroupIdMatcher.java index c0b2b7a0cc47..884b186c16e2 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/GroupIdMatcher.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/GroupIdMatcher.java @@ -21,7 +21,6 @@ import io.prestosql.sql.planner.plan.PlanNode; import java.util.List; -import java.util.Map; import static com.google.common.base.MoreObjects.toStringHelper; import static com.google.common.base.Preconditions.checkState; @@ -31,15 +30,15 @@ public class GroupIdMatcher implements Matcher { - private final List> groups; - private final Map identityMappings; - private final String groupIdAlias; + private final List> groupingSets; + private final List aggregationArguments; + private final String groupIdSymbol; - public GroupIdMatcher(List> groups, Map identityMappings, String groupIdAlias) + public GroupIdMatcher(List> groupingSets, List aggregationArguments, String groupIdSymbol) { - this.groups = groups; - this.identityMappings = identityMappings; - this.groupIdAlias = groupIdAlias; + this.groupingSets = groupingSets; + this.aggregationArguments = aggregationArguments; + this.groupIdSymbol = groupIdSymbol; } @Override @@ -54,31 +53,33 @@ public MatchResult detailMatches(PlanNode node, StatsProvider stats, Session ses checkState(shapeMatches(node), "Plan testing framework error: shapeMatches returned false in detailMatches in %s", this.getClass().getName()); GroupIdNode groudIdNode = (GroupIdNode) node; - List> actualGroups = groudIdNode.getGroupingSets(); + List> actualGroupingSets = groudIdNode.getGroupingSets(); List actualAggregationArguments = groudIdNode.getAggregationArguments(); - if (actualGroups.size() != groups.size()) { + if (actualGroupingSets.size() != groupingSets.size()) { return NO_MATCH; } - for (int i = 0; i < actualGroups.size(); i++) { - if (!AggregationMatcher.matches(groups.get(i), actualGroups.get(i), symbolAliases)) { + for (int i = 0; i < actualGroupingSets.size(); i++) { + if (!AggregationMatcher.matches(groupingSets.get(i), actualGroupingSets.get(i), symbolAliases)) { return NO_MATCH; } } - if (!AggregationMatcher.matches(identityMappings.keySet(), actualAggregationArguments, symbolAliases)) { + if (!AggregationMatcher.matches(aggregationArguments, actualAggregationArguments, symbolAliases)) { return NO_MATCH; } - return match(groupIdAlias, groudIdNode.getGroupIdSymbol().toSymbolReference()); + return match(groupIdSymbol, groudIdNode.getGroupIdSymbol().toSymbolReference()); } @Override public String toString() { return toStringHelper(this) - .add("groups", groups) + .add("groupingSets", groupingSets) + .add("aggregationArguments", aggregationArguments) + .add("groupIdSymbol", groupIdSymbol) .toString(); } } diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java index 63b38dbf4049..945079a52b13 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java @@ -16,7 +16,6 @@ import com.google.common.base.Predicate; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; import io.prestosql.Session; @@ -567,9 +566,9 @@ public static PlanMatchPattern correlatedJoin(List correlationSymbolAlia .with(new CorrelationMatcher(correlationSymbolAliases)); } - public static PlanMatchPattern groupingSet(List> groups, String groupIdAlias, PlanMatchPattern source) + public static PlanMatchPattern groupId(List> groupingSets, String groupIdSymbol, PlanMatchPattern source) { - return node(GroupIdNode.class, source).with(new GroupIdMatcher(groups, ImmutableMap.of(), groupIdAlias)); + return node(GroupIdNode.class, source).with(new GroupIdMatcher(groupingSets, ImmutableList.of(), groupIdSymbol)); } private static PlanMatchPattern values( diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestOptimizeMixedDistinctAggregations.java b/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestOptimizeMixedDistinctAggregations.java index 96cac6e6f3c6..56eca5f1b35c 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestOptimizeMixedDistinctAggregations.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestOptimizeMixedDistinctAggregations.java @@ -38,7 +38,7 @@ import static io.prestosql.sql.planner.assertions.PlanMatchPattern.anySymbol; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.anyTree; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.functionCall; -import static io.prestosql.sql.planner.assertions.PlanMatchPattern.groupingSet; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.groupId; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.project; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.singleGroupingSet; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.tableScan; @@ -86,7 +86,7 @@ public void testMixedDistinctAggregationOptimizer() aggregation(singleGroupingSet(groupByKeysSecond), aggregationsSecond, ImmutableMap.of(), Optional.empty(), SINGLE, project( aggregation(singleGroupingSet(groupByKeysFirst), aggregationsFirst, ImmutableMap.of(), Optional.empty(), SINGLE, - groupingSet(groups.build(), group, + groupId(groups.build(), group, anyTree(tableScan)))))); assertUnitPlan(sql, expectedPlanPattern); From 897e875d9ad38dc40dc74ffb27f6d09240513342 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Fri, 17 Apr 2020 22:34:56 +0200 Subject: [PATCH 178/519] Add project-off rule for GroupIdNode --- .../prestosql/sql/planner/PlanOptimizers.java | 2 + .../iterative/rule/PruneGroupIdColumns.java | 88 ++++++++++++++ .../prestosql/sql/planner/plan/Patterns.java | 5 + .../planner/assertions/PlanMatchPattern.java | 14 ++- .../rule/TestPruneGroupIdColumns.java | 111 ++++++++++++++++++ .../iterative/rule/test/PlanBuilder.java | 19 +++ 6 files changed, 238 insertions(+), 1 deletion(-) create mode 100644 presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneGroupIdColumns.java create mode 100644 presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneGroupIdColumns.java diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java index c0ec22e5f7be..4094c9a7b400 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java @@ -72,6 +72,7 @@ import io.prestosql.sql.planner.iterative.rule.PruneExchangeColumns; import io.prestosql.sql.planner.iterative.rule.PruneExchangeSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneFilterColumns; +import io.prestosql.sql.planner.iterative.rule.PruneGroupIdColumns; import io.prestosql.sql.planner.iterative.rule.PruneIndexSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneIntersectSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneJoinChildrenColumns; @@ -256,6 +257,7 @@ public PlanOptimizers( new PruneExchangeColumns(), new PruneExchangeSourceColumns(), new PruneFilterColumns(), + new PruneGroupIdColumns(), new PruneIndexSourceColumns(), new PruneIntersectSourceColumns(), new PruneJoinChildrenColumns(), diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneGroupIdColumns.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneGroupIdColumns.java new file mode 100644 index 000000000000..ef420b3d1279 --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneGroupIdColumns.java @@ -0,0 +1,88 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.plan.GroupIdNode; +import io.prestosql.sql.planner.plan.PlanNode; + +import java.util.List; +import java.util.Optional; +import java.util.Set; + +import static com.google.common.collect.ImmutableList.toImmutableList; +import static io.prestosql.sql.planner.plan.Patterns.groupId; + +/** + * This rule prunes GroupIdNode's aggregationArguments. + *

+ * Transforms: + *

+ * - Project (a, key_1, key_2, group_id)
+ *      - GroupId
+ *          grouping sets: ((key_1), (key_2))
+ *          aggregation arguments: (a, b)
+ *          group id symbol: group_id
+ *           - Source (a, b, key_1, key_2)
+ * 
+ * Into: + *
+ * - Project (a, key_1, key_2, group_id)
+ *      - GroupId
+ *          grouping sets: ((key_1), (key_2))
+ *          aggregation arguments: (a)
+ *          group id symbol: group_id
+ *           - Source (a, b, key_1, key_2)
+ * 
+ * Note: this rule does not prune any grouping symbols. + * Currently, GroupIdNode is only used in regard to AggregationNode. + * The presence of an AggregationNode in the plan ensures that + * the grouping symbols are referenced. + * This rule could be extended to prune grouping symbols. + *

+ * Note: after pruning an aggregation argument, the child node + * of the GroupIdNode becomes eligible for symbol pruning. + * That is performed by the rule PruneGroupIdSourceColumns. + *

+ */ +public class PruneGroupIdColumns + extends ProjectOffPushDownRule +{ + public PruneGroupIdColumns() + { + super(groupId()); + } + + @Override + protected Optional pushDownProjectOff( + Context context, + GroupIdNode groupIdNode, + Set referencedOutputs) + { + List prunedAggregationArguments = groupIdNode.getAggregationArguments().stream() + .filter(referencedOutputs::contains) + .collect(toImmutableList()); + if (prunedAggregationArguments.size() == groupIdNode.getAggregationArguments().size()) { + return Optional.empty(); + } + + return Optional.of(new GroupIdNode( + groupIdNode.getId(), + groupIdNode.getSource(), + groupIdNode.getGroupingSets(), + groupIdNode.getGroupingColumns(), + prunedAggregationArguments, + groupIdNode.getGroupIdSymbol())); + } +} diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/plan/Patterns.java b/presto-main/src/main/java/io/prestosql/sql/planner/plan/Patterns.java index 10adba2d7fab..018a9c2b5fdd 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/plan/Patterns.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/plan/Patterns.java @@ -42,6 +42,11 @@ public static Pattern aggregation() return typeOf(AggregationNode.class); } + public static Pattern groupId() + { + return typeOf(GroupIdNode.class); + } + public static Pattern applyNode() { return typeOf(ApplyNode.class); diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java index 945079a52b13..35808ed45d39 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java @@ -568,7 +568,19 @@ public static PlanMatchPattern correlatedJoin(List correlationSymbolAlia public static PlanMatchPattern groupId(List> groupingSets, String groupIdSymbol, PlanMatchPattern source) { - return node(GroupIdNode.class, source).with(new GroupIdMatcher(groupingSets, ImmutableList.of(), groupIdSymbol)); + return groupId(groupingSets, ImmutableList.of(), groupIdSymbol, source); + } + + public static PlanMatchPattern groupId( + List> groupingSets, + List aggregationArguments, + String groupIdSymbol, + PlanMatchPattern source) + { + return node(GroupIdNode.class, source).with(new GroupIdMatcher( + groupingSets, + aggregationArguments, + groupIdSymbol)); } private static PlanMatchPattern values( diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneGroupIdColumns.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneGroupIdColumns.java new file mode 100644 index 000000000000..982b10200420 --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneGroupIdColumns.java @@ -0,0 +1,111 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; +import io.prestosql.sql.planner.plan.Assignments; +import org.testng.annotations.Test; + +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.groupId; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.strictProject; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; + +public class TestPruneGroupIdColumns + extends BaseRuleTest +{ + @Test + public void testPruneAggregationArgument() + { + tester().assertThat(new PruneGroupIdColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + Symbol k = p.symbol("k"); + Symbol groupId = p.symbol("group_id"); + return p.project( + Assignments.identity(a, k, groupId), + p.groupId( + ImmutableList.of(ImmutableList.of(k), ImmutableList.of(k)), + ImmutableList.of(a, b), + groupId, + p.values(a, b, k))); + }) + .matches( + strictProject( + ImmutableMap.of("a", expression("a"), "k", expression("k"), "group_id", expression("group_id")), + groupId( + ImmutableList.of(ImmutableList.of("k"), ImmutableList.of("k")), + ImmutableList.of("a"), + "group_id", + values("a", "b", "k")))); + } + + @Test + public void testAllOutputsReferenced() + { + tester().assertThat(new PruneGroupIdColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol k = p.symbol("k"); + Symbol groupId = p.symbol("group_id"); + return p.project( + Assignments.identity(a, k, groupId), + p.groupId( + ImmutableList.of(ImmutableList.of(k), ImmutableList.of(k)), + ImmutableList.of(a), + groupId, + p.values(a, k))); + }).doesNotFire(); + } + + @Test + public void doNotPruneGroupingSymbols() + { + tester().assertThat(new PruneGroupIdColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol k = p.symbol("k"); + Symbol groupId = p.symbol("group_id"); + return p.project( + Assignments.identity(a, groupId), + p.groupId( + ImmutableList.of(ImmutableList.of(k), ImmutableList.of(k)), + ImmutableList.of(a), + groupId, + p.values(a, k))); + }).doesNotFire(); + } + + @Test + public void testGroupIdSymbolUnreferenced() + { + tester().assertThat(new PruneGroupIdColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol k = p.symbol("k"); + Symbol groupId = p.symbol("group_id"); + return p.project( + Assignments.identity(a, k), + p.groupId( + ImmutableList.of(ImmutableList.of(k), ImmutableList.of(k)), + ImmutableList.of(a), + groupId, + p.values(a, k))); + }).doesNotFire(); + } +} diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/test/PlanBuilder.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/test/PlanBuilder.java index 7223a0972bf6..5e9c33eadf8e 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/test/PlanBuilder.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/test/PlanBuilder.java @@ -55,6 +55,7 @@ import io.prestosql.sql.planner.plan.ExceptNode; import io.prestosql.sql.planner.plan.ExchangeNode; import io.prestosql.sql.planner.plan.FilterNode; +import io.prestosql.sql.planner.plan.GroupIdNode; import io.prestosql.sql.planner.plan.IndexJoinNode; import io.prestosql.sql.planner.plan.IndexSourceNode; import io.prestosql.sql.planner.plan.IntersectNode; @@ -89,6 +90,7 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -108,6 +110,7 @@ import static io.prestosql.util.MoreLists.nElements; import static java.lang.String.format; import static java.util.Collections.emptyList; +import static java.util.function.Function.identity; public class PlanBuilder { @@ -292,6 +295,22 @@ public AggregationNode aggregation(Consumer aggregationBuild return aggregationBuilder.build(); } + public GroupIdNode groupId(List> groupingSets, List aggregationArguments, Symbol groupIdSymbol, PlanNode source) + { + Map groupingColumns = groupingSets.stream() + .flatMap(Collection::stream) + .distinct() + .collect(ImmutableMap.toImmutableMap(identity(), identity())); + + return new GroupIdNode( + idAllocator.getNextId(), + source, + groupingSets, + groupingColumns, + aggregationArguments, + groupIdSymbol); + } + public DistinctLimitNode distinctLimit(long count, List distinctSymbols, PlanNode source) { return distinctLimit(count, distinctSymbols, Optional.empty(), source); From fb8c5c44a672af7eaa4eb0666b3016d70d3b9612 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Sat, 18 Apr 2020 00:06:45 +0200 Subject: [PATCH 179/519] Add column-pruning rule for GroupIdNode --- .../prestosql/sql/planner/PlanOptimizers.java | 2 + .../rule/PruneGroupIdSourceColumns.java | 42 +++++++++++ .../rule/TestPruneGroupIdSourceColumns.java | 71 +++++++++++++++++++ 3 files changed, 115 insertions(+) create mode 100644 presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneGroupIdSourceColumns.java create mode 100644 presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneGroupIdSourceColumns.java diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java index 4094c9a7b400..d052f9935157 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java @@ -73,6 +73,7 @@ import io.prestosql.sql.planner.iterative.rule.PruneExchangeSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneFilterColumns; import io.prestosql.sql.planner.iterative.rule.PruneGroupIdColumns; +import io.prestosql.sql.planner.iterative.rule.PruneGroupIdSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneIndexSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneIntersectSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneJoinChildrenColumns; @@ -258,6 +259,7 @@ public PlanOptimizers( new PruneExchangeSourceColumns(), new PruneFilterColumns(), new PruneGroupIdColumns(), + new PruneGroupIdSourceColumns(), new PruneIndexSourceColumns(), new PruneIntersectSourceColumns(), new PruneJoinChildrenColumns(), diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneGroupIdSourceColumns.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneGroupIdSourceColumns.java new file mode 100644 index 000000000000..c94cb9de892e --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneGroupIdSourceColumns.java @@ -0,0 +1,42 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import io.prestosql.matching.Captures; +import io.prestosql.matching.Pattern; +import io.prestosql.sql.planner.iterative.Rule; +import io.prestosql.sql.planner.plan.GroupIdNode; + +import static io.prestosql.sql.planner.iterative.rule.Util.restrictChildOutputs; +import static io.prestosql.sql.planner.plan.Patterns.groupId; + +public class PruneGroupIdSourceColumns + implements Rule +{ + private static final Pattern PATTERN = groupId(); + + @Override + public Pattern getPattern() + { + return PATTERN; + } + + @Override + public Result apply(GroupIdNode groupId, Captures captures, Context context) + { + return restrictChildOutputs(context.getIdAllocator(), groupId, groupId.getInputSymbols()) + .map(Result::ofPlanNode) + .orElse(Result.empty()); + } +} diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneGroupIdSourceColumns.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneGroupIdSourceColumns.java new file mode 100644 index 000000000000..1d470ea4d189 --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneGroupIdSourceColumns.java @@ -0,0 +1,71 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; +import org.testng.annotations.Test; + +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.groupId; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.strictProject; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; + +public class TestPruneGroupIdSourceColumns + extends BaseRuleTest +{ + @Test + public void testPruneInputColumn() + { + tester().assertThat(new PruneGroupIdSourceColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + Symbol k = p.symbol("k"); + Symbol groupId = p.symbol("group_id"); + return p.groupId( + ImmutableList.of(ImmutableList.of(k), ImmutableList.of(k)), + ImmutableList.of(a), + groupId, + p.values(a, b, k)); + }) + .matches( + groupId( + ImmutableList.of(ImmutableList.of("k"), ImmutableList.of("k")), + ImmutableList.of("a"), + "group_id", + strictProject( + ImmutableMap.of("a", expression("a"), "k", expression("k")), + values("a", "b", "k")))); + } + + @Test + public void allInputsReferenced() + { + tester().assertThat(new PruneGroupIdSourceColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol k = p.symbol("k"); + Symbol groupId = p.symbol("group_id"); + return p.groupId( + ImmutableList.of(ImmutableList.of(k), ImmutableList.of(k)), + ImmutableList.of(a), + groupId, + p.values(a, k)); + }) + .doesNotFire(); + } +} From 68c939223f65d4b50fcae332611688b4f3802954 Mon Sep 17 00:00:00 2001 From: Martin Traverso Date: Wed, 15 Apr 2020 23:53:37 -0700 Subject: [PATCH 180/519] Support composite publish_address in Elasticsearch Starting with version 7.x, the publish_address field can contain addresses of the following form: cname/ip:port ip:port If the CNAME is present, the connector will now use the CNAME and port. Otherwise, it will use the IP and port. This change also validates that the address matches one of the above formats. --- .../client/ElasticsearchClient.java | 30 +++++++++++++- .../client/TestExtractAddress.java | 39 +++++++++++++++++++ 2 files changed, 68 insertions(+), 1 deletion(-) create mode 100644 presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/client/TestExtractAddress.java diff --git a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/client/ElasticsearchClient.java b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/client/ElasticsearchClient.java index 24b5c0a6d327..e5f1ea03ed22 100644 --- a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/client/ElasticsearchClient.java +++ b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/client/ElasticsearchClient.java @@ -20,6 +20,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.NullNode; +import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import io.airlift.json.JsonCodec; @@ -83,6 +84,8 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.ImmutableMap.toImmutableMap; @@ -108,6 +111,8 @@ public class ElasticsearchClient private static final JsonCodec NODES_RESPONSE_CODEC = jsonCodec(NodesResponse.class); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapperProvider().get(); + private static final Pattern ADDRESS_PATTERN = Pattern.compile("((?[^/]+)/)?(?.+):(?\\d+)"); + private final RestHighLevelClient client; private final int scrollSize; private final Duration scrollTimeout; @@ -343,7 +348,10 @@ private Set fetchNodes() NodesResponse.Node node = entry.getValue(); if (node.getRoles().contains("data")) { - result.add(new ElasticsearchNode(nodeId, node.getAddress())); + Optional address = node.getAddress() + .flatMap(ElasticsearchClient::extractAddress); + + result.add(new ElasticsearchNode(nodeId, address)); } } @@ -657,6 +665,26 @@ private T doRequest(String path, ResponseHandler handler) return handler.process(body); } + @VisibleForTesting + static Optional extractAddress(String address) + { + Matcher matcher = ADDRESS_PATTERN.matcher(address); + + if (!matcher.matches()) { + return Optional.empty(); + } + + String cname = matcher.group("cname"); + String ip = matcher.group("ip"); + String port = matcher.group("port"); + + if (cname != null) { + return Optional.of(cname + ":" + port); + } + + return Optional.of(ip + ":" + port); + } + private interface ResponseHandler { T process(String body); diff --git a/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/client/TestExtractAddress.java b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/client/TestExtractAddress.java new file mode 100644 index 000000000000..7853b26e6e24 --- /dev/null +++ b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/client/TestExtractAddress.java @@ -0,0 +1,39 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.elasticsearch.client; + +import org.testng.annotations.Test; + +import java.util.Optional; + +import static io.prestosql.elasticsearch.client.ElasticsearchClient.extractAddress; +import static org.testng.Assert.assertEquals; + +public class TestExtractAddress +{ + @Test + public void test() + { + assertEquals(extractAddress("node/1.2.3.4:9200"), Optional.of("node:9200")); + assertEquals(extractAddress("1.2.3.4:9200"), Optional.of("1.2.3.4:9200")); + assertEquals(extractAddress("node/1.2.3.4:9200"), Optional.of("node:9200")); + assertEquals(extractAddress("node/[fe80::1]:9200"), Optional.of("node:9200")); + assertEquals(extractAddress("[fe80::1]:9200"), Optional.of("[fe80::1]:9200")); + + assertEquals(extractAddress(""), Optional.empty()); + assertEquals(extractAddress("node/1.2.3.4"), Optional.empty()); + assertEquals(extractAddress("node/1.2.3.4:xxxx"), Optional.empty()); + assertEquals(extractAddress("1.2.3.4:xxxx"), Optional.empty()); + } +} From 967429323fc3fc639e7a4eb394160cd27014ead3 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Mon, 20 Apr 2020 19:56:40 +0200 Subject: [PATCH 181/519] Add rule to remove redundant OffsetNode --- .../prestosql/sql/planner/PlanOptimizers.java | 2 + .../iterative/rule/RemoveRedundantOffset.java | 53 +++++++++++++++ .../rule/TestRemoveRedundantOffset.java | 68 +++++++++++++++++++ 3 files changed, 123 insertions(+) create mode 100644 presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/RemoveRedundantOffset.java create mode 100644 presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestRemoveRedundantOffset.java diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java index d052f9935157..dda9e2995f67 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java @@ -125,6 +125,7 @@ import io.prestosql.sql.planner.iterative.rule.RemoveRedundantIdentityProjections; import io.prestosql.sql.planner.iterative.rule.RemoveRedundantJoin; import io.prestosql.sql.planner.iterative.rule.RemoveRedundantLimit; +import io.prestosql.sql.planner.iterative.rule.RemoveRedundantOffset; import io.prestosql.sql.planner.iterative.rule.RemoveRedundantSort; import io.prestosql.sql.planner.iterative.rule.RemoveRedundantTableScanPredicate; import io.prestosql.sql.planner.iterative.rule.RemoveRedundantTopN; @@ -354,6 +355,7 @@ public PlanOptimizers( new PushLimitThroughUnion(), new RemoveTrivialFilters(), new RemoveRedundantLimit(), + new RemoveRedundantOffset(), new RemoveRedundantSort(), new RemoveRedundantTopN(), new RemoveRedundantDistinctLimit(), diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/RemoveRedundantOffset.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/RemoveRedundantOffset.java new file mode 100644 index 000000000000..99d07954e65f --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/RemoveRedundantOffset.java @@ -0,0 +1,53 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableList; +import io.prestosql.matching.Captures; +import io.prestosql.matching.Pattern; +import io.prestosql.sql.planner.iterative.Rule; +import io.prestosql.sql.planner.plan.OffsetNode; +import io.prestosql.sql.planner.plan.ValuesNode; + +import static io.prestosql.sql.planner.optimizations.QueryCardinalityUtil.isAtMost; +import static io.prestosql.sql.planner.plan.Patterns.offset; + +/** + * Remove Offset node and its subplan when the subplan is guaranteed to produce no more rows than the offset + * and replace the plan with empty values. + * Remove Offset node from the plan if the offset is 0. + */ +public class RemoveRedundantOffset + implements Rule +{ + private static final Pattern PATTERN = offset(); + + @Override + public Pattern getPattern() + { + return PATTERN; + } + + @Override + public Result apply(OffsetNode offset, Captures captures, Context context) + { + if (isAtMost(offset.getSource(), context.getLookup(), offset.getCount())) { + return Result.ofPlanNode(new ValuesNode(offset.getId(), offset.getOutputSymbols(), ImmutableList.of())); + } + if (offset.getCount() == 0) { + return Result.ofPlanNode(offset.getSource()); + } + return Result.empty(); + } +} diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestRemoveRedundantOffset.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestRemoveRedundantOffset.java new file mode 100644 index 000000000000..f45a011d98d2 --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestRemoveRedundantOffset.java @@ -0,0 +1,68 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableList; +import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; +import org.testng.annotations.Test; + +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; +import static io.prestosql.sql.planner.iterative.rule.test.PlanBuilder.expression; + +public class TestRemoveRedundantOffset + extends BaseRuleTest +{ + @Test + public void testOffsetEqualToSubplanCardinality() + { + tester().assertThat(new RemoveRedundantOffset()) + .on(p -> p.offset(10, p.values(10))) + .matches(values(ImmutableList.of(), ImmutableList.of())); + } + + @Test + public void testOffsetExceedsSubplanCardinality() + { + tester().assertThat(new RemoveRedundantOffset()) + .on(p -> p.offset(10, p.values(5))) + .matches(values(ImmutableList.of(), ImmutableList.of())); + } + + @Test + public void testOffsetEqualToZero() + { + tester().assertThat(new RemoveRedundantOffset()) + .on(p -> p.offset( + 0, + p.values( + ImmutableList.of(p.symbol("a")), + ImmutableList.of( + ImmutableList.of(expression("1")), + ImmutableList.of(expression("2")))))) + .matches( + values( + ImmutableList.of("a"), + ImmutableList.of( + ImmutableList.of(expression("1")), + ImmutableList.of(expression("2"))))); + } + + @Test + public void testDoNotFireWhenOffsetLowerThanSubplanCardinality() + { + tester().assertThat(new RemoveRedundantOffset()) + .on(p -> p.offset(5, p.values(10))) + .doesNotFire(); + } +} From 03f438465aec42e527cc44b5d75dbedae0e2e896 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Mon, 20 Apr 2020 18:29:55 +0200 Subject: [PATCH 182/519] Fix visitLimit in PruneUnreferencedOutputs When rewriting LimitNode, its tiesResolvingScheme was skipped. --- .../sql/planner/optimizations/PruneUnreferencedOutputs.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java index a1426ab93c01..5d4d56fbcf9e 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java @@ -576,7 +576,7 @@ public PlanNode visitLimit(LimitNode node, RewriteContext> context) .addAll(context.get()) .addAll(node.getTiesResolvingScheme().map(OrderingScheme::getOrderBy).orElse(ImmutableList.of())); PlanNode source = context.rewrite(node.getSource(), expectedInputs.build()); - return new LimitNode(node.getId(), source, node.getCount(), node.isPartial()); + return new LimitNode(node.getId(), source, node.getCount(), node.getTiesResolvingScheme(), node.isPartial()); } @Override From 4e8b25dfca33d9264c0c7ac0ace4e965f3fdbc27 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Mon, 20 Apr 2020 18:33:21 +0200 Subject: [PATCH 183/519] Improve test for PruneLimitColumns rule Change test to show that pruning doesn't affect LimitNode's ties resolving scheme. --- .../iterative/rule/TestPruneLimitColumns.java | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneLimitColumns.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneLimitColumns.java index e63499326080..4b6ab59cd55c 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneLimitColumns.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneLimitColumns.java @@ -29,8 +29,11 @@ import static com.google.common.collect.ImmutableSet.toImmutableSet; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.limit; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.sort; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.strictProject; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; +import static io.prestosql.sql.tree.SortItem.NullOrdering.FIRST; +import static io.prestosql.sql.tree.SortItem.Ordering.ASCENDING; public class TestPruneLimitColumns extends BaseRuleTest @@ -59,17 +62,25 @@ public void testAllOutputsReferenced() } @Test - public void doNotPruneLimitWithTies() + public void testDoNotPruneTiesResolvingSymbols() { tester().assertThat(new PruneLimitColumns()) .on(p -> { Symbol a = p.symbol("a"); Symbol b = p.symbol("b"); return p.project( - Assignments.identity(ImmutableList.of(b)), + Assignments.of(), p.limit(1, ImmutableList.of(a), p.values(a, b))); }) - .doesNotFire(); + .matches( + strictProject( + ImmutableMap.of(), + limit( + 1, + ImmutableList.of(sort("a", ASCENDING, FIRST)), + strictProject( + ImmutableMap.of("a", expression("a")), + values("a", "b"))))); } private ProjectNode buildProjectedLimit(PlanBuilder planBuilder, Predicate projectionFilter) From bfcccfa9a1580d1671d1d58998e76df0f4bbd800 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Mon, 20 Apr 2020 20:50:59 +0200 Subject: [PATCH 184/519] Order column pruning rules alphabetically --- .../java/io/prestosql/sql/planner/PlanOptimizers.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java index dda9e2995f67..438f1795586a 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java @@ -265,20 +265,20 @@ public PlanOptimizers( new PruneIntersectSourceColumns(), new PruneJoinChildrenColumns(), new PruneJoinColumns(), + new PruneLimitColumns(), new PruneMarkDistinctColumns(), + new PruneOffsetColumns(), new PruneOutputColumns(), new PruneProjectColumns(), new PruneSampleColumns(), new PruneSemiJoinColumns(), new PruneSemiJoinFilteringSourceColumns(), + new PruneTableScanColumns(metadata, typeAnalyzer), new PruneTopNColumns(), new PruneUnionColumns(), new PruneUnionSourceColumns(), new PruneValuesColumns(), - new PruneWindowColumns(), - new PruneOffsetColumns(), - new PruneLimitColumns(), - new PruneTableScanColumns(metadata, typeAnalyzer)); + new PruneWindowColumns()); Set> projectionPushdownRules = ImmutableSet.of( new PushProjectionIntoTableScan(metadata, typeAnalyzer), From bfd8fbe82a1e936587606ae2492a1c91075abe64 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Thu, 16 Apr 2020 22:41:25 +0200 Subject: [PATCH 185/519] Remove redundant check --- .../sql/planner/optimizations/IndexJoinOptimizer.java | 4 ++-- .../sql/planner/sanity/ValidateDependenciesChecker.java | 4 +--- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/IndexJoinOptimizer.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/IndexJoinOptimizer.java index 5444b025a4a5..fbfb821cbe0c 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/IndexJoinOptimizer.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/IndexJoinOptimizer.java @@ -126,7 +126,7 @@ public PlanNode visitJoin(JoinNode node, RewriteContext context) if (leftIndexCandidate.isPresent()) { // Sanity check that we can trace the path for the index lookup key Map trace = IndexKeyTracer.trace(leftIndexCandidate.get(), ImmutableSet.copyOf(leftJoinSymbols)); - checkState(!trace.isEmpty() && leftJoinSymbols.containsAll(trace.keySet())); + checkState(!trace.isEmpty()); } Optional rightIndexCandidate = IndexSourceRewriter.rewriteWithIndex( @@ -139,7 +139,7 @@ public PlanNode visitJoin(JoinNode node, RewriteContext context) if (rightIndexCandidate.isPresent()) { // Sanity check that we can trace the path for the index lookup key Map trace = IndexKeyTracer.trace(rightIndexCandidate.get(), ImmutableSet.copyOf(rightJoinSymbols)); - checkState(!trace.isEmpty() && rightJoinSymbols.containsAll(trace.keySet())); + checkState(!trace.isEmpty()); } switch (node.getType()) { diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/sanity/ValidateDependenciesChecker.java b/presto-main/src/main/java/io/prestosql/sql/planner/sanity/ValidateDependenciesChecker.java index 2259174f0921..f401d34a2a48 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/sanity/ValidateDependenciesChecker.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/sanity/ValidateDependenciesChecker.java @@ -448,9 +448,7 @@ public Void visitIndexJoin(IndexJoinNode node, Set boundSymbols) .map(IndexJoinNode.EquiJoinClause::getIndex) .collect(toImmutableSet()); Map trace = IndexKeyTracer.trace(node.getIndexSource(), lookupSymbols); - checkArgument(!trace.isEmpty() && lookupSymbols.containsAll(trace.keySet()), - "Index lookup symbols are not traceable to index source: %s", - lookupSymbols); + checkArgument(!trace.isEmpty(), "Index lookup symbols are not traceable to index source: %s", lookupSymbols); return null; } From b80a49f2f7e546c6b5ed271d9eb0d8a802ce1c7a Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Sat, 18 Apr 2020 23:43:28 +0200 Subject: [PATCH 186/519] Add project-off rule for IndexJoinNode --- .../prestosql/sql/planner/PlanOptimizers.java | 2 + .../iterative/rule/PruneIndexJoinColumns.java | 55 ++++++ .../sql/planner/plan/IndexJoinNode.java | 31 ++++ .../prestosql/sql/planner/plan/Patterns.java | 5 + .../IndexJoinEquiClauseProvider.java | 43 +++++ .../planner/assertions/IndexJoinMatcher.java | 103 +++++++++++ .../planner/assertions/PlanMatchPattern.java | 18 ++ .../rule/TestPruneIndexJoinColumns.java | 160 ++++++++++++++++++ .../iterative/rule/test/PlanBuilder.java | 19 ++- 9 files changed, 432 insertions(+), 4 deletions(-) create mode 100644 presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneIndexJoinColumns.java create mode 100644 presto-main/src/test/java/io/prestosql/sql/planner/assertions/IndexJoinEquiClauseProvider.java create mode 100644 presto-main/src/test/java/io/prestosql/sql/planner/assertions/IndexJoinMatcher.java create mode 100644 presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneIndexJoinColumns.java diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java index 438f1795586a..2e157ce5e492 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java @@ -74,6 +74,7 @@ import io.prestosql.sql.planner.iterative.rule.PruneFilterColumns; import io.prestosql.sql.planner.iterative.rule.PruneGroupIdColumns; import io.prestosql.sql.planner.iterative.rule.PruneGroupIdSourceColumns; +import io.prestosql.sql.planner.iterative.rule.PruneIndexJoinColumns; import io.prestosql.sql.planner.iterative.rule.PruneIndexSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneIntersectSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneJoinChildrenColumns; @@ -261,6 +262,7 @@ public PlanOptimizers( new PruneFilterColumns(), new PruneGroupIdColumns(), new PruneGroupIdSourceColumns(), + new PruneIndexJoinColumns(), new PruneIndexSourceColumns(), new PruneIntersectSourceColumns(), new PruneJoinChildrenColumns(), diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneIndexJoinColumns.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneIndexJoinColumns.java new file mode 100644 index 000000000000..c2b1ebd44d5e --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneIndexJoinColumns.java @@ -0,0 +1,55 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableSet; +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.plan.IndexJoinNode; +import io.prestosql.sql.planner.plan.PlanNode; + +import java.util.Optional; +import java.util.Set; + +import static com.google.common.collect.ImmutableList.toImmutableList; +import static io.prestosql.sql.planner.iterative.rule.Util.restrictChildOutputs; +import static io.prestosql.sql.planner.plan.Patterns.indexJoin; + +public class PruneIndexJoinColumns + extends ProjectOffPushDownRule +{ + public PruneIndexJoinColumns() + { + super(indexJoin()); + } + + @Override + protected Optional pushDownProjectOff(Context context, IndexJoinNode indexJoinNode, Set referencedOutputs) + { + ImmutableSet.Builder probeInputs = ImmutableSet.builder() + .addAll(referencedOutputs) + .addAll(indexJoinNode.getCriteria().stream() + .map(IndexJoinNode.EquiJoinClause::getProbe) + .collect(toImmutableList())); + indexJoinNode.getProbeHashSymbol().ifPresent(probeInputs::add); + + ImmutableSet.Builder indexInputs = ImmutableSet.builder() + .addAll(referencedOutputs) + .addAll(indexJoinNode.getCriteria().stream() + .map(IndexJoinNode.EquiJoinClause::getIndex) + .collect(toImmutableList())); + indexJoinNode.getIndexHashSymbol().ifPresent(indexInputs::add); + + return restrictChildOutputs(context.getIdAllocator(), indexJoinNode, probeInputs.build(), indexInputs.build()); + } +} diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/plan/IndexJoinNode.java b/presto-main/src/main/java/io/prestosql/sql/planner/plan/IndexJoinNode.java index e657b3e1629f..0d6b8d67244e 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/plan/IndexJoinNode.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/plan/IndexJoinNode.java @@ -21,9 +21,11 @@ import javax.annotation.concurrent.Immutable; import java.util.List; +import java.util.Objects; import java.util.Optional; import static com.google.common.base.Preconditions.checkArgument; +import static java.lang.String.format; import static java.util.Objects.requireNonNull; @Immutable @@ -161,5 +163,34 @@ public Symbol getIndex() { return index; } + + @Override + public boolean equals(Object obj) + { + if (this == obj) { + return true; + } + + if (obj == null || !this.getClass().equals(obj.getClass())) { + return false; + } + + IndexJoinNode.EquiJoinClause other = (IndexJoinNode.EquiJoinClause) obj; + + return Objects.equals(this.probe, other.probe) && + Objects.equals(this.index, other.index); + } + + @Override + public int hashCode() + { + return Objects.hash(probe, index); + } + + @Override + public String toString() + { + return format("%s = %s", probe, index); + } } } diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/plan/Patterns.java b/presto-main/src/main/java/io/prestosql/sql/planner/plan/Patterns.java index 018a9c2b5fdd..7189306f7ec4 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/plan/Patterns.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/plan/Patterns.java @@ -72,6 +72,11 @@ public static Pattern filter() return typeOf(FilterNode.class); } + public static Pattern indexJoin() + { + return typeOf(IndexJoinNode.class); + } + public static Pattern indexSource() { return typeOf(IndexSourceNode.class); diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/IndexJoinEquiClauseProvider.java b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/IndexJoinEquiClauseProvider.java new file mode 100644 index 000000000000..723d2de42d0c --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/IndexJoinEquiClauseProvider.java @@ -0,0 +1,43 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.assertions; + +import io.prestosql.sql.planner.plan.IndexJoinNode; + +import static java.util.Objects.requireNonNull; + +class IndexJoinEquiClauseProvider + implements ExpectedValueProvider +{ + private final SymbolAlias probe; + private final SymbolAlias index; + + IndexJoinEquiClauseProvider(SymbolAlias probe, SymbolAlias index) + { + this.probe = requireNonNull(probe, "probe is null"); + this.index = requireNonNull(index, "index is null"); + } + + @Override + public IndexJoinNode.EquiJoinClause getExpectedValue(SymbolAliases aliases) + { + return new IndexJoinNode.EquiJoinClause(probe.toSymbol(aliases), index.toSymbol(aliases)); + } + + @Override + public String toString() + { + return probe + " = " + index; + } +} diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/IndexJoinMatcher.java b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/IndexJoinMatcher.java new file mode 100644 index 000000000000..9d939f042d99 --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/IndexJoinMatcher.java @@ -0,0 +1,103 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.assertions; + +import com.google.common.collect.ImmutableSet; +import io.prestosql.Session; +import io.prestosql.cost.StatsProvider; +import io.prestosql.metadata.Metadata; +import io.prestosql.sql.planner.plan.IndexJoinNode; +import io.prestosql.sql.planner.plan.PlanNode; + +import java.util.List; +import java.util.Optional; +import java.util.Set; + +import static com.google.common.base.MoreObjects.toStringHelper; +import static com.google.common.base.Preconditions.checkState; +import static com.google.common.collect.ImmutableSet.toImmutableSet; +import static io.prestosql.sql.planner.assertions.MatchResult.NO_MATCH; +import static java.util.Objects.requireNonNull; + +final class IndexJoinMatcher + implements Matcher +{ + private final IndexJoinNode.Type type; + private final List> criteria; + private final Optional probeHashSymbol; + private final Optional indexHashSymbol; + + IndexJoinMatcher( + IndexJoinNode.Type type, + List> criteria, + Optional probeHashSymbol, + Optional indexHashSymbol) + { + this.type = requireNonNull(type, "type is null"); + this.criteria = requireNonNull(criteria, "criteria is null"); + this.probeHashSymbol = requireNonNull(probeHashSymbol, "probeHashSymbol is null"); + this.indexHashSymbol = requireNonNull(indexHashSymbol, "indexHashSymbol is null"); + } + + @Override + public boolean shapeMatches(PlanNode node) + { + if (!(node instanceof IndexJoinNode)) { + return false; + } + + IndexJoinNode indexJoinNode = (IndexJoinNode) node; + return indexJoinNode.getType() == type; + } + + @Override + public MatchResult detailMatches(PlanNode node, StatsProvider stats, Session session, Metadata metadata, SymbolAliases symbolAliases) + { + checkState(shapeMatches(node), "Plan testing framework error: shapeMatches returned false in detailMatches in %s", this.getClass().getName()); + IndexJoinNode indexJoinNode = (IndexJoinNode) node; + + if (indexJoinNode.getCriteria().size() != criteria.size()) { + return NO_MATCH; + } + Set actualCriteria = ImmutableSet.copyOf(indexJoinNode.getCriteria()); + Set expectedCriteria = criteria.stream() + .map(equiClause -> equiClause.getExpectedValue(symbolAliases)) + .collect(toImmutableSet()); + if (!expectedCriteria.equals(actualCriteria)) { + return NO_MATCH; + } + + if (!indexJoinNode.getProbeHashSymbol().equals(probeHashSymbol.map(alias -> alias.toSymbol(symbolAliases)))) { + return NO_MATCH; + } + + if (!indexJoinNode.getIndexHashSymbol().equals(indexHashSymbol.map(alias -> alias.toSymbol(symbolAliases)))) { + return NO_MATCH; + } + + return MatchResult.match(); + } + + @Override + public String toString() + { + return toStringHelper(this) + .omitNullValues() + .add("type", type) + .add("criteria", criteria) + .add("probeHashSymbol", probeHashSymbol.orElse(null)) + .add("indexHashSymbol", indexHashSymbol.orElse(null)) + .toString(); + } +} diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java index 35808ed45d39..a1f586fb4842 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java @@ -41,6 +41,7 @@ import io.prestosql.sql.planner.plan.ExchangeNode; import io.prestosql.sql.planner.plan.FilterNode; import io.prestosql.sql.planner.plan.GroupIdNode; +import io.prestosql.sql.planner.plan.IndexJoinNode; import io.prestosql.sql.planner.plan.IndexSourceNode; import io.prestosql.sql.planner.plan.IntersectNode; import io.prestosql.sql.planner.plan.JoinNode; @@ -182,6 +183,23 @@ public static PlanMatchPattern constrainedTableScanWithTableLayout(String expect return result.addColumnReferences(expectedTableName, columnReferences); } + public static PlanMatchPattern indexJoin( + IndexJoinNode.Type type, + List> criteria, + Optional probeHashSymbol, + Optional indexHashSymbol, + PlanMatchPattern probeSource, + PlanMatchPattern indexSource) + { + return node(IndexJoinNode.class, probeSource, indexSource) + .with(new IndexJoinMatcher(type, criteria, probeHashSymbol.map(SymbolAlias::new), indexHashSymbol.map(SymbolAlias::new))); + } + + public static ExpectedValueProvider indexJoinEquiClause(String probe, String index) + { + return new IndexJoinEquiClauseProvider(new SymbolAlias(probe), new SymbolAlias(index)); + } + public static PlanMatchPattern constrainedIndexSource(String expectedTableName, Map columnReferences) { return node(IndexSourceNode.class) diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneIndexJoinColumns.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneIndexJoinColumns.java new file mode 100644 index 000000000000..88e75b693346 --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneIndexJoinColumns.java @@ -0,0 +1,160 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; +import io.prestosql.sql.planner.plan.Assignments; +import io.prestosql.sql.planner.plan.IndexJoinNode.EquiJoinClause; +import org.testng.annotations.Test; + +import java.util.Optional; + +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.indexJoin; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.indexJoinEquiClause; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.strictProject; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; +import static io.prestosql.sql.planner.plan.IndexJoinNode.Type.INNER; + +public class TestPruneIndexJoinColumns + extends BaseRuleTest +{ + @Test + public void testPruneInputColumn() + { + tester().assertThat(new PruneIndexJoinColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + Symbol c = p.symbol("c"); + return p.project( + Assignments.identity(a, b), + p.indexJoin( + INNER, + p.values(a), + p.values(b, c), + ImmutableList.of(new EquiJoinClause(a, b)), + Optional.empty(), + Optional.empty())); + }) + .matches( + strictProject( + ImmutableMap.of("a", expression("a"), "b", expression("b")), + indexJoin( + INNER, + ImmutableList.of(indexJoinEquiClause("a", "b")), + Optional.empty(), + Optional.empty(), + values("a"), + strictProject( + ImmutableMap.of("b", expression("b")), + values("b", "c"))))); + + tester().assertThat(new PruneIndexJoinColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + Symbol c = p.symbol("c"); + Symbol d = p.symbol("d"); + return p.project( + Assignments.identity(a, c), + p.indexJoin( + INNER, + p.values(a, b), + p.values(c, d), + ImmutableList.of(new EquiJoinClause(a, c)), + Optional.empty(), + Optional.empty())); + }) + .matches( + strictProject( + ImmutableMap.of("a", expression("a"), "c", expression("c")), + indexJoin( + INNER, + ImmutableList.of(indexJoinEquiClause("a", "c")), + Optional.empty(), + Optional.empty(), + strictProject( + ImmutableMap.of("a", expression("a")), + values("a", "b")), + strictProject( + ImmutableMap.of("c", expression("c")), + values("c", "d"))))); + } + + @Test + public void testDoNotPruneEquiClauseSymbol() + { + tester().assertThat(new PruneIndexJoinColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + return p.project( + Assignments.identity(a), + p.indexJoin( + INNER, + p.values(a), + p.values(b), + ImmutableList.of(new EquiJoinClause(a, b)), + Optional.empty(), + Optional.empty())); + }) + .doesNotFire(); + } + + @Test + public void testDoNotPruneHashSymbol() + { + tester().assertThat(new PruneIndexJoinColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + Symbol h = p.symbol("h"); + return p.project( + Assignments.identity(a, b), + p.indexJoin( + INNER, + p.values(a), + p.values(b, h), + ImmutableList.of(new EquiJoinClause(a, b)), + Optional.empty(), + Optional.of(h))); + }) + .doesNotFire(); + } + + @Test + public void testAllOutputsReferenced() + { + tester().assertThat(new PruneIndexJoinColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + Symbol c = p.symbol("c"); + return p.project( + Assignments.identity(a, b, c), + p.indexJoin( + INNER, + p.values(a), + p.values(b, c), + ImmutableList.of(new EquiJoinClause(a, b)), + Optional.empty(), + Optional.empty())); + }) + .doesNotFire(); + } +} diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/test/PlanBuilder.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/test/PlanBuilder.java index 5e9c33eadf8e..60db570c3c62 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/test/PlanBuilder.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/test/PlanBuilder.java @@ -773,16 +773,27 @@ public JoinNode join( Optional.empty()); } - public PlanNode indexJoin(IndexJoinNode.Type type, TableScanNode probe, TableScanNode index) + public PlanNode indexJoin(IndexJoinNode.Type type, PlanNode probe, PlanNode index) + { + return indexJoin(type, probe, index, emptyList(), Optional.empty(), Optional.empty()); + } + + public PlanNode indexJoin( + IndexJoinNode.Type type, + PlanNode probe, + PlanNode index, + List criteria, + Optional probeHashSymbol, + Optional indexHashSymbol) { return new IndexJoinNode( idAllocator.getNextId(), type, probe, index, - emptyList(), - Optional.empty(), - Optional.empty()); + criteria, + probeHashSymbol, + indexHashSymbol); } public UnionNode union(ListMultimap outputsToInputs, List sources) From e57108fa3555a410b4b85ea1c7345fcc5ef24c16 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 20 Apr 2020 17:46:42 -0700 Subject: [PATCH 187/519] Free disk space before CI checks --- .github/workflows/ci-tests.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml index c707cc655309..f4e75d0cea78 100644 --- a/.github/workflows/ci-tests.yml +++ b/.github/workflows/ci-tests.yml @@ -20,6 +20,11 @@ jobs: 13 ] steps: + - name: Free Disk Space + run: | + df -h + sudo apt-get clean + df -h - uses: actions/checkout@v2 - uses: actions/setup-java@v1 with: @@ -32,7 +37,7 @@ jobs: run: | export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" ./bin/retry ./mvnw verify -B -P ci -pl presto-server-rpm - - name: Free Disk Space + - name: Clean Maven Output run: ./mvnw clean -pl '!presto-server,!presto-cli' - name: Test Docker Image run: docker/build-local.sh From cbe28f764b3e875e56d3039efd956f3cae5a7f12 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 20 Apr 2020 12:06:50 +0200 Subject: [PATCH 188/519] Group related tests together Group timestamp tests together, separately from timestamp with time zone tests. --- .../postgresql/TestPostgreSqlTypeMapping.java | 88 +++++++++---------- 1 file changed, 44 insertions(+), 44 deletions(-) diff --git a/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java b/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java index d363c624a40f..d2446ec6790d 100644 --- a/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java +++ b/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java @@ -1020,6 +1020,16 @@ public void testTimestamp(boolean legacyTimestamp, boolean insertWithPresto, Zon } } + private void addTimestampTestIfSupported(DataTypeTest tests, boolean legacyTimestamp, ZoneId sessionZone, LocalDateTime dateTime) + { + if (legacyTimestamp && isGap(sessionZone, dateTime)) { + // in legacy timestamp semantics we cannot represent this dateTime + return; + } + + tests.addRoundTrip(timestampDataType(), dateTime); + } + @Test(dataProvider = "testTimestampDataProvider") public void testArrayTimestamp(boolean legacyTimestamp, boolean insertWithPresto, ZoneId sessionZone) { @@ -1053,16 +1063,6 @@ public void testArrayTimestamp(boolean legacyTimestamp, boolean insertWithPresto tests.execute(getQueryRunner(), session, dataSetup); } - private void addTimestampTestIfSupported(DataTypeTest tests, boolean legacyTimestamp, ZoneId sessionZone, LocalDateTime dateTime) - { - if (legacyTimestamp && isGap(sessionZone, dateTime)) { - // in legacy timestamp semantics we cannot represent this dateTime - return; - } - - tests.addRoundTrip(timestampDataType(), dateTime); - } - private void addArrayTimestampTestIfSupported(DataTypeTest tests, boolean legacyTimestamp, ZoneId sessionZone, DataType> dataType, LocalDateTime dateTime) { if (legacyTimestamp && isGap(sessionZone, dateTime)) { @@ -1073,40 +1073,6 @@ private void addArrayTimestampTestIfSupported(DataTypeTest tests, boolean legacy tests.addRoundTrip(dataType, asList(dateTime)); } - @Test(dataProvider = "testTimestampWithTimeZoneDataProvider") - public void testArrayTimestampWithTimeZone(boolean insertWithPresto) - { - DataType> dataType; - DataSetup dataSetup; - if (insertWithPresto) { - dataType = arrayDataType(prestoTimestampWithTimeZoneDataType()); - dataSetup = prestoCreateAsSelect(sessionWithArrayAsArray(), "test_array_timestamp_with_time_zone"); - } - else { - dataType = arrayDataType(postgreSqlTimestampWithTimeZoneDataType(), "timestamptz[]"); - dataSetup = postgresCreateAndInsert("tpch.test_array_timestamp_with_time_zone"); - } - - DataTypeTest tests = DataTypeTest.create() - .addRoundTrip(dataType, asList(epoch.atZone(UTC), epoch.atZone(kathmandu))) - .addRoundTrip(dataType, asList(beforeEpoch.atZone(kathmandu), beforeEpoch.atZone(UTC))) - .addRoundTrip(dataType, asList(afterEpoch.atZone(UTC), afterEpoch.atZone(kathmandu))) - .addRoundTrip(dataType, asList(timeDoubledInJvmZone.atZone(UTC))) - .addRoundTrip(dataType, asList(timeDoubledInJvmZone.atZone(kathmandu))) - .addRoundTrip(dataType, asList(timeDoubledInVilnius.atZone(UTC), timeDoubledInVilnius.atZone(vilnius), timeDoubledInVilnius.atZone(kathmandu))) - .addRoundTrip(dataType, asList(timeGapInJvmZone1.atZone(UTC), timeGapInJvmZone1.atZone(kathmandu))) - .addRoundTrip(dataType, asList(timeGapInJvmZone2.atZone(UTC), timeGapInJvmZone2.atZone(kathmandu))) - .addRoundTrip(dataType, asList(timeGapInVilnius.atZone(kathmandu))) - .addRoundTrip(dataType, asList(timeGapInKathmandu.atZone(vilnius))); - if (!insertWithPresto) { - // Postgres results with non-DST time (winter time) for timeDoubledInJvmZone.atZone(jvmZone) while Java results with DST time - // When writing timestamptz arrays, Postgres JDBC driver converts java.sql.Timestamp to string representing date-time in JVM zone - // TODO upgrade driver or find a different way to write timestamptz array elements as a point in time values with org.postgresql.jdbc.PgArray (https://github.com/pgjdbc/pgjdbc/issues/1225#issuecomment-516312324) - tests.addRoundTrip(dataType, asList(timeDoubledInJvmZone.atZone(jvmZone))); - } - tests.execute(getQueryRunner(), sessionWithArrayAsArray(), dataSetup); - } - @DataProvider public Object[][] testTimestampDataProvider() { @@ -1176,6 +1142,40 @@ public void testTimestampWithTimeZone(boolean insertWithPresto) tests.execute(getQueryRunner(), dataSetup); } + @Test(dataProvider = "testTimestampWithTimeZoneDataProvider") + public void testArrayTimestampWithTimeZone(boolean insertWithPresto) + { + DataType> dataType; + DataSetup dataSetup; + if (insertWithPresto) { + dataType = arrayDataType(prestoTimestampWithTimeZoneDataType()); + dataSetup = prestoCreateAsSelect(sessionWithArrayAsArray(), "test_array_timestamp_with_time_zone"); + } + else { + dataType = arrayDataType(postgreSqlTimestampWithTimeZoneDataType(), "timestamptz[]"); + dataSetup = postgresCreateAndInsert("tpch.test_array_timestamp_with_time_zone"); + } + + DataTypeTest tests = DataTypeTest.create() + .addRoundTrip(dataType, asList(epoch.atZone(UTC), epoch.atZone(kathmandu))) + .addRoundTrip(dataType, asList(beforeEpoch.atZone(kathmandu), beforeEpoch.atZone(UTC))) + .addRoundTrip(dataType, asList(afterEpoch.atZone(UTC), afterEpoch.atZone(kathmandu))) + .addRoundTrip(dataType, asList(timeDoubledInJvmZone.atZone(UTC))) + .addRoundTrip(dataType, asList(timeDoubledInJvmZone.atZone(kathmandu))) + .addRoundTrip(dataType, asList(timeDoubledInVilnius.atZone(UTC), timeDoubledInVilnius.atZone(vilnius), timeDoubledInVilnius.atZone(kathmandu))) + .addRoundTrip(dataType, asList(timeGapInJvmZone1.atZone(UTC), timeGapInJvmZone1.atZone(kathmandu))) + .addRoundTrip(dataType, asList(timeGapInJvmZone2.atZone(UTC), timeGapInJvmZone2.atZone(kathmandu))) + .addRoundTrip(dataType, asList(timeGapInVilnius.atZone(kathmandu))) + .addRoundTrip(dataType, asList(timeGapInKathmandu.atZone(vilnius))); + if (!insertWithPresto) { + // Postgres results with non-DST time (winter time) for timeDoubledInJvmZone.atZone(jvmZone) while Java results with DST time + // When writing timestamptz arrays, Postgres JDBC driver converts java.sql.Timestamp to string representing date-time in JVM zone + // TODO upgrade driver or find a different way to write timestamptz array elements as a point in time values with org.postgresql.jdbc.PgArray (https://github.com/pgjdbc/pgjdbc/issues/1225#issuecomment-516312324) + tests.addRoundTrip(dataType, asList(timeDoubledInJvmZone.atZone(jvmZone))); + } + tests.execute(getQueryRunner(), sessionWithArrayAsArray(), dataSetup); + } + @DataProvider public Object[][] testTimestampWithTimeZoneDataProvider() { From 23d2aca78bdb23811e012f63940eb2c81e733647 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 20 Apr 2020 12:06:52 +0200 Subject: [PATCH 189/519] Delegate to overload --- .../prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java b/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java index d2446ec6790d..c74604d6b1d8 100644 --- a/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java +++ b/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java @@ -1462,7 +1462,7 @@ private Session sessionWithDecimalMappingStrict(UnsupportedTypeHandling unsuppor private DataSetup prestoCreateAsSelect(String tableNamePrefix) { - return new CreateAsSelectDataSetup(new PrestoSqlExecutor(getQueryRunner()), tableNamePrefix); + return prestoCreateAsSelect(getSession(), tableNamePrefix); } private DataSetup prestoCreateAsSelect(Session session, String tableNamePrefix) From 40d72ef9b76d2a48381012e9ad7366fdd458b3c1 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 20 Apr 2020 12:06:53 +0200 Subject: [PATCH 190/519] Avoid octal numeric literals --- .../prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java b/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java index c74604d6b1d8..ab1be5fa1b40 100644 --- a/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java +++ b/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java @@ -157,7 +157,7 @@ public void setUp() { beforeEpoch = LocalDateTime.of(1958, 1, 1, 13, 18, 3, 123_000_000); epoch = LocalDateTime.of(1970, 1, 1, 0, 0, 0); - afterEpoch = LocalDateTime.of(2019, 03, 18, 10, 01, 17, 987_000_000); + afterEpoch = LocalDateTime.of(2019, 3, 18, 10, 1, 17, 987_000_000); jvmZone = ZoneId.systemDefault(); From ff8f1e528a8230eefa6d00a926428c959f87aed9 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 20 Apr 2020 12:06:54 +0200 Subject: [PATCH 191/519] Initialization more tersely --- .../postgresql/TestPostgreSqlTypeMapping.java | 40 ++++++------------- 1 file changed, 12 insertions(+), 28 deletions(-) diff --git a/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java b/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java index ab1be5fa1b40..d2cfa778fe42 100644 --- a/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java +++ b/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java @@ -116,23 +116,23 @@ public class TestPostgreSqlTypeMapping private TestingPostgreSqlServer postgreSqlServer; - private LocalDateTime beforeEpoch; - private LocalDateTime epoch; - private LocalDateTime afterEpoch; + private final LocalDateTime beforeEpoch = LocalDateTime.of(1958, 1, 1, 13, 18, 3, 123_000_000); + private final LocalDateTime epoch = LocalDateTime.of(1970, 1, 1, 0, 0, 0); + private final LocalDateTime afterEpoch = LocalDateTime.of(2019, 3, 18, 10, 1, 17, 987_000_000); - private ZoneId jvmZone; - private LocalDateTime timeGapInJvmZone1; - private LocalDateTime timeGapInJvmZone2; - private LocalDateTime timeDoubledInJvmZone; + private final ZoneId jvmZone = ZoneId.systemDefault(); + private final LocalDateTime timeGapInJvmZone1 = LocalDateTime.of(1970, 1, 1, 0, 13, 42); + private final LocalDateTime timeGapInJvmZone2 = LocalDateTime.of(2018, 4, 1, 2, 13, 55, 123_000_000); + private final LocalDateTime timeDoubledInJvmZone = LocalDateTime.of(2018, 10, 28, 1, 33, 17, 456_000_000); // no DST in 1970, but has DST in later years (e.g. 2018) - private ZoneId vilnius; - private LocalDateTime timeGapInVilnius; - private LocalDateTime timeDoubledInVilnius; + private final ZoneId vilnius = ZoneId.of("Europe/Vilnius"); + private final LocalDateTime timeGapInVilnius = LocalDateTime.of(2018, 3, 25, 3, 17, 17); + private final LocalDateTime timeDoubledInVilnius = LocalDateTime.of(2018, 10, 28, 3, 33, 33, 333_000_000); // minutes offset change since 1970-01-01, no DST - private ZoneId kathmandu; - private LocalDateTime timeGapInKathmandu; + private final ZoneId kathmandu = ZoneId.of("Asia/Kathmandu"); + private final LocalDateTime timeGapInKathmandu = LocalDateTime.of(1986, 1, 1, 0, 13, 7); @Override protected QueryRunner createQueryRunner() @@ -155,29 +155,13 @@ public final void destroy() @BeforeClass public void setUp() { - beforeEpoch = LocalDateTime.of(1958, 1, 1, 13, 18, 3, 123_000_000); - epoch = LocalDateTime.of(1970, 1, 1, 0, 0, 0); - afterEpoch = LocalDateTime.of(2019, 3, 18, 10, 1, 17, 987_000_000); - - jvmZone = ZoneId.systemDefault(); - - timeGapInJvmZone1 = LocalDateTime.of(1970, 1, 1, 0, 13, 42); checkIsGap(jvmZone, timeGapInJvmZone1); - timeGapInJvmZone2 = LocalDateTime.of(2018, 4, 1, 2, 13, 55, 123_000_000); checkIsGap(jvmZone, timeGapInJvmZone2); - timeDoubledInJvmZone = LocalDateTime.of(2018, 10, 28, 1, 33, 17, 456_000_000); checkIsDoubled(jvmZone, timeDoubledInJvmZone); - vilnius = ZoneId.of("Europe/Vilnius"); - - timeGapInVilnius = LocalDateTime.of(2018, 3, 25, 3, 17, 17); checkIsGap(vilnius, timeGapInVilnius); - timeDoubledInVilnius = LocalDateTime.of(2018, 10, 28, 3, 33, 33, 333_000_000); checkIsDoubled(vilnius, timeDoubledInVilnius); - kathmandu = ZoneId.of("Asia/Kathmandu"); - - timeGapInKathmandu = LocalDateTime.of(1986, 1, 1, 0, 13, 7); checkIsGap(kathmandu, timeGapInKathmandu); JdbcSqlExecutor executor = new JdbcSqlExecutor(postgreSqlServer.getJdbcUrl()); From ca878d471740c07155ea29fb74fe8be0ba855a68 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 20 Apr 2020 22:37:52 +0200 Subject: [PATCH 192/519] Add timestamp tz tests with fixed offset zone --- .../postgresql/TestPostgreSqlTypeMapping.java | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java b/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java index d2cfa778fe42..f17ba510c768 100644 --- a/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java +++ b/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlTypeMapping.java @@ -134,6 +134,9 @@ public class TestPostgreSqlTypeMapping private final ZoneId kathmandu = ZoneId.of("Asia/Kathmandu"); private final LocalDateTime timeGapInKathmandu = LocalDateTime.of(1986, 1, 1, 0, 13, 7); + private final ZoneOffset fixedOffsetEast = ZoneOffset.ofHoursMinutes(2, 17); + private final ZoneOffset fixedOffsetWest = ZoneOffset.ofHoursMinutes(-7, -31); + @Override protected QueryRunner createQueryRunner() throws Exception @@ -1106,10 +1109,20 @@ public void testTimestampWithTimeZone(boolean insertWithPresto) DataTypeTest tests = DataTypeTest.create() .addRoundTrip(dataType, epoch.atZone(UTC)) .addRoundTrip(dataType, epoch.atZone(kathmandu)) + .addRoundTrip(dataType, epoch.atZone(fixedOffsetEast)) + .addRoundTrip(dataType, epoch.atZone(fixedOffsetWest)) .addRoundTrip(dataType, beforeEpoch.atZone(UTC)) .addRoundTrip(dataType, beforeEpoch.atZone(kathmandu)) + .addRoundTrip(dataType, beforeEpoch.atZone(fixedOffsetEast)) + .addRoundTrip(dataType, beforeEpoch.atZone(fixedOffsetWest)) .addRoundTrip(dataType, afterEpoch.atZone(UTC)) .addRoundTrip(dataType, afterEpoch.atZone(kathmandu)) + .addRoundTrip(dataType, afterEpoch.atZone(fixedOffsetEast)) + .addRoundTrip(dataType, afterEpoch.atZone(fixedOffsetWest)) + .addRoundTrip(dataType, afterEpoch.atZone(ZoneId.of("GMT"))) + .addRoundTrip(dataType, afterEpoch.atZone(ZoneId.of("UTC"))) + .addRoundTrip(dataType, afterEpoch.atZone(ZoneId.of("Z"))) + .addRoundTrip(dataType, afterEpoch.atZone(ZoneId.of("UTC+00:00"))) .addRoundTrip(dataType, timeDoubledInJvmZone.atZone(UTC)) .addRoundTrip(dataType, timeDoubledInJvmZone.atZone(jvmZone)) .addRoundTrip(dataType, timeDoubledInJvmZone.atZone(kathmandu)) From 2b3b1ae5dbc9265c2052dcab66cfebe84e5edca4 Mon Sep 17 00:00:00 2001 From: Yuya Ebihara Date: Sun, 19 Apr 2020 16:02:30 +0900 Subject: [PATCH 193/519] Extract CREATE TABLE logic in Cassandra Additionally, remove invalid comment. --- .../io/prestosql/plugin/cassandra/CassandraMetadata.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraMetadata.java b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraMetadata.java index f7fdd629e448..983561df9bd8 100644 --- a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraMetadata.java +++ b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraMetadata.java @@ -274,6 +274,11 @@ public void renameTable(ConnectorSession session, ConnectorTableHandle tableHand @Override public ConnectorOutputTableHandle beginCreateTable(ConnectorSession session, ConnectorTableMetadata tableMetadata, Optional layout) + { + return createTable(tableMetadata); + } + + private CassandraOutputTableHandle createTable(ConnectorTableMetadata tableMetadata) { ImmutableList.Builder columnNames = ImmutableList.builder(); ImmutableList.Builder columnTypes = ImmutableList.builder(); @@ -285,7 +290,6 @@ public ConnectorOutputTableHandle beginCreateTable(ConnectorSession session, Con columnExtra.add(new ExtraColumnMetadata(column.getName(), column.isHidden())); } - // get the root directory for the database SchemaTableName table = tableMetadata.getTable(); String schemaName = cassandraSession.getCaseSensitiveSchemaName(table.getSchemaName()); String tableName = table.getTableName(); From c54f6113c3bfddecf7d646625699050b18f5bcd5 Mon Sep 17 00:00:00 2001 From: Yuya Ebihara Date: Sun, 19 Apr 2020 16:11:05 +0900 Subject: [PATCH 194/519] Add support for CREATE TABLE in Cassandra Additionally, enable related tests in TestCassandraDistributedQueries. --- .../plugin/cassandra/CassandraMetadata.java | 2 +- .../TestCassandraDistributedQueries.java | 21 ++++++++----------- 2 files changed, 10 insertions(+), 13 deletions(-) diff --git a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraMetadata.java b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraMetadata.java index 983561df9bd8..37362b704f7d 100644 --- a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraMetadata.java +++ b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraMetadata.java @@ -246,7 +246,7 @@ public ConnectorTableProperties getTableProperties(ConnectorSession session, Con @Override public void createTable(ConnectorSession session, ConnectorTableMetadata tableMetadata, boolean ignoreExisting) { - throw new PrestoException(NOT_SUPPORTED, "CREATE TABLE not yet supported for Cassandra"); + createTable(tableMetadata); } @Override diff --git a/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraDistributedQueries.java b/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraDistributedQueries.java index 32edb5ff8f1f..7b40ab9ba1b5 100644 --- a/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraDistributedQueries.java +++ b/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraDistributedQueries.java @@ -84,37 +84,27 @@ public void testDropColumn() @Override public void testInsert() { - // Cassandra connector currently does not support create table // TODO test inserts } @Override public void testInsertWithCoercion() { - // Cassandra connector currently does not support create table // TODO test inserts } @Override public void testInsertUnicode() { - // Cassandra connector currently does not support create table // TODO test inserts } @Override public void testInsertArray() { - // Cassandra connector currently does not support create table // TODO test inserts } - @Override - public void testCreateTable() - { - // Cassandra connector currently does not support create table - } - @Override public void testDelete() { @@ -163,12 +153,19 @@ protected TestTable createTableWithDefaultColumns() @Override public void testColumnName(String columnName) { - // Cassandra connector currently does not support create table + // TODO Enable after fixing the following error messages + // - Multiple definition of identifier id + // - Column family names shouldn't be more than 48 characters long + // - mismatched character '' + // - missing EOF at 'apostrophe' } @Override public void testDataMappingSmokeTest(DataMappingTestSetup dataMappingTestSetup) { - // Cassandra connector currently does not support create table + // TODO Enable after fixing the following error messages + // - Multiple definition of identifier id + // - unsupported type: char(3), decimal(5,3), decimal(15,3), time, timestamp with time zone + // - Invalid (reserved) user type name smallint } } From d64528d10be4c90b274e6616172ff1fb7d49bdbb Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Fri, 3 Apr 2020 11:24:12 +0200 Subject: [PATCH 195/519] Verify connector did not return nulls --- .../java/io/prestosql/metadata/MetadataManager.java | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java b/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java index 49a94ae7f494..175b7aef0379 100644 --- a/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java +++ b/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java @@ -1096,10 +1096,15 @@ public Optional> applyProjection(Sessio ConnectorSession connectorSession = session.toConnectorSession(catalogName); return metadata.applyProjection(connectorSession, table.getConnectorHandle(), projections, assignments) - .map(result -> new ProjectionApplicationResult<>( - new TableHandle(catalogName, result.getHandle(), table.getTransaction(), Optional.empty()), - result.getProjections(), - result.getAssignments())); + .map(result -> { + result.getProjections().forEach(projection -> requireNonNull(projection, "one of the projections is null")); + result.getAssignments().forEach(assignment -> requireNonNull(assignment, "one of the assignments is null")); + + return new ProjectionApplicationResult<>( + new TableHandle(catalogName, result.getHandle(), table.getTransaction(), Optional.empty()), + result.getProjections(), + result.getAssignments()); + }); } // From e2a3686fdedc11d1779f8beb7753f22039cc6b41 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Fri, 3 Apr 2020 11:19:22 +0200 Subject: [PATCH 196/519] Verify projections returned from applyProjection() --- .../main/java/io/prestosql/metadata/MetadataManager.java | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java b/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java index 175b7aef0379..70aa6ee581b1 100644 --- a/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java +++ b/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java @@ -1100,6 +1100,13 @@ public Optional> applyProjection(Sessio result.getProjections().forEach(projection -> requireNonNull(projection, "one of the projections is null")); result.getAssignments().forEach(assignment -> requireNonNull(assignment, "one of the assignments is null")); + verify( + projections.size() == result.getProjections().size(), + "ConnectorMetadata returned invalid number of projections: %s instead of %s for %s", + result.getProjections().size(), + projections.size(), + table); + return new ProjectionApplicationResult<>( new TableHandle(catalogName, result.getHandle(), table.getTransaction(), Optional.empty()), result.getProjections(), From 2422757b0f2a5b1571486daaf4070714c8b384fe Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Fri, 3 Apr 2020 11:53:56 +0200 Subject: [PATCH 197/519] Verify projections wrt assignments in applyProjection --- .../prestosql/metadata/MetadataManager.java | 11 +++++ .../sql/planner/ConnectorExpressions.java | 46 +++++++++++++++++++ .../spi/expression/ConnectorExpression.java | 4 ++ .../io/prestosql/spi/expression/Constant.java | 9 ++++ .../spi/expression/FieldDereference.java | 8 ++++ .../io/prestosql/spi/expression/Variable.java | 8 ++++ 6 files changed, 86 insertions(+) create mode 100644 presto-main/src/main/java/io/prestosql/sql/planner/ConnectorExpressions.java diff --git a/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java b/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java index 70aa6ee581b1..f33050547e01 100644 --- a/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java +++ b/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java @@ -72,6 +72,7 @@ import io.prestosql.spi.connector.SchemaTablePrefix; import io.prestosql.spi.connector.SystemTable; import io.prestosql.spi.expression.ConnectorExpression; +import io.prestosql.spi.expression.Variable; import io.prestosql.spi.function.OperatorType; import io.prestosql.spi.predicate.TupleDomain; import io.prestosql.spi.security.GrantInfo; @@ -88,6 +89,7 @@ import io.prestosql.spi.type.TypeSignature; import io.prestosql.sql.analyzer.FeaturesConfig; import io.prestosql.sql.analyzer.TypeSignatureProvider; +import io.prestosql.sql.planner.ConnectorExpressions; import io.prestosql.sql.planner.PartitioningHandle; import io.prestosql.sql.tree.QualifiedName; import io.prestosql.transaction.TransactionManager; @@ -1107,6 +1109,15 @@ public Optional> applyProjection(Sessio projections.size(), table); + Set assignedVariables = result.getAssignments().stream() + .map(ProjectionApplicationResult.Assignment::getVariable) + .collect(toImmutableSet()); + result.getProjections().stream() + .flatMap(connectorExpression -> ConnectorExpressions.extractVariables(connectorExpression).stream()) + .map(Variable::getName) + .filter(variableName -> !assignedVariables.contains(variableName)) + .findAny().ifPresent(variableName -> { throw new IllegalStateException("Unbound variable: " + variableName); }); + return new ProjectionApplicationResult<>( new TableHandle(catalogName, result.getHandle(), table.getTransaction(), Optional.empty()), result.getProjections(), diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/ConnectorExpressions.java b/presto-main/src/main/java/io/prestosql/sql/planner/ConnectorExpressions.java new file mode 100644 index 000000000000..41412b667457 --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/sql/planner/ConnectorExpressions.java @@ -0,0 +1,46 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner; + +import com.google.common.graph.SuccessorsFunction; +import com.google.common.graph.Traverser; +import io.prestosql.spi.expression.ConnectorExpression; +import io.prestosql.spi.expression.Variable; + +import java.util.List; +import java.util.stream.Stream; + +import static com.google.common.collect.ImmutableList.toImmutableList; +import static com.google.common.collect.Streams.stream; +import static java.util.Objects.requireNonNull; + +public final class ConnectorExpressions +{ + private ConnectorExpressions() {} + + public static List extractVariables(ConnectorExpression expression) + { + return preOrder(expression) + .filter(Variable.class::isInstance) + .map(Variable.class::cast) + .collect(toImmutableList()); + } + + public static Stream preOrder(ConnectorExpression expression) + { + return stream( + Traverser.forTree((SuccessorsFunction) ConnectorExpression::getChildren) + .depthFirstPreOrder(requireNonNull(expression, "expression is null"))); + } +} diff --git a/presto-spi/src/main/java/io/prestosql/spi/expression/ConnectorExpression.java b/presto-spi/src/main/java/io/prestosql/spi/expression/ConnectorExpression.java index 5bd443d1068c..599134ce82dd 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/expression/ConnectorExpression.java +++ b/presto-spi/src/main/java/io/prestosql/spi/expression/ConnectorExpression.java @@ -15,6 +15,8 @@ import io.prestosql.spi.type.Type; +import java.util.List; + import static java.util.Objects.requireNonNull; public abstract class ConnectorExpression @@ -31,6 +33,8 @@ public Type getType() return type; } + public abstract List getChildren(); + @Override public abstract int hashCode(); diff --git a/presto-spi/src/main/java/io/prestosql/spi/expression/Constant.java b/presto-spi/src/main/java/io/prestosql/spi/expression/Constant.java index d1e139e6c49e..e820c97ff4f9 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/expression/Constant.java +++ b/presto-spi/src/main/java/io/prestosql/spi/expression/Constant.java @@ -15,8 +15,11 @@ import io.prestosql.spi.type.Type; +import java.util.List; import java.util.Objects; +import static java.util.Collections.emptyList; + public class Constant extends ConnectorExpression { @@ -36,6 +39,12 @@ public Object getValue() return value; } + @Override + public List getChildren() + { + return emptyList(); + } + @Override public int hashCode() { diff --git a/presto-spi/src/main/java/io/prestosql/spi/expression/FieldDereference.java b/presto-spi/src/main/java/io/prestosql/spi/expression/FieldDereference.java index 4db98cd69e96..d812f84a552d 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/expression/FieldDereference.java +++ b/presto-spi/src/main/java/io/prestosql/spi/expression/FieldDereference.java @@ -15,9 +15,11 @@ import io.prestosql.spi.type.Type; +import java.util.List; import java.util.Objects; import static java.lang.String.format; +import static java.util.Collections.singletonList; import static java.util.Objects.requireNonNull; public class FieldDereference @@ -43,6 +45,12 @@ public int getField() return field; } + @Override + public List getChildren() + { + return singletonList(target); + } + @Override public int hashCode() { diff --git a/presto-spi/src/main/java/io/prestosql/spi/expression/Variable.java b/presto-spi/src/main/java/io/prestosql/spi/expression/Variable.java index c665c184c020..fd2b647ba396 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/expression/Variable.java +++ b/presto-spi/src/main/java/io/prestosql/spi/expression/Variable.java @@ -15,8 +15,10 @@ import io.prestosql.spi.type.Type; +import java.util.List; import java.util.Objects; +import static java.util.Collections.emptyList; import static java.util.Objects.requireNonNull; public class Variable @@ -39,6 +41,12 @@ public String getName() return name; } + @Override + public List getChildren() + { + return emptyList(); + } + @Override public int hashCode() { From 70d02427402818528cf8e76f0f760d54ed64df1c Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 20 Apr 2020 22:07:43 -0700 Subject: [PATCH 198/519] Add EditorConfig for YAML --- .editorconfig | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.editorconfig b/.editorconfig index 70e734ce4ceb..dcd467ab5d41 100644 --- a/.editorconfig +++ b/.editorconfig @@ -16,3 +16,6 @@ trim_trailing_whitespace = false [*.java] ij_java_doc_align_exception_comments = false ij_java_doc_align_param_comments = false + +[*.yml] +indent_size = 2 From dae7c755a4e01992fc5860a53dae881dde24611c Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 20 Apr 2020 22:13:08 -0700 Subject: [PATCH 199/519] Use list syntax in GitHub YAML --- .github/workflows/ci-tests.yml | 77 ++++++++++++++++------------------ 1 file changed, 37 insertions(+), 40 deletions(-) diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml index f4e75d0cea78..3c3bab540ca8 100644 --- a/.github/workflows/ci-tests.yml +++ b/.github/workflows/ci-tests.yml @@ -1,6 +1,8 @@ name: ci-tests -on: [push, pull_request] +on: + - push + - pull_request env: # maven.wagon.rto is in millis, defaults to 30m @@ -14,11 +16,10 @@ jobs: strategy: fail-fast: false matrix: - java-version: [ - 1.8, - 11, - 13 - ] + java-version: + - 1.8 + - 11 + - 13 steps: - name: Free Disk Space run: | @@ -71,11 +72,10 @@ jobs: strategy: fail-fast: false matrix: - config: [ - config-empty, - config-hdp3, - # TODO config-cdh5, - ] + config: + - config-empty + - config-hdp3 + # TODO: config-cdh5 steps: - uses: actions/checkout@v2 - uses: actions/setup-java@v1 @@ -146,21 +146,20 @@ jobs: strategy: fail-fast: false matrix: - modules: [ - "presto-main", - "presto-tests", - "presto-tests -P ci-only", - "presto-raptor-legacy", - "presto-accumulo", - "presto-cassandra", - "presto-hive,presto-orc", - "presto-hive,presto-parquet -P test-parquet", - "presto-mongodb,presto-kafka,presto-elasticsearch", - "presto-redis", - "presto-sqlserver,presto-postgresql,presto-mysql", - "presto-phoenix,presto-iceberg", - "presto-kudu", - ] + modules: + - "presto-main" + - "presto-tests" + - "presto-tests -P ci-only" + - "presto-raptor-legacy" + - "presto-accumulo" + - "presto-cassandra" + - "presto-hive,presto-orc" + - "presto-hive,presto-parquet -P test-parquet" + - "presto-mongodb,presto-kafka,presto-elasticsearch" + - "presto-redis" + - "presto-sqlserver,presto-postgresql,presto-mysql" + - "presto-phoenix,presto-iceberg" + - "presto-kudu" steps: - uses: actions/checkout@v2 - uses: actions/setup-java@v1 @@ -178,21 +177,19 @@ jobs: strategy: fail-fast: false matrix: - config: [ - config-empty, - config-hdp3, - config-cdh5, - ] - suite: [ - suite-1, - suite-2, - suite-3, + config: + - config-empty + - config-hdp3 + - config-cdh5 + suite: + - suite-1 + - suite-2 + - suite-3 # suite-4 does not exist - suite-5, - suite-6-non-generic, - suite-7-non-generic, - suite-8-non-generic, - ] + - suite-5 + - suite-6-non-generic + - suite-7-non-generic + - suite-8-non-generic exclude: - config: config-hdp3 suite: suite-6-non-generic From ca99a33e3ffdc3b8c2adbb2adb7731c75903da92 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 20 Apr 2020 21:23:03 -0700 Subject: [PATCH 200/519] Use JDK 14 in CI --- .github/workflows/ci-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml index 3c3bab540ca8..17592c0d5d1b 100644 --- a/.github/workflows/ci-tests.yml +++ b/.github/workflows/ci-tests.yml @@ -19,7 +19,7 @@ jobs: java-version: - 1.8 - 11 - - 13 + - 14 steps: - name: Free Disk Space run: | From 3d035e188b86421a02f9865b1bc12635f3ec4ad5 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 20 Apr 2020 22:26:33 -0700 Subject: [PATCH 201/519] Shorten workflow name --- .github/workflows/{ci-tests.yml => ci.yml} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename .github/workflows/{ci-tests.yml => ci.yml} (99%) diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci.yml similarity index 99% rename from .github/workflows/ci-tests.yml rename to .github/workflows/ci.yml index 17592c0d5d1b..c68fa665f776 100644 --- a/.github/workflows/ci-tests.yml +++ b/.github/workflows/ci.yml @@ -1,4 +1,4 @@ -name: ci-tests +name: ci on: - push From 7aeea3a7b7a295428a20c643ea1b954ea3265daa Mon Sep 17 00:00:00 2001 From: Lars Date: Sat, 14 Mar 2020 16:30:20 -0700 Subject: [PATCH 202/519] Add support for SHOW CREATE SCHEMA. Add support for authorized users to see the definitin of a schema, including its owner and location. --- .../plugin/blackhole/BlackHoleMetadata.java | 14 ++++++ .../prestosql/plugin/hive/HiveMetadata.java | 29 +++++++++++ .../plugin/hive/HiveSchemaProperties.java | 10 ++++ .../hive/security/LegacyAccessControl.java | 5 ++ .../security/SqlStandardAccessControl.java | 9 ++++ .../hive/TestHiveIntegrationSmokeTest.java | 50 +++++++++++++++++++ .../plugin/iceberg/IcebergMetadata.java | 24 +++++++++ .../plugin/iceberg/TestIcebergSmoke.java | 11 ++++ .../java/io/prestosql/metadata/Metadata.java | 10 ++++ .../prestosql/metadata/MetadataManager.java | 29 +++++++++++ .../io/prestosql/metadata/MetadataUtil.java | 15 ++++++ .../io/prestosql/security/AccessControl.java | 7 +++ .../security/AccessControlManager.java | 13 +++++ .../security/AllowAllAccessControl.java | 5 ++ .../security/DenyAllAccessControl.java | 7 +++ .../security/ForwardingAccessControl.java | 6 +++ .../sql/rewrite/ShowQueriesRewrite.java | 40 +++++++++++++-- .../metadata/AbstractMockMetadata.java | 12 +++++ .../plugin/memory/MemoryMetadata.java | 13 +++++ .../antlr4/io/prestosql/sql/parser/SqlBase.g4 | 1 + .../java/io/prestosql/sql/SqlFormatter.java | 2 +- .../io/prestosql/sql/parser/AstBuilder.java | 6 +++ .../io/prestosql/sql/tree/CreateSchema.java | 5 ++ .../io/prestosql/sql/tree/ShowCreate.java | 4 +- ...ClassLoaderSafeConnectorAccessControl.java | 8 +++ .../ClassLoaderSafeConnectorMetadata.java | 17 +++++++ .../base/security/AllowAllAccessControl.java | 5 ++ .../security/AllowAllSystemAccessControl.java | 5 ++ .../base/security/FileBasedAccessControl.java | 9 ++++ .../FileBasedSystemAccessControl.java | 9 ++++ .../ForwardingConnectorAccessControl.java | 6 +++ .../ForwardingSystemAccessControl.java | 6 +++ .../base/security/ReadOnlyAccessControl.java | 5 ++ .../security/TestFileBasedAccessControl.java | 15 ++++++ .../spi/connector/ConnectorAccessControl.java | 11 ++++ .../spi/connector/ConnectorMetadata.java | 16 ++++++ .../spi/security/AccessDeniedException.java | 5 ++ .../spi/security/SystemAccessControl.java | 11 ++++ 38 files changed, 449 insertions(+), 6 deletions(-) diff --git a/presto-blackhole/src/main/java/io/prestosql/plugin/blackhole/BlackHoleMetadata.java b/presto-blackhole/src/main/java/io/prestosql/plugin/blackhole/BlackHoleMetadata.java index 6af10944fe07..3c025284a27c 100644 --- a/presto-blackhole/src/main/java/io/prestosql/plugin/blackhole/BlackHoleMetadata.java +++ b/presto-blackhole/src/main/java/io/prestosql/plugin/blackhole/BlackHoleMetadata.java @@ -14,11 +14,13 @@ package io.prestosql.plugin.blackhole; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import io.airlift.slice.Slice; import io.airlift.units.Duration; import io.prestosql.spi.PrestoException; +import io.prestosql.spi.connector.CatalogSchemaName; import io.prestosql.spi.connector.ColumnHandle; import io.prestosql.spi.connector.ColumnMetadata; import io.prestosql.spi.connector.ConnectorInsertTableHandle; @@ -91,6 +93,18 @@ public synchronized void createSchema(ConnectorSession session, String schemaNam schemas.add(schemaName); } + @Override + public Optional getSchemaOwner(ConnectorSession session, CatalogSchemaName schemaName) + { + return Optional.empty(); + } + + @Override + public Map getSchemaProperties(ConnectorSession session, CatalogSchemaName schemaName) + { + return ImmutableMap.of(); + } + @Override public ConnectorTableHandle getTableHandle(ConnectorSession session, SchemaTableName tableName) { diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java index df31614d75a7..95521711540e 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java @@ -49,6 +49,7 @@ import io.prestosql.spi.PrestoException; import io.prestosql.spi.StandardErrorCode; import io.prestosql.spi.block.Block; +import io.prestosql.spi.connector.CatalogSchemaName; import io.prestosql.spi.connector.ColumnHandle; import io.prestosql.spi.connector.ColumnMetadata; import io.prestosql.spi.connector.ConnectorInsertTableHandle; @@ -68,6 +69,7 @@ import io.prestosql.spi.connector.InMemoryRecordSet; import io.prestosql.spi.connector.ProjectionApplicationResult; import io.prestosql.spi.connector.ProjectionApplicationResult.Assignment; +import io.prestosql.spi.connector.SchemaNotFoundException; import io.prestosql.spi.connector.SchemaTableName; import io.prestosql.spi.connector.SchemaTablePrefix; import io.prestosql.spi.connector.SystemTable; @@ -124,6 +126,7 @@ import static com.google.common.base.MoreObjects.firstNonNull; import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Verify.verify; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableMap.toImmutableMap; @@ -1748,6 +1751,32 @@ public List listViews(ConnectorSession session, Optional getSchemaProperties(ConnectorSession session, CatalogSchemaName schemaName) + { + checkState(filterSchema(schemaName.getSchemaName()), "Schema is not accessible: %s", schemaName); + + Optional db = metastore.getDatabase(schemaName.getSchemaName()); + if (db.isPresent()) { + return HiveSchemaProperties.fromDatabase(db.get()); + } + + throw new SchemaNotFoundException(schemaName.getSchemaName()); + } + + @Override + public Optional getSchemaOwner(ConnectorSession session, CatalogSchemaName schemaName) + { + checkState(filterSchema(schemaName.getSchemaName()), "Schema is not accessible: %s", schemaName); + + Optional database = metastore.getDatabase(schemaName.getSchemaName()); + if (database.isPresent()) { + return database.flatMap(db -> Optional.of(new PrestoPrincipal(db.getOwnerType(), db.getOwnerName()))); + } + + throw new SchemaNotFoundException(schemaName.getSchemaName()); + } + @Override public Optional getView(ConnectorSession session, SchemaTableName viewName) { diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveSchemaProperties.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveSchemaProperties.java index a43f76fbb0e1..f36f743489a5 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveSchemaProperties.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveSchemaProperties.java @@ -14,6 +14,8 @@ package io.prestosql.plugin.hive; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.plugin.hive.metastore.Database; import io.prestosql.spi.session.PropertyMetadata; import java.util.List; @@ -35,6 +37,14 @@ public final class HiveSchemaProperties private HiveSchemaProperties() {} + public static Map fromDatabase(Database db) + { + ImmutableMap.Builder result = ImmutableMap.builder(); + db.getLocation().ifPresent(location -> result.put(HiveSchemaProperties.LOCATION_PROPERTY, location)); + + return result.build(); + } + public static Optional getLocation(Map schemaProperties) { return Optional.ofNullable((String) schemaProperties.get(LOCATION_PROPERTY)); diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/security/LegacyAccessControl.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/security/LegacyAccessControl.java index 313ce8eebf98..78b92053b0f4 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/security/LegacyAccessControl.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/security/LegacyAccessControl.java @@ -101,6 +101,11 @@ public Set filterSchemas(ConnectorSecurityContext context, Set s return schemaNames; } + @Override + public void checkCanShowCreateSchema(ConnectorSecurityContext context, String schemaName) + { + } + @Override public void checkCanShowCreateTable(ConnectorSecurityContext context, SchemaTableName tableName) { diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/security/SqlStandardAccessControl.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/security/SqlStandardAccessControl.java index 72f75b7ef247..ebecd4b040d5 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/security/SqlStandardAccessControl.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/security/SqlStandardAccessControl.java @@ -81,6 +81,7 @@ import static io.prestosql.spi.security.AccessDeniedException.denySetRole; import static io.prestosql.spi.security.AccessDeniedException.denySetSchemaAuthorization; import static io.prestosql.spi.security.AccessDeniedException.denyShowColumns; +import static io.prestosql.spi.security.AccessDeniedException.denyShowCreateSchema; import static io.prestosql.spi.security.AccessDeniedException.denyShowCreateTable; import static io.prestosql.spi.security.AccessDeniedException.denyShowRoles; import static io.prestosql.spi.security.PrincipalType.ROLE; @@ -159,6 +160,14 @@ public void checkCanShowCreateTable(ConnectorSecurityContext context, SchemaTabl } } + @Override + public void checkCanShowCreateSchema(ConnectorSecurityContext context, String schemaName) + { + if (!isDatabaseOwner(context, schemaName)) { + denyShowCreateSchema(schemaName); + } + } + @Override public void checkCanCreateTable(ConnectorSecurityContext context, SchemaTableName tableName) { diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java index 05f5859dc28d..0f532b92780b 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java @@ -716,6 +716,56 @@ public void testSchemaAuthorization() assertUpdate(admin, "DROP ROLE admin"); } + @Test + public void testShowCreateSchema() + { + Session admin = Session.builder(getQueryRunner().getDefaultSession()) + .setIdentity(Identity.forUser("hive") + .withRole("hive", new SelectedRole(ROLE, Optional.of("admin"))) + .build()) + .build(); + + Session user = testSessionBuilder() + .setCatalog(getSession().getCatalog().get()) + .setSchema("test_show_create_schema") + .setIdentity(Identity.forUser("user").withPrincipal(getSession().getIdentity().getPrincipal()).build()) + .build(); + + assertUpdate(admin, "CREATE ROLE test_show_create_schema_role"); + assertUpdate(admin, "GRANT test_show_create_schema_role TO user"); + + assertUpdate(admin, "CREATE SCHEMA test_show_create_schema"); + + String createSchemaSql = format("" + + "CREATE SCHEMA %s.test_show_create_schema\n" + + "AUTHORIZATION USER hive\n" + + "WITH \\(\n" + + " location = '.*test_show_create_schema'\n" + + "\\)", + getSession().getCatalog().get()); + + String actualResult = getOnlyElement(computeActual(admin, "SHOW CREATE SCHEMA test_show_create_schema").getOnlyColumnAsSet()).toString(); + assertThat(actualResult).matches(createSchemaSql); + + assertQueryFails(user, "SHOW CREATE SCHEMA test_show_create_schema", "Access Denied: Cannot show create schema for test_show_create_schema"); + + assertUpdate(admin, "ALTER SCHEMA test_show_create_schema SET AUTHORIZATION ROLE test_show_create_schema_role"); + + createSchemaSql = format("" + + "CREATE SCHEMA %s.test_show_create_schema\n" + + "AUTHORIZATION ROLE test_show_create_schema_role\n" + + "WITH \\(\n" + + " location = '.*test_show_create_schema'\n" + + "\\)", + getSession().getCatalog().get()); + + actualResult = getOnlyElement(computeActual(admin, "SHOW CREATE SCHEMA test_show_create_schema").getOnlyColumnAsSet()).toString(); + assertThat(actualResult).matches(createSchemaSql); + + assertUpdate(user, "DROP SCHEMA test_show_create_schema"); + assertUpdate(admin, "DROP ROLE test_show_create_schema_role"); + } + @Test public void testIoExplain() { diff --git a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergMetadata.java b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergMetadata.java index c547c7d17562..65e4178a9768 100644 --- a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergMetadata.java +++ b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergMetadata.java @@ -21,6 +21,7 @@ import io.prestosql.plugin.base.classloader.ClassLoaderSafeSystemTable; import io.prestosql.plugin.hive.HdfsEnvironment; import io.prestosql.plugin.hive.HdfsEnvironment.HdfsContext; +import io.prestosql.plugin.hive.HiveSchemaProperties; import io.prestosql.plugin.hive.HiveWrittenPartitions; import io.prestosql.plugin.hive.TableAlreadyExistsException; import io.prestosql.plugin.hive.authentication.HiveIdentity; @@ -29,6 +30,7 @@ import io.prestosql.plugin.hive.metastore.HivePrincipal; import io.prestosql.plugin.hive.metastore.Table; import io.prestosql.spi.PrestoException; +import io.prestosql.spi.connector.CatalogSchemaName; import io.prestosql.spi.connector.ColumnHandle; import io.prestosql.spi.connector.ColumnMetadata; import io.prestosql.spi.connector.ConnectorInsertTableHandle; @@ -142,6 +144,28 @@ public List listSchemaNames(ConnectorSession session) return metastore.getAllDatabases(); } + @Override + public Map getSchemaProperties(ConnectorSession session, CatalogSchemaName schemaName) + { + Optional db = metastore.getDatabase(schemaName.getSchemaName()); + if (db.isPresent()) { + return HiveSchemaProperties.fromDatabase(db.get()); + } + + throw new SchemaNotFoundException(schemaName.getSchemaName()); + } + + @Override + public Optional getSchemaOwner(ConnectorSession session, CatalogSchemaName schemaName) + { + Optional database = metastore.getDatabase(schemaName.getSchemaName()); + if (database.isPresent()) { + return database.flatMap(db -> Optional.of(new PrestoPrincipal(db.getOwnerType(), db.getOwnerName()))); + } + + throw new SchemaNotFoundException(schemaName.getSchemaName()); + } + @Override public IcebergTableHandle getTableHandle(ConnectorSession session, SchemaTableName tableName) { diff --git a/presto-iceberg/src/test/java/io/prestosql/plugin/iceberg/TestIcebergSmoke.java b/presto-iceberg/src/test/java/io/prestosql/plugin/iceberg/TestIcebergSmoke.java index 14bc3cfb2da5..8a10fe006074 100644 --- a/presto-iceberg/src/test/java/io/prestosql/plugin/iceberg/TestIcebergSmoke.java +++ b/presto-iceberg/src/test/java/io/prestosql/plugin/iceberg/TestIcebergSmoke.java @@ -44,6 +44,17 @@ protected QueryRunner createQueryRunner() return createIcebergQueryRunner(ImmutableMap.of()); } + @Test + public void testShowCreateSchema() + { + assertThat(computeActual("SHOW CREATE SCHEMA tpch").getOnlyValue().toString()) + .matches("CREATE SCHEMA iceberg.tpch\n" + + "AUTHORIZATION USER user\n" + + "WITH \\(\n" + + " location = '.*/iceberg_data/tpch'\n" + + "\\)"); + } + @Test @Override public void testDescribeTable() diff --git a/presto-main/src/main/java/io/prestosql/metadata/Metadata.java b/presto-main/src/main/java/io/prestosql/metadata/Metadata.java index f2e5419e90c4..1f4ac3fa862c 100644 --- a/presto-main/src/main/java/io/prestosql/metadata/Metadata.java +++ b/presto-main/src/main/java/io/prestosql/metadata/Metadata.java @@ -311,6 +311,16 @@ public interface Metadata */ Optional getView(Session session, QualifiedObjectName viewName); + /** + * Gets the schema properties for the specified schema. + */ + Map getSchemaProperties(Session session, CatalogSchemaName schemaName); + + /** + * Gets the schema owner for the specified schema. + */ + Optional getSchemaOwner(Session session, CatalogSchemaName schemaName); + /** * Creates the specified view with the specified view definition. */ diff --git a/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java b/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java index f33050547e01..4247d588ff1c 100644 --- a/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java +++ b/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java @@ -126,6 +126,7 @@ import static io.prestosql.spi.StandardErrorCode.FUNCTION_NOT_FOUND; import static io.prestosql.spi.StandardErrorCode.INVALID_VIEW; import static io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED; +import static io.prestosql.spi.StandardErrorCode.SCHEMA_NOT_FOUND; import static io.prestosql.spi.StandardErrorCode.SYNTAX_ERROR; import static io.prestosql.spi.connector.ConnectorViewDefinition.ViewColumn; import static io.prestosql.spi.function.OperatorType.BETWEEN; @@ -959,6 +960,34 @@ public Map getViews(Session sessio return ImmutableMap.copyOf(views); } + @Override + public Map getSchemaProperties(Session session, CatalogSchemaName schemaName) + { + if (!schemaExists(session, schemaName)) { + throw new PrestoException(SCHEMA_NOT_FOUND, format("Schema '%s' does not exist", schemaName)); + } + CatalogMetadata catalogMetadata = getCatalogMetadata(session, new CatalogName(schemaName.getCatalogName())); + CatalogName catalogName = catalogMetadata.getCatalogName(); + ConnectorMetadata metadata = catalogMetadata.getMetadataFor(catalogName); + + ConnectorSession connectorSession = session.toConnectorSession(catalogName); + return metadata.getSchemaProperties(connectorSession, schemaName); + } + + @Override + public Optional getSchemaOwner(Session session, CatalogSchemaName schemaName) + { + if (!schemaExists(session, schemaName)) { + throw new PrestoException(SCHEMA_NOT_FOUND, format("Schema '%s' does not exist", schemaName)); + } + CatalogMetadata catalogMetadata = getCatalogMetadata(session, new CatalogName(schemaName.getCatalogName())); + CatalogName catalogName = catalogMetadata.getCatalogName(); + ConnectorMetadata metadata = catalogMetadata.getMetadataFor(catalogName); + + ConnectorSession connectorSession = session.toConnectorSession(catalogName); + return metadata.getSchemaOwner(connectorSession, schemaName); + } + @Override public Optional getView(Session session, QualifiedObjectName viewName) { diff --git a/presto-main/src/main/java/io/prestosql/metadata/MetadataUtil.java b/presto-main/src/main/java/io/prestosql/metadata/MetadataUtil.java index 61a867b72c78..0894990f9fd6 100644 --- a/presto-main/src/main/java/io/prestosql/metadata/MetadataUtil.java +++ b/presto-main/src/main/java/io/prestosql/metadata/MetadataUtil.java @@ -23,8 +23,10 @@ import io.prestosql.spi.connector.ConnectorTableMetadata; import io.prestosql.spi.connector.SchemaTableName; import io.prestosql.spi.security.PrestoPrincipal; +import io.prestosql.spi.security.PrincipalType; import io.prestosql.spi.type.Type; import io.prestosql.sql.tree.GrantorSpecification; +import io.prestosql.sql.tree.Identifier; import io.prestosql.sql.tree.Node; import io.prestosql.sql.tree.PrincipalSpecification; import io.prestosql.sql.tree.QualifiedName; @@ -184,6 +186,19 @@ public static PrestoPrincipal createPrincipal(PrincipalSpecification specificati } } + public static PrincipalSpecification createPrincipal(PrestoPrincipal principal) + { + PrincipalType type = principal.getType(); + switch (type) { + case USER: + return new PrincipalSpecification(PrincipalSpecification.Type.USER, new Identifier(principal.getName())); + case ROLE: + return new PrincipalSpecification(PrincipalSpecification.Type.ROLE, new Identifier(principal.getName())); + default: + throw new IllegalArgumentException("Unsupported type: " + type); + } + } + public static boolean tableExists(Metadata metadata, Session session, String table) { if (!session.getCatalog().isPresent() || !session.getSchema().isPresent()) { diff --git a/presto-main/src/main/java/io/prestosql/security/AccessControl.java b/presto-main/src/main/java/io/prestosql/security/AccessControl.java index dfd6dba65ffc..934b2fcec32e 100644 --- a/presto-main/src/main/java/io/prestosql/security/AccessControl.java +++ b/presto-main/src/main/java/io/prestosql/security/AccessControl.java @@ -127,6 +127,13 @@ public interface AccessControl */ Set filterSchemas(SecurityContext context, String catalogName, Set schemaNames); + /** + * Check if identity is allowed to execute SHOW CREATE SCHEMA. + * + * @throws io.prestosql.spi.security.AccessDeniedException if not allowed + */ + void checkCanShowCreateSchema(SecurityContext context, CatalogSchemaName schemaName); + /** * Check if identity is allowed to execute SHOW CREATE TABLE or SHOW CREATE VIEW. * diff --git a/presto-main/src/main/java/io/prestosql/security/AccessControlManager.java b/presto-main/src/main/java/io/prestosql/security/AccessControlManager.java index 307d3353490f..a314980c53eb 100644 --- a/presto-main/src/main/java/io/prestosql/security/AccessControlManager.java +++ b/presto-main/src/main/java/io/prestosql/security/AccessControlManager.java @@ -335,6 +335,19 @@ public Set filterSchemas(SecurityContext securityContext, String catalog return schemaNames; } + @Override + public void checkCanShowCreateSchema(SecurityContext securityContext, CatalogSchemaName schemaName) + { + requireNonNull(securityContext, "securityContext is null"); + requireNonNull(schemaName, "schemaName is null"); + + checkCanAccessCatalog(securityContext, schemaName.getCatalogName()); + + systemAuthorizationCheck(control -> control.checkCanShowCreateSchema(securityContext.toSystemSecurityContext(), schemaName)); + + catalogAuthorizationCheck(schemaName.getCatalogName(), securityContext, (control, context) -> control.checkCanShowCreateSchema(context, schemaName.getSchemaName())); + } + @Override public void checkCanShowCreateTable(SecurityContext securityContext, QualifiedObjectName tableName) { diff --git a/presto-main/src/main/java/io/prestosql/security/AllowAllAccessControl.java b/presto-main/src/main/java/io/prestosql/security/AllowAllAccessControl.java index f68c8f063bee..212bab148c71 100644 --- a/presto-main/src/main/java/io/prestosql/security/AllowAllAccessControl.java +++ b/presto-main/src/main/java/io/prestosql/security/AllowAllAccessControl.java @@ -98,6 +98,11 @@ public Set filterSchemas(SecurityContext context, String catalogName, Se return schemaNames; } + @Override + public void checkCanShowCreateSchema(SecurityContext context, CatalogSchemaName schemaName) + { + } + @Override public void checkCanShowCreateTable(SecurityContext context, QualifiedObjectName tableName) { diff --git a/presto-main/src/main/java/io/prestosql/security/DenyAllAccessControl.java b/presto-main/src/main/java/io/prestosql/security/DenyAllAccessControl.java index 399a193aa17b..729573033749 100644 --- a/presto-main/src/main/java/io/prestosql/security/DenyAllAccessControl.java +++ b/presto-main/src/main/java/io/prestosql/security/DenyAllAccessControl.java @@ -64,6 +64,7 @@ import static io.prestosql.spi.security.AccessDeniedException.denySetSystemSessionProperty; import static io.prestosql.spi.security.AccessDeniedException.denySetUser; import static io.prestosql.spi.security.AccessDeniedException.denyShowColumns; +import static io.prestosql.spi.security.AccessDeniedException.denyShowCreateSchema; import static io.prestosql.spi.security.AccessDeniedException.denyShowCreateTable; import static io.prestosql.spi.security.AccessDeniedException.denyShowCurrentRoles; import static io.prestosql.spi.security.AccessDeniedException.denyShowRoleGrants; @@ -135,6 +136,12 @@ public void checkCanRenameSchema(SecurityContext context, CatalogSchemaName sche denyRenameSchema(schemaName.toString(), newSchemaName); } + @Override + public void checkCanShowCreateSchema(SecurityContext context, CatalogSchemaName schemaName) + { + denyShowCreateSchema(schemaName.toString()); + } + @Override public void checkCanShowCreateTable(SecurityContext context, QualifiedObjectName tableName) { diff --git a/presto-main/src/main/java/io/prestosql/security/ForwardingAccessControl.java b/presto-main/src/main/java/io/prestosql/security/ForwardingAccessControl.java index d995369b9fd2..beacdd17d03d 100644 --- a/presto-main/src/main/java/io/prestosql/security/ForwardingAccessControl.java +++ b/presto-main/src/main/java/io/prestosql/security/ForwardingAccessControl.java @@ -129,6 +129,12 @@ public Set filterSchemas(SecurityContext context, String catalogName, Se return delegate().filterSchemas(context, catalogName, schemaNames); } + @Override + public void checkCanShowCreateSchema(SecurityContext context, CatalogSchemaName schemaName) + { + delegate().checkCanShowCreateSchema(context, schemaName); + } + @Override public void checkCanShowCreateTable(SecurityContext context, QualifiedObjectName tableName) { diff --git a/presto-main/src/main/java/io/prestosql/sql/rewrite/ShowQueriesRewrite.java b/presto-main/src/main/java/io/prestosql/sql/rewrite/ShowQueriesRewrite.java index 3c2a35f75875..697693aa7962 100644 --- a/presto-main/src/main/java/io/prestosql/sql/rewrite/ShowQueriesRewrite.java +++ b/presto-main/src/main/java/io/prestosql/sql/rewrite/ShowQueriesRewrite.java @@ -25,6 +25,7 @@ import io.prestosql.metadata.FunctionKind; import io.prestosql.metadata.FunctionMetadata; import io.prestosql.metadata.Metadata; +import io.prestosql.metadata.MetadataUtil; import io.prestosql.metadata.QualifiedObjectName; import io.prestosql.metadata.SessionPropertyManager.SessionPropertyValue; import io.prestosql.metadata.TableHandle; @@ -46,6 +47,7 @@ import io.prestosql.sql.tree.AstVisitor; import io.prestosql.sql.tree.BooleanLiteral; import io.prestosql.sql.tree.ColumnDefinition; +import io.prestosql.sql.tree.CreateSchema; import io.prestosql.sql.tree.CreateTable; import io.prestosql.sql.tree.CreateView; import io.prestosql.sql.tree.DoubleLiteral; @@ -57,6 +59,7 @@ import io.prestosql.sql.tree.Node; import io.prestosql.sql.tree.NodeRef; import io.prestosql.sql.tree.Parameter; +import io.prestosql.sql.tree.PrincipalSpecification; import io.prestosql.sql.tree.Property; import io.prestosql.sql.tree.QualifiedName; import io.prestosql.sql.tree.Query; @@ -98,6 +101,7 @@ import static io.prestosql.metadata.MetadataUtil.createQualifiedObjectName; import static io.prestosql.spi.StandardErrorCode.CATALOG_NOT_FOUND; import static io.prestosql.spi.StandardErrorCode.INVALID_COLUMN_PROPERTY; +import static io.prestosql.spi.StandardErrorCode.INVALID_SCHEMA_PROPERTY; import static io.prestosql.spi.StandardErrorCode.INVALID_TABLE_PROPERTY; import static io.prestosql.spi.StandardErrorCode.INVALID_VIEW; import static io.prestosql.spi.StandardErrorCode.MISSING_CATALOG_NAME; @@ -127,6 +131,7 @@ import static io.prestosql.sql.tree.BooleanLiteral.FALSE_LITERAL; import static io.prestosql.sql.tree.BooleanLiteral.TRUE_LITERAL; import static io.prestosql.sql.tree.LogicalBinaryExpression.and; +import static io.prestosql.sql.tree.ShowCreate.Type.SCHEMA; import static io.prestosql.sql.tree.ShowCreate.Type.TABLE; import static io.prestosql.sql.tree.ShowCreate.Type.VIEW; import static java.lang.String.format; @@ -427,10 +432,10 @@ private static Expression toExpression(Object value) @Override protected Node visitShowCreate(ShowCreate node, Void context) { - QualifiedObjectName objectName = createQualifiedObjectName(session, node, node.getName()); - Optional viewDefinition = metadata.getView(session, objectName); - if (node.getType() == VIEW) { + QualifiedObjectName objectName = createQualifiedObjectName(session, node, node.getName()); + Optional viewDefinition = metadata.getView(session, objectName); + if (!viewDefinition.isPresent()) { if (metadata.getTableHandle(session, objectName).isPresent()) { throw semanticException(NOT_SUPPORTED, node, "Relation '%s' is a table, not a view", objectName); @@ -451,6 +456,9 @@ protected Node visitShowCreate(ShowCreate node, Void context) } if (node.getType() == TABLE) { + QualifiedObjectName objectName = createQualifiedObjectName(session, node, node.getName()); + Optional viewDefinition = metadata.getView(session, objectName); + if (viewDefinition.isPresent()) { throw semanticException(NOT_SUPPORTED, node, "Relation '%s' is a view, not a table", objectName); } @@ -486,7 +494,31 @@ protected Node visitShowCreate(ShowCreate node, Void context) return singleValueQuery("Create Table", formatSql(createTable).trim()); } - throw new UnsupportedOperationException("SHOW CREATE only supported for tables and views"); + if (node.getType() == SCHEMA) { + CatalogSchemaName schemaName = createCatalogSchemaName(session, node, Optional.of(node.getName())); + + if (!metadata.schemaExists(session, schemaName)) { + throw semanticException(SCHEMA_NOT_FOUND, node, "Schema '%s' does not exist", schemaName); + } + + accessControl.checkCanShowCreateSchema(session.toSecurityContext(), schemaName); + + Map properties = metadata.getSchemaProperties(session, schemaName); + Map> allTableProperties = metadata.getSchemaPropertyManager().getAllProperties().get(new CatalogName(schemaName.getCatalogName())); + QualifiedName qualifiedSchemaName = QualifiedName.of(schemaName.getCatalogName(), schemaName.getSchemaName()); + List propertyNodes = buildProperties(qualifiedSchemaName, Optional.empty(), INVALID_SCHEMA_PROPERTY, properties, allTableProperties); + + Optional owner = metadata.getSchemaOwner(session, schemaName).map(principal -> MetadataUtil.createPrincipal(principal)); + + CreateSchema createSchema = new CreateSchema( + qualifiedSchemaName, + false, + propertyNodes, + owner); + return singleValueQuery("Create Schema", formatSql(createSchema).trim()); + } + + throw new UnsupportedOperationException("SHOW CREATE only supported for schemas, tables and views"); } private List buildProperties( diff --git a/presto-main/src/test/java/io/prestosql/metadata/AbstractMockMetadata.java b/presto-main/src/test/java/io/prestosql/metadata/AbstractMockMetadata.java index d87f3d88656c..4bdafedb5deb 100644 --- a/presto-main/src/test/java/io/prestosql/metadata/AbstractMockMetadata.java +++ b/presto-main/src/test/java/io/prestosql/metadata/AbstractMockMetadata.java @@ -387,6 +387,18 @@ public Optional getView(Session session, QualifiedObjec throw new UnsupportedOperationException(); } + @Override + public Map getSchemaProperties(Session session, CatalogSchemaName schemaName) + { + throw new UnsupportedOperationException(); + } + + @Override + public Optional getSchemaOwner(Session session, CatalogSchemaName schemaName) + { + throw new UnsupportedOperationException(); + } + @Override public void createView(Session session, QualifiedObjectName viewName, ConnectorViewDefinition definition, boolean replace) { diff --git a/presto-memory/src/main/java/io/prestosql/plugin/memory/MemoryMetadata.java b/presto-memory/src/main/java/io/prestosql/plugin/memory/MemoryMetadata.java index 32245aca8af7..412cd1917ab4 100644 --- a/presto-memory/src/main/java/io/prestosql/plugin/memory/MemoryMetadata.java +++ b/presto-memory/src/main/java/io/prestosql/plugin/memory/MemoryMetadata.java @@ -22,6 +22,7 @@ import io.prestosql.spi.Node; import io.prestosql.spi.NodeManager; import io.prestosql.spi.PrestoException; +import io.prestosql.spi.connector.CatalogSchemaName; import io.prestosql.spi.connector.ColumnHandle; import io.prestosql.spi.connector.ColumnMetadata; import io.prestosql.spi.connector.ConnectorInsertTableHandle; @@ -120,6 +121,18 @@ public synchronized void dropSchema(ConnectorSession session, String schemaName) verify(schemas.remove(schemaName)); } + @Override + public Optional getSchemaOwner(ConnectorSession session, CatalogSchemaName schemaName) + { + return Optional.empty(); + } + + @Override + public Map getSchemaProperties(ConnectorSession session, CatalogSchemaName schemaName) + { + return ImmutableMap.of(); + } + @Override public synchronized ConnectorTableHandle getTableHandle(ConnectorSession session, SchemaTableName schemaTableName) { diff --git a/presto-parser/src/main/antlr4/io/prestosql/sql/parser/SqlBase.g4 b/presto-parser/src/main/antlr4/io/prestosql/sql/parser/SqlBase.g4 index 0b805d556418..2c8cac0da5bd 100644 --- a/presto-parser/src/main/antlr4/io/prestosql/sql/parser/SqlBase.g4 +++ b/presto-parser/src/main/antlr4/io/prestosql/sql/parser/SqlBase.g4 @@ -97,6 +97,7 @@ statement | EXPLAIN ANALYZE? VERBOSE? ('(' explainOption (',' explainOption)* ')')? statement #explain | SHOW CREATE TABLE qualifiedName #showCreateTable + | SHOW CREATE SCHEMA qualifiedName #showCreateSchema | SHOW CREATE VIEW qualifiedName #showCreateView | SHOW TABLES ((FROM | IN) qualifiedName)? (LIKE pattern=string (ESCAPE escape=string)?)? #showTables diff --git a/presto-parser/src/main/java/io/prestosql/sql/SqlFormatter.java b/presto-parser/src/main/java/io/prestosql/sql/SqlFormatter.java index fa23d0cacc14..85000e1a70d9 100644 --- a/presto-parser/src/main/java/io/prestosql/sql/SqlFormatter.java +++ b/presto-parser/src/main/java/io/prestosql/sql/SqlFormatter.java @@ -823,7 +823,7 @@ protected Void visitCreateSchema(CreateSchema node, Integer context) } builder.append(formatName(node.getSchemaName())); if (node.getPrincipal().isPresent()) { - builder.append(" AUTHORIZATION ") + builder.append("\nAUTHORIZATION ") .append(formatPrincipal(node.getPrincipal().get())); } builder.append(formatPropertiesMultiLine(node.getProperties())); diff --git a/presto-parser/src/main/java/io/prestosql/sql/parser/AstBuilder.java b/presto-parser/src/main/java/io/prestosql/sql/parser/AstBuilder.java index 1f0ae0350752..99bf34471792 100644 --- a/presto-parser/src/main/java/io/prestosql/sql/parser/AstBuilder.java +++ b/presto-parser/src/main/java/io/prestosql/sql/parser/AstBuilder.java @@ -896,6 +896,12 @@ public Node visitShowStatsForQuery(SqlBaseParser.ShowStatsForQueryContext contex return new ShowStats(Optional.of(getLocation(context)), new TableSubquery(query(specification))); } + @Override + public Node visitShowCreateSchema(SqlBaseParser.ShowCreateSchemaContext context) + { + return new ShowCreate(getLocation(context), ShowCreate.Type.SCHEMA, getQualifiedName(context.qualifiedName())); + } + @Override public Node visitShowCreateView(SqlBaseParser.ShowCreateViewContext context) { diff --git a/presto-parser/src/main/java/io/prestosql/sql/tree/CreateSchema.java b/presto-parser/src/main/java/io/prestosql/sql/tree/CreateSchema.java index 78865a705f72..4fdd6f1fa1f3 100644 --- a/presto-parser/src/main/java/io/prestosql/sql/tree/CreateSchema.java +++ b/presto-parser/src/main/java/io/prestosql/sql/tree/CreateSchema.java @@ -35,6 +35,11 @@ public CreateSchema(QualifiedName schemaName, boolean notExists, List this(Optional.empty(), schemaName, notExists, properties, Optional.empty()); } + public CreateSchema(QualifiedName schemaName, boolean notExists, List properties, Optional principal) + { + this(Optional.empty(), schemaName, notExists, properties, principal); + } + public CreateSchema(NodeLocation location, QualifiedName schemaName, boolean notExists, List properties, Optional principal) { this(Optional.of(location), schemaName, notExists, properties, principal); diff --git a/presto-parser/src/main/java/io/prestosql/sql/tree/ShowCreate.java b/presto-parser/src/main/java/io/prestosql/sql/tree/ShowCreate.java index 3f6f04039003..dadb83eb0009 100644 --- a/presto-parser/src/main/java/io/prestosql/sql/tree/ShowCreate.java +++ b/presto-parser/src/main/java/io/prestosql/sql/tree/ShowCreate.java @@ -28,7 +28,9 @@ public class ShowCreate public enum Type { TABLE, - VIEW + VIEW, + SCHEMA, + /**/; } private final Type type; diff --git a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/classloader/ClassLoaderSafeConnectorAccessControl.java b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/classloader/ClassLoaderSafeConnectorAccessControl.java index 3af9e6f6df78..02718d604cdf 100644 --- a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/classloader/ClassLoaderSafeConnectorAccessControl.java +++ b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/classloader/ClassLoaderSafeConnectorAccessControl.java @@ -93,6 +93,14 @@ public Set filterSchemas(ConnectorSecurityContext context, Set s } } + @Override + public void checkCanShowCreateSchema(ConnectorSecurityContext context, String schemaName) + { + try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) { + delegate.checkCanShowCreateSchema(context, schemaName); + } + } + @Override public void checkCanShowCreateTable(ConnectorSecurityContext context, SchemaTableName tableName) { diff --git a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/classloader/ClassLoaderSafeConnectorMetadata.java b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/classloader/ClassLoaderSafeConnectorMetadata.java index b9071ebc95ad..39e507e1d229 100644 --- a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/classloader/ClassLoaderSafeConnectorMetadata.java +++ b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/classloader/ClassLoaderSafeConnectorMetadata.java @@ -15,6 +15,7 @@ import io.airlift.slice.Slice; import io.prestosql.spi.classloader.ThreadContextClassLoader; +import io.prestosql.spi.connector.CatalogSchemaName; import io.prestosql.spi.connector.ColumnHandle; import io.prestosql.spi.connector.ColumnMetadata; import io.prestosql.spi.connector.ConnectorInsertTableHandle; @@ -470,6 +471,22 @@ public Optional getView(ConnectorSession session, Schem } } + @Override + public Map getSchemaProperties(ConnectorSession session, CatalogSchemaName schemaName) + { + try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) { + return delegate.getSchemaProperties(session, schemaName); + } + } + + @Override + public Optional getSchemaOwner(ConnectorSession session, CatalogSchemaName schemaName) + { + try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) { + return delegate.getSchemaOwner(session, schemaName); + } + } + @Override public ColumnHandle getUpdateRowIdColumnHandle(ConnectorSession session, ConnectorTableHandle tableHandle) { diff --git a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/AllowAllAccessControl.java b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/AllowAllAccessControl.java index de33df383ca7..b75cb0e9e249 100644 --- a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/AllowAllAccessControl.java +++ b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/AllowAllAccessControl.java @@ -61,6 +61,11 @@ public Set filterSchemas(ConnectorSecurityContext context, Set s return schemaNames; } + @Override + public void checkCanShowCreateSchema(ConnectorSecurityContext context, String schemaName) + { + } + @Override public void checkCanShowCreateTable(ConnectorSecurityContext context, SchemaTableName tableName) { diff --git a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/AllowAllSystemAccessControl.java b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/AllowAllSystemAccessControl.java index d886f8038416..ea76bb0b9c60 100644 --- a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/AllowAllSystemAccessControl.java +++ b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/AllowAllSystemAccessControl.java @@ -138,6 +138,11 @@ public Set filterSchemas(SystemSecurityContext context, String catalogNa return schemaNames; } + @Override + public void checkCanShowCreateSchema(SystemSecurityContext context, CatalogSchemaName schemaName) + { + } + @Override public void checkCanShowCreateTable(SystemSecurityContext context, CatalogSchemaTableName table) { diff --git a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/FileBasedAccessControl.java b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/FileBasedAccessControl.java index f009c69ae110..5bced988e141 100644 --- a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/FileBasedAccessControl.java +++ b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/FileBasedAccessControl.java @@ -63,6 +63,7 @@ import static io.prestosql.spi.security.AccessDeniedException.denySetCatalogSessionProperty; import static io.prestosql.spi.security.AccessDeniedException.denySetSchemaAuthorization; import static io.prestosql.spi.security.AccessDeniedException.denyShowColumns; +import static io.prestosql.spi.security.AccessDeniedException.denyShowCreateSchema; import static io.prestosql.spi.security.AccessDeniedException.denyShowCreateTable; public class FileBasedAccessControl @@ -127,6 +128,14 @@ public Set filterSchemas(ConnectorSecurityContext context, Set s return schemaNames; } + @Override + public void checkCanShowCreateSchema(ConnectorSecurityContext context, String schemaName) + { + if (!isSchemaOwner(context, schemaName)) { + denyShowCreateSchema(schemaName); + } + } + @Override public void checkCanShowCreateTable(ConnectorSecurityContext context, SchemaTableName tableName) { diff --git a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/FileBasedSystemAccessControl.java b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/FileBasedSystemAccessControl.java index 6f251e1684fc..ebe32fca4f9d 100644 --- a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/FileBasedSystemAccessControl.java +++ b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/FileBasedSystemAccessControl.java @@ -72,6 +72,7 @@ import static io.prestosql.spi.security.AccessDeniedException.denyRevokeTablePrivilege; import static io.prestosql.spi.security.AccessDeniedException.denySetSchemaAuthorization; import static io.prestosql.spi.security.AccessDeniedException.denySetUser; +import static io.prestosql.spi.security.AccessDeniedException.denyShowCreateSchema; import static io.prestosql.spi.security.AccessDeniedException.denyShowCreateTable; import static io.prestosql.spi.security.AccessDeniedException.denyViewQuery; import static java.lang.String.format; @@ -374,6 +375,14 @@ public void checkCanShowCreateTable(SystemSecurityContext context, CatalogSchema } } + @Override + public void checkCanShowCreateSchema(SystemSecurityContext context, CatalogSchemaName schemaName) + { + if (!canAccessCatalog(context.getIdentity(), schemaName.getCatalogName(), ALL)) { + denyShowCreateSchema(schemaName.toString()); + } + } + @Override public void checkCanCreateTable(SystemSecurityContext context, CatalogSchemaTableName table) { diff --git a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/ForwardingConnectorAccessControl.java b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/ForwardingConnectorAccessControl.java index 29206838b47f..f6b5a141638c 100644 --- a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/ForwardingConnectorAccessControl.java +++ b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/ForwardingConnectorAccessControl.java @@ -84,6 +84,12 @@ public Set filterSchemas(ConnectorSecurityContext context, Set s return delegate().filterSchemas(context, schemaNames); } + @Override + public void checkCanShowCreateSchema(ConnectorSecurityContext context, String schemaName) + { + delegate().checkCanShowCreateSchema(context, schemaName); + } + @Override public void checkCanShowCreateTable(ConnectorSecurityContext context, SchemaTableName tableName) { diff --git a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/ForwardingSystemAccessControl.java b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/ForwardingSystemAccessControl.java index aaa1738d67d5..75fb9252955f 100644 --- a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/ForwardingSystemAccessControl.java +++ b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/ForwardingSystemAccessControl.java @@ -141,6 +141,12 @@ public Set filterSchemas(SystemSecurityContext context, String catalogNa return delegate().filterSchemas(context, catalogName, schemaNames); } + @Override + public void checkCanShowCreateSchema(SystemSecurityContext context, CatalogSchemaName schemaName) + { + delegate().checkCanShowCreateSchema(context, schemaName); + } + @Override public void checkCanShowCreateTable(SystemSecurityContext context, CatalogSchemaTableName table) { diff --git a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/ReadOnlyAccessControl.java b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/ReadOnlyAccessControl.java index 47c19b78f132..3e5ba02eb869 100644 --- a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/ReadOnlyAccessControl.java +++ b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/security/ReadOnlyAccessControl.java @@ -64,6 +64,11 @@ public void checkCanDropColumn(ConnectorSecurityContext context, SchemaTableName denyDropColumn(tableName.toString()); } + @Override + public void checkCanShowCreateSchema(ConnectorSecurityContext context, String schemaName) + { + } + @Override public void checkCanShowCreateTable(ConnectorSecurityContext context, SchemaTableName tableName) { diff --git a/presto-plugin-toolkit/src/test/java/io/prestosql/plugin/base/security/TestFileBasedAccessControl.java b/presto-plugin-toolkit/src/test/java/io/prestosql/plugin/base/security/TestFileBasedAccessControl.java index 1eb5735b3e78..3574873d8e8e 100644 --- a/presto-plugin-toolkit/src/test/java/io/prestosql/plugin/base/security/TestFileBasedAccessControl.java +++ b/presto-plugin-toolkit/src/test/java/io/prestosql/plugin/base/security/TestFileBasedAccessControl.java @@ -116,6 +116,21 @@ public void testSchemaRules() accessControl.checkCanSetSchemaAuthorization(BOB, "bob", new PrestoPrincipal(PrincipalType.USER, "some_user")); assertDenied(() -> accessControl.checkCanSetSchemaAuthorization(BOB, "test", new PrestoPrincipal(PrincipalType.ROLE, "some_role"))); assertDenied(() -> accessControl.checkCanSetSchemaAuthorization(BOB, "test", new PrestoPrincipal(PrincipalType.USER, "some_user"))); + + accessControl.checkCanShowCreateSchema(ADMIN, "bob"); + accessControl.checkCanShowCreateSchema(ADMIN, "staff"); + accessControl.checkCanShowCreateSchema(ADMIN, "authenticated"); + accessControl.checkCanShowCreateSchema(ADMIN, "test"); + + accessControl.checkCanShowCreateSchema(BOB, "bob"); + accessControl.checkCanShowCreateSchema(BOB, "staff"); + accessControl.checkCanShowCreateSchema(BOB, "authenticated"); + assertDenied(() -> accessControl.checkCanShowCreateSchema(BOB, "test")); + + assertDenied(() -> accessControl.checkCanShowCreateSchema(CHARLIE, "bob")); + assertDenied(() -> accessControl.checkCanShowCreateSchema(CHARLIE, "staff")); + accessControl.checkCanShowCreateSchema(CHARLIE, "authenticated"); + assertDenied(() -> accessControl.checkCanShowCreateSchema(CHARLIE, "test")); } @Test diff --git a/presto-spi/src/main/java/io/prestosql/spi/connector/ConnectorAccessControl.java b/presto-spi/src/main/java/io/prestosql/spi/connector/ConnectorAccessControl.java index 5414ef8cb971..361917bbbe6c 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/connector/ConnectorAccessControl.java +++ b/presto-spi/src/main/java/io/prestosql/spi/connector/ConnectorAccessControl.java @@ -51,6 +51,7 @@ import static io.prestosql.spi.security.AccessDeniedException.denySetRole; import static io.prestosql.spi.security.AccessDeniedException.denySetSchemaAuthorization; import static io.prestosql.spi.security.AccessDeniedException.denyShowColumns; +import static io.prestosql.spi.security.AccessDeniedException.denyShowCreateSchema; import static io.prestosql.spi.security.AccessDeniedException.denyShowCreateTable; import static io.prestosql.spi.security.AccessDeniedException.denyShowCurrentRoles; import static io.prestosql.spi.security.AccessDeniedException.denyShowRoleGrants; @@ -123,6 +124,16 @@ default Set filterSchemas(ConnectorSecurityContext context, Set return emptySet(); } + /** + * Check if identity is allowed to execute SHOW CREATE SCHEMA. + * + * @throws io.prestosql.spi.security.AccessDeniedException if not allowed + */ + default void checkCanShowCreateSchema(ConnectorSecurityContext context, String schemaName) + { + denyShowCreateSchema(schemaName); + } + /** * Check if identity is allowed to execute SHOW CREATE TABLE or SHOW CREATE VIEW. * diff --git a/presto-spi/src/main/java/io/prestosql/spi/connector/ConnectorMetadata.java b/presto-spi/src/main/java/io/prestosql/spi/connector/ConnectorMetadata.java index 3cd67d49cf6f..66cf1bb9818a 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/connector/ConnectorMetadata.java +++ b/presto-spi/src/main/java/io/prestosql/spi/connector/ConnectorMetadata.java @@ -521,6 +521,22 @@ default Optional getView(ConnectorSession session, Sche return Optional.empty(); } + /** + * Gets the schema properties for the specified schema. + */ + default Map getSchemaProperties(ConnectorSession session, CatalogSchemaName schemaName) + { + throw new PrestoException(NOT_SUPPORTED, "This connector does not support schema properties"); + } + + /** + * Get the schema properties for the specified schema. + */ + default Optional getSchemaOwner(ConnectorSession session, CatalogSchemaName schemaName) + { + throw new PrestoException(NOT_SUPPORTED, "This connector does not support schema ownership"); + } + /** * @return whether delete without table scan is supported */ diff --git a/presto-spi/src/main/java/io/prestosql/spi/security/AccessDeniedException.java b/presto-spi/src/main/java/io/prestosql/spi/security/AccessDeniedException.java index fba33bda69cb..7aca9ea8e661 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/security/AccessDeniedException.java +++ b/presto-spi/src/main/java/io/prestosql/spi/security/AccessDeniedException.java @@ -136,6 +136,11 @@ public static void denyShowSchemas(String extraInfo) throw new AccessDeniedException(format("Cannot show schemas%s", formatExtraInfo(extraInfo))); } + public static void denyShowCreateSchema(String schemaName) + { + throw new AccessDeniedException(format("Cannot show create schema for %s", schemaName)); + } + public static void denyShowCreateTable(String tableName) { denyShowCreateTable(tableName, null); diff --git a/presto-spi/src/main/java/io/prestosql/spi/security/SystemAccessControl.java b/presto-spi/src/main/java/io/prestosql/spi/security/SystemAccessControl.java index 664526ce82bb..83ab0e09ee24 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/security/SystemAccessControl.java +++ b/presto-spi/src/main/java/io/prestosql/spi/security/SystemAccessControl.java @@ -56,6 +56,7 @@ import static io.prestosql.spi.security.AccessDeniedException.denySetSchemaAuthorization; import static io.prestosql.spi.security.AccessDeniedException.denySetUser; import static io.prestosql.spi.security.AccessDeniedException.denyShowColumns; +import static io.prestosql.spi.security.AccessDeniedException.denyShowCreateSchema; import static io.prestosql.spi.security.AccessDeniedException.denyShowCreateTable; import static io.prestosql.spi.security.AccessDeniedException.denyShowRoles; import static io.prestosql.spi.security.AccessDeniedException.denyShowSchemas; @@ -215,6 +216,16 @@ default Set filterSchemas(SystemSecurityContext context, String catalogN return Collections.emptySet(); } + /** + * Check if identity is allowed to execute SHOW CREATE SCHEMA. + * + * @throws io.prestosql.spi.security.AccessDeniedException if not allowed + */ + default void checkCanShowCreateSchema(SystemSecurityContext context, CatalogSchemaName schemaName) + { + denyShowCreateSchema(schemaName.toString()); + } + /** * Check if identity is allowed to execute SHOW CREATE TABLE or SHOW CREATE VIEW. * From 18caddca87685a1d839604ca4b3f5b2df141cbe0 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Wed, 15 Apr 2020 12:27:28 -0700 Subject: [PATCH 203/519] Increase width of Hive Thrift metastore table --- .../src/main/sphinx/connector/hive.rst | 46 +++++++++---------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/presto-docs/src/main/sphinx/connector/hive.rst b/presto-docs/src/main/sphinx/connector/hive.rst index 3422c9800798..d51e556d07d9 100644 --- a/presto-docs/src/main/sphinx/connector/hive.rst +++ b/presto-docs/src/main/sphinx/connector/hive.rst @@ -248,39 +248,39 @@ Property Name Description Hive Thrift Metastore Configuration Properties ---------------------------------------------- -================================================== ============================================================ ============ -Property Name Description Default -================================================== ============================================================ ============ -``hive.metastore.uri`` The URI(s) of the Hive metastore to connect to using the - Thrift protocol. If multiple URIs are provided, the first - URI is used by default, and the rest of the URIs are - fallback metastores. This property is required. - Example: ``thrift://192.0.2.3:9083`` or - ``thrift://192.0.2.3:9083,thrift://192.0.2.4:9083`` +======================================================== ============================================================ ============ +Property Name Description Default +======================================================== ============================================================ ============ +``hive.metastore.uri`` The URI(s) of the Hive metastore to connect to using the + Thrift protocol. If multiple URIs are provided, the first + URI is used by default, and the rest of the URIs are + fallback metastores. This property is required. + Example: ``thrift://192.0.2.3:9083`` or + ``thrift://192.0.2.3:9083,thrift://192.0.2.4:9083`` -``hive.metastore.username`` The username Presto uses to access the Hive metastore. +``hive.metastore.username`` The username Presto uses to access the Hive metastore. -``hive.metastore.authentication.type`` Hive metastore authentication type. - Possible values are ``NONE`` or ``KERBEROS`` - (defaults to ``NONE``). +``hive.metastore.authentication.type`` Hive metastore authentication type. + Possible values are ``NONE`` or ``KERBEROS`` + (defaults to ``NONE``). -``hive.metastore.thrift.impersonation.enabled`` Enable Hive metastore end user impersonation. +``hive.metastore.thrift.impersonation.enabled`` Enable Hive metastore end user impersonation. -``hive.metastore.service.principal`` The Kerberos principal of the Hive metastore service. +``hive.metastore.service.principal`` The Kerberos principal of the Hive metastore service. -``hive.metastore.client.principal`` The Kerberos principal that Presto uses when connecting - to the Hive metastore service. +``hive.metastore.client.principal`` The Kerberos principal that Presto uses when connecting + to the Hive metastore service. -``hive.metastore.client.keytab`` Hive metastore client keytab location. +``hive.metastore.client.keytab`` Hive metastore client keytab location. -``hive.metastore-cache-ttl`` Time to live Hive metadata cache. ``0s`` +``hive.metastore-cache-ttl`` Time to live Hive metadata cache. ``0s`` -``hive.metastore-refresh-interval`` How often to refresh the Hive metastore cache. +``hive.metastore-refresh-interval`` How often to refresh the Hive metastore cache. -``hive.metastore-cache-maximum-size`` Hive metastore cache maximum size. 10,000 +``hive.metastore-cache-maximum-size`` Hive metastore cache maximum size. 10,000 -``hive.metastore-refresh-max-threads`` Maximum number of threads to refresh Hive metastore cache. 100 -================================================== ============================================================ ============ +``hive.metastore-refresh-max-threads`` Maximum number of threads to refresh Hive metastore cache. 100 +======================================================== ============================================================ ============ AWS Glue Catalog Configuration Properties ----------------------------------------- From 91576930f64ae463b849e8f487e3ac66b758adf9 Mon Sep 17 00:00:00 2001 From: Dongwoo Son Date: Thu, 12 Mar 2020 12:56:00 -0700 Subject: [PATCH 204/519] Add SSL/TLS support for Hive Metastore --- .../src/main/sphinx/connector/hive.rst | 9 ++ presto-hive/pom.xml | 5 + .../thrift/ThriftMetastoreClientFactory.java | 120 +++++++++++++++++- .../thrift/ThriftMetastoreConfig.java | 65 ++++++++++ .../thrift/TestThriftMetastoreConfig.java | 13 ++ 5 files changed, 209 insertions(+), 3 deletions(-) diff --git a/presto-docs/src/main/sphinx/connector/hive.rst b/presto-docs/src/main/sphinx/connector/hive.rst index d51e556d07d9..1f61331b761a 100644 --- a/presto-docs/src/main/sphinx/connector/hive.rst +++ b/presto-docs/src/main/sphinx/connector/hive.rst @@ -266,6 +266,15 @@ Property Name Description ``hive.metastore.thrift.impersonation.enabled`` Enable Hive metastore end user impersonation. +``hive.metastore.thrift.client.ssl.enabled`` Use SSL when connecting to metastore. ``false`` + +``hive.metastore.thrift.client.ssl.key`` Path to PEM private key and client certificate (key store). + +``hive.metastore.thrift.client.ssl.key-password`` Password for the PEM private key. + +``hive.metastore.thrift.client.ssl.trust-certificate`` Path to the PEM server certificate chain (trust store). + Required when SSL is enabled. + ``hive.metastore.service.principal`` The Kerberos principal of the Hive metastore service. ``hive.metastore.client.principal`` The Kerberos principal that Presto uses when connecting diff --git a/presto-hive/pom.xml b/presto-hive/pom.xml index b7cd7be4a33e..bd151d27b13c 100644 --- a/presto-hive/pom.xml +++ b/presto-hive/pom.xml @@ -205,6 +205,11 @@ jackson-databind
+ + io.airlift + security + + io.airlift diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/thrift/ThriftMetastoreClientFactory.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/thrift/ThriftMetastoreClientFactory.java index 4d4ef939d55b..b710f5effe49 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/thrift/ThriftMetastoreClientFactory.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/thrift/ThriftMetastoreClientFactory.java @@ -14,17 +14,35 @@ package io.prestosql.plugin.hive.metastore.thrift; import com.google.common.net.HostAndPort; +import io.airlift.security.pem.PemReader; import io.airlift.units.Duration; import io.prestosql.plugin.hive.authentication.HiveMetastoreAuthentication; import io.prestosql.spi.NodeManager; import org.apache.thrift.transport.TTransportException; import javax.inject.Inject; +import javax.net.ssl.KeyManager; +import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManager; +import javax.net.ssl.TrustManagerFactory; +import javax.net.ssl.X509TrustManager; +import javax.security.auth.x500.X500Principal; +import java.io.File; +import java.io.IOException; +import java.security.GeneralSecurityException; +import java.security.KeyStore; +import java.security.cert.Certificate; +import java.security.cert.CertificateExpiredException; +import java.security.cert.CertificateNotYetValidException; +import java.security.cert.X509Certificate; +import java.util.Arrays; +import java.util.List; import java.util.Optional; import static java.lang.Math.toIntExact; +import static java.util.Collections.list; import static java.util.Objects.requireNonNull; public class ThriftMetastoreClientFactory @@ -50,14 +68,110 @@ public ThriftMetastoreClientFactory( } @Inject - public ThriftMetastoreClientFactory(ThriftMetastoreConfig config, HiveMetastoreAuthentication metastoreAuthentication, NodeManager nodeManager) + public ThriftMetastoreClientFactory( + ThriftMetastoreConfig config, + HiveMetastoreAuthentication metastoreAuthentication, + NodeManager nodeManager) { - this(Optional.empty(), Optional.ofNullable(config.getSocksProxy()), config.getMetastoreTimeout(), metastoreAuthentication, nodeManager.getCurrentNode().getHost()); + this( + buildSslContext( + config.isTlsEnabled(), + Optional.ofNullable(config.getKeystorePath()), + Optional.ofNullable(config.getKeystorePassword()), + config.getTruststorePath()), + Optional.ofNullable(config.getSocksProxy()), + config.getMetastoreTimeout(), + metastoreAuthentication, + nodeManager.getCurrentNode().getHost()); } public ThriftMetastoreClient create(HostAndPort address, Optional delegationToken) throws TTransportException { - return new ThriftHiveMetastoreClient(Transport.create(address, sslContext, socksProxy, timeoutMillis, metastoreAuthentication, delegationToken), hostname); + return new ThriftHiveMetastoreClient( + Transport.create(address, sslContext, socksProxy, timeoutMillis, metastoreAuthentication, delegationToken), + hostname); + } + + private static Optional buildSslContext( + boolean tlsEnabled, + Optional keyStorePath, + Optional keyStorePassword, + File trustStorePath) + { + if (!tlsEnabled) { + return Optional.empty(); + } + + try { + // load KeyStore if configured and get KeyManagers + KeyManager[] keyManagers = null; + if (keyStorePath.isPresent()) { + KeyStore keyStore = PemReader.loadKeyStore(keyStorePath.get(), keyStorePath.get(), keyStorePassword); + validateCertificates(keyStore); + KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); + keyManagerFactory.init(keyStore, new char[0]); + keyManagers = keyManagerFactory.getKeyManagers(); + } + + // load TrustStore + KeyStore trustStore = loadTrustStore(trustStorePath); + + // create TrustManagerFactory + TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); + trustManagerFactory.init(trustStore); + + // get X509TrustManager + TrustManager[] trustManagers = trustManagerFactory.getTrustManagers(); + if (trustManagers.length != 1 || !(trustManagers[0] instanceof X509TrustManager)) { + throw new RuntimeException("Unexpected default trust managers:" + Arrays.toString(trustManagers)); + } + + // create SSLContext + SSLContext sslContext = SSLContext.getInstance("SSL"); + sslContext.init(keyManagers, trustManagers, null); + return Optional.of(sslContext); + } + catch (GeneralSecurityException | IOException e) { + throw new RuntimeException(e); + } + } + + private static KeyStore loadTrustStore(File trustStorePath) + throws IOException, GeneralSecurityException + { + List certificateChain = PemReader.readCertificateChain(trustStorePath); + + KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType()); + trustStore.load(null, null); + for (X509Certificate certificate : certificateChain) { + X500Principal principal = certificate.getSubjectX500Principal(); + trustStore.setCertificateEntry(principal.getName(), certificate); + } + return trustStore; + } + + private static void validateCertificates(KeyStore keyStore) + throws GeneralSecurityException + { + for (String alias : list(keyStore.aliases())) { + if (!keyStore.isKeyEntry(alias)) { + continue; + } + Certificate certificate = keyStore.getCertificate(alias); + if (!(certificate instanceof X509Certificate)) { + continue; + } + + try { + ((X509Certificate) certificate).checkValidity(); + } + catch (CertificateExpiredException e) { + throw new CertificateExpiredException("KeyStore certificate is expired: " + e.getMessage()); + } + catch (CertificateNotYetValidException e) { + throw new CertificateNotYetValidException("KeyStore certificate is not yet valid: " + e.getMessage()); + } + } } } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/thrift/ThriftMetastoreConfig.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/thrift/ThriftMetastoreConfig.java index 47934969f5bb..4a5e7068f32a 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/thrift/ThriftMetastoreConfig.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/thrift/ThriftMetastoreConfig.java @@ -20,9 +20,11 @@ import io.airlift.units.Duration; import io.prestosql.plugin.hive.util.RetryDriver; +import javax.validation.constraints.AssertTrue; import javax.validation.constraints.Min; import javax.validation.constraints.NotNull; +import java.io.File; import java.util.concurrent.TimeUnit; public class ThriftMetastoreConfig @@ -38,6 +40,11 @@ public class ThriftMetastoreConfig private boolean deleteFilesOnDrop; private Duration maxWaitForTransactionLock = new Duration(10, TimeUnit.MINUTES); + private boolean tlsEnabled; + private File keystorePath; + private String keystorePassword; + private File truststorePath; + @NotNull public Duration getMetastoreTimeout() { @@ -169,4 +176,62 @@ public ThriftMetastoreConfig setMaxWaitForTransactionLock(Duration maxWaitForTra this.maxWaitForTransactionLock = maxWaitForTransactionLock; return this; } + + public boolean isTlsEnabled() + { + return tlsEnabled; + } + + @Config("hive.metastore.thrift.client.ssl.enabled") + @ConfigDescription("Whether TLS security is enabled") + public ThriftMetastoreConfig setTlsEnabled(boolean tlsEnabled) + { + this.tlsEnabled = tlsEnabled; + return this; + } + + public File getKeystorePath() + { + return keystorePath; + } + + @Config("hive.metastore.thrift.client.ssl.key") + @ConfigDescription("Path to the PEM key store") + public ThriftMetastoreConfig setKeystorePath(File keystorePath) + { + this.keystorePath = keystorePath; + return this; + } + + public String getKeystorePassword() + { + return keystorePassword; + } + + @Config("hive.metastore.thrift.client.ssl.key-password") + @ConfigDescription("Password for the key store") + public ThriftMetastoreConfig setKeystorePassword(String keystorePassword) + { + this.keystorePassword = keystorePassword; + return this; + } + + public File getTruststorePath() + { + return truststorePath; + } + + @Config("hive.metastore.thrift.client.ssl.trust-certificate") + @ConfigDescription("Path to the PEM trust store") + public ThriftMetastoreConfig setTruststorePath(File truststorePath) + { + this.truststorePath = truststorePath; + return this; + } + + @AssertTrue(message = "Trust store must be provided when TLS is enabled") + public boolean isTruststorePathValid() + { + return !tlsEnabled || getTruststorePath() != null; + } } diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/thrift/TestThriftMetastoreConfig.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/thrift/TestThriftMetastoreConfig.java index 6fdd6d645f05..c551e8b333bf 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/thrift/TestThriftMetastoreConfig.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/thrift/TestThriftMetastoreConfig.java @@ -18,6 +18,7 @@ import io.airlift.units.Duration; import org.testng.annotations.Test; +import java.io.File; import java.util.Map; import static io.airlift.configuration.testing.ConfigAssertions.assertFullMapping; @@ -39,6 +40,10 @@ public void testDefaults() .setMinBackoffDelay(new Duration(1, SECONDS)) .setMaxBackoffDelay(new Duration(1, SECONDS)) .setMaxRetryTime(new Duration(30, SECONDS)) + .setTlsEnabled(false) + .setKeystorePath(null) + .setKeystorePassword(null) + .setTruststorePath(null) .setImpersonationEnabled(false) .setDeleteFilesOnDrop(false) .setMaxWaitForTransactionLock(new Duration(10, MINUTES))); @@ -55,6 +60,10 @@ public void testExplicitPropertyMappings() .put("hive.metastore.thrift.client.min-backoff-delay", "2s") .put("hive.metastore.thrift.client.max-backoff-delay", "4s") .put("hive.metastore.thrift.client.max-retry-time", "60s") + .put("hive.metastore.thrift.client.ssl.enabled", "true") + .put("hive.metastore.thrift.client.ssl.key", "/tmp/keystore") + .put("hive.metastore.thrift.client.ssl.key-password", "keystore-password") + .put("hive.metastore.thrift.client.ssl.trust-certificate", "/tmp/truststore") .put("hive.metastore.thrift.impersonation.enabled", "true") .put("hive.metastore.thrift.delete-files-on-drop", "true") .put("hive.metastore.thrift.txn-lock-max-wait", "5m") @@ -68,6 +77,10 @@ public void testExplicitPropertyMappings() .setMinBackoffDelay(new Duration(2, SECONDS)) .setMaxBackoffDelay(new Duration(4, SECONDS)) .setMaxRetryTime(new Duration(60, SECONDS)) + .setTlsEnabled(true) + .setKeystorePath(new File("/tmp/keystore")) + .setKeystorePassword("keystore-password") + .setTruststorePath(new File("/tmp/truststore")) .setImpersonationEnabled(true) .setDeleteFilesOnDrop(true) .setMaxWaitForTransactionLock(new Duration(5, MINUTES)); From 237313b11f6e27356b48c014bb9ee53cb1e7af6c Mon Sep 17 00:00:00 2001 From: James Taylor Date: Thu, 9 Apr 2020 09:10:59 -0700 Subject: [PATCH 205/519] Add support for dereference pushdown to parquet reader --- .../parquet/ParquetPageSourceFactory.java | 172 ++++++++++-------- .../hive/TestHiveIntegrationSmokeTest.java | 22 ++- .../prestosql/parquet/ParquetTypeUtils.java | 9 +- .../parquet/reader/ParquetReader.java | 10 +- 4 files changed, 123 insertions(+), 90 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/parquet/ParquetPageSourceFactory.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/parquet/ParquetPageSourceFactory.java index a43a2d8a2aa6..8815aad8e729 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/parquet/ParquetPageSourceFactory.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/parquet/ParquetPageSourceFactory.java @@ -16,8 +16,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Streams; -import io.prestosql.memory.context.AggregatedMemoryContext; import io.prestosql.parquet.Field; import io.prestosql.parquet.ParquetCorruptionException; import io.prestosql.parquet.ParquetDataSource; @@ -48,6 +46,7 @@ import org.apache.parquet.hadoop.metadata.FileMetaData; import org.apache.parquet.hadoop.metadata.ParquetMetadata; import org.apache.parquet.io.MessageColumnIO; +import org.apache.parquet.schema.GroupType; import org.apache.parquet.schema.MessageType; import org.joda.time.DateTimeZone; @@ -80,6 +79,7 @@ import static io.prestosql.plugin.hive.HiveSessionProperties.isFailOnCorruptedParquetStatistics; import static io.prestosql.plugin.hive.HiveSessionProperties.isUseParquetColumnNames; import static io.prestosql.plugin.hive.ReaderProjections.projectBaseColumns; +import static io.prestosql.plugin.hive.ReaderProjections.projectSufficientColumns; import static io.prestosql.plugin.hive.parquet.HdfsParquetDataSource.buildHdfsParquetDataSource; import static io.prestosql.plugin.hive.parquet.ParquetColumnIOConverter.constructField; import static io.prestosql.plugin.hive.util.HiveUtil.getDeserializerClassName; @@ -131,69 +131,34 @@ public Optional createPageSource( // Ignore predicates on partial columns for now. effectivePredicate = effectivePredicate.transform(column -> column.isBaseColumn() ? column : null); + boolean useParquetColumnNames = isUseParquetColumnNames(session); - Optional projectedReaderColumns = projectBaseColumns(columns); - - ConnectorPageSource parquetPageSource = createParquetPageSource( - hdfsEnvironment, - session.getUser(), - configuration, - path, - start, - length, - fileSize, - projectedReaderColumns - .map(ReaderProjections::getReaderColumns) - .orElse(columns), - isUseParquetColumnNames(session), - options - .withFailOnCorruptedStatistics(isFailOnCorruptedParquetStatistics(session)) - .withMaxReadBlockSize(getParquetMaxReadBlockSize(session)), - effectivePredicate, - stats); - - return Optional.of(new ReaderPageSourceWithProjections(parquetPageSource, projectedReaderColumns)); - } - - public static ParquetPageSource createParquetPageSource( - HdfsEnvironment hdfsEnvironment, - String user, - Configuration configuration, - Path path, - long start, - long length, - long fileSize, - List columns, - boolean useParquetColumnNames, - ParquetReaderOptions options, - TupleDomain effectivePredicate, - FileFormatDataSourceStats stats) - { - for (HiveColumnHandle column : columns) { - checkArgument(column.getColumnType() == REGULAR, "column type must be REGULAR: %s", column); - } - - AggregatedMemoryContext systemMemoryContext = newSimpleAggregatedMemoryContext(); - + MessageType fileSchema; + MessageType requestedSchema; + MessageColumnIO messageColumn; + ParquetReader parquetReader; ParquetDataSource dataSource = null; try { + String user = session.getUser(); FileSystem fileSystem = hdfsEnvironment.getFileSystem(user, path, configuration); FSDataInputStream inputStream = hdfsEnvironment.doAs(user, () -> fileSystem.open(path)); ParquetMetadata parquetMetadata = MetadataReader.readFooter(inputStream, path, fileSize); FileMetaData fileMetaData = parquetMetadata.getFileMetaData(); - MessageType fileSchema = fileMetaData.getSchema(); + fileSchema = fileMetaData.getSchema(); dataSource = buildHdfsParquetDataSource(inputStream, path, fileSize, stats, options); - List> parquetFields = columns.stream() - .map(column -> getParquetType(column, fileSchema, useParquetColumnNames)) - .map(Optional::ofNullable) - .collect(toImmutableList()); + Optional message = projectSufficientColumns(columns) + .map(ReaderProjections::getReaderColumns) + .orElse(columns).stream() + .filter(column -> column.getColumnType() == REGULAR) + .map(column -> getColumnType(column, fileSchema, useParquetColumnNames)) + .filter(Optional::isPresent) + .map(Optional::get) + .map(type -> new MessageType(fileSchema.getName(), type)) + .reduce(MessageType::union); - MessageType requestedSchema = new MessageType( - fileSchema.getName(), - parquetFields.stream() - .flatMap(Streams::stream) - .collect(toImmutableList())); + requestedSchema = message.orElse(new MessageType(fileSchema.getName(), ImmutableList.of())); + messageColumn = getColumnIO(fileSchema, requestedSchema); ImmutableList.Builder footerBlocks = ImmutableList.builder(); for (BlockMetaData block : parquetMetadata.getBlocks()) { @@ -206,37 +171,19 @@ public static ParquetPageSource createParquetPageSource( Map, RichColumnDescriptor> descriptorsByPath = getDescriptors(fileSchema, requestedSchema); TupleDomain parquetTupleDomain = getParquetTupleDomain(descriptorsByPath, effectivePredicate); Predicate parquetPredicate = buildPredicate(requestedSchema, parquetTupleDomain, descriptorsByPath); - ParquetDataSource finalDataSource = dataSource; ImmutableList.Builder blocks = ImmutableList.builder(); for (BlockMetaData block : footerBlocks.build()) { - if (predicateMatches(parquetPredicate, block, finalDataSource, descriptorsByPath, parquetTupleDomain, options.isFailOnCorruptedStatistics())) { + if (predicateMatches(parquetPredicate, block, dataSource, descriptorsByPath, parquetTupleDomain, options.isFailOnCorruptedStatistics())) { blocks.add(block); } } - MessageColumnIO messageColumnIO = getColumnIO(fileSchema, requestedSchema); - ParquetReader parquetReader = new ParquetReader( + parquetReader = new ParquetReader( Optional.ofNullable(fileMetaData.getCreatedBy()), - messageColumnIO, + messageColumn, blocks.build(), dataSource, - systemMemoryContext, - options); - - ImmutableList.Builder prestoTypes = ImmutableList.builder(); - ImmutableList.Builder> internalFields = ImmutableList.builder(); - for (int columnIndex = 0; columnIndex < columns.size(); columnIndex++) { - HiveColumnHandle column = columns.get(columnIndex); - Optional parquetField = parquetFields.get(columnIndex); - - prestoTypes.add(column.getType()); - - internalFields.add(parquetField.flatMap(field -> { - String columnName = useParquetColumnNames ? column.getName() : fileSchema.getFields().get(column.getBaseHiveColumnIndex()).getName(); - return constructField(column.getType(), lookupColumnByName(messageColumnIO, columnName)); - })); - } - - return new ParquetPageSource(parquetReader, prestoTypes.build(), internalFields.build()); + newSimpleAggregatedMemoryContext(), + options.withFailOnCorruptedStatistics(isFailOnCorruptedParquetStatistics(session)).withMaxReadBlockSize(getParquetMaxReadBlockSize(session))); } catch (Exception e) { try { @@ -262,6 +209,73 @@ public static ParquetPageSource createParquetPageSource( } throw new PrestoException(HIVE_CANNOT_OPEN_SPLIT, message, e); } + + Optional readerProjections = projectBaseColumns(columns); + List baseColumns = readerProjections.map(ReaderProjections::getReaderColumns).orElse(columns); + for (HiveColumnHandle column : baseColumns) { + checkArgument(column.getColumnType() == REGULAR, "column type must be REGULAR: %s", column); + } + + List> parquetFields = baseColumns.stream() + .map(column -> getParquetType(column, fileSchema, useParquetColumnNames)) + .map(Optional::ofNullable) + .collect(toImmutableList()); + ImmutableList.Builder prestoTypes = ImmutableList.builder(); + ImmutableList.Builder> internalFields = ImmutableList.builder(); + for (int columnIndex = 0; columnIndex < baseColumns.size(); columnIndex++) { + HiveColumnHandle column = baseColumns.get(columnIndex); + Optional parquetField = parquetFields.get(columnIndex); + + prestoTypes.add(column.getBaseType()); + + internalFields.add(parquetField.flatMap(field -> { + String columnName = useParquetColumnNames ? column.getBaseColumnName() : fileSchema.getFields().get(column.getBaseHiveColumnIndex()).getName(); + return constructField(column.getBaseType(), lookupColumnByName(messageColumn, columnName)); + })); + } + + ConnectorPageSource parquetPageSource = new ParquetPageSource(parquetReader, prestoTypes.build(), internalFields.build()); + return Optional.of(new ReaderPageSourceWithProjections(parquetPageSource, readerProjections)); + } + + public static Optional getParquetType(GroupType groupType, boolean useParquetColumnNames, HiveColumnHandle column) + { + if (useParquetColumnNames) { + return Optional.ofNullable(getParquetTypeByName(column.getBaseColumnName(), groupType)); + } + if (column.getBaseHiveColumnIndex() < groupType.getFieldCount()) { + return Optional.of(groupType.getType(column.getBaseHiveColumnIndex())); + } + + return Optional.empty(); + } + + public static Optional getColumnType(HiveColumnHandle column, MessageType messageType, boolean useParquetColumnNames) + { + Optional columnType = getParquetType(messageType, useParquetColumnNames, column); + if (!columnType.isPresent() || !column.getHiveColumnProjectionInfo().isPresent()) { + return columnType; + } + GroupType baseType = columnType.get().asGroupType(); + ImmutableList.Builder typeBuilder = ImmutableList.builder(); + org.apache.parquet.schema.Type parentType = baseType; + + for (String name : column.getHiveColumnProjectionInfo().get().getDereferenceNames()) { + org.apache.parquet.schema.Type childType = getParquetTypeByName(name, parentType.asGroupType()); + if (childType == null) { + return Optional.empty(); + } + typeBuilder.add(childType); + parentType = childType; + } + + List subfieldTypes = typeBuilder.build(); + org.apache.parquet.schema.Type type = subfieldTypes.get(subfieldTypes.size() - 1); + for (int i = subfieldTypes.size() - 2; i >= 0; --i) { + GroupType groupType = subfieldTypes.get(i).asGroupType(); + type = new GroupType(type.getRepetition(), groupType.getName(), ImmutableList.of(type)); + } + return Optional.of(new GroupType(baseType.getRepetition(), baseType.getName(), ImmutableList.of(type))); } public static TupleDomain getParquetTupleDomain(Map, RichColumnDescriptor> descriptorsByPath, TupleDomain effectivePredicate) @@ -289,7 +303,7 @@ public static TupleDomain getParquetTupleDomain(Map ranges = new HashMap<>(); for (int rowGroup = 0; rowGroup < blocks.size(); rowGroup++) { @@ -119,7 +119,7 @@ public ParquetReader( } } - chunkReaders = dataSource.planRead(ranges); + this.chunkReaders = dataSource.planRead(ranges); } @Override From 69f634481d7d0d461b84b0ecc6c09c15b31232c6 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Wed, 22 Apr 2020 08:35:24 +0200 Subject: [PATCH 206/519] Remove shared mutable state from test --- .../plugin/bigquery/TestReadRowsHelper.java | 20 ++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestReadRowsHelper.java b/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestReadRowsHelper.java index 6e603bef26dd..341443af1f78 100644 --- a/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestReadRowsHelper.java +++ b/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestReadRowsHelper.java @@ -28,18 +28,14 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; -@Test public class TestReadRowsHelper { - // it is not used, we just need the reference - BigQueryStorageClient client = mock(BigQueryStorageClient.class); - private ReadRowsRequest.Builder request = ReadRowsRequest.newBuilder().setReadPosition( - StreamPosition.newBuilder().setStream( - Stream.newBuilder().setName("test"))); - @Test void testNoFailures() { + BigQueryStorageClient client = mock(BigQueryStorageClient.class); + ReadRowsRequest.Builder request = newRequest(); + MockResponsesBatch batch1 = new MockResponsesBatch(); batch1.addResponse(ReadRowsResponse.newBuilder().setRowCount(10).build()); batch1.addResponse(ReadRowsResponse.newBuilder().setRowCount(11).build()); @@ -56,6 +52,9 @@ void testNoFailures() @Test void testRetryOfSingleFailure() { + BigQueryStorageClient client = mock(BigQueryStorageClient.class); + ReadRowsRequest.Builder request = newRequest(); + MockResponsesBatch batch1 = new MockResponsesBatch(); batch1.addResponse(ReadRowsResponse.newBuilder().setRowCount(10).build()); batch1.addException(new StatusRuntimeException(Status.INTERNAL.withDescription( @@ -71,6 +70,13 @@ void testRetryOfSingleFailure() assertThat(responses.stream().mapToLong(ReadRowsResponse::getRowCount).sum()).isEqualTo(21); } + private static ReadRowsRequest.Builder newRequest() + { + return ReadRowsRequest.newBuilder().setReadPosition( + StreamPosition.newBuilder().setStream( + Stream.newBuilder().setName("test"))); + } + private static final class MockReadRowsHelper extends ReadRowsHelper { From 8014a527178e541c4bb73df7f571853e07013e17 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Wed, 22 Apr 2020 10:26:24 +0200 Subject: [PATCH 207/519] Disable flaky test temporarily --- .../io/prestosql/elasticsearch/BaseElasticsearchSmokeTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/BaseElasticsearchSmokeTest.java b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/BaseElasticsearchSmokeTest.java index 450f89f89f98..a57a49d2e3ea 100644 --- a/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/BaseElasticsearchSmokeTest.java +++ b/presto-elasticsearch/src/test/java/io/prestosql/elasticsearch/BaseElasticsearchSmokeTest.java @@ -727,7 +727,7 @@ public void testAlias() "SELECT count(*) FROM orders"); } - @Test + @Test(enabled = false) // TODO (https://github.com/prestosql/presto/issues/2428) public void testMultiIndexAlias() throws IOException { From 8a6b17b2fdd91b45139136d2e172973fdc2832dd Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Wed, 22 Apr 2020 09:48:06 +0200 Subject: [PATCH 208/519] Test code cleanup --- .../bigquery/TestBigQueryConnectorModule.java | 2 - .../plugin/bigquery/TestBigQueryPlugin.java | 2 - .../plugin/bigquery/TestTypeConversions.java | 44 +++++++++---------- 3 files changed, 22 insertions(+), 26 deletions(-) diff --git a/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestBigQueryConnectorModule.java b/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestBigQueryConnectorModule.java index 239d647e94a5..e0c05618c709 100644 --- a/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestBigQueryConnectorModule.java +++ b/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestBigQueryConnectorModule.java @@ -22,12 +22,10 @@ import static org.assertj.core.api.Assertions.assertThat; -@Test public class TestBigQueryConnectorModule { @Test public void testConfigurationOnly() - throws Exception { String projectId = BigQueryConnectorModule.calculateBillingProjectId(Optional.of("pid"), Optional.empty()); assertThat(projectId).isEqualTo("pid"); diff --git a/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestBigQueryPlugin.java b/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestBigQueryPlugin.java index 16bc60636a40..42fa236da96a 100644 --- a/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestBigQueryPlugin.java +++ b/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestBigQueryPlugin.java @@ -19,14 +19,12 @@ import static com.google.common.collect.Iterables.getOnlyElement; import static io.airlift.testing.Assertions.assertInstanceOf; -@Test public class TestBigQueryPlugin { @Test public void testStartup() { BigQueryPlugin plugin = new BigQueryPlugin(); - ConnectorFactory factory = getOnlyElement(plugin.getConnectorFactories()); assertInstanceOf(factory, BigQueryConnectorFactory.class); } diff --git a/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestTypeConversions.java b/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestTypeConversions.java index 3828b19d558c..97801a797937 100644 --- a/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestTypeConversions.java +++ b/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestTypeConversions.java @@ -158,17 +158,6 @@ public void testConvertStringArrayField() assertThat(metadata.getType()).isEqualTo(new ArrayType(VarcharType.VARCHAR)); } - void assertSimpleFieldTypeConversion(LegacySQLTypeName from, Type to) - { - ColumnMetadata metadata = Conversions.toColumnMetadata(createField(from)); - assertThat(metadata.getType()).isEqualTo(to); - } - - private Field createField(LegacySQLTypeName type) - { - return Field.of("test", type); - } - @Test public void testConvertBooleanColumn() { @@ -278,17 +267,6 @@ public void testConvertStringArrayColumn() assertThat(metadata.getType()).isEqualTo(new ArrayType(VarcharType.VARCHAR)); } - void assertSimpleColumnTypeConversion(LegacySQLTypeName from, Type to) - { - ColumnMetadata metadata = createColumn(from).getColumnMetadata(); - assertThat(metadata.getType()).isEqualTo(to); - } - - private BigQueryColumnHandle createColumn(LegacySQLTypeName type) - { - return new BigQueryColumnHandle("test", BigQueryType.valueOf(type.name()), Field.Mode.NULLABLE, ImmutableList.of(), null); - } - @Test public void testBigQueryDateTimeToJavaConversion() { @@ -299,4 +277,26 @@ public void testBigQueryDateTimeToJavaConversion() assertThat(toLocalDateTime("2005-05-05T05:05:05.55555")).isEqualTo(LocalDateTime.of(2005, MAY, 5, 5, 5, 5, 555_550_000)); assertThat(toLocalDateTime("2006-06-06T06:06:06.666666")).isEqualTo(LocalDateTime.of(2006, JUNE, 6, 6, 6, 6, 666_666_000)); } + + private static void assertSimpleFieldTypeConversion(LegacySQLTypeName from, Type to) + { + ColumnMetadata metadata = Conversions.toColumnMetadata(createField(from)); + assertThat(metadata.getType()).isEqualTo(to); + } + + private static Field createField(LegacySQLTypeName type) + { + return Field.of("test", type); + } + + private static void assertSimpleColumnTypeConversion(LegacySQLTypeName from, Type to) + { + ColumnMetadata metadata = createColumn(from).getColumnMetadata(); + assertThat(metadata.getType()).isEqualTo(to); + } + + private static BigQueryColumnHandle createColumn(LegacySQLTypeName type) + { + return new BigQueryColumnHandle("test", BigQueryType.valueOf(type.name()), Field.Mode.NULLABLE, ImmutableList.of(), null); + } } From 6ffaf3411fc2ba12e2708a813f569df32b0f9200 Mon Sep 17 00:00:00 2001 From: David Rabinowitz Date: Tue, 7 Apr 2020 13:27:56 -0700 Subject: [PATCH 209/519] Add support for filter pushdown to BigQuery --- .../plugin/bigquery/BigQueryColumnHandle.java | 24 ++- .../bigquery/BigQueryFilterQueryBuilder.java | 164 ++++++++++++++++++ .../plugin/bigquery/BigQueryMetadata.java | 50 +++++- .../plugin/bigquery/BigQuerySplitManager.java | 4 +- .../plugin/bigquery/BigQueryTableHandle.java | 2 +- .../plugin/bigquery/BigQueryType.java | 118 +++++++++++-- .../plugin/bigquery/BigQueryUtil.java | 11 +- .../plugin/bigquery/Conversions.java | 3 +- .../plugin/bigquery/ReadSessionCreator.java | 8 +- .../plugin/bigquery/TestBigQueryType.java | 74 ++++++++ 10 files changed, 434 insertions(+), 24 deletions(-) create mode 100644 presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryFilterQueryBuilder.java create mode 100644 presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestBigQueryType.java diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryColumnHandle.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryColumnHandle.java index 4e6f4578b645..697581c38195 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryColumnHandle.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryColumnHandle.java @@ -16,6 +16,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.cloud.bigquery.Field; +import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import io.prestosql.spi.connector.ColumnHandle; import io.prestosql.spi.connector.ColumnMetadata; @@ -38,6 +39,7 @@ public class BigQueryColumnHandle private final Field.Mode mode; private final List subColumns; private final String description; + private final boolean hidden; @JsonCreator public BigQueryColumnHandle( @@ -45,13 +47,26 @@ public BigQueryColumnHandle( @JsonProperty("bigQueryType") BigQueryType bigQueryType, @JsonProperty("mode") Field.Mode mode, @JsonProperty("subColumns") List subColumns, - @JsonProperty("description") String description) + @JsonProperty("description") String description, + @JsonProperty("hidden") boolean hidden) { this.name = requireNonNull(name, "column name cannot be null"); this.bigQueryType = requireNonNull(bigQueryType, () -> format("column type cannot be null for column [%s]", name)); this.mode = requireNonNull(mode, "Field mode cannot be null"); this.subColumns = ImmutableList.copyOf(requireNonNull(subColumns, "subColumns is null")); this.description = description; + this.hidden = hidden; + } + + @VisibleForTesting + BigQueryColumnHandle( + String name, + BigQueryType bigQueryType, + Field.Mode mode, + List subColumns, + String description) + { + this(name, bigQueryType, mode, subColumns, description, false); } @JsonProperty @@ -92,6 +107,12 @@ public String description() return description; } + @JsonProperty + public boolean isHidden() + { + return hidden; + } + public ColumnMetadata getColumnMetadata() { return ColumnMetadata.builder() @@ -99,6 +120,7 @@ public ColumnMetadata getColumnMetadata() .setType(getPrestoType()) .setComment(Optional.ofNullable(description)) .setNullable(mode == Field.Mode.NULLABLE) + .setHidden(hidden) .build(); } diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryFilterQueryBuilder.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryFilterQueryBuilder.java new file mode 100644 index 000000000000..07f1c9b1d8c1 --- /dev/null +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryFilterQueryBuilder.java @@ -0,0 +1,164 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.bigquery; + +import com.google.common.collect.ImmutableList; +import io.prestosql.spi.connector.ColumnHandle; +import io.prestosql.spi.predicate.Domain; +import io.prestosql.spi.predicate.Range; +import io.prestosql.spi.predicate.TupleDomain; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import static com.google.common.base.Preconditions.checkState; +import static com.google.common.collect.Iterables.getOnlyElement; +import static java.util.stream.Collectors.joining; +import static java.util.stream.Collectors.toList; + +class BigQueryFilterQueryBuilder +{ + private static final String QUOTE = "`"; + private static final String ESCAPED_QUOTE = "``"; + private final TupleDomain tupleDomain; + + public static Optional buildFilter(TupleDomain tupleDomain) + { + return new BigQueryFilterQueryBuilder(tupleDomain).buildFilter(); + } + + private BigQueryFilterQueryBuilder(TupleDomain tupleDomain) + { + this.tupleDomain = tupleDomain; + } + + private Optional buildFilter() + { + Optional> domains = tupleDomain.getDomains(); + return domains.map(this::toConjuncts) + .map(this::concat); + } + + private String concat(List clauses) + { + return clauses.isEmpty() ? null : clauses.stream().collect(joining(" AND ")); + } + + private List toConjuncts(Map domains) + { + List columns = domains.keySet().stream().map(BigQueryColumnHandle.class::cast).collect(toList()); + return toConjuncts(columns); + } + + private List toConjuncts(List columns) + { + if (tupleDomain.isNone()) { + return ImmutableList.of("FALSE"); + } + ImmutableList.Builder clauses = ImmutableList.builder(); + for (BigQueryColumnHandle column : columns) { + Domain domain = tupleDomain.getDomains().get().get(column); + if (domain != null) { + clauses.add(toPredicate(column.getName(), domain, column)); + } + } + return clauses.build(); + } + + private String toPredicate(String columnName, Domain domain, BigQueryColumnHandle column) + { + if (domain.getValues().isNone()) { + return domain.isNullAllowed() ? quote(columnName) + " IS NULL" : "FALSE"; + } + + if (domain.getValues().isAll()) { + return domain.isNullAllowed() ? "TRUE" : quote(columnName) + " IS NOT NULL"; + } + + List disjuncts = new ArrayList<>(); + List singleValues = new ArrayList<>(); + for (Range range : domain.getValues().getRanges().getOrderedRanges()) { + checkState(!range.isAll()); // Already checked + if (range.isSingleValue()) { + singleValues.add(range.getLow().getValue()); + } + else { + List rangeConjuncts = new ArrayList<>(); + if (!range.getLow().isLowerUnbounded()) { + switch (range.getLow().getBound()) { + case ABOVE: + rangeConjuncts.add(toPredicate(columnName, ">", range.getLow().getValue(), column)); + break; + case EXACTLY: + rangeConjuncts.add(toPredicate(columnName, ">=", range.getLow().getValue(), column)); + break; + case BELOW: + throw new IllegalArgumentException("Low marker should never use BELOW bound"); + default: + throw new AssertionError("Unhandled bound: " + range.getLow().getBound()); + } + } + if (!range.getHigh().isUpperUnbounded()) { + switch (range.getHigh().getBound()) { + case ABOVE: + throw new IllegalArgumentException("High marker should never use ABOVE bound"); + case EXACTLY: + rangeConjuncts.add(toPredicate(columnName, "<=", range.getHigh().getValue(), column)); + break; + case BELOW: + rangeConjuncts.add(toPredicate(columnName, "<", range.getHigh().getValue(), column)); + break; + default: + throw new AssertionError("Unhandled bound: " + range.getHigh().getBound()); + } + } + // If rangeConjuncts is null, then the range was ALL, which should already have been checked for + checkState(!rangeConjuncts.isEmpty()); + disjuncts.add("(" + concat(rangeConjuncts) + ")"); + } + } + + // Add back all of the possible single values either as an equality or an IN predicate + if (singleValues.size() == 1) { + disjuncts.add(toPredicate(columnName, "=", getOnlyElement(singleValues), column)); + } + else if (singleValues.size() > 1) { + String values = singleValues.stream() + .map(column.getBigQueryType()::convertToString) + .collect(joining(",")); + disjuncts.add(quote(columnName) + " IN (" + values + ")"); + } + + // Add nullability disjuncts + checkState(!disjuncts.isEmpty()); + if (domain.isNullAllowed()) { + disjuncts.add(quote(columnName) + " IS NULL"); + } + + return "(" + String.join(" OR ", disjuncts) + ")"; + } + + private String toPredicate(String columnName, String operator, Object value, BigQueryColumnHandle column) + { + String valueAsString = column.getBigQueryType().convertToString(value); + return quote(columnName) + " " + operator + " " + valueAsString; + } + + private String quote(String name) + { + return QUOTE + name.replace(QUOTE, ESCAPED_QUOTE) + QUOTE; + } +} diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryMetadata.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryMetadata.java index 17e6b750ac82..3c9197f14697 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryMetadata.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryMetadata.java @@ -33,6 +33,8 @@ import io.prestosql.spi.connector.ConnectorTableHandle; import io.prestosql.spi.connector.ConnectorTableMetadata; import io.prestosql.spi.connector.ConnectorTableProperties; +import io.prestosql.spi.connector.Constraint; +import io.prestosql.spi.connector.ConstraintApplicationResult; import io.prestosql.spi.connector.LimitApplicationResult; import io.prestosql.spi.connector.NotFoundException; import io.prestosql.spi.connector.ProjectionApplicationResult; @@ -41,6 +43,7 @@ import io.prestosql.spi.connector.SchemaTablePrefix; import io.prestosql.spi.connector.TableNotFoundException; import io.prestosql.spi.expression.ConnectorExpression; +import io.prestosql.spi.predicate.TupleDomain; import javax.inject.Inject; @@ -53,6 +56,7 @@ import static com.google.cloud.bigquery.TableDefinition.Type.VIEW; import static com.google.common.collect.ImmutableList.toImmutableList; import static java.util.Objects.requireNonNull; +import static java.util.function.Function.identity; import static java.util.stream.Collectors.toMap; public class BigQueryMetadata @@ -164,11 +168,26 @@ public ConnectorTableMetadata getTableMetadata(ConnectorSession session, Connect public Map getColumnHandles(ConnectorSession session, ConnectorTableHandle tableHandle) { log.debug("getColumnHandles(session=%s, tableHandle=%s)", session, tableHandle); - TableInfo table = bigQueryClient.getTable(((BigQueryTableHandle) tableHandle).getTableId()); - Schema schema = table.getDefinition().getSchema(); - return schema == null ? - ImmutableMap.of() : - schema.getFields().stream().collect(toMap(Field::getName, Conversions::toColumnHandle)); + List columnHandles = getTableColumns(((BigQueryTableHandle) tableHandle).getTableId()); + return columnHandles.stream().collect(toMap(BigQueryColumnHandle::getName, identity())); + } + + List getTableColumns(TableId tableId) + { + return getTableColumns(bigQueryClient.getTable(tableId)); + } + + private List getTableColumns(TableInfo table) + { + ImmutableList.Builder columns = ImmutableList.builder(); + TableDefinition tableDefinition = table.getDefinition(); + Schema schema = tableDefinition.getSchema(); + if (schema != null) { + for (Field field : schema.getFields()) { + columns.add(Conversions.toColumnHandle(field)); + } + } + return columns.build(); } @Override @@ -267,4 +286,25 @@ public Optional> applyProjecti return Optional.of(new ProjectionApplicationResult<>(bigQueryTableHandle, projections, assignmentList.build())); } + + @Override + public Optional> applyFilter( + ConnectorSession session, + ConnectorTableHandle handle, + Constraint constraint) + { + log.debug("applyFilter(session=%s, handle=%s, summary=%s, predicate=%s, columns=%s)", + session, handle, constraint.getSummary(), constraint.predicate(), constraint.getColumns()); + BigQueryTableHandle bigQueryTableHandle = (BigQueryTableHandle) handle; + + TupleDomain oldDomain = bigQueryTableHandle.getConstraint(); + TupleDomain newDomain = oldDomain.intersect(constraint.getSummary()); + if (oldDomain.equals(newDomain)) { + return Optional.empty(); + } + + BigQueryTableHandle updatedHandle = bigQueryTableHandle.withConstraint(newDomain); + + return Optional.of(new ConstraintApplicationResult<>(updatedHandle, constraint.getSummary())); + } } diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQuerySplitManager.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQuerySplitManager.java index c1f173b46024..b2354a68027a 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQuerySplitManager.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQuerySplitManager.java @@ -28,6 +28,7 @@ import io.prestosql.spi.connector.ConnectorTableHandle; import io.prestosql.spi.connector.ConnectorTransactionHandle; import io.prestosql.spi.connector.FixedSplitSource; +import io.prestosql.spi.predicate.TupleDomain; import javax.inject.Inject; @@ -81,7 +82,8 @@ public ConnectorSplitSource getSplits( TableId tableId = bigQueryTableHandle.getTableId(); int actualParallelism = parallelism.orElse(nodeManager.getRequiredWorkerNodes().size()); - Optional filter = Optional.empty(); + TupleDomain constraint = bigQueryTableHandle.getConstraint(); + Optional filter = BigQueryFilterQueryBuilder.buildFilter(constraint); List splits = emptyProjectionIsRequired(bigQueryTableHandle.getProjectedColumns()) ? createEmptyProjection(tableId, actualParallelism, filter) : readFromBigQuery(tableId, bigQueryTableHandle.getProjectedColumns(), actualParallelism, filter); diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryTableHandle.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryTableHandle.java index b41c2cb27068..612ad64be57d 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryTableHandle.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryTableHandle.java @@ -63,7 +63,7 @@ public static BigQueryTableHandle from(TableInfo tableInfo) { TableId tableId = tableInfo.getTableId(); String type = tableInfo.getDefinition().getType().toString(); - return new BigQueryTableHandle(tableId.getProject(), tableId.getDataset(), tableId.getTable(), type, TupleDomain.none(), Optional.empty(), OptionalLong.empty()); + return new BigQueryTableHandle(tableId.getProject(), tableId.getDataset(), tableId.getTable(), type, TupleDomain.all(), Optional.empty(), OptionalLong.empty()); } @JsonProperty diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryType.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryType.java index 8b9ecb96a1a8..031dee70737c 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryType.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryType.java @@ -15,11 +15,14 @@ import com.google.cloud.bigquery.Field; import com.google.common.collect.ImmutableMap; +import io.airlift.slice.Slice; import io.prestosql.spi.type.ArrayType; import io.prestosql.spi.type.BigintType; import io.prestosql.spi.type.BooleanType; +import io.prestosql.spi.type.DateTimeEncoding; import io.prestosql.spi.type.DateType; import io.prestosql.spi.type.DecimalType; +import io.prestosql.spi.type.Decimals; import io.prestosql.spi.type.DoubleType; import io.prestosql.spi.type.RowType; import io.prestosql.spi.type.TimeWithTimeZoneType; @@ -29,9 +32,15 @@ import io.prestosql.spi.type.VarbinaryType; import io.prestosql.spi.type.VarcharType; +import java.time.Instant; +import java.time.LocalDate; import java.time.LocalDateTime; +import java.time.LocalTime; import java.time.Month; +import java.time.ZoneId; +import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; +import java.util.Base64; import java.util.List; import java.util.Map; @@ -40,6 +49,8 @@ import static io.prestosql.plugin.bigquery.BigQueryMetadata.NUMERIC_DATA_TYPE_SCALE; import static io.prestosql.spi.type.VarcharType.createUnboundedVarcharType; import static java.lang.Integer.parseInt; +import static java.lang.String.format; +import static java.nio.charset.StandardCharsets.UTF_8; import static java.time.Month.APRIL; import static java.time.Month.AUGUST; import static java.time.Month.DECEMBER; @@ -57,18 +68,18 @@ public enum BigQueryType { - BOOLEAN(BooleanType.BOOLEAN), - BYTES(VarbinaryType.VARBINARY), - DATE(DateType.DATE), - DATETIME(TimestampType.TIMESTAMP), - FLOAT(DoubleType.DOUBLE), - GEOGRAPHY(VarcharType.VARCHAR), - INTEGER(BigintType.BIGINT), - NUMERIC(DecimalType.createDecimalType(NUMERIC_DATA_TYPE_PRECISION, NUMERIC_DATA_TYPE_SCALE)), - RECORD(null), - STRING(createUnboundedVarcharType()), - TIME(TimeWithTimeZoneType.TIME_WITH_TIME_ZONE), - TIMESTAMP(TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE); + BOOLEAN(BooleanType.BOOLEAN, BigQueryType::simpleToStringConverter), + BYTES(VarbinaryType.VARBINARY, BigQueryType::bytesToStringConverter), + DATE(DateType.DATE, BigQueryType::dateToStringConverter), + DATETIME(TimestampType.TIMESTAMP, BigQueryType::datetimeToStringConverter), + FLOAT(DoubleType.DOUBLE, BigQueryType::simpleToStringConverter), + GEOGRAPHY(VarcharType.VARCHAR, BigQueryType::stringToStringConverter), + INTEGER(BigintType.BIGINT, BigQueryType::simpleToStringConverter), + NUMERIC(DecimalType.createDecimalType(NUMERIC_DATA_TYPE_PRECISION, NUMERIC_DATA_TYPE_SCALE), BigQueryType::numericToStringConverter), + RECORD(null, BigQueryType::simpleToStringConverter), + STRING(createUnboundedVarcharType(), BigQueryType::stringToStringConverter), + TIME(TimeWithTimeZoneType.TIME_WITH_TIME_ZONE, BigQueryType::timeToStringConverter), + TIMESTAMP(TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE, BigQueryType::timestampToStringConverter); private static final int[] NANO_FACTOR = { -1, // 0, no need to multiply @@ -96,11 +107,15 @@ public enum BigQueryType .put("11", NOVEMBER) .put("12", DECEMBER) .build(); + private static final DateTimeFormatter DATETIME_FORMATTER = DateTimeFormatter.ofPattern("''yyyy-MM-dd HH:mm:ss.SSS''"); + private final Type nativeType; + private final ToStringConverter toStringConverter; - BigQueryType(Type nativeType) + BigQueryType(Type nativeType, ToStringConverter toStringConverter) { this.nativeType = nativeType; + this.toStringConverter = toStringConverter; } static RowType.Field toRawTypeField(Map.Entry entry) @@ -133,6 +148,77 @@ static long toPrestoTimestamp(String datetime) return toLocalDateTime(datetime).atZone(systemDefault()).toInstant().toEpochMilli(); } + static String simpleToStringConverter(Object value) + { + return String.valueOf(value); + } + + static String dateToStringConverter(Object value) + { + LocalDate date = LocalDate.ofEpochDay(((Long) value).longValue()); + return quote(date.toString()); + } + + static String datetimeToStringConverter(Object value) + { + return formatTimestamp(((Long) value).longValue(), systemDefault()); + } + + static String timeToStringConverter(Object value) + { + long longValue = ((Long) value).longValue(); + long millisUtc = DateTimeEncoding.unpackMillisUtc(longValue); + ZoneId zoneId = ZoneId.of(DateTimeEncoding.unpackZoneKey(longValue).getId()); + LocalTime time = toZonedDateTime(millisUtc, zoneId).toLocalTime(); + return quote(time.toString()); + } + + static String timestampToStringConverter(Object value) + { + long longValue = ((Long) value).longValue(); + long millisUtc = DateTimeEncoding.unpackMillisUtc(longValue); + ZoneId zoneId = ZoneId.of(DateTimeEncoding.unpackZoneKey(longValue).getId()); + return formatTimestamp(millisUtc, zoneId); + } + + private static String formatTimestamp(long millisUtc, ZoneId zoneId) + { + return DATETIME_FORMATTER.format(toZonedDateTime(millisUtc, zoneId)); + } + + private static ZonedDateTime toZonedDateTime(long millisUtc, ZoneId zoneId) + { + return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millisUtc), zoneId); + } + + static String stringToStringConverter(Object value) + { + Slice slice = (Slice) value; + return quote(new String(slice.getBytes(), UTF_8)); + } + + static String numericToStringConverter(Object value) + { + Slice slice = (Slice) value; + return Decimals.toString(slice, NUMERIC_DATA_TYPE_SCALE); + } + + static String bytesToStringConverter(Object value) + { + Slice slice = (Slice) value; + return format("FROM_BASE64('%s')", Base64.getEncoder().encodeToString(slice.getBytes())); + } + + private static String quote(String value) + { + return "'" + value + "'"; + } + + String convertToString(Object value) + { + return toStringConverter.convertToString(value); + } + public Type getNativeType(BigQueryType.Adaptor typeAdaptor) { switch (this) { @@ -161,4 +247,10 @@ default Type getPrestoType() return getMode() == Field.Mode.REPEATED ? new ArrayType(rawType) : rawType; } } + + @FunctionalInterface + interface ToStringConverter + { + String convertToString(Object value); + } } diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryUtil.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryUtil.java index 0bb6f4fad5cd..4006a74c800d 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryUtil.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryUtil.java @@ -19,16 +19,20 @@ import io.grpc.Status; import io.grpc.StatusRuntimeException; +import java.util.Set; + import static com.google.cloud.http.BaseHttpServiceException.UNKNOWN_CODE; import static com.google.common.base.Throwables.getCausalChain; class BigQueryUtil { - static final ImmutableSet INTERNAL_ERROR_MESSAGES = ImmutableSet.of( + private static final Set INTERNAL_ERROR_MESSAGES = ImmutableSet.of( "HTTP/2 error code: INTERNAL_ERROR", "Connection closed with unknown cause", "Received unexpected EOS on DATA frame from server"); + private static final Set INVALID_COLUMN_NAMES = ImmutableSet.of("_partitiondate", "_PARTITIONDATE", "_partitiontime", "_PARTITIONTIME"); + private BigQueryUtil() {} static boolean isRetryable(Throwable cause) @@ -51,4 +55,9 @@ static BigQueryException convertToBigQueryException(BigQueryError error) { return new BigQueryException(UNKNOWN_CODE, error.getMessage(), error); } + + public static boolean validColumnName(String columnName) + { + return !INVALID_COLUMN_NAMES.contains(columnName); + } } diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/Conversions.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/Conversions.java index 3d8d2c00ff85..25f676f39edb 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/Conversions.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/Conversions.java @@ -43,7 +43,8 @@ static BigQueryColumnHandle toColumnHandle(Field field) BigQueryType.valueOf(field.getType().name()), getMode(field), subColumns, - field.getDescription()); + field.getDescription(), + false); } static ColumnMetadata toColumnMetadata(Field field) diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/ReadSessionCreator.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/ReadSessionCreator.java index 301429710dcd..31cc8bbf8f9f 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/ReadSessionCreator.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/ReadSessionCreator.java @@ -32,6 +32,7 @@ import io.airlift.log.Logger; import io.prestosql.spi.PrestoException; +import java.util.List; import java.util.Optional; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; @@ -41,6 +42,7 @@ import static io.prestosql.plugin.bigquery.BigQueryUtil.convertToBigQueryException; import static io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED; import static java.lang.String.format; +import static java.util.stream.Collectors.toList; // A helper class, also handles view materialization public class ReadSessionCreator @@ -73,9 +75,13 @@ public Storage.ReadSession create(TableId table, ImmutableList selectedF TableInfo actualTable = getActualTable(tableDetails, selectedFields, new String[] {}); + List filteredSelectedFields = selectedFields.stream() + .filter(BigQueryUtil::validColumnName) + .collect(toList()); + try (BigQueryStorageClient bigQueryStorageClient = bigQueryStorageClientFactory.createBigQueryStorageClient()) { ReadOptions.TableReadOptions.Builder readOptions = ReadOptions.TableReadOptions.newBuilder() - .addAllSelectedFields(selectedFields); + .addAllSelectedFields(filteredSelectedFields); filter.ifPresent(readOptions::setRowRestriction); TableReferenceProto.TableReference tableReference = toTableReference(actualTable.getTableId()); diff --git a/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestBigQueryType.java b/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestBigQueryType.java new file mode 100644 index 000000000000..863f21a9b9bc --- /dev/null +++ b/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestBigQueryType.java @@ -0,0 +1,74 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.bigquery; + +import org.testng.annotations.Test; + +import static io.airlift.slice.Slices.utf8Slice; +import static io.airlift.slice.Slices.wrappedBuffer; +import static io.prestosql.spi.type.Decimals.encodeScaledValue; +import static java.math.BigDecimal.ONE; +import static org.assertj.core.api.Assertions.assertThat; + +@Test +public class TestBigQueryType +{ + @Test + public void testTimeToStringConverter() + { + assertThat(BigQueryType.timeToStringConverter( + Long.valueOf(303497217825L))) + .isEqualTo("'12:34:56'"); + } + + @Test + public void testTimestampToStringConverter() + { + assertThat(BigQueryType.timestampToStringConverter( + Long.valueOf(6494958783649569L))) + .isEqualTo("'2020-03-31 12:34:56.789'"); + } + + @Test + public void testDateToStringConverter() + { + assertThat(BigQueryType.dateToStringConverter( + Long.valueOf(18352))) + .isEqualTo("'2020-03-31'"); + } + + @Test + public void testStringToStringConverter() + { + assertThat(BigQueryType.stringToStringConverter( + utf8Slice("test"))) + .isEqualTo("'test'"); + } + + @Test + public void testNumericToStringConverter() + { + assertThat(BigQueryType.numericToStringConverter( + encodeScaledValue(ONE, 9))) + .isEqualTo("1.000000000"); + } + + @Test + public void testBytesToStringConverter() + { + assertThat(BigQueryType.bytesToStringConverter( + wrappedBuffer((byte) 1, (byte) 2, (byte) 3, (byte) 4))) + .isEqualTo("FROM_BASE64('AQIDBA==')"); + } +} From faff5e5be4296a0d03b2d2f9c6cb558cb9cb75f5 Mon Sep 17 00:00:00 2001 From: David Rabinowitz Date: Thu, 9 Apr 2020 12:42:06 -0700 Subject: [PATCH 210/519] Improve readRows performance This change is a port from the spark connector - instead of reading the entire stream as Avro and them converting it to Presto pages, the read from the server is done in a lazy fashion using the new Iterator. We have seen performance improvement especially when reading part of the data (for example when using LIMIT) --- .../plugin/bigquery/ReadRowsHelper.java | 84 +++++++++++++------ 1 file changed, 57 insertions(+), 27 deletions(-) diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/ReadRowsHelper.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/ReadRowsHelper.java index 4322ac4a4dd7..32486fc1d5c8 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/ReadRowsHelper.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/ReadRowsHelper.java @@ -14,12 +14,12 @@ package io.prestosql.plugin.bigquery; import com.google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient; +import com.google.cloud.bigquery.storage.v1beta1.Storage; import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; -import java.util.ArrayList; import java.util.Iterator; -import java.util.List; +import java.util.NoSuchElementException; import static java.util.Objects.requireNonNull; @@ -38,32 +38,8 @@ public ReadRowsHelper(BigQueryStorageClient client, ReadRowsRequest.Builder requ public Iterator readRows() { - List readRowResponses = new ArrayList<>(); - long readRowsCount = 0; - int retries = 0; Iterator serverResponses = fetchResponses(request); - while (serverResponses.hasNext()) { - try { - ReadRowsResponse response = serverResponses.next(); - readRowsCount += response.getRowCount(); - readRowResponses.add(response); - } - catch (RuntimeException e) { - // if relevant, retry the read, from the last read position - if (BigQueryUtil.isRetryable(e) && retries < maxReadRowsRetries) { - request.getReadPositionBuilder().setOffset(readRowsCount); - serverResponses = fetchResponses(request); - retries++; - } - else { - // to safely close the client - try (BigQueryStorageClient ignored = client) { - throw e; - } - } - } - } - return readRowResponses.iterator(); + return new ReadRowsIterator(this, request.getReadPositionBuilder(), serverResponses); } // In order to enable testing @@ -73,4 +49,58 @@ protected Iterator fetchResponses(ReadRowsRequest.Builder read .call(readRowsRequest.build()) .iterator(); } + + // Ported from https://github.com/GoogleCloudDataproc/spark-bigquery-connector/pull/150 + private static class ReadRowsIterator + implements Iterator + { + private final ReadRowsHelper helper; + private final Storage.StreamPosition.Builder readPosition; + private Iterator serverResponses; + private long readRowsCount; + private int retries; + + public ReadRowsIterator( + ReadRowsHelper helper, + Storage.StreamPosition.Builder readPosition, + Iterator serverResponses) + { + this.helper = helper; + this.readPosition = readPosition; + this.serverResponses = serverResponses; + } + + @Override + public boolean hasNext() + { + return serverResponses.hasNext(); + } + + @Override + public ReadRowsResponse next() + { + do { + try { + ReadRowsResponse response = serverResponses.next(); + readRowsCount += response.getRowCount(); + return response; + } + catch (Exception e) { + // if relevant, retry the read, from the last read position + if (BigQueryUtil.isRetryable(e) && retries < helper.maxReadRowsRetries) { + serverResponses = helper.fetchResponses(helper.request.setReadPosition( + readPosition.setOffset(readRowsCount))); + retries++; + } + else { + helper.client.close(); + throw e; + } + } + } + while (serverResponses.hasNext()); + + throw new NoSuchElementException("No more server responses"); + } + } } From 7d7ad695209281447f7b7849f85c1b50d362daa4 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Tue, 21 Apr 2020 21:49:42 +0200 Subject: [PATCH 211/519] Fix property description --- .../main/java/io/prestosql/dispatcher/DispatcherConfig.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/dispatcher/DispatcherConfig.java b/presto-main/src/main/java/io/prestosql/dispatcher/DispatcherConfig.java index aa3878ae978f..3ee8b7a951fa 100644 --- a/presto-main/src/main/java/io/prestosql/dispatcher/DispatcherConfig.java +++ b/presto-main/src/main/java/io/prestosql/dispatcher/DispatcherConfig.java @@ -18,7 +18,7 @@ import javax.validation.constraints.NotNull; -import static com.google.common.net.HttpHeaders.X_FORWARDED_PROTO; +import static com.google.common.net.HttpHeaders.X_FORWARDED_FOR; public class DispatcherConfig { @@ -40,7 +40,7 @@ public HeaderSupport getForwardedHeaderSupport() } @Config("dispatcher.forwarded-header") - @ConfigDescription("Support for " + X_FORWARDED_PROTO + " header") + @ConfigDescription("Support for " + X_FORWARDED_FOR + " header") public DispatcherConfig setForwardedHeaderSupport(HeaderSupport forwardedHeaderSupport) { this.forwardedHeaderSupport = forwardedHeaderSupport; From 5b139126dac7b1741c32cd9eba6d22ea941885a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20=C5=9Alizak?= Date: Wed, 22 Apr 2020 19:39:41 +0200 Subject: [PATCH 212/519] Add timestamp with UTC time zone support to Parquet column reader --- .../parquet/reader/TimestampColumnReader.java | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/presto-parquet/src/main/java/io/prestosql/parquet/reader/TimestampColumnReader.java b/presto-parquet/src/main/java/io/prestosql/parquet/reader/TimestampColumnReader.java index 4149f849d5a9..8887daa7c8f6 100644 --- a/presto-parquet/src/main/java/io/prestosql/parquet/reader/TimestampColumnReader.java +++ b/presto-parquet/src/main/java/io/prestosql/parquet/reader/TimestampColumnReader.java @@ -15,10 +15,12 @@ import io.prestosql.parquet.RichColumnDescriptor; import io.prestosql.spi.block.BlockBuilder; +import io.prestosql.spi.type.TimestampWithTimeZoneType; import io.prestosql.spi.type.Type; -import org.apache.parquet.io.api.Binary; import static io.prestosql.parquet.ParquetTimestampUtils.getTimestampMillis; +import static io.prestosql.spi.type.DateTimeEncoding.packDateTimeWithZone; +import static io.prestosql.spi.type.TimeZoneKey.UTC_KEY; public class TimestampColumnReader extends PrimitiveColumnReader @@ -32,8 +34,13 @@ public TimestampColumnReader(RichColumnDescriptor descriptor) protected void readValue(BlockBuilder blockBuilder, Type type) { if (definitionLevel == columnDescriptor.getMaxDefinitionLevel()) { - Binary binary = valuesReader.readBytes(); - type.writeLong(blockBuilder, getTimestampMillis(binary)); + long utcMillis = getTimestampMillis(valuesReader.readBytes()); + if (type instanceof TimestampWithTimeZoneType) { + type.writeLong(blockBuilder, packDateTimeWithZone(utcMillis, UTC_KEY)); + } + else { + type.writeLong(blockBuilder, utcMillis); + } } else if (isValueNull()) { blockBuilder.appendNull(); From 7e4beab517c95115b572fc1692b4722ccd82d545 Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Thu, 16 Apr 2020 01:58:15 -0700 Subject: [PATCH 213/519] Add test for dereferences on data containing null rows --- .../hive/TestHiveIntegrationSmokeTest.java | 44 +++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java index b48f0809b0c4..9374b9411888 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java @@ -3017,6 +3017,50 @@ private void testRows(Session session, HiveStorageFormat format) assertUpdate(session, "DROP TABLE " + tableName); } + @Test + public void testRowsWithNulls() + { + testRowsWithNulls(getSession(), HiveStorageFormat.ORC); + testRowsWithNulls(getSession(), HiveStorageFormat.PARQUET); + } + + private void testRowsWithNulls(Session session, HiveStorageFormat format) + { + String tableName = "test_dereferences_with_nulls"; + @Language("SQL") String createTable = "" + + "CREATE TABLE " + tableName + "\n" + + "(col0 BIGINT, col1 row(f0 BIGINT, f1 BIGINT), col2 row(f0 BIGINT, f1 ROW(f0 BIGINT, f1 BIGINT)))\n" + + "WITH (format = '" + format + "')"; + + assertUpdate(session, createTable); + + @Language("SQL") String insertTable = "" + + "INSERT INTO " + tableName + " VALUES \n" + + "row(1, row(2, 3), row(4, row(5, 6))),\n" + + "row(7, row(8, 9), row(10, row(11, NULL))),\n" + + "row(NULL, NULL, row(12, NULL)),\n" + + "row(13, row(NULL, 14), NULL),\n" + + "row(15, row(16, NULL), row(NULL, row(17, 18)))"; + + assertUpdate(session, insertTable, 5); + + assertQuery( + session, + format("SELECT col0, col1.f0, col2.f1.f1 FROM %s", tableName), + "SELECT * FROM \n" + + " (SELECT 1, 2, 6) UNION\n" + + " (SELECT 7, 8, NULL) UNION\n" + + " (SELECT NULL, NULL, NULL) UNION\n" + + " (SELECT 13, NULL, NULL) UNION\n" + + " (SELECT 15, 16, 18)"); + + assertQuery(session, format("SELECT col0 FROM %s WHERE col2.f1.f1 IS NOT NULL", tableName), "SELECT * FROM UNNEST(array[1, 15])"); + + assertQuery(session, format("SELECT col0, col1.f0, col1.f1 FROM %s WHERE col2.f1.f1 = 18", tableName), "SELECT 15, 16, NULL"); + + assertUpdate(session, "DROP TABLE " + tableName); + } + @Test public void testComplex() { From c77528cc1a07bc08d31cc208f1538bdbe03e760a Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Thu, 16 Apr 2020 01:59:05 -0700 Subject: [PATCH 214/519] Pushdown dereference projections in ORC reader --- .../plugin/hive/orc/OrcPageSourceFactory.java | 46 ++++++++++- .../main/java/io/prestosql/orc/OrcReader.java | 80 +++++++++++++++++++ .../io/prestosql/orc/OrcRecordReader.java | 9 ++- .../prestosql/orc/reader/ColumnReaders.java | 10 ++- .../orc/reader/ListColumnReader.java | 3 +- .../prestosql/orc/reader/MapColumnReader.java | 5 +- .../orc/reader/StructColumnReader.java | 9 ++- 7 files changed, 152 insertions(+), 10 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcPageSourceFactory.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcPageSourceFactory.java index 601f54bee51e..033fcbec1dc8 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcPageSourceFactory.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcPageSourceFactory.java @@ -30,6 +30,7 @@ import io.prestosql.plugin.hive.FileFormatDataSourceStats; import io.prestosql.plugin.hive.HdfsEnvironment; import io.prestosql.plugin.hive.HiveColumnHandle; +import io.prestosql.plugin.hive.HiveColumnProjectionInfo; import io.prestosql.plugin.hive.HivePageSourceFactory; import io.prestosql.plugin.hive.ReaderProjections; import io.prestosql.plugin.hive.orc.OrcPageSource.ColumnAdaptation; @@ -58,12 +59,15 @@ import java.util.Optional; import java.util.Properties; import java.util.regex.Pattern; +import java.util.stream.Collectors; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Strings.nullToEmpty; import static com.google.common.collect.Maps.uniqueIndex; import static io.prestosql.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext; import static io.prestosql.orc.OrcReader.INITIAL_BATCH_SIZE; +import static io.prestosql.orc.OrcReader.ProjectedLayout.createProjectedLayout; +import static io.prestosql.orc.OrcReader.ProjectedLayout.fullyProjectedLayout; import static io.prestosql.orc.metadata.OrcType.OrcTypeKind.INT; import static io.prestosql.orc.metadata.OrcType.OrcTypeKind.LONG; import static io.prestosql.orc.metadata.OrcType.OrcTypeKind.STRUCT; @@ -90,6 +94,8 @@ import static java.lang.String.format; import static java.util.Locale.ENGLISH; import static java.util.Objects.requireNonNull; +import static java.util.stream.Collectors.mapping; +import static java.util.stream.Collectors.toList; import static org.apache.hadoop.hive.ql.io.AcidUtils.isFullAcidTable; public class OrcPageSourceFactory @@ -162,6 +168,7 @@ public Optional createPageSource( projectedReaderColumns .map(ReaderProjections::getReaderColumns) .orElse(columns), + columns, isUseOrcColumnNames(session), isFullAcidTable(Maps.fromProperties(schema)), effectivePredicate, @@ -190,6 +197,7 @@ private static OrcPageSource createOrcPageSource( long length, long fileSize, List columns, + List projections, boolean useOrcColumnNames, boolean isFullAcid, TupleDomain effectivePredicate, @@ -229,6 +237,7 @@ private static OrcPageSource createOrcPageSource( List fileColumns = reader.getRootColumn().getNestedColumns(); List fileReadColumns = new ArrayList<>(columns.size() + (isFullAcid ? 3 : 0)); List fileReadTypes = new ArrayList<>(columns.size() + (isFullAcid ? 3 : 0)); + List fileReadLayouts = new ArrayList<>(columns.size() + (isFullAcid ? 3 : 0)); if (isFullAcid) { verifyAcidSchema(reader, path); Map acidColumnsByName = uniqueIndex(fileColumns, orcColumn -> orcColumn.getColumnName().toLowerCase(ENGLISH)); @@ -236,10 +245,15 @@ private static OrcPageSource createOrcPageSource( fileReadColumns.add(acidColumnsByName.get(ACID_COLUMN_ORIGINAL_TRANSACTION.toLowerCase(ENGLISH))); fileReadTypes.add(BIGINT); + fileReadLayouts.add(fullyProjectedLayout()); + fileReadColumns.add(acidColumnsByName.get(ACID_COLUMN_BUCKET.toLowerCase(ENGLISH))); fileReadTypes.add(INTEGER); + fileReadLayouts.add(fullyProjectedLayout()); + fileReadColumns.add(acidColumnsByName.get(ACID_COLUMN_ROW_ID.toLowerCase(ENGLISH))); fileReadTypes.add(BIGINT); + fileReadLayouts.add(fullyProjectedLayout()); } Map fileColumnsByName = ImmutableMap.of(); @@ -250,6 +264,25 @@ private static OrcPageSource createOrcPageSource( fileColumnsByName = uniqueIndex(fileColumns, orcColumn -> orcColumn.getColumnName().toLowerCase(ENGLISH)); } + Map>> projectionsByColumnName = ImmutableMap.of(); + Map>> projectionsByColumnIndex = ImmutableMap.of(); + if (useOrcColumnNames || isFullAcid) { + projectionsByColumnName = projections.stream() + .collect(Collectors.groupingBy( + HiveColumnHandle::getBaseColumnName, + mapping( + column -> column.getHiveColumnProjectionInfo().map(HiveColumnProjectionInfo::getDereferenceNames).orElse(ImmutableList.of()), + toList()))); + } + else { + projectionsByColumnIndex = projections.stream() + .collect(Collectors.groupingBy( + HiveColumnHandle::getBaseHiveColumnIndex, + mapping( + column -> column.getHiveColumnProjectionInfo().map(HiveColumnProjectionInfo::getDereferenceNames).orElse(ImmutableList.of()), + toList()))); + } + TupleDomainOrcPredicateBuilder predicateBuilder = TupleDomainOrcPredicate.builder() .setBloomFiltersEnabled(options.isBloomFiltersEnabled()); Map effectivePredicateDomains = effectivePredicate.getDomains() @@ -257,11 +290,20 @@ private static OrcPageSource createOrcPageSource( List columnAdaptations = new ArrayList<>(columns.size()); for (HiveColumnHandle column : columns) { OrcColumn orcColumn = null; + OrcReader.ProjectedLayout projectedLayout = null; + if (useOrcColumnNames || isFullAcid) { - orcColumn = fileColumnsByName.get(column.getName().toLowerCase(ENGLISH)); + String columnName = column.getName().toLowerCase(ENGLISH); + orcColumn = fileColumnsByName.get(columnName); + if (orcColumn != null) { + projectedLayout = createProjectedLayout(orcColumn, projectionsByColumnName.get(columnName)); + } } else if (column.getBaseHiveColumnIndex() < fileColumns.size()) { orcColumn = fileColumns.get(column.getBaseHiveColumnIndex()); + if (orcColumn != null) { + projectedLayout = createProjectedLayout(orcColumn, projectionsByColumnIndex.get(column.getBaseHiveColumnIndex())); + } } Type readType = column.getType(); @@ -270,6 +312,7 @@ else if (column.getBaseHiveColumnIndex() < fileColumns.size()) { columnAdaptations.add(ColumnAdaptation.sourceColumn(sourceIndex)); fileReadColumns.add(orcColumn); fileReadTypes.add(readType); + fileReadLayouts.add(projectedLayout); Domain domain = effectivePredicateDomains.get(column); if (domain != null) { @@ -284,6 +327,7 @@ else if (column.getBaseHiveColumnIndex() < fileColumns.size()) { OrcRecordReader recordReader = reader.createRecordReader( fileReadColumns, fileReadTypes, + fileReadLayouts, predicateBuilder.build(), start, length, diff --git a/presto-orc/src/main/java/io/prestosql/orc/OrcReader.java b/presto-orc/src/main/java/io/prestosql/orc/OrcReader.java index b9dbaa93b4e9..963f0749e83c 100644 --- a/presto-orc/src/main/java/io/prestosql/orc/OrcReader.java +++ b/presto-orc/src/main/java/io/prestosql/orc/OrcReader.java @@ -15,6 +15,7 @@ import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import io.airlift.log.Logger; import io.airlift.slice.Slice; import io.airlift.units.DataSize; @@ -38,10 +39,13 @@ import java.io.IOException; import java.io.InputStream; +import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.function.Function; import java.util.function.Predicate; +import java.util.stream.Collectors; import java.util.stream.IntStream; import static com.google.common.base.Throwables.throwIfUnchecked; @@ -53,7 +57,10 @@ import static io.prestosql.orc.metadata.PostScript.MAGIC; import static java.lang.Math.min; import static java.lang.Math.toIntExact; +import static java.util.Locale.ENGLISH; import static java.util.Objects.requireNonNull; +import static java.util.stream.Collectors.mapping; +import static java.util.stream.Collectors.toList; public class OrcReader { @@ -253,10 +260,37 @@ public OrcRecordReader createRecordReader( int initialBatchSize, Function exceptionTransform) throws OrcCorruptionException + { + return createRecordReader( + readColumns, + readTypes, + Collections.nCopies(readColumns.size(), ProjectedLayout.fullyProjectedLayout()), + predicate, + offset, + length, + hiveStorageTimeZone, + systemMemoryUsage, + initialBatchSize, + exceptionTransform); + } + + public OrcRecordReader createRecordReader( + List readColumns, + List readTypes, + List readLayouts, + OrcPredicate predicate, + long offset, + long length, + DateTimeZone hiveStorageTimeZone, + AggregatedMemoryContext systemMemoryUsage, + int initialBatchSize, + Function exceptionTransform) + throws OrcCorruptionException { return new OrcRecordReader( requireNonNull(readColumns, "readColumns is null"), requireNonNull(readTypes, "readTypes is null"), + requireNonNull(readLayouts, "readLayouts is null"), requireNonNull(predicate, "predicate is null"), footer.getNumberOfRows(), footer.getStripes(), @@ -395,4 +429,50 @@ static void validateFile( throw new OrcCorruptionException(e, input.getId(), "Validation failed"); } } + + public static class ProjectedLayout + { + private final Optional> fieldLayouts; + + private ProjectedLayout(Optional> fieldLayouts) + { + this.fieldLayouts = requireNonNull(fieldLayouts, "fieldLayouts is null"); + } + + public ProjectedLayout getFieldLayout(String name) + { + if (fieldLayouts.isPresent()) { + return fieldLayouts.get().get(name); + } + + return fullyProjectedLayout(); + } + + public static ProjectedLayout fullyProjectedLayout() + { + return new ProjectedLayout(Optional.empty()); + } + + public static ProjectedLayout createProjectedLayout(OrcColumn root, List> dereferences) + { + if (dereferences.stream().map(List::size).anyMatch(Predicate.isEqual(0))) { + return fullyProjectedLayout(); + } + + Map>> dereferencesByField = dereferences.stream().collect( + Collectors.groupingBy( + sequence -> sequence.get(0), + mapping(sequence -> sequence.subList(1, sequence.size()), toList()))); + + ImmutableMap.Builder fieldLayouts = ImmutableMap.builder(); + for (OrcColumn nestedColumn : root.getNestedColumns()) { + String fieldName = nestedColumn.getColumnName().toLowerCase(ENGLISH); + if (dereferencesByField.containsKey(fieldName)) { + fieldLayouts.put(fieldName, createProjectedLayout(nestedColumn, dereferencesByField.get(fieldName))); + } + } + + return new ProjectedLayout(Optional.of(fieldLayouts.build())); + } + } } diff --git a/presto-orc/src/main/java/io/prestosql/orc/OrcRecordReader.java b/presto-orc/src/main/java/io/prestosql/orc/OrcRecordReader.java index 084486f573bf..7bfe116a7696 100644 --- a/presto-orc/src/main/java/io/prestosql/orc/OrcRecordReader.java +++ b/presto-orc/src/main/java/io/prestosql/orc/OrcRecordReader.java @@ -119,6 +119,7 @@ public class OrcRecordReader public OrcRecordReader( List readColumns, List readTypes, + List readLayouts, OrcPredicate predicate, long numberOfRows, List fileStripes, @@ -145,6 +146,8 @@ public OrcRecordReader( checkArgument(readColumns.stream().distinct().count() == readColumns.size(), "readColumns contains duplicate entries"); requireNonNull(readTypes, "readTypes is null"); checkArgument(readColumns.size() == readTypes.size(), "readColumns and readTypes must have the same size"); + requireNonNull(readLayouts, "readLayouts is null"); + checkArgument(readColumns.size() == readLayouts.size(), "readColumns and readLayouts must have the same size"); requireNonNull(predicate, "predicate is null"); requireNonNull(fileStripes, "fileStripes is null"); requireNonNull(stripeStats, "stripeStats is null"); @@ -233,7 +236,7 @@ public OrcRecordReader( metadataReader, writeValidation); - columnReaders = createColumnReaders(readColumns, readTypes, streamReadersSystemMemoryContext, blockFactory); + columnReaders = createColumnReaders(readColumns, readTypes, readLayouts, streamReadersSystemMemoryContext, blockFactory); currentBytesPerCell = new long[columnReaders.length]; maxBytesPerCell = new long[columnReaders.length]; nextBatchSize = initialBatchSize; @@ -545,6 +548,7 @@ private void validateWritePageChecksum(Page page) private ColumnReader[] createColumnReaders( List columns, List readTypes, + List readLayouts, AggregatedMemoryContext systemMemoryContext, OrcBlockFactory blockFactory) throws OrcCorruptionException @@ -554,7 +558,8 @@ private ColumnReader[] createColumnReaders( int columnIndex = i; Type readType = readTypes.get(columnIndex); OrcColumn column = columns.get(columnIndex); - columnReaders[columnIndex] = createColumnReader(readType, column, systemMemoryContext, blockFactory); + OrcReader.ProjectedLayout projectedLayout = readLayouts.get(columnIndex); + columnReaders[columnIndex] = createColumnReader(readType, column, projectedLayout, systemMemoryContext, blockFactory); } return columnReaders; } diff --git a/presto-orc/src/main/java/io/prestosql/orc/reader/ColumnReaders.java b/presto-orc/src/main/java/io/prestosql/orc/reader/ColumnReaders.java index 1cc371e39678..80dd8665d6e0 100644 --- a/presto-orc/src/main/java/io/prestosql/orc/reader/ColumnReaders.java +++ b/presto-orc/src/main/java/io/prestosql/orc/reader/ColumnReaders.java @@ -17,13 +17,19 @@ import io.prestosql.orc.OrcBlockFactory; import io.prestosql.orc.OrcColumn; import io.prestosql.orc.OrcCorruptionException; +import io.prestosql.orc.OrcReader; import io.prestosql.spi.type.Type; public final class ColumnReaders { private ColumnReaders() {} - public static ColumnReader createColumnReader(Type type, OrcColumn column, AggregatedMemoryContext systemMemoryContext, OrcBlockFactory blockFactory) + public static ColumnReader createColumnReader( + Type type, + OrcColumn column, + OrcReader.ProjectedLayout projectedLayout, + AggregatedMemoryContext systemMemoryContext, + OrcBlockFactory blockFactory) throws OrcCorruptionException { switch (column.getColumnType()) { @@ -50,7 +56,7 @@ public static ColumnReader createColumnReader(Type type, OrcColumn column, Aggre case LIST: return new ListColumnReader(type, column, systemMemoryContext, blockFactory); case STRUCT: - return new StructColumnReader(type, column, systemMemoryContext, blockFactory); + return new StructColumnReader(type, column, projectedLayout, systemMemoryContext, blockFactory); case MAP: return new MapColumnReader(type, column, systemMemoryContext, blockFactory); case DECIMAL: diff --git a/presto-orc/src/main/java/io/prestosql/orc/reader/ListColumnReader.java b/presto-orc/src/main/java/io/prestosql/orc/reader/ListColumnReader.java index 43190253bcd4..f4c69e95d531 100644 --- a/presto-orc/src/main/java/io/prestosql/orc/reader/ListColumnReader.java +++ b/presto-orc/src/main/java/io/prestosql/orc/reader/ListColumnReader.java @@ -38,6 +38,7 @@ import java.util.Optional; import static com.google.common.base.MoreObjects.toStringHelper; +import static io.prestosql.orc.OrcReader.ProjectedLayout.fullyProjectedLayout; import static io.prestosql.orc.metadata.Stream.StreamKind.LENGTH; import static io.prestosql.orc.metadata.Stream.StreamKind.PRESENT; import static io.prestosql.orc.reader.ColumnReaders.createColumnReader; @@ -81,7 +82,7 @@ public ListColumnReader(Type type, OrcColumn column, AggregatedMemoryContext sys this.column = requireNonNull(column, "column is null"); this.blockFactory = requireNonNull(blockFactory, "blockFactory is null"); - this.elementColumnReader = createColumnReader(elementType, column.getNestedColumns().get(0), systemMemoryContext, blockFactory); + this.elementColumnReader = createColumnReader(elementType, column.getNestedColumns().get(0), fullyProjectedLayout(), systemMemoryContext, blockFactory); } @Override diff --git a/presto-orc/src/main/java/io/prestosql/orc/reader/MapColumnReader.java b/presto-orc/src/main/java/io/prestosql/orc/reader/MapColumnReader.java index 5c1896bc43a8..73ce9a7b5045 100644 --- a/presto-orc/src/main/java/io/prestosql/orc/reader/MapColumnReader.java +++ b/presto-orc/src/main/java/io/prestosql/orc/reader/MapColumnReader.java @@ -39,6 +39,7 @@ import java.util.Optional; import static com.google.common.base.MoreObjects.toStringHelper; +import static io.prestosql.orc.OrcReader.ProjectedLayout.fullyProjectedLayout; import static io.prestosql.orc.metadata.Stream.StreamKind.LENGTH; import static io.prestosql.orc.metadata.Stream.StreamKind.PRESENT; import static io.prestosql.orc.reader.ColumnReaders.createColumnReader; @@ -85,8 +86,8 @@ public MapColumnReader(Type type, OrcColumn column, AggregatedMemoryContext syst this.column = requireNonNull(column, "column is null"); this.blockFactory = requireNonNull(blockFactory, "blockFactory is null"); - this.keyColumnReader = createColumnReader(this.type.getKeyType(), column.getNestedColumns().get(0), systemMemoryContext, blockFactory); - this.valueColumnReader = createColumnReader(this.type.getValueType(), column.getNestedColumns().get(1), systemMemoryContext, blockFactory); + this.keyColumnReader = createColumnReader(this.type.getKeyType(), column.getNestedColumns().get(0), fullyProjectedLayout(), systemMemoryContext, blockFactory); + this.valueColumnReader = createColumnReader(this.type.getValueType(), column.getNestedColumns().get(1), fullyProjectedLayout(), systemMemoryContext, blockFactory); } @Override diff --git a/presto-orc/src/main/java/io/prestosql/orc/reader/StructColumnReader.java b/presto-orc/src/main/java/io/prestosql/orc/reader/StructColumnReader.java index bf4f5784746c..688f1360cb5f 100644 --- a/presto-orc/src/main/java/io/prestosql/orc/reader/StructColumnReader.java +++ b/presto-orc/src/main/java/io/prestosql/orc/reader/StructColumnReader.java @@ -20,6 +20,7 @@ import io.prestosql.orc.OrcBlockFactory; import io.prestosql.orc.OrcColumn; import io.prestosql.orc.OrcCorruptionException; +import io.prestosql.orc.OrcReader; import io.prestosql.orc.metadata.ColumnEncoding; import io.prestosql.orc.metadata.ColumnMetadata; import io.prestosql.orc.stream.BooleanInputStream; @@ -74,7 +75,7 @@ public class StructColumnReader private boolean rowGroupOpen; - StructColumnReader(Type type, OrcColumn column, AggregatedMemoryContext systemMemoryContext, OrcBlockFactory blockFactory) + StructColumnReader(Type type, OrcColumn column, OrcReader.ProjectedLayout readLayout, AggregatedMemoryContext systemMemoryContext, OrcBlockFactory blockFactory) throws OrcCorruptionException { requireNonNull(type, "type is null"); @@ -96,8 +97,12 @@ public class StructColumnReader fieldNames.add(fieldName); OrcColumn fieldStream = nestedColumns.get(fieldName); + if (fieldStream != null) { - structFields.put(fieldName, createColumnReader(field.getType(), fieldStream, systemMemoryContext, blockFactory)); + OrcReader.ProjectedLayout fieldLayout = readLayout.getFieldLayout(fieldName); + if (fieldLayout != null) { + structFields.put(fieldName, createColumnReader(field.getType(), fieldStream, fieldLayout, systemMemoryContext, blockFactory)); + } } } this.fieldNames = fieldNames.build(); From 5069a55252dfc0958e68c140a4c49313096e9003 Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Mon, 27 Jan 2020 14:27:16 -0800 Subject: [PATCH 215/519] Implement predicate pushdown for nested columns in ORC reader --- .../plugin/hive/orc/OrcPageSourceFactory.java | 38 ++- .../plugin/hive/orc/TestOrcPredicates.java | 226 ++++++++++++++++++ 2 files changed, 260 insertions(+), 4 deletions(-) create mode 100644 presto-hive/src/test/java/io/prestosql/plugin/hive/orc/TestOrcPredicates.java diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcPageSourceFactory.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcPageSourceFactory.java index 033fcbec1dc8..ad43b31a9c3d 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcPageSourceFactory.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/orc/OrcPageSourceFactory.java @@ -63,6 +63,7 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Strings.nullToEmpty; +import static com.google.common.collect.ImmutableMap.toImmutableMap; import static com.google.common.collect.Maps.uniqueIndex; import static io.prestosql.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext; import static io.prestosql.orc.OrcReader.INITIAL_BATCH_SIZE; @@ -155,7 +156,6 @@ public Optional createPageSource( } Optional projectedReaderColumns = projectBaseColumns(columns); - effectivePredicate = effectivePredicate.transform(column -> column.isBaseColumn() ? column : null); ConnectorPageSource orcPageSource = createOrcPageSource( hdfsEnvironment, @@ -291,18 +291,25 @@ private static OrcPageSource createOrcPageSource( for (HiveColumnHandle column : columns) { OrcColumn orcColumn = null; OrcReader.ProjectedLayout projectedLayout = null; + Map, Domain> columnDomains = null; if (useOrcColumnNames || isFullAcid) { String columnName = column.getName().toLowerCase(ENGLISH); orcColumn = fileColumnsByName.get(columnName); if (orcColumn != null) { projectedLayout = createProjectedLayout(orcColumn, projectionsByColumnName.get(columnName)); + columnDomains = effectivePredicateDomains.entrySet().stream() + .filter(columnDomain -> columnDomain.getKey().getBaseColumnName().toLowerCase(ENGLISH).equals(columnName)) + .collect(toImmutableMap(columnDomain -> columnDomain.getKey().getHiveColumnProjectionInfo(), Map.Entry::getValue)); } } else if (column.getBaseHiveColumnIndex() < fileColumns.size()) { orcColumn = fileColumns.get(column.getBaseHiveColumnIndex()); if (orcColumn != null) { projectedLayout = createProjectedLayout(orcColumn, projectionsByColumnIndex.get(column.getBaseHiveColumnIndex())); + columnDomains = effectivePredicateDomains.entrySet().stream() + .filter(columnDomain -> columnDomain.getKey().getBaseHiveColumnIndex() == column.getBaseHiveColumnIndex()) + .collect(toImmutableMap(columnDomain -> columnDomain.getKey().getHiveColumnProjectionInfo(), Map.Entry::getValue)); } } @@ -314,9 +321,12 @@ else if (column.getBaseHiveColumnIndex() < fileColumns.size()) { fileReadTypes.add(readType); fileReadLayouts.add(projectedLayout); - Domain domain = effectivePredicateDomains.get(column); - if (domain != null) { - predicateBuilder.addColumn(orcColumn.getColumnId(), domain); + // Add predicates on top-level and nested columns + for (Map.Entry, Domain> columnDomain : columnDomains.entrySet()) { + OrcColumn nestedColumn = getNestedColumn(orcColumn, columnDomain.getKey()); + if (nestedColumn != null) { + predicateBuilder.addColumn(nestedColumn.getColumnId(), columnDomain.getValue()); + } } } else { @@ -408,4 +418,24 @@ private static void verifyAcidColumn(OrcReader orcReader, int columnIndex, Strin throw new PrestoException(HIVE_BAD_DATA, format("ORC ACID file %s column should be type %s: %s", columnName, columnType, path)); } } + + private static OrcColumn getNestedColumn(OrcColumn baseColumn, Optional projectionInfo) + { + if (!projectionInfo.isPresent()) { + return baseColumn; + } + + OrcColumn current = baseColumn; + for (String field : projectionInfo.get().getDereferenceNames()) { + Optional orcColumn = current.getNestedColumns().stream() + .filter(column -> column.getColumnName().toLowerCase(ENGLISH).equals(field)) + .findFirst(); + + if (!orcColumn.isPresent()) { + return null; + } + current = orcColumn.get(); + } + return current; + } } diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/orc/TestOrcPredicates.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/orc/TestOrcPredicates.java new file mode 100644 index 000000000000..efa08ad07a40 --- /dev/null +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/orc/TestOrcPredicates.java @@ -0,0 +1,226 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.hive.orc; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import io.prestosql.orc.OrcReaderOptions; +import io.prestosql.orc.OrcWriterOptions; +import io.prestosql.plugin.hive.AbstractTestHiveFileFormats; +import io.prestosql.plugin.hive.FileFormatDataSourceStats; +import io.prestosql.plugin.hive.HiveColumnHandle; +import io.prestosql.plugin.hive.HiveCompressionCodec; +import io.prestosql.plugin.hive.HiveConfig; +import io.prestosql.plugin.hive.HivePageSourceProvider; +import io.prestosql.plugin.hive.HivePartitionKey; +import io.prestosql.plugin.hive.NodeVersion; +import io.prestosql.plugin.hive.TableToPartitionMapping; +import io.prestosql.spi.Page; +import io.prestosql.spi.connector.ConnectorPageSource; +import io.prestosql.spi.connector.ConnectorSession; +import io.prestosql.spi.predicate.Domain; +import io.prestosql.spi.predicate.TupleDomain; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.mapred.FileSplit; +import org.joda.time.DateTimeZone; +import org.testng.annotations.Test; + +import java.io.File; +import java.time.Instant; +import java.util.HashSet; +import java.util.List; +import java.util.Optional; +import java.util.OptionalInt; +import java.util.Properties; +import java.util.Set; +import java.util.stream.Collectors; + +import static com.google.common.base.Preconditions.checkState; +import static io.prestosql.plugin.hive.HiveStorageFormat.ORC; +import static io.prestosql.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT; +import static io.prestosql.plugin.hive.HiveTestUtils.TYPE_MANAGER; +import static io.prestosql.plugin.hive.HiveTestUtils.getHiveSession; +import static io.prestosql.plugin.hive.parquet.ParquetTester.HIVE_STORAGE_TIME_ZONE; +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.testing.StructuralTestUtil.rowBlockOf; +import static java.util.stream.Collectors.toList; +import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.FILE_INPUT_FORMAT; +import static org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB; +import static org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector; +import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaIntObjectInspector; +import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaLongObjectInspector; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + +public class TestOrcPredicates + extends AbstractTestHiveFileFormats +{ + private static final int NUM_ROWS = 50000; + private static final FileFormatDataSourceStats STATS = new FileFormatDataSourceStats(); + + // Prepare test columns + private static final TestColumn columnPrimitiveInteger = new TestColumn("column_primitive_integer", javaIntObjectInspector, 3, 3); + private static final TestColumn columnStruct = new TestColumn( + "column1_struct", + getStandardStructObjectInspector(ImmutableList.of("field0", "field1"), ImmutableList.of(javaLongObjectInspector, javaLongObjectInspector)), + new Long[] {4L, 5L}, + rowBlockOf(ImmutableList.of(BIGINT, BIGINT), 4L, 5L)); + private static final TestColumn columnPrimitiveBigInt = new TestColumn("column_primitive_bigint", javaLongObjectInspector, 6L, 6L); + + @Test + public void testOrcPredicates() + throws Exception + { + testOrcPredicates(getHiveSession(new HiveConfig(), new OrcReaderConfig().setUseColumnNames(true))); + testOrcPredicates(getHiveSession(new HiveConfig(), new OrcReaderConfig())); + } + + private void testOrcPredicates(ConnectorSession session) + throws Exception + { + List columnsToWrite = ImmutableList.of(columnPrimitiveInteger, columnStruct, columnPrimitiveBigInt); + + File file = File.createTempFile("test", "orc_predicate"); + file.delete(); + try { + // Write data + OrcFileWriterFactory writerFactory = new OrcFileWriterFactory(HDFS_ENVIRONMENT, TYPE_MANAGER, new NodeVersion("test"), HIVE_STORAGE_TIME_ZONE, false, STATS, new OrcWriterOptions()); + FileSplit split = createTestFile(file.getAbsolutePath(), ORC, HiveCompressionCodec.NONE, columnsToWrite, session, NUM_ROWS, writerFactory); + + TupleDomain testingPredicate; + + // Verify predicates on base column + List columnsToRead = columnsToWrite; + // All rows returned for a satisfying predicate + testingPredicate = TupleDomain.withColumnDomains(ImmutableMap.of(columnPrimitiveBigInt, Domain.singleValue(BIGINT, 6L))); + assertFilteredRows(testingPredicate, columnsToRead, session, split, NUM_ROWS); + // No rows returned for a mismatched predicate + testingPredicate = TupleDomain.withColumnDomains(ImmutableMap.of(columnPrimitiveBigInt, Domain.singleValue(BIGINT, 1L))); + assertFilteredRows(testingPredicate, columnsToRead, session, split, 0); + + // Verify predicates on projected column + TestColumn projectedColumn = new TestColumn( + columnStruct.getBaseName(), + columnStruct.getBaseObjectInspector(), + ImmutableList.of("field1"), + ImmutableList.of(1), + javaLongObjectInspector, + 5L, + 5L, + false); + + columnsToRead = ImmutableList.of(columnPrimitiveBigInt, projectedColumn); + // All rows returned for a satisfying predicate + testingPredicate = TupleDomain.withColumnDomains(ImmutableMap.of(projectedColumn, Domain.singleValue(BIGINT, 5L))); + assertFilteredRows(testingPredicate, columnsToRead, session, split, NUM_ROWS); + // No rows returned for a mismatched predicate + testingPredicate = TupleDomain.withColumnDomains(ImmutableMap.of(projectedColumn, Domain.singleValue(BIGINT, 6L))); + assertFilteredRows(testingPredicate, columnsToRead, session, split, 0); + } + finally { + file.delete(); + } + } + + private void assertFilteredRows( + TupleDomain effectivePredicate, + List columnsToRead, + ConnectorSession session, + FileSplit split, + int expectedRows) + { + ConnectorPageSource pageSource = createPageSource(effectivePredicate, columnsToRead, session, split); + + int filteredRows = 0; + while (!pageSource.isFinished()) { + Page page = pageSource.getNextPage(); + if (page != null) { + filteredRows += page.getPositionCount(); + } + } + + assertEquals(filteredRows, expectedRows); + } + + private ConnectorPageSource createPageSource( + TupleDomain effectivePredicate, + List columnsToRead, + ConnectorSession session, + FileSplit split) + { + OrcPageSourceFactory readerFactory = new OrcPageSourceFactory(new OrcReaderOptions(), HDFS_ENVIRONMENT, STATS); + + Properties splitProperties = new Properties(); + splitProperties.setProperty(FILE_INPUT_FORMAT, ORC.getInputFormat()); + splitProperties.setProperty(SERIALIZATION_LIB, ORC.getSerDe()); + + // Use full columns in split properties + ImmutableList.Builder splitPropertiesColumnNames = ImmutableList.builder(); + ImmutableList.Builder splitPropertiesColumnTypes = ImmutableList.builder(); + Set baseColumnNames = new HashSet<>(); + for (TestColumn columnToRead : columnsToRead) { + String name = columnToRead.getBaseName(); + if (!baseColumnNames.contains(name) && !columnToRead.isPartitionKey()) { + baseColumnNames.add(name); + splitPropertiesColumnNames.add(name); + splitPropertiesColumnTypes.add(columnToRead.getBaseObjectInspector().getTypeName()); + } + } + + splitProperties.setProperty("columns", splitPropertiesColumnNames.build().stream().collect(Collectors.joining(","))); + splitProperties.setProperty("columns.types", splitPropertiesColumnTypes.build().stream().collect(Collectors.joining(","))); + + List partitionKeys = columnsToRead.stream() + .filter(TestColumn::isPartitionKey) + .map(input -> new HivePartitionKey(input.getName(), (String) input.getWriteValue())) + .collect(toList()); + + List columnHandles = getColumnHandles(columnsToRead); + + TupleDomain predicate = effectivePredicate.transform(testColumn -> { + Optional handle = columnHandles.stream() + .filter(column -> testColumn.getName().equals(column.getName())) + .findFirst(); + + checkState(handle.isPresent(), "Predicate on invalid column"); + return handle.get(); + }); + + Optional pageSource = HivePageSourceProvider.createHivePageSource( + ImmutableSet.of(readerFactory), + ImmutableSet.of(), + new Configuration(false), + session, + split.getPath(), + OptionalInt.empty(), + split.getStart(), + split.getLength(), + split.getLength(), + Instant.now().toEpochMilli(), + splitProperties, + predicate, + columnHandles, + partitionKeys, + DateTimeZone.getDefault(), + TYPE_MANAGER, + TableToPartitionMapping.empty(), + Optional.empty(), + false, + Optional.empty()); + + assertTrue(pageSource.isPresent()); + return pageSource.get(); + } +} From c0514ffca154f4c7303a3f3dea08f05d9c3e2265 Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Fri, 17 Apr 2020 11:47:29 -0700 Subject: [PATCH 216/519] Fix table handle comparison Compare table handles representing the same logical dataset as equal --- .../plugin/atop/AtopTableHandle.java | 6 ++-- .../plugin/jdbc/JdbcTableHandle.java | 7 ++-- .../blackhole/BlackHoleTableHandle.java | 12 +++++-- .../plugin/cassandra/CassandraPartition.java | 24 +++++++++++++ .../cassandra/CassandraTableHandle.java | 6 ++-- .../ElasticsearchTableHandle.java | 5 +-- .../plugin/hive/HiveBucketHandle.java | 35 +++++++++++++++++++ .../plugin/hive/util/HiveBucketing.java | 20 +++++++++++ .../plugin/iceberg/IcebergTableHandle.java | 25 +++++++++++++ .../plugin/kudu/KuduTableHandle.java | 2 +- .../localfile/LocalFileTableHandle.java | 5 +-- .../InformationSchemaTableHandle.java | 6 ++-- .../connector/system/SystemTableHandle.java | 5 +-- .../io/prestosql/metadata/TableHandle.java | 23 ++++++++++++ .../plugin/mongodb/MongoTableHandle.java | 5 +-- .../raptor/legacy/RaptorTableHandle.java | 12 +++++-- .../plugin/tpcds/TpcdsTableLayoutHandle.java | 22 ++++++++++++ .../plugin/tpch/TpchTableHandle.java | 5 +-- .../plugin/tpch/TpchTableLayoutHandle.java | 23 ++++++++++++ 19 files changed, 224 insertions(+), 24 deletions(-) diff --git a/presto-atop/src/main/java/io/prestosql/plugin/atop/AtopTableHandle.java b/presto-atop/src/main/java/io/prestosql/plugin/atop/AtopTableHandle.java index a3851b9ad1d9..e1683d906969 100644 --- a/presto-atop/src/main/java/io/prestosql/plugin/atop/AtopTableHandle.java +++ b/presto-atop/src/main/java/io/prestosql/plugin/atop/AtopTableHandle.java @@ -76,7 +76,7 @@ public Domain getEndTimeConstraint() @Override public int hashCode() { - return Objects.hash(schema, table); + return Objects.hash(schema, table, startTimeConstraint, endTimeConstraint); } @Override @@ -90,7 +90,9 @@ public boolean equals(Object obj) } AtopTableHandle other = (AtopTableHandle) obj; return Objects.equals(this.schema, other.schema) && - this.table == other.table; + this.table == other.table && + Objects.equals(startTimeConstraint, other.startTimeConstraint) && + Objects.equals(endTimeConstraint, other.endTimeConstraint); } @Override diff --git a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcTableHandle.java b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcTableHandle.java index f8bb0428081f..3d74275904b0 100644 --- a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcTableHandle.java +++ b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcTableHandle.java @@ -123,13 +123,16 @@ public boolean equals(Object obj) return false; } JdbcTableHandle o = (JdbcTableHandle) obj; - return Objects.equals(this.schemaTableName, o.schemaTableName); + return Objects.equals(this.schemaTableName, o.schemaTableName) && + Objects.equals(this.columns, o.columns) && + Objects.equals(this.constraint, o.constraint) && + Objects.equals(this.limit, o.limit); } @Override public int hashCode() { - return Objects.hash(schemaTableName); + return Objects.hash(schemaTableName, columns, constraint, limit); } @Override diff --git a/presto-blackhole/src/main/java/io/prestosql/plugin/blackhole/BlackHoleTableHandle.java b/presto-blackhole/src/main/java/io/prestosql/plugin/blackhole/BlackHoleTableHandle.java index 8bd305aac934..bcb2d456eb5d 100644 --- a/presto-blackhole/src/main/java/io/prestosql/plugin/blackhole/BlackHoleTableHandle.java +++ b/presto-blackhole/src/main/java/io/prestosql/plugin/blackhole/BlackHoleTableHandle.java @@ -141,7 +141,7 @@ public SchemaTableName toSchemaTableName() @Override public int hashCode() { - return Objects.hash(getSchemaName(), getTableName()); + return Objects.hash(schemaName, tableName, columnHandles, splitCount, pagesPerSplit, rowsPerPage, fieldsLength, pageProcessingDelay); } @Override @@ -154,8 +154,14 @@ public boolean equals(Object obj) return false; } BlackHoleTableHandle other = (BlackHoleTableHandle) obj; - return Objects.equals(this.getSchemaName(), other.getSchemaName()) && - Objects.equals(this.getTableName(), other.getTableName()); + return Objects.equals(this.schemaName, other.schemaName) && + Objects.equals(this.tableName, other.tableName) && + Objects.equals(this.columnHandles, other.columnHandles) && + this.splitCount == other.splitCount && + this.pagesPerSplit == other.pagesPerSplit && + this.rowsPerPage == other.rowsPerPage && + this.fieldsLength == other.fieldsLength && + Objects.equals(this.pageProcessingDelay, other.pageProcessingDelay); } @Override diff --git a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraPartition.java b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraPartition.java index 00b1184cbe8b..03eba6243897 100644 --- a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraPartition.java +++ b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraPartition.java @@ -17,6 +17,8 @@ import io.prestosql.spi.predicate.TupleDomain; import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Objects; public class CassandraPartition { @@ -79,4 +81,26 @@ public byte[] getKey() { return key; } + + @Override + public boolean equals(Object obj) + { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + CassandraPartition other = (CassandraPartition) obj; + return Objects.equals(this.partitionId, other.partitionId) && + Arrays.equals(this.key, other.key) && + Objects.equals(this.tupleDomain, other.tupleDomain) && + Objects.equals(this.indexedColumnPredicatePushdown, other.indexedColumnPredicatePushdown); + } + + @Override + public int hashCode() + { + return Objects.hash(partitionId, Arrays.hashCode(key), tupleDomain, indexedColumnPredicatePushdown); + } } diff --git a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraTableHandle.java b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraTableHandle.java index a70dceb88d75..2999a281a9de 100644 --- a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraTableHandle.java +++ b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraTableHandle.java @@ -88,7 +88,7 @@ public SchemaTableName getSchemaTableName() @Override public int hashCode() { - return Objects.hash(schemaName, tableName); + return Objects.hash(schemaName, tableName, partitions, clusteringKeyPredicates); } @Override @@ -102,7 +102,9 @@ public boolean equals(Object obj) } CassandraTableHandle other = (CassandraTableHandle) obj; return Objects.equals(this.schemaName, other.schemaName) && - Objects.equals(this.tableName, other.tableName); + Objects.equals(this.tableName, other.tableName) && + Objects.equals(this.partitions, other.partitions) && + Objects.equals(this.clusteringKeyPredicates, other.clusteringKeyPredicates); } @Override diff --git a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/ElasticsearchTableHandle.java b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/ElasticsearchTableHandle.java index 47e8eeb258af..9b3f72c95b75 100644 --- a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/ElasticsearchTableHandle.java +++ b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/ElasticsearchTableHandle.java @@ -102,12 +102,13 @@ public boolean equals(Object o) return schema.equals(that.schema) && index.equals(that.index) && constraint.equals(that.constraint) && - query.equals(that.query); + query.equals(that.query) && + limit.equals(that.limit); } @Override public int hashCode() { - return Objects.hash(schema, index, constraint, query); + return Objects.hash(schema, index, constraint, query, limit); } } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveBucketHandle.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveBucketHandle.java index 17a53dc36fd9..d97cc1b388e5 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveBucketHandle.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveBucketHandle.java @@ -19,7 +19,9 @@ import io.prestosql.plugin.hive.util.HiveBucketing.BucketingVersion; import java.util.List; +import java.util.Objects; +import static com.google.common.base.MoreObjects.toStringHelper; import static com.google.common.base.Preconditions.checkArgument; import static java.lang.String.format; import static java.util.Objects.requireNonNull; @@ -83,4 +85,37 @@ public HiveBucketProperty toTableBucketProperty() tableBucketCount, ImmutableList.of()); } + + @Override + public boolean equals(Object obj) + { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + HiveBucketHandle other = (HiveBucketHandle) obj; + return Objects.equals(this.columns, other.columns) && + this.bucketingVersion == other.bucketingVersion && + this.tableBucketCount == other.tableBucketCount && + this.readBucketCount == other.readBucketCount; + } + + @Override + public int hashCode() + { + return Objects.hash(columns, bucketingVersion, tableBucketCount, readBucketCount); + } + + @Override + public String toString() + { + return toStringHelper(this) + .add("columns", columns) + .add("bucketingVersion", bucketingVersion) + .add("tableBucketCount", tableBucketCount) + .add("readBucketCount", readBucketCount) + .toString(); + } } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/util/HiveBucketing.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/util/HiveBucketing.java index ff1443e09ac7..087c4afbe31e 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/util/HiveBucketing.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/util/HiveBucketing.java @@ -42,6 +42,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; @@ -354,5 +355,24 @@ public Set getBucketsToKeep() { return bucketsToKeep; } + + @Override + public boolean equals(Object obj) + { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + HiveBucketFilter other = (HiveBucketFilter) obj; + return Objects.equals(this.bucketsToKeep, other.bucketsToKeep); + } + + @Override + public int hashCode() + { + return Objects.hash(bucketsToKeep); + } } } diff --git a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergTableHandle.java b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergTableHandle.java index 4b3320b94009..7f6b8d765289 100644 --- a/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergTableHandle.java +++ b/presto-iceberg/src/main/java/io/prestosql/plugin/iceberg/IcebergTableHandle.java @@ -21,6 +21,7 @@ import io.prestosql.spi.predicate.TupleDomain; import java.util.Locale; +import java.util.Objects; import java.util.Optional; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -99,6 +100,30 @@ public SchemaTableName getSchemaTableNameWithType() return new SchemaTableName(schemaName, tableName + "$" + tableType.name()); } + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + IcebergTableHandle that = (IcebergTableHandle) o; + return Objects.equals(schemaName, that.schemaName) && + Objects.equals(tableName, that.tableName) && + tableType == that.tableType && + Objects.equals(snapshotId, that.snapshotId) && + Objects.equals(predicate, that.predicate); + } + + @Override + public int hashCode() + { + return Objects.hash(schemaName, tableName, tableType, snapshotId, predicate); + } + @Override public String toString() { diff --git a/presto-kudu/src/main/java/io/prestosql/plugin/kudu/KuduTableHandle.java b/presto-kudu/src/main/java/io/prestosql/plugin/kudu/KuduTableHandle.java index a0341c5391b9..5aa83439e3d5 100755 --- a/presto-kudu/src/main/java/io/prestosql/plugin/kudu/KuduTableHandle.java +++ b/presto-kudu/src/main/java/io/prestosql/plugin/kudu/KuduTableHandle.java @@ -95,7 +95,7 @@ public boolean isDeleteHandle() @Override public int hashCode() { - return Objects.hash(schemaTableName); + return Objects.hash(schemaTableName, constraint, desiredColumns, isDeleteHandle); } @Override diff --git a/presto-local-file/src/main/java/io/prestosql/plugin/localfile/LocalFileTableHandle.java b/presto-local-file/src/main/java/io/prestosql/plugin/localfile/LocalFileTableHandle.java index 04d3e0d9981e..7520d843c154 100644 --- a/presto-local-file/src/main/java/io/prestosql/plugin/localfile/LocalFileTableHandle.java +++ b/presto-local-file/src/main/java/io/prestosql/plugin/localfile/LocalFileTableHandle.java @@ -88,13 +88,14 @@ public boolean equals(Object o) LocalFileTableHandle that = (LocalFileTableHandle) o; return Objects.equals(schemaTableName, that.schemaTableName) && Objects.equals(timestampColumn, that.timestampColumn) && - Objects.equals(serverAddressColumn, that.serverAddressColumn); + Objects.equals(serverAddressColumn, that.serverAddressColumn) && + Objects.equals(constraint, that.constraint); } @Override public int hashCode() { - return Objects.hash(schemaTableName, timestampColumn, serverAddressColumn); + return Objects.hash(schemaTableName, timestampColumn, serverAddressColumn, constraint); } @Override diff --git a/presto-main/src/main/java/io/prestosql/connector/informationschema/InformationSchemaTableHandle.java b/presto-main/src/main/java/io/prestosql/connector/informationschema/InformationSchemaTableHandle.java index 8963fc496bb5..aaa72f367244 100644 --- a/presto-main/src/main/java/io/prestosql/connector/informationschema/InformationSchemaTableHandle.java +++ b/presto-main/src/main/java/io/prestosql/connector/informationschema/InformationSchemaTableHandle.java @@ -85,7 +85,7 @@ public String toString() @Override public int hashCode() { - return Objects.hash(catalogName, table); + return Objects.hash(catalogName, table, prefixes, limit); } @Override @@ -99,6 +99,8 @@ public boolean equals(Object obj) } InformationSchemaTableHandle other = (InformationSchemaTableHandle) obj; return Objects.equals(this.catalogName, other.catalogName) && - this.table == other.table; + this.table == other.table && + Objects.equals(this.prefixes, other.prefixes) && + Objects.equals(this.limit, other.limit); } } diff --git a/presto-main/src/main/java/io/prestosql/connector/system/SystemTableHandle.java b/presto-main/src/main/java/io/prestosql/connector/system/SystemTableHandle.java index 3af5e95beb49..8e775b2cef6f 100644 --- a/presto-main/src/main/java/io/prestosql/connector/system/SystemTableHandle.java +++ b/presto-main/src/main/java/io/prestosql/connector/system/SystemTableHandle.java @@ -82,7 +82,7 @@ public String toString() @Override public int hashCode() { - return Objects.hash(schemaName, tableName); + return Objects.hash(schemaName, tableName, constraint); } @Override @@ -96,6 +96,7 @@ public boolean equals(Object obj) } final SystemTableHandle other = (SystemTableHandle) obj; return Objects.equals(this.schemaName, other.schemaName) && - Objects.equals(this.tableName, other.tableName); + Objects.equals(this.tableName, other.tableName) && + Objects.equals(this.constraint, other.constraint); } } diff --git a/presto-main/src/main/java/io/prestosql/metadata/TableHandle.java b/presto-main/src/main/java/io/prestosql/metadata/TableHandle.java index 78e5b3ec3329..71a4fa2dce35 100644 --- a/presto-main/src/main/java/io/prestosql/metadata/TableHandle.java +++ b/presto-main/src/main/java/io/prestosql/metadata/TableHandle.java @@ -20,6 +20,7 @@ import io.prestosql.spi.connector.ConnectorTableLayoutHandle; import io.prestosql.spi.connector.ConnectorTransactionHandle; +import java.util.Objects; import java.util.Optional; import static java.util.Objects.requireNonNull; @@ -76,4 +77,26 @@ public String toString() { return catalogName + ":" + connectorHandle; } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + TableHandle other = (TableHandle) o; + return Objects.equals(catalogName, other.catalogName) && + Objects.equals(connectorHandle, other.connectorHandle) && + Objects.equals(transaction, other.transaction) && + Objects.equals(layout, other.layout); + } + + @Override + public int hashCode() + { + return Objects.hash(catalogName, connectorHandle, transaction, layout); + } } diff --git a/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/MongoTableHandle.java b/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/MongoTableHandle.java index ee6a21cdc29b..01e3692d624c 100644 --- a/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/MongoTableHandle.java +++ b/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/MongoTableHandle.java @@ -59,7 +59,7 @@ public TupleDomain getConstraint() @Override public int hashCode() { - return Objects.hash(schemaTableName); + return Objects.hash(schemaTableName, constraint); } @Override @@ -72,7 +72,8 @@ public boolean equals(Object obj) return false; } MongoTableHandle other = (MongoTableHandle) obj; - return Objects.equals(this.schemaTableName, other.schemaTableName); + return Objects.equals(this.schemaTableName, other.schemaTableName) && + Objects.equals(this.constraint, other.constraint); } @Override diff --git a/presto-raptor-legacy/src/main/java/io/prestosql/plugin/raptor/legacy/RaptorTableHandle.java b/presto-raptor-legacy/src/main/java/io/prestosql/plugin/raptor/legacy/RaptorTableHandle.java index b5afc371e7b3..855e3b283a1e 100644 --- a/presto-raptor-legacy/src/main/java/io/prestosql/plugin/raptor/legacy/RaptorTableHandle.java +++ b/presto-raptor-legacy/src/main/java/io/prestosql/plugin/raptor/legacy/RaptorTableHandle.java @@ -158,7 +158,7 @@ public String toString() @Override public int hashCode() { - return Objects.hash(schemaName, tableName, tableId); + return Objects.hash(schemaName, tableName, tableId, distributionId, distributionName, bucketCount, organized, transactionId, constraint, bucketAssignments, delete); } @Override @@ -173,7 +173,15 @@ public boolean equals(Object obj) RaptorTableHandle other = (RaptorTableHandle) obj; return Objects.equals(this.schemaName, other.schemaName) && Objects.equals(this.tableName, other.tableName) && - Objects.equals(this.tableId, other.tableId); + Objects.equals(this.tableId, other.tableId) && + Objects.equals(this.distributionId, other.distributionId) && + Objects.equals(this.distributionName, other.distributionName) && + Objects.equals(this.bucketCount, other.bucketCount) && + this.organized == other.organized && + Objects.equals(this.transactionId, other.transactionId) && + Objects.equals(this.constraint, other.constraint) && + Objects.equals(this.bucketAssignments, other.bucketAssignments) && + this.delete == other.delete; } @JsonIgnore diff --git a/presto-tpcds/src/main/java/io/prestosql/plugin/tpcds/TpcdsTableLayoutHandle.java b/presto-tpcds/src/main/java/io/prestosql/plugin/tpcds/TpcdsTableLayoutHandle.java index 3d6779e68962..8f4a273df4b0 100644 --- a/presto-tpcds/src/main/java/io/prestosql/plugin/tpcds/TpcdsTableLayoutHandle.java +++ b/presto-tpcds/src/main/java/io/prestosql/plugin/tpcds/TpcdsTableLayoutHandle.java @@ -17,6 +17,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import io.prestosql.spi.connector.ConnectorTableLayoutHandle; +import java.util.Objects; + import static java.util.Objects.requireNonNull; public class TpcdsTableLayoutHandle @@ -41,4 +43,24 @@ public String toString() { return table.getTableName(); } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + TpcdsTableLayoutHandle that = (TpcdsTableLayoutHandle) o; + return Objects.equals(table, that.table); + } + + @Override + public int hashCode() + { + return table.hashCode(); + } } diff --git a/presto-tpch/src/main/java/io/prestosql/plugin/tpch/TpchTableHandle.java b/presto-tpch/src/main/java/io/prestosql/plugin/tpch/TpchTableHandle.java index ad87ce2d353e..633dfb191328 100644 --- a/presto-tpch/src/main/java/io/prestosql/plugin/tpch/TpchTableHandle.java +++ b/presto-tpch/src/main/java/io/prestosql/plugin/tpch/TpchTableHandle.java @@ -75,7 +75,7 @@ public String toString() @Override public int hashCode() { - return Objects.hash(tableName, scaleFactor); + return Objects.hash(tableName, scaleFactor, constraint); } @Override @@ -89,6 +89,7 @@ public boolean equals(Object obj) } TpchTableHandle other = (TpchTableHandle) obj; return Objects.equals(this.tableName, other.tableName) && - Objects.equals(this.scaleFactor, other.scaleFactor); + Objects.equals(this.scaleFactor, other.scaleFactor) && + Objects.equals(this.constraint, other.constraint); } } diff --git a/presto-tpch/src/main/java/io/prestosql/plugin/tpch/TpchTableLayoutHandle.java b/presto-tpch/src/main/java/io/prestosql/plugin/tpch/TpchTableLayoutHandle.java index 0b292e8f9281..0d91e319e913 100644 --- a/presto-tpch/src/main/java/io/prestosql/plugin/tpch/TpchTableLayoutHandle.java +++ b/presto-tpch/src/main/java/io/prestosql/plugin/tpch/TpchTableLayoutHandle.java @@ -19,6 +19,8 @@ import io.prestosql.spi.connector.ConnectorTableLayoutHandle; import io.prestosql.spi.predicate.TupleDomain; +import java.util.Objects; + public class TpchTableLayoutHandle implements ConnectorTableLayoutHandle { @@ -49,4 +51,25 @@ public String toString() { return table.toString(); } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + TpchTableLayoutHandle that = (TpchTableLayoutHandle) o; + return Objects.equals(table, that.table) && + Objects.equals(predicate, that.predicate); + } + + @Override + public int hashCode() + { + return Objects.hash(table, predicate); + } } From 3508f949a85cf61775362bf3b6941d7aadcf52d0 Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Tue, 21 Apr 2020 11:18:29 -0700 Subject: [PATCH 217/519] Remove unused RaptorTableLayoutHandle --- .../legacy/RaptorTableLayoutHandle.java | 67 ------------------- 1 file changed, 67 deletions(-) delete mode 100644 presto-raptor-legacy/src/main/java/io/prestosql/plugin/raptor/legacy/RaptorTableLayoutHandle.java diff --git a/presto-raptor-legacy/src/main/java/io/prestosql/plugin/raptor/legacy/RaptorTableLayoutHandle.java b/presto-raptor-legacy/src/main/java/io/prestosql/plugin/raptor/legacy/RaptorTableLayoutHandle.java deleted file mode 100644 index 2bfbbb16f8dd..000000000000 --- a/presto-raptor-legacy/src/main/java/io/prestosql/plugin/raptor/legacy/RaptorTableLayoutHandle.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package io.prestosql.plugin.raptor.legacy; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import io.prestosql.spi.connector.ColumnHandle; -import io.prestosql.spi.connector.ConnectorTableLayoutHandle; -import io.prestosql.spi.predicate.TupleDomain; - -import java.util.Optional; - -import static java.util.Objects.requireNonNull; - -public class RaptorTableLayoutHandle - implements ConnectorTableLayoutHandle -{ - private final RaptorTableHandle table; - private final TupleDomain constraint; - private final Optional partitioning; - - @JsonCreator - public RaptorTableLayoutHandle( - @JsonProperty("table") RaptorTableHandle table, - @JsonProperty("constraint") TupleDomain constraint, - @JsonProperty("partitioning") Optional partitioning) - { - this.table = requireNonNull(table, "table is null"); - this.constraint = requireNonNull(constraint, "constraint is null"); - this.partitioning = requireNonNull(partitioning, "partitioning is null"); - } - - @JsonProperty - public RaptorTableHandle getTable() - { - return table; - } - - @JsonProperty - public TupleDomain getConstraint() - { - return constraint; - } - - @JsonProperty - public Optional getPartitioning() - { - return partitioning; - } - - @Override - public String toString() - { - return table.toString(); - } -} From 7f032b5b84597971deb872cfe3bd8802fb9075cc Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Tue, 14 Apr 2020 20:48:57 -0700 Subject: [PATCH 218/519] Compare table handles while matching plans in PushPredicateIntoTableScan --- .../sql/planner/iterative/rule/PushPredicateIntoTableScan.java | 3 ++- .../io/prestosql/tests/TestInformationSchemaConnector.java | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PushPredicateIntoTableScan.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PushPredicateIntoTableScan.java index 0e19134aa2a7..a1dc2e89d4f1 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PushPredicateIntoTableScan.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PushPredicateIntoTableScan.java @@ -131,7 +131,8 @@ private boolean arePlansSame(FilterNode filter, TableScanNode tableScan, PlanNod TableScanNode rewrittenTableScan = (TableScanNode) rewrittenFilter.getSource(); - return Objects.equals(tableScan.getEnforcedConstraint(), rewrittenTableScan.getEnforcedConstraint()); + return Objects.equals(tableScan.getEnforcedConstraint(), rewrittenTableScan.getEnforcedConstraint()) && + Objects.equals(tableScan.getTable(), rewrittenTableScan.getTable()); } public static Optional pushFilterIntoTableScan( diff --git a/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java b/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java index 7ed183e78504..1e31e20a9f10 100644 --- a/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java +++ b/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java @@ -153,7 +153,7 @@ public void testMetadataCalls() "SELECT count(*) from test_catalog.information_schema.tables WHERE table_name = 'test_table1'", "VALUES 2", new MetadataCallsCount() - .withListSchemasCount(3)); + .withListSchemasCount(1)); assertMetadataCalls( "SELECT count(*) from test_catalog.information_schema.tables WHERE table_name LIKE 'test_t_ble1'", "VALUES 2", From 1a81d10cda9cca3c079dd7cfb9609e163ad1e042 Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Fri, 17 Apr 2020 16:18:20 -0700 Subject: [PATCH 219/519] Add a test for predicate pushdown in hive connector --- ...> TestConnectorPushdownRulesWithHive.java} | 60 ++++++++++++++++--- 1 file changed, 53 insertions(+), 7 deletions(-) rename presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/{TestPushProjectionRuleWithHive.java => TestConnectorPushdownRulesWithHive.java} (72%) diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestPushProjectionRuleWithHive.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestConnectorPushdownRulesWithHive.java similarity index 72% rename from presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestPushProjectionRuleWithHive.java rename to presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestConnectorPushdownRulesWithHive.java index 3287f442817a..6050bd143948 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestPushProjectionRuleWithHive.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestConnectorPushdownRulesWithHive.java @@ -36,12 +36,15 @@ import io.prestosql.plugin.hive.metastore.HiveMetastore; import io.prestosql.plugin.hive.metastore.file.FileHiveMetastore; import io.prestosql.plugin.hive.testing.TestingHiveConnectorFactory; +import io.prestosql.spi.predicate.Domain; import io.prestosql.spi.predicate.TupleDomain; import io.prestosql.spi.security.PrincipalType; import io.prestosql.spi.type.RowType; import io.prestosql.spi.type.Type; +import io.prestosql.sql.planner.iterative.rule.PushPredicateIntoTableScan; import io.prestosql.sql.planner.iterative.rule.PushProjectionIntoTableScan; import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; +import io.prestosql.sql.planner.iterative.rule.test.PlanBuilder; import io.prestosql.sql.planner.plan.Assignments; import io.prestosql.sql.tree.DereferenceExpression; import io.prestosql.sql.tree.Identifier; @@ -57,26 +60,32 @@ import static com.google.common.base.Predicates.equalTo; import static com.google.common.io.MoreFiles.deleteRecursively; import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE; +import static io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.REGULAR; +import static io.prestosql.plugin.hive.HiveColumnHandle.createBaseColumn; +import static io.prestosql.plugin.hive.HiveType.HIVE_INT; import static io.prestosql.plugin.hive.HiveType.toHiveType; import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.IntegerType.INTEGER; import static io.prestosql.spi.type.RowType.field; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.filter; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.project; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.tableScan; import static io.prestosql.testing.TestingConnectorSession.SESSION; import static io.prestosql.testing.TestingSession.testSessionBuilder; +import static java.lang.String.format; import static java.util.Arrays.asList; -public class TestPushProjectionRuleWithHive +public class TestConnectorPushdownRulesWithHive extends BaseRuleTest { private static final String HIVE_CATALOG_NAME = "hive"; private static final String SCHEMA_NAME = "test_schema"; - private static final String TABLE_NAME = "test_table"; private static final Type ROW_TYPE = RowType.from(asList(field("a", BIGINT), field("b", BIGINT))); private File baseDir; + private HiveMetastore metastore; private static final Session HIVE_SESSION = testSessionBuilder() .setCatalog(HIVE_CATALOG_NAME) @@ -91,7 +100,7 @@ protected Optional createLocalQueryRunner() HdfsConfiguration configuration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(config), ImmutableSet.of()); HdfsEnvironment environment = new HdfsEnvironment(configuration, config, new NoHdfsAuthentication()); - HiveMetastore metastore = new FileHiveMetastore(environment, baseDir.toURI().toString(), "test"); + metastore = new FileHiveMetastore(environment, baseDir.toURI().toString(), "test"); Database database = Database.builder() .setDatabaseName(SCHEMA_NAME) .setOwnerName("public") @@ -109,10 +118,13 @@ protected Optional createLocalQueryRunner() @Test public void testProjectionPushdown() { + String tableName = "projection_test"; PushProjectionIntoTableScan pushProjectionIntoTableScan = new PushProjectionIntoTableScan(tester().getMetadata(), tester().getTypeAnalyzer()); - tester().getQueryRunner().execute("CREATE TABLE " + TABLE_NAME + "(struct_of_int) AS " + - "SELECT cast(row(5, 6) as row(a bigint, b bigint)) as struct_of_int where false"); + tester().getQueryRunner().execute(format( + "CREATE TABLE %s (struct_of_int) AS " + + "SELECT cast(row(5, 6) as row(a bigint, b bigint)) as struct_of_int where false", + tableName)); Type baseType = ROW_TYPE; @@ -126,10 +138,10 @@ public void testProjectionPushdown() ImmutableList.of("a"), toHiveType(new HiveTypeTranslator(), BIGINT), BIGINT)), - HiveColumnHandle.ColumnType.REGULAR, + REGULAR, Optional.empty()); - HiveTableHandle hiveTable = new HiveTableHandle(SCHEMA_NAME, TABLE_NAME, ImmutableMap.of(), ImmutableList.of(), Optional.empty()); + HiveTableHandle hiveTable = new HiveTableHandle(SCHEMA_NAME, tableName, ImmutableMap.of(), ImmutableList.of(), Optional.empty()); TableHandle table = new TableHandle(new CatalogName(HIVE_CATALOG_NAME), hiveTable, new HiveTransactionHandle(), Optional.empty()); HiveColumnHandle fullColumn = partialColumn.getBaseColumn(); @@ -163,6 +175,40 @@ public void testProjectionPushdown() equalTo(table.getConnectorHandle()), TupleDomain.all(), ImmutableMap.of("struct_of_int#a", equalTo(partialColumn))))); + + metastore.dropTable(new HiveIdentity(SESSION), SCHEMA_NAME, tableName, true); + } + + @Test + public void testPredicatePushdown() + { + String tableName = "predicate_test"; + tester().getQueryRunner().execute(format("CREATE TABLE %s (a, b) AS SELECT 5, 6", tableName)); + + PushPredicateIntoTableScan pushPredicateIntoTableScan = new PushPredicateIntoTableScan(tester().getMetadata(), tester().getTypeAnalyzer()); + + HiveTableHandle hiveTable = new HiveTableHandle(SCHEMA_NAME, tableName, ImmutableMap.of(), ImmutableList.of(), Optional.empty()); + TableHandle table = new TableHandle(new CatalogName(HIVE_CATALOG_NAME), hiveTable, new HiveTransactionHandle(), Optional.empty()); + + HiveColumnHandle column = createBaseColumn("a", 0, HIVE_INT, INTEGER, REGULAR, Optional.empty()); + + tester().assertThat(pushPredicateIntoTableScan) + .on(p -> + p.filter( + PlanBuilder.expression("a = 5"), + p.tableScan( + table, + ImmutableList.of(p.symbol("a", INTEGER)), + ImmutableMap.of(p.symbol("a", INTEGER), column)))) + .matches(filter( + "a = 5", + tableScan( + tableHandle -> ((HiveTableHandle) tableHandle).getCompactEffectivePredicate().getDomains().get() + .equals(ImmutableMap.of(column, Domain.singleValue(INTEGER, 5L))), + TupleDomain.all(), + ImmutableMap.of("a", equalTo(column))))); + + metastore.dropTable(new HiveIdentity(SESSION), SCHEMA_NAME, tableName, true); } @AfterClass(alwaysRun = true) From 13d6b297620af3fd37e4047e60080e9ae97113ad Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Tue, 21 Apr 2020 21:50:38 -0700 Subject: [PATCH 220/519] Fix bug in PhoenixTableLayoutHandle::equals --- .../io/prestosql/plugin/phoenix/PhoenixTableLayoutHandle.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixTableLayoutHandle.java b/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixTableLayoutHandle.java index 1663b8ebc8f6..c09093443c96 100644 --- a/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixTableLayoutHandle.java +++ b/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixTableLayoutHandle.java @@ -69,9 +69,7 @@ public boolean equals(Object o) if (o == null || getClass() != o.getClass()) { return false; } - if (!super.equals(o)) { - return false; - } + PhoenixTableLayoutHandle that = (PhoenixTableLayoutHandle) o; return Objects.equals(table, that.table) && Objects.equals(tupleDomain, that.tupleDomain) && From 922f1712462421fb13ef59de894999cdf44c6afd Mon Sep 17 00:00:00 2001 From: Martin Traverso Date: Wed, 22 Apr 2020 20:33:38 -0700 Subject: [PATCH 221/519] Expose static method for creating parquet page source --- .../parquet/ParquetPageSourceFactory.java | 45 ++++++++++++++++--- 1 file changed, 38 insertions(+), 7 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/parquet/ParquetPageSourceFactory.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/parquet/ParquetPageSourceFactory.java index 8815aad8e729..0475de9b58fa 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/parquet/ParquetPageSourceFactory.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/parquet/ParquetPageSourceFactory.java @@ -129,9 +129,41 @@ public Optional createPageSource( checkArgument(!deleteDeltaLocations.isPresent(), "Delete delta is not supported"); + return Optional.of(createPageSource( + path, + start, + length, + fileSize, + columns, + effectivePredicate, + isUseParquetColumnNames(session), + hdfsEnvironment, + configuration, + session.getUser(), + stats, + options.withFailOnCorruptedStatistics(isFailOnCorruptedParquetStatistics(session)) + .withMaxReadBlockSize(getParquetMaxReadBlockSize(session)))); + } + + /** + * This method is available for other callers to use directly. + */ + public static ReaderPageSourceWithProjections createPageSource( + Path path, + long start, + long length, + long fileSize, + List columns, + TupleDomain effectivePredicate, + boolean useColumnNames, + HdfsEnvironment hdfsEnvironment, + Configuration configuration, + String user, + FileFormatDataSourceStats stats, + ParquetReaderOptions options) + { // Ignore predicates on partial columns for now. effectivePredicate = effectivePredicate.transform(column -> column.isBaseColumn() ? column : null); - boolean useParquetColumnNames = isUseParquetColumnNames(session); MessageType fileSchema; MessageType requestedSchema; @@ -139,7 +171,6 @@ public Optional createPageSource( ParquetReader parquetReader; ParquetDataSource dataSource = null; try { - String user = session.getUser(); FileSystem fileSystem = hdfsEnvironment.getFileSystem(user, path, configuration); FSDataInputStream inputStream = hdfsEnvironment.doAs(user, () -> fileSystem.open(path)); ParquetMetadata parquetMetadata = MetadataReader.readFooter(inputStream, path, fileSize); @@ -151,7 +182,7 @@ public Optional createPageSource( .map(ReaderProjections::getReaderColumns) .orElse(columns).stream() .filter(column -> column.getColumnType() == REGULAR) - .map(column -> getColumnType(column, fileSchema, useParquetColumnNames)) + .map(column -> getColumnType(column, fileSchema, useColumnNames)) .filter(Optional::isPresent) .map(Optional::get) .map(type -> new MessageType(fileSchema.getName(), type)) @@ -183,7 +214,7 @@ public Optional createPageSource( blocks.build(), dataSource, newSimpleAggregatedMemoryContext(), - options.withFailOnCorruptedStatistics(isFailOnCorruptedParquetStatistics(session)).withMaxReadBlockSize(getParquetMaxReadBlockSize(session))); + options); } catch (Exception e) { try { @@ -217,7 +248,7 @@ public Optional createPageSource( } List> parquetFields = baseColumns.stream() - .map(column -> getParquetType(column, fileSchema, useParquetColumnNames)) + .map(column -> getParquetType(column, fileSchema, useColumnNames)) .map(Optional::ofNullable) .collect(toImmutableList()); ImmutableList.Builder prestoTypes = ImmutableList.builder(); @@ -229,13 +260,13 @@ public Optional createPageSource( prestoTypes.add(column.getBaseType()); internalFields.add(parquetField.flatMap(field -> { - String columnName = useParquetColumnNames ? column.getBaseColumnName() : fileSchema.getFields().get(column.getBaseHiveColumnIndex()).getName(); + String columnName = useColumnNames ? column.getBaseColumnName() : fileSchema.getFields().get(column.getBaseHiveColumnIndex()).getName(); return constructField(column.getBaseType(), lookupColumnByName(messageColumn, columnName)); })); } ConnectorPageSource parquetPageSource = new ParquetPageSource(parquetReader, prestoTypes.build(), internalFields.build()); - return Optional.of(new ReaderPageSourceWithProjections(parquetPageSource, readerProjections)); + return new ReaderPageSourceWithProjections(parquetPageSource, readerProjections); } public static Optional getParquetType(GroupType groupType, boolean useParquetColumnNames, HiveColumnHandle column) From 15196182fcf413d4cb500b52b733691d4fda415c Mon Sep 17 00:00:00 2001 From: praveenkrishna Date: Wed, 8 Apr 2020 12:58:36 +0530 Subject: [PATCH 222/519] Minor cleanup in PhoenixMetadata --- .../plugin/phoenix/PhoenixClient.java | 6 ++ .../plugin/phoenix/PhoenixMetadata.java | 70 ++++--------------- 2 files changed, 19 insertions(+), 57 deletions(-) diff --git a/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixClient.java b/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixClient.java index 70320e33e842..d10d9765958c 100644 --- a/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixClient.java +++ b/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixClient.java @@ -297,6 +297,12 @@ public WriteMapping toWriteMapping(ConnectorSession session, Type type) return super.toWriteMapping(session, type); } + @Override + public boolean isLimitGuaranteed(ConnectorSession session) + { + return false; + } + private static ColumnMapping arrayColumnMapping(ConnectorSession session, ArrayType arrayType, String elementJdbcTypeName) { return ColumnMapping.blockMapping( diff --git a/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixMetadata.java b/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixMetadata.java index 1a84708eb4b0..d28db6e08eb7 100644 --- a/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixMetadata.java +++ b/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixMetadata.java @@ -18,13 +18,13 @@ import io.airlift.slice.Slice; import io.prestosql.plugin.jdbc.JdbcColumnHandle; import io.prestosql.plugin.jdbc.JdbcIdentity; +import io.prestosql.plugin.jdbc.JdbcMetadata; import io.prestosql.plugin.jdbc.JdbcOutputTableHandle; import io.prestosql.plugin.jdbc.JdbcTableHandle; import io.prestosql.spi.PrestoException; import io.prestosql.spi.connector.ColumnHandle; import io.prestosql.spi.connector.ColumnMetadata; import io.prestosql.spi.connector.ConnectorInsertTableHandle; -import io.prestosql.spi.connector.ConnectorMetadata; import io.prestosql.spi.connector.ConnectorNewTableLayout; import io.prestosql.spi.connector.ConnectorOutputMetadata; import io.prestosql.spi.connector.ConnectorOutputTableHandle; @@ -37,8 +37,6 @@ import io.prestosql.spi.connector.Constraint; import io.prestosql.spi.connector.SchemaNotFoundException; import io.prestosql.spi.connector.SchemaTableName; -import io.prestosql.spi.connector.SchemaTablePrefix; -import io.prestosql.spi.connector.TableNotFoundException; import io.prestosql.spi.security.PrestoPrincipal; import io.prestosql.spi.statistics.ComputedStatistics; import io.prestosql.spi.type.Type; @@ -86,7 +84,7 @@ import static org.apache.phoenix.util.SchemaUtil.getEscapedArgument; public class PhoenixMetadata - implements ConnectorMetadata + extends JdbcMetadata { // Maps to Phoenix's default empty schema public static final String DEFAULT_SCHEMA = "default"; @@ -97,15 +95,10 @@ public class PhoenixMetadata @Inject public PhoenixMetadata(PhoenixClient phoenixClient) { + super(phoenixClient, true); this.phoenixClient = requireNonNull(phoenixClient, "client is null"); } - @Override - public List listSchemaNames(ConnectorSession session) - { - return ImmutableList.copyOf(phoenixClient.getSchemaNames(JdbcIdentity.from(session))); - } - @Override public JdbcTableHandle getTableHandle(ConnectorSession session, SchemaTableName schemaTableName) { @@ -138,7 +131,7 @@ public ConnectorTableMetadata getTableMetadata(ConnectorSession session, Connect return getTableMetadata(session, table, false); } - public ConnectorTableMetadata getTableMetadata(ConnectorSession session, ConnectorTableHandle table, boolean rowkeyRequired) + private ConnectorTableMetadata getTableMetadata(ConnectorSession session, ConnectorTableHandle table, boolean rowkeyRequired) { JdbcTableHandle handle = (JdbcTableHandle) table; List columnMetadata = phoenixClient.getColumns(session, handle).stream() @@ -154,7 +147,7 @@ public boolean usesLegacyTableLayouts() return true; } - public Map getTableProperties(ConnectorSession session, JdbcTableHandle handle) + private Map getTableProperties(ConnectorSession session, JdbcTableHandle handle) { ImmutableMap.Builder properties = ImmutableMap.builder(); @@ -269,7 +262,13 @@ public Optional finishCreateTable(ConnectorSession sess } @Override - public ConnectorInsertTableHandle beginInsert(ConnectorSession session, ConnectorTableHandle tableHandle) + public boolean supportsMissingColumnsOnInsert() + { + return false; + } + + @Override + public ConnectorInsertTableHandle beginInsert(ConnectorSession session, ConnectorTableHandle tableHandle, List columns) { JdbcTableHandle handle = (JdbcTableHandle) tableHandle; List allColumns = phoenixClient.getColumns(session, handle); @@ -329,7 +328,7 @@ public void dropTable(ConnectorSession session, ConnectorTableHandle tableHandle phoenixClient.dropTable(JdbcIdentity.from(session), (JdbcTableHandle) tableHandle); } - public JdbcOutputTableHandle createTable(ConnectorSession session, ConnectorTableMetadata tableMetadata) + private JdbcOutputTableHandle createTable(ConnectorSession session, ConnectorTableMetadata tableMetadata) { SchemaTableName schemaTableName = tableMetadata.getTable(); Optional schema = Optional.of(schemaTableName.getSchemaName()); @@ -415,47 +414,4 @@ public JdbcOutputTableHandle createTable(ConnectorSession session, ConnectorTabl throw new PrestoException(PHOENIX_METADATA_ERROR, "Error creating Phoenix table", e); } } - - @Override - public List listTables(ConnectorSession session, Optional schemaName) - { - return phoenixClient.getTableNames(JdbcIdentity.from(session), schemaName); - } - - @Override - public Map getColumnHandles(ConnectorSession session, ConnectorTableHandle tableHandle) - { - JdbcTableHandle jdbcTableHandle = (JdbcTableHandle) tableHandle; - - ImmutableMap.Builder columnHandles = ImmutableMap.builder(); - for (JdbcColumnHandle column : phoenixClient.getColumns(session, jdbcTableHandle)) { - columnHandles.put(column.getColumnMetadata().getName(), column); - } - return columnHandles.build(); - } - - @Override - public ColumnMetadata getColumnMetadata(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnHandle columnHandle) - { - return ((JdbcColumnHandle) columnHandle).getColumnMetadata(); - } - - @Override - public Map> listTableColumns(ConnectorSession session, SchemaTablePrefix prefix) - { - ImmutableMap.Builder> columns = ImmutableMap.builder(); - List tables = prefix.toOptionalSchemaTableName() - .>map(ImmutableList::of) - .orElseGet(() -> listTables(session, prefix.getSchema())); - for (SchemaTableName tableName : tables) { - try { - phoenixClient.getTableHandle(JdbcIdentity.from(session), tableName) - .ifPresent(tableHandle -> columns.put(tableName, getTableMetadata(session, tableHandle).getColumns())); - } - catch (TableNotFoundException e) { - // table disappeared during listing operation - } - } - return columns.build(); - } } From eab2a455d60c64cff17ca38c7908d3cb349591a6 Mon Sep 17 00:00:00 2001 From: praveenkrishna Date: Wed, 8 Apr 2020 12:06:07 +0530 Subject: [PATCH 223/519] Remove TableLayout from Phoenix connector --- .../plugin/phoenix/PhoenixHandleResolver.java | 7 -- .../plugin/phoenix/PhoenixMetadata.java | 24 ------ .../plugin/phoenix/PhoenixSplitManager.java | 21 +++-- .../phoenix/PhoenixTableLayoutHandle.java | 84 ------------------- 4 files changed, 10 insertions(+), 126 deletions(-) delete mode 100644 presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixTableLayoutHandle.java diff --git a/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixHandleResolver.java b/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixHandleResolver.java index be8f6da0b2f1..a5ab634da724 100644 --- a/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixHandleResolver.java +++ b/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixHandleResolver.java @@ -22,7 +22,6 @@ import io.prestosql.spi.connector.ConnectorOutputTableHandle; import io.prestosql.spi.connector.ConnectorSplit; import io.prestosql.spi.connector.ConnectorTableHandle; -import io.prestosql.spi.connector.ConnectorTableLayoutHandle; import io.prestosql.spi.connector.ConnectorTransactionHandle; public class PhoenixHandleResolver @@ -40,12 +39,6 @@ public Class getTableHandleClass() return JdbcTableHandle.class; } - @Override - public Class getTableLayoutHandleClass() - { - return PhoenixTableLayoutHandle.class; - } - @Override public Class getColumnHandleClass() { diff --git a/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixMetadata.java b/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixMetadata.java index d28db6e08eb7..8eb43f0ab0bf 100644 --- a/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixMetadata.java +++ b/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixMetadata.java @@ -30,11 +30,7 @@ import io.prestosql.spi.connector.ConnectorOutputTableHandle; import io.prestosql.spi.connector.ConnectorSession; import io.prestosql.spi.connector.ConnectorTableHandle; -import io.prestosql.spi.connector.ConnectorTableLayout; -import io.prestosql.spi.connector.ConnectorTableLayoutHandle; -import io.prestosql.spi.connector.ConnectorTableLayoutResult; import io.prestosql.spi.connector.ConnectorTableMetadata; -import io.prestosql.spi.connector.Constraint; import io.prestosql.spi.connector.SchemaNotFoundException; import io.prestosql.spi.connector.SchemaTableName; import io.prestosql.spi.security.PrestoPrincipal; @@ -111,20 +107,6 @@ public JdbcTableHandle getTableHandle(ConnectorSession session, SchemaTableName .orElse(null); } - @Override - public List getTableLayouts(ConnectorSession session, ConnectorTableHandle table, Constraint constraint, Optional> desiredColumns) - { - JdbcTableHandle tableHandle = (JdbcTableHandle) table; - ConnectorTableLayout layout = new ConnectorTableLayout(new PhoenixTableLayoutHandle(tableHandle, constraint.getSummary(), desiredColumns)); - return ImmutableList.of(new ConnectorTableLayoutResult(layout, constraint.getSummary())); - } - - @Override - public ConnectorTableLayout getTableLayout(ConnectorSession session, ConnectorTableLayoutHandle handle) - { - return new ConnectorTableLayout(handle); - } - @Override public ConnectorTableMetadata getTableMetadata(ConnectorSession session, ConnectorTableHandle table) { @@ -141,12 +123,6 @@ private ConnectorTableMetadata getTableMetadata(ConnectorSession session, Connec return new ConnectorTableMetadata(handle.getSchemaTableName(), columnMetadata, getTableProperties(session, handle)); } - @Override - public boolean usesLegacyTableLayouts() - { - return true; - } - private Map getTableProperties(ConnectorSession session, JdbcTableHandle handle) { ImmutableMap.Builder properties = ImmutableMap.builder(); diff --git a/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixSplitManager.java b/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixSplitManager.java index 40661bd5796b..bf06c14c14b0 100644 --- a/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixSplitManager.java +++ b/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixSplitManager.java @@ -25,7 +25,7 @@ import io.prestosql.spi.connector.ConnectorSplit; import io.prestosql.spi.connector.ConnectorSplitManager; import io.prestosql.spi.connector.ConnectorSplitSource; -import io.prestosql.spi.connector.ConnectorTableLayoutHandle; +import io.prestosql.spi.connector.ConnectorTableHandle; import io.prestosql.spi.connector.ConnectorTransactionHandle; import io.prestosql.spi.connector.FixedSplitSource; import org.apache.hadoop.hbase.HRegionLocation; @@ -71,23 +71,22 @@ public PhoenixSplitManager(PhoenixClient phoenixClient) } @Override - public ConnectorSplitSource getSplits(ConnectorTransactionHandle transactionHandle, ConnectorSession session, ConnectorTableLayoutHandle layout, SplitSchedulingStrategy splitSchedulingStrategy) + public ConnectorSplitSource getSplits(ConnectorTransactionHandle transactionHandle, ConnectorSession session, ConnectorTableHandle table, SplitSchedulingStrategy splitSchedulingStrategy) { - PhoenixTableLayoutHandle layoutHandle = (PhoenixTableLayoutHandle) layout; - JdbcTableHandle handle = layoutHandle.getTable(); + JdbcTableHandle tableHandle = (JdbcTableHandle) table; try (PhoenixConnection connection = phoenixClient.getConnection(JdbcIdentity.from(session))) { - List columns = layoutHandle.getDesiredColumns() + List columns = tableHandle.getColumns() .map(columnSet -> columnSet.stream().map(JdbcColumnHandle.class::cast).collect(toList())) - .orElseGet(() -> phoenixClient.getColumns(session, handle)); + .orElseGet(() -> phoenixClient.getColumns(session, tableHandle)); PhoenixPreparedStatement inputQuery = (PhoenixPreparedStatement) new QueryBuilder(SchemaUtil.ESCAPE_CHARACTER).buildSql( phoenixClient, session, connection, - handle.getCatalogName(), - handle.getSchemaName(), - handle.getTableName(), + tableHandle.getCatalogName(), + tableHandle.getSchemaName(), + tableHandle.getTableName(), columns, - layoutHandle.getTupleDomain(), + tableHandle.getConstraint(), Optional.empty(), Function.identity()); @@ -96,7 +95,7 @@ public ConnectorSplitSource getSplits(ConnectorTransactionHandle transactionHand .map(split -> new PhoenixSplit( getSplitAddresses(split), new WrappedPhoenixInputSplit(split), - layoutHandle.getTupleDomain())) + tableHandle.getConstraint())) .collect(toImmutableList()); return new FixedSplitSource(splits); } diff --git a/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixTableLayoutHandle.java b/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixTableLayoutHandle.java deleted file mode 100644 index c09093443c96..000000000000 --- a/presto-phoenix/src/main/java/io/prestosql/plugin/phoenix/PhoenixTableLayoutHandle.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package io.prestosql.plugin.phoenix; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import io.prestosql.plugin.jdbc.JdbcTableHandle; -import io.prestosql.spi.connector.ColumnHandle; -import io.prestosql.spi.connector.ConnectorTableLayoutHandle; -import io.prestosql.spi.predicate.TupleDomain; - -import java.util.Objects; -import java.util.Optional; -import java.util.Set; - -public class PhoenixTableLayoutHandle - implements ConnectorTableLayoutHandle -{ - private final JdbcTableHandle table; - private final TupleDomain tupleDomain; - private final Optional> desiredColumns; - - @JsonCreator - public PhoenixTableLayoutHandle( - @JsonProperty("table") JdbcTableHandle table, - @JsonProperty("tupleDomain") TupleDomain tupleDomain, - @JsonProperty("desiredColumns") Optional> desiredColumns) - { - this.table = table; - this.tupleDomain = tupleDomain; - this.desiredColumns = desiredColumns; - } - - @JsonProperty - public JdbcTableHandle getTable() - { - return table; - } - - @JsonProperty - public TupleDomain getTupleDomain() - { - return tupleDomain; - } - - @JsonProperty - public Optional> getDesiredColumns() - { - return desiredColumns; - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - PhoenixTableLayoutHandle that = (PhoenixTableLayoutHandle) o; - return Objects.equals(table, that.table) && - Objects.equals(tupleDomain, that.tupleDomain) && - Objects.equals(desiredColumns, that.desiredColumns); - } - - @Override - public int hashCode() - { - return Objects.hash(table, tupleDomain, desiredColumns); - } -} From f6dcef4992cb7a447e419a4990e00fcd504960c8 Mon Sep 17 00:00:00 2001 From: praveenkrishna Date: Fri, 10 Apr 2020 14:47:16 +0530 Subject: [PATCH 224/519] Add check to ensure server tar.gz and RPM do not go above 1GB --- presto-server-rpm/pom.xml | 25 +++++++++++++++++++++++++ presto-server/pom.xml | 30 +++++++++++++++++++++++++++++- 2 files changed, 54 insertions(+), 1 deletion(-) diff --git a/presto-server-rpm/pom.xml b/presto-server-rpm/pom.xml index aa91eaaeb75e..bbd38bb822d6 100644 --- a/presto-server-rpm/pom.xml +++ b/presto-server-rpm/pom.xml @@ -230,6 +230,31 @@ + + org.apache.maven.plugins + maven-enforcer-plugin + + + enforce-file-size + verify + + enforce + + + + + + 1073741824 + + ${project.build.directory}/${project.build.finalName}.x86_64.rpm + + + + true + + + + diff --git a/presto-server/pom.xml b/presto-server/pom.xml index 7bb28d437dae..b3c41fb09cb1 100644 --- a/presto-server/pom.xml +++ b/presto-server/pom.xml @@ -15,7 +15,7 @@ ${project.parent.basedir} - true + false true true true @@ -24,4 +24,32 @@ io.prestosql.server.PrestoServer ${project.artifactId} + + + + + org.apache.maven.plugins + maven-enforcer-plugin + + + + + 1073741824 + + ${project.build.directory}/${project.artifactId}-${project.version}.tar.gz + + + + + + + verify + + enforce + + + + + + From 4aab57362c76bcd6fcc0f67f809ca3d0ec7a4bca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Grzegorz=20Kokosi=C5=84ski?= Date: Thu, 23 Apr 2020 06:43:43 +0200 Subject: [PATCH 225/519] Use unique tableName in testInsert --- .../AbstractTestDistributedQueries.java | 27 ++++++++++--------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java index 7961b79a4dde..1e7fc952ee2f 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java @@ -444,21 +444,22 @@ public void testInsert() { @Language("SQL") String query = "SELECT orderdate, orderkey, totalprice FROM orders"; - assertUpdate("CREATE TABLE test_insert AS " + query + " WITH NO DATA", 0); - assertQuery("SELECT count(*) FROM test_insert", "SELECT 0"); + String tableName = "test_insert_" + randomTableSuffix(); + assertUpdate("CREATE TABLE " + tableName + " AS " + query + " WITH NO DATA", 0); + assertQuery("SELECT count(*) FROM " + tableName + "", "SELECT 0"); - assertUpdate("INSERT INTO test_insert " + query, "SELECT count(*) FROM orders"); + assertUpdate("INSERT INTO " + tableName + " " + query, "SELECT count(*) FROM orders"); - assertQuery("SELECT * FROM test_insert", query); + assertQuery("SELECT * FROM " + tableName + "", query); - assertUpdate("INSERT INTO test_insert (orderkey) VALUES (-1)", 1); - assertUpdate("INSERT INTO test_insert (orderkey) VALUES (null)", 1); - assertUpdate("INSERT INTO test_insert (orderdate) VALUES (DATE '2001-01-01')", 1); - assertUpdate("INSERT INTO test_insert (orderkey, orderdate) VALUES (-2, DATE '2001-01-02')", 1); - assertUpdate("INSERT INTO test_insert (orderdate, orderkey) VALUES (DATE '2001-01-03', -3)", 1); - assertUpdate("INSERT INTO test_insert (totalprice) VALUES (1234)", 1); + assertUpdate("INSERT INTO " + tableName + " (orderkey) VALUES (-1)", 1); + assertUpdate("INSERT INTO " + tableName + " (orderkey) VALUES (null)", 1); + assertUpdate("INSERT INTO " + tableName + " (orderdate) VALUES (DATE '2001-01-01')", 1); + assertUpdate("INSERT INTO " + tableName + " (orderkey, orderdate) VALUES (-2, DATE '2001-01-02')", 1); + assertUpdate("INSERT INTO " + tableName + " (orderdate, orderkey) VALUES (DATE '2001-01-03', -3)", 1); + assertUpdate("INSERT INTO " + tableName + " (totalprice) VALUES (1234)", 1); - assertQuery("SELECT * FROM test_insert", query + assertQuery("SELECT * FROM " + tableName + "", query + " UNION ALL SELECT null, -1, null" + " UNION ALL SELECT null, null, null" + " UNION ALL SELECT DATE '2001-01-01', null, null" @@ -469,13 +470,13 @@ public void testInsert() // UNION query produces columns in the opposite order // of how they are declared in the table schema assertUpdate( - "INSERT INTO test_insert (orderkey, orderdate, totalprice) " + + "INSERT INTO " + tableName + " (orderkey, orderdate, totalprice) " + "SELECT orderkey, orderdate, totalprice FROM orders " + "UNION ALL " + "SELECT orderkey, orderdate, totalprice FROM orders", "SELECT 2 * count(*) FROM orders"); - assertUpdate("DROP TABLE test_insert"); + assertUpdate("DROP TABLE " + tableName); } @Test From 0f4bbc5d7470b455e4fc4c7b4e1a5a656894a17b Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Thu, 23 Apr 2020 21:51:29 +0200 Subject: [PATCH 226/519] Add requireNonNull in constructor --- .../java/io/prestosql/sql/planner/plan/MarkDistinctNode.java | 4 ++-- .../main/java/io/prestosql/sql/planner/plan/OutputNode.java | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/plan/MarkDistinctNode.java b/presto-main/src/main/java/io/prestosql/sql/planner/plan/MarkDistinctNode.java index 4eaea6c26aba..0b951d11a58a 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/plan/MarkDistinctNode.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/plan/MarkDistinctNode.java @@ -45,8 +45,8 @@ public MarkDistinctNode(@JsonProperty("id") PlanNodeId id, @JsonProperty("hashSymbol") Optional hashSymbol) { super(id); - this.source = source; - this.markerSymbol = markerSymbol; + this.source = requireNonNull(source, "source is null"); + this.markerSymbol = requireNonNull(markerSymbol, "markerSymbol is null"); this.hashSymbol = requireNonNull(hashSymbol, "hashSymbol is null"); requireNonNull(distinctSymbols, "distinctSymbols is null"); checkArgument(!distinctSymbols.isEmpty(), "distinctSymbols cannot be empty"); diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/plan/OutputNode.java b/presto-main/src/main/java/io/prestosql/sql/planner/plan/OutputNode.java index 02d37f141877..643b1aeee9f1 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/plan/OutputNode.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/plan/OutputNode.java @@ -44,10 +44,11 @@ public OutputNode(@JsonProperty("id") PlanNodeId id, requireNonNull(source, "source is null"); requireNonNull(columnNames, "columnNames is null"); + requireNonNull(outputs, "outputs is null"); Preconditions.checkArgument(columnNames.size() == outputs.size(), "columnNames and assignments sizes don't match"); this.source = source; - this.columnNames = columnNames; + this.columnNames = ImmutableList.copyOf(columnNames); this.outputs = ImmutableList.copyOf(outputs); } From 8200cf9d5ff8885645f2f61570cdb12179114c6b Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Thu, 23 Apr 2020 22:45:02 +0200 Subject: [PATCH 227/519] Rename rule --- .../main/java/io/prestosql/sql/planner/PlanOptimizers.java | 4 ++-- ...runeOutputColumns.java => PruneOutputSourceColumns.java} | 2 +- ...OutputColumns.java => TestPruneOutputSourceColumns.java} | 6 +++--- 3 files changed, 6 insertions(+), 6 deletions(-) rename presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/{PruneOutputColumns.java => PruneOutputSourceColumns.java} (97%) rename presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/{TestPruneOutputColumns.java => TestPruneOutputSourceColumns.java} (93%) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java index 2e157ce5e492..8c79f5e43de7 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java @@ -83,7 +83,7 @@ import io.prestosql.sql.planner.iterative.rule.PruneMarkDistinctColumns; import io.prestosql.sql.planner.iterative.rule.PruneOffsetColumns; import io.prestosql.sql.planner.iterative.rule.PruneOrderByInAggregation; -import io.prestosql.sql.planner.iterative.rule.PruneOutputColumns; +import io.prestosql.sql.planner.iterative.rule.PruneOutputSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneProjectColumns; import io.prestosql.sql.planner.iterative.rule.PruneSampleColumns; import io.prestosql.sql.planner.iterative.rule.PruneSemiJoinColumns; @@ -270,7 +270,7 @@ public PlanOptimizers( new PruneLimitColumns(), new PruneMarkDistinctColumns(), new PruneOffsetColumns(), - new PruneOutputColumns(), + new PruneOutputSourceColumns(), new PruneProjectColumns(), new PruneSampleColumns(), new PruneSemiJoinColumns(), diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneOutputColumns.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneOutputSourceColumns.java similarity index 97% rename from presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneOutputColumns.java rename to presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneOutputSourceColumns.java index 19d0e74cc0b6..ea5fccb28f2b 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneOutputColumns.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneOutputSourceColumns.java @@ -22,7 +22,7 @@ import static io.prestosql.sql.planner.iterative.rule.Util.restrictChildOutputs; import static io.prestosql.sql.planner.plan.Patterns.output; -public class PruneOutputColumns +public class PruneOutputSourceColumns implements Rule { private static final Pattern PATTERN = output(); diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneOutputColumns.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneOutputSourceColumns.java similarity index 93% rename from presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneOutputColumns.java rename to presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneOutputSourceColumns.java index 6fae30d22179..75d3a0f10dc8 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneOutputColumns.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneOutputSourceColumns.java @@ -24,13 +24,13 @@ import static io.prestosql.sql.planner.assertions.PlanMatchPattern.strictProject; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; -public class TestPruneOutputColumns +public class TestPruneOutputSourceColumns extends BaseRuleTest { @Test public void testNotAllOutputsReferenced() { - tester().assertThat(new PruneOutputColumns()) + tester().assertThat(new PruneOutputSourceColumns()) .on(p -> { Symbol a = p.symbol("a"); Symbol b = p.symbol("b"); @@ -50,7 +50,7 @@ public void testNotAllOutputsReferenced() @Test public void testAllOutputsReferenced() { - tester().assertThat(new PruneOutputColumns()) + tester().assertThat(new PruneOutputSourceColumns()) .on(p -> { Symbol a = p.symbol("a"); Symbol b = p.symbol("b"); From cf6c872addbcd40a331a06c645292ccc6fb42e7e Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Fri, 24 Apr 2020 00:19:59 +0200 Subject: [PATCH 228/519] Use helper method --- .../prestosql/sql/planner/RelationPlanner.java | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java b/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java index 845776dfe085..bef3728a6be6 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java @@ -196,15 +196,10 @@ private RelationPlan addRowFilters(Table node, RelationPlan plan) private RelationPlan addColumnMasks(Table table, RelationPlan plan) { - Map> columnMasks = analysis.getColumnMasks(table); - - PlanNode root = plan.getRoot(); - List mappings = plan.getFieldMappings(); - - TranslationMap translations = new TranslationMap(plan, analysis, lambdaDeclarationToSymbolMap); - translations.setFieldMappings(mappings); + PlanBuilder planBuilder = initializePlanBuilder(plan); + TranslationMap translations = planBuilder.getTranslations(); - PlanBuilder planBuilder = new PlanBuilder(translations, root); + Map> columnMasks = analysis.getColumnMasks(table); for (int i = 0; i < plan.getDescriptor().getAllFieldCount(); i++) { Field field = plan.getDescriptor().getFieldByIndex(i); @@ -213,10 +208,10 @@ private RelationPlan addColumnMasks(Table table, RelationPlan plan) planBuilder = subqueryPlanner.handleSubqueries(planBuilder, mask, mask); Map assignments = new LinkedHashMap<>(); - for (Symbol symbol : root.getOutputSymbols()) { + for (Symbol symbol : plan.getRoot().getOutputSymbols()) { assignments.put(symbol, symbol.toSymbolReference()); } - assignments.put(mappings.get(i), translations.rewrite(mask)); + assignments.put(plan.getFieldMappings().get(i), translations.rewrite(mask)); planBuilder = planBuilder.withNewRoot(new ProjectNode( idAllocator.getNextId(), @@ -225,7 +220,7 @@ private RelationPlan addColumnMasks(Table table, RelationPlan plan) } } - return new RelationPlan(planBuilder.getRoot(), plan.getScope(), mappings); + return new RelationPlan(planBuilder.getRoot(), plan.getScope(), plan.getFieldMappings()); } @Override From b186c4f62aa7849dc1861951a4da1cbf3529c502 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Wed, 22 Apr 2020 22:46:29 +0200 Subject: [PATCH 229/519] Simplify initialization of TranslationMap --- .../sql/planner/RelationPlanner.java | 24 +++++-------------- 1 file changed, 6 insertions(+), 18 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java b/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java index bef3728a6be6..60bc5d937088 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java @@ -401,13 +401,7 @@ else if (firstDependencies.stream().allMatch(right::canResolve) && secondDepende // subqueries can be applied only to one side of join - left side is selected in arbitrary way leftPlanBuilder = subqueryPlanner.handleUncorrelatedSubqueries(leftPlanBuilder, complexJoinExpressions, node); } - TranslationMap translationMap = translationMapFromSourceOutputs( - ImmutableList.builder() - .addAll(leftPlanBuilder.getRoot().getOutputSymbols()) - .addAll(rightPlanBuilder.getRoot().getOutputSymbols()) - .build(), - node, - outputSymbols); + TranslationMap translationMap = initializeTranslationMap(node, outputSymbols); translationMap.setFieldMappings(outputSymbols); translationMap.putExpressionMappingsFrom(leftPlanBuilder.getTranslations()); translationMap.putExpressionMappingsFrom(rightPlanBuilder.getTranslations()); @@ -613,13 +607,7 @@ private RelationPlan planCorrelatedJoin(Join join, RelationPlan leftPlan, Latera .addAll(leftPlan.getFieldMappings()) .addAll(rightPlan.getFieldMappings()) .build(); - TranslationMap translationMap = translationMapFromSourceOutputs( - ImmutableList.builder() - .addAll(leftPlanBuilder.getRoot().getOutputSymbols()) - .addAll(rightPlanBuilder.getRoot().getOutputSymbols()) - .build(), - join, - rewriterOutputSymbols); + TranslationMap translationMap = initializeTranslationMap(join, rewriterOutputSymbols); translationMap.setFieldMappings(rewriterOutputSymbols); translationMap.putExpressionMappingsFrom(leftPlanBuilder.getTranslations()); translationMap.putExpressionMappingsFrom(rightPlanBuilder.getTranslations()); @@ -752,7 +740,7 @@ protected RelationPlan visitValues(Values node, Void context) outputSymbolsBuilder.add(symbol); } List outputSymbols = outputSymbolsBuilder.build(); - TranslationMap translationMap = translationMapFromSourceOutputs(ImmutableList.of(), node, outputSymbols); + TranslationMap translationMap = initializeTranslationMap(node, outputSymbols); ImmutableList.Builder> rows = ImmutableList.builder(); for (Expression row : node.getRows()) { @@ -787,7 +775,7 @@ protected RelationPlan visitUnnest(Unnest node, Void context) List unnestedSymbols = outputSymbolsBuilder.build(); // If we got here, then we must be unnesting a constant, and not be in a join (where there could be column references) - TranslationMap translationMap = translationMapFromSourceOutputs(ImmutableList.of(), node, unnestedSymbols); + TranslationMap translationMap = initializeTranslationMap(node, unnestedSymbols); ImmutableList.Builder argumentSymbols = ImmutableList.builder(); ImmutableList.Builder values = ImmutableList.builder(); ImmutableMap.Builder> unnestSymbols = ImmutableMap.builder(); @@ -827,11 +815,11 @@ else if (type instanceof MapType) { return new RelationPlan(unnestNode, scope, unnestedSymbols); } - private TranslationMap translationMapFromSourceOutputs(List sourceOutputs, Node node, List outputSymbols) + private TranslationMap initializeTranslationMap(Node node, List outputSymbols) { PlanNode dummy = new ValuesNode( idAllocator.getNextId(), - ImmutableList.copyOf(requireNonNull(sourceOutputs, "sourceOutputs is null")), + ImmutableList.copyOf(requireNonNull(outputSymbols, "outputSymbols is null")), ImmutableList.of()); RelationPlan dummyRelationPlan = new RelationPlan(dummy, analysis.getScope(node), outputSymbols); From b6e09693e2e0b23f28420b1abbd7646e6d3362f5 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Wed, 22 Apr 2020 23:22:11 +0200 Subject: [PATCH 230/519] Fix CorrelatedJoin planning Before this change, planning of the following query failed: SELECT * FROM (VALUES 1) t1(a) JOIN (VALUES 2) t2(b) ON a rewriterOutputSymbols = ImmutableList.builder() + List outputSymbols = ImmutableList.builder() .addAll(leftPlan.getFieldMappings()) .addAll(rightPlan.getFieldMappings()) .build(); - TranslationMap translationMap = initializeTranslationMap(join, rewriterOutputSymbols); - translationMap.setFieldMappings(rewriterOutputSymbols); + TranslationMap translationMap = initializeTranslationMap(join, outputSymbols); + translationMap.setFieldMappings(outputSymbols); translationMap.putExpressionMappingsFrom(leftPlanBuilder.getTranslations()); translationMap.putExpressionMappingsFrom(rightPlanBuilder.getTranslations()); @@ -622,10 +622,6 @@ private RelationPlan planCorrelatedJoin(Join join, RelationPlan leftPlan, Latera CorrelatedJoinNode.Type.typeConvert(join.getType()), rewrittenFilterCondition); - List outputSymbols = ImmutableList.builder() - .addAll(leftPlan.getRoot().getOutputSymbols()) - .addAll(rightPlan.getRoot().getOutputSymbols()) - .build(); return new RelationPlan(planBuilder.getRoot(), analysis.getScope(join), outputSymbols); } diff --git a/presto-main/src/test/java/io/prestosql/sql/query/TestCorrelatedJoin.java b/presto-main/src/test/java/io/prestosql/sql/query/TestCorrelatedJoin.java new file mode 100644 index 000000000000..b1c50fc1cc49 --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/query/TestCorrelatedJoin.java @@ -0,0 +1,44 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.query; + +import org.testng.annotations.AfterClass; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.Test; + +public class TestCorrelatedJoin +{ + private QueryAssertions assertions; + + @BeforeClass + public void init() + { + assertions = new QueryAssertions(); + } + + @AfterClass(alwaysRun = true) + public void teardown() + { + assertions.close(); + assertions = null; + } + + @Test + public void testJoinInCorrelatedJoinInput() + { + assertions.assertQuery( + "SELECT * FROM (VALUES 1) t1(a) JOIN (VALUES 2) t2(b) ON a < b, LATERAL (VALUES 3)", + "VALUES (1, 2, 3)"); + } +} From e03c76ff7a6a4757d2e4b72d506d2910fbd8c923 Mon Sep 17 00:00:00 2001 From: Yuya Ebihara Date: Thu, 23 Apr 2020 00:01:28 +0900 Subject: [PATCH 231/519] Fix flaky TestCassandraTokenSplitManager.testNonEmptyTable --- .../plugin/cassandra/TestCassandraTokenSplitManager.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraTokenSplitManager.java b/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraTokenSplitManager.java index 06d72e53d6a8..8a640aea9bc6 100644 --- a/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraTokenSplitManager.java +++ b/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraTokenSplitManager.java @@ -97,7 +97,9 @@ public void testNonEmptyTable() } server.refreshSizeEstimates(KEYSPACE, tableName); List splits = splitManager.getSplits(KEYSPACE, tableName, Optional.empty()); - assertThat(splits).hasSize(PARTITION_COUNT / SPLIT_SIZE); + int expectedTokenSplitSize = PARTITION_COUNT / SPLIT_SIZE; + // Use hasSizeBetween because Cassandra server may overestimate the size + assertThat(splits).hasSizeBetween(expectedTokenSplitSize, expectedTokenSplitSize + 1); session.execute(format("DROP TABLE %s.%s", KEYSPACE, tableName)); } } From 6428a5230d5f5108a8f06b8dfa862e14f6073080 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Fri, 24 Apr 2020 00:24:15 +0200 Subject: [PATCH 232/519] Put getter before the setter --- .../main/java/io/prestosql/plugin/hive/HiveConfig.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveConfig.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveConfig.java index 380e69a97be9..edd495697e2c 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveConfig.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveConfig.java @@ -897,6 +897,11 @@ public HiveConfig setAllowRegisterPartition(boolean allowRegisterPartition) return this; } + public boolean isQueryPartitionFilterRequired() + { + return queryPartitionFilterRequired; + } + @Config("hive.query-partition-filter-required") @ConfigDescription("Require filter on at least one partition column") public HiveConfig setQueryPartitionFilterRequired(boolean queryPartitionFilterRequired) @@ -905,11 +910,6 @@ public HiveConfig setQueryPartitionFilterRequired(boolean queryPartitionFilterRe return this; } - public boolean isQueryPartitionFilterRequired() - { - return queryPartitionFilterRequired; - } - public boolean getPartitionUseColumnNames() { return partitionUseColumnNames; From 7d976af9b8b890772a9adcb8a99346795f751830 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Wed, 22 Apr 2020 23:16:29 +0200 Subject: [PATCH 233/519] Smoke test column name with comma --- .../prestosql/plugin/hive/TestHiveDistributedQueries.java | 6 ++++++ .../prestosql/testing/AbstractTestDistributedQueries.java | 1 + 2 files changed, 7 insertions(+) diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveDistributedQueries.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveDistributedQueries.java index 806f50a5bfca..b268ff58ba0f 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveDistributedQueries.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveDistributedQueries.java @@ -69,6 +69,12 @@ public void testColumnName(String columnName) .hasMessageMatching("Table '.*' does not have columns \\[atrailingspace ]"); throw new SkipException("works incorrectly, column name is trimmed"); } + if (columnName.equals("a,comma")) { + // TODO (https://github.com/prestosql/presto/issues/3537) + assertThatThrownBy(() -> super.testColumnName(columnName)) + .hasMessageMatching("Table '.*' does not have columns \\[a,comma]"); + throw new SkipException("works incorrectly"); + } super.testColumnName(columnName); } diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java index 1e7fc952ee2f..dd1a16d7d07f 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java @@ -1304,6 +1304,7 @@ public Object[][] testColumnNameDataProvider() {"a space"}, {"atrailingspace "}, {"a.dot"}, + {"a,comma"}, {"a:colon"}, {"a;semicolon"}, {"an@at"}, From 38b50e8f3ec2a88b6d9a4771ca633bb5daf8e4ab Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Fri, 24 Apr 2020 12:22:51 +0200 Subject: [PATCH 234/519] Smoke test column name with leading space --- .../io/prestosql/plugin/hive/TestHiveDistributedQueries.java | 4 ++-- .../io/prestosql/testing/AbstractTestDistributedQueries.java | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveDistributedQueries.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveDistributedQueries.java index b268ff58ba0f..d3066debb919 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveDistributedQueries.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveDistributedQueries.java @@ -63,10 +63,10 @@ public void testExplainOfCreateTableAs() @Override public void testColumnName(String columnName) { - if (columnName.equals("atrailingspace ")) { + if (columnName.equals("atrailingspace ") || columnName.equals(" aleadingspace")) { // TODO (https://github.com/prestosql/presto/issues/3461) assertThatThrownBy(() -> super.testColumnName(columnName)) - .hasMessageMatching("Table '.*' does not have columns \\[atrailingspace ]"); + .hasMessageMatching("Table '.*' does not have columns \\[" + columnName + "]"); throw new SkipException("works incorrectly, column name is trimmed"); } if (columnName.equals("a,comma")) { diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java index dd1a16d7d07f..723378469721 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java @@ -1303,6 +1303,7 @@ public Object[][] testColumnNameDataProvider() {"a-hyphen-minus"}, // ASCII '-' is HYPHEN-MINUS in Unicode {"a space"}, {"atrailingspace "}, + {" aleadingspace"}, {"a.dot"}, {"a,comma"}, {"a:colon"}, From 84429d84d8fd75d686aa63d0223885c793089395 Mon Sep 17 00:00:00 2001 From: Martin Traverso Date: Fri, 24 Apr 2020 08:16:27 -0700 Subject: [PATCH 235/519] Apply filters and masks for tables referenced by views --- .../prestosql/security/ViewAccessControl.java | 15 ++++ .../prestosql/sql/query/QueryAssertions.java | 20 +++-- .../prestosql/sql/query/TestColumnMask.java | 90 +++++++++++++++++-- .../io/prestosql/sql/query/TestRowFilter.java | 87 ++++++++++++++++-- 4 files changed, 193 insertions(+), 19 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/security/ViewAccessControl.java b/presto-main/src/main/java/io/prestosql/security/ViewAccessControl.java index 5faf4e1a9e51..2113aa75fd39 100644 --- a/presto-main/src/main/java/io/prestosql/security/ViewAccessControl.java +++ b/presto-main/src/main/java/io/prestosql/security/ViewAccessControl.java @@ -15,7 +15,10 @@ import io.prestosql.metadata.QualifiedObjectName; import io.prestosql.spi.security.Identity; +import io.prestosql.spi.security.ViewExpression; +import io.prestosql.spi.type.Type; +import java.util.List; import java.util.Set; import static java.util.Objects.requireNonNull; @@ -59,4 +62,16 @@ public void checkCanGrantExecuteFunctionPrivilege(SecurityContext context, Strin { delegate.checkCanGrantExecuteFunctionPrivilege(context, functionName, grantee, grantOption); } + + @Override + public List getRowFilters(SecurityContext context, QualifiedObjectName tableName) + { + return delegate.getRowFilters(context, tableName); + } + + @Override + public List getColumnMasks(SecurityContext context, QualifiedObjectName tableName, String columnName, Type type) + { + return delegate.getColumnMasks(context, tableName, columnName, type); + } } diff --git a/presto-main/src/test/java/io/prestosql/sql/query/QueryAssertions.java b/presto-main/src/test/java/io/prestosql/sql/query/QueryAssertions.java index fe21b47da14b..085357017a24 100644 --- a/presto-main/src/test/java/io/prestosql/sql/query/QueryAssertions.java +++ b/presto-main/src/test/java/io/prestosql/sql/query/QueryAssertions.java @@ -91,19 +91,24 @@ public void assertQueryAndPlan( public void assertQuery(@Language("SQL") String actual, @Language("SQL") String expected) { - assertQuery(actual, expected, false); + assertQuery(runner.getDefaultSession(), actual, expected, false); + } + + public void assertQuery(Session session, @Language("SQL") String actual, @Language("SQL") String expected) + { + assertQuery(session, actual, expected, false); } public void assertQueryOrdered(@Language("SQL") String actual, @Language("SQL") String expected) { - assertQuery(actual, expected, true); + assertQuery(runner.getDefaultSession(), actual, expected, true); } - public void assertQuery(@Language("SQL") String actual, @Language("SQL") String expected, boolean ensureOrdering) + public void assertQuery(Session session, @Language("SQL") String actual, @Language("SQL") String expected, boolean ensureOrdering) { MaterializedResult actualResults = null; try { - actualResults = execute(actual); + actualResults = execute(session, actual); } catch (RuntimeException ex) { fail("Execution of 'actual' query failed: " + actual, ex); @@ -160,9 +165,14 @@ public static void assertContains(MaterializedResult all, MaterializedResult exp } public MaterializedResult execute(@Language("SQL") String query) + { + return execute(runner.getDefaultSession(), query); + } + + public MaterializedResult execute(Session session, @Language("SQL") String query) { MaterializedResult actualResults; - actualResults = runner.execute(runner.getDefaultSession(), query).toTestTypes(); + actualResults = runner.execute(session, query).toTestTypes(); return actualResults; } diff --git a/presto-main/src/test/java/io/prestosql/sql/query/TestColumnMask.java b/presto-main/src/test/java/io/prestosql/sql/query/TestColumnMask.java index 0566409e9c17..61f988fac3c7 100644 --- a/presto-main/src/test/java/io/prestosql/sql/query/TestColumnMask.java +++ b/presto-main/src/test/java/io/prestosql/sql/query/TestColumnMask.java @@ -13,12 +13,18 @@ */ package io.prestosql.sql.query; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.prestosql.Session; +import io.prestosql.connector.MockConnectorFactory; import io.prestosql.metadata.QualifiedObjectName; import io.prestosql.plugin.tpch.TpchConnectorFactory; +import io.prestosql.spi.connector.ConnectorViewDefinition; +import io.prestosql.spi.connector.SchemaTableName; import io.prestosql.spi.security.Identity; import io.prestosql.spi.security.ViewExpression; +import io.prestosql.spi.type.BigintType; +import io.prestosql.spi.type.VarcharType; import io.prestosql.testing.LocalQueryRunner; import io.prestosql.testing.TestingAccessControlManager; import org.testng.annotations.AfterClass; @@ -33,25 +39,43 @@ public class TestColumnMask { private static final String CATALOG = "local"; + private static final String MOCK_CATALOG = "mock"; private static final String USER = "user"; + private static final String VIEW_OWNER = "view-owner"; private static final String RUN_AS_USER = "run-as-user"; + private static final Session SESSION = testSessionBuilder() + .setCatalog(CATALOG) + .setSchema(TINY_SCHEMA_NAME) + .setIdentity(Identity.forUser(USER).build()) + .build(); + private QueryAssertions assertions; private TestingAccessControlManager accessControl; @BeforeClass public void init() { - Session session = testSessionBuilder() - .setCatalog(CATALOG) - .setSchema(TINY_SCHEMA_NAME) - .setIdentity(Identity.forUser(USER).build()) - .build(); + LocalQueryRunner runner = LocalQueryRunner.builder(SESSION).build(); - LocalQueryRunner runner = LocalQueryRunner.builder(session) + runner.createCatalog(CATALOG, new TpchConnectorFactory(1), ImmutableMap.of()); + + ConnectorViewDefinition view = new ConnectorViewDefinition( + "SELECT nationkey, name FROM local.tiny.nation", + Optional.empty(), + Optional.empty(), + ImmutableList.of(new ConnectorViewDefinition.ViewColumn("nationkey", BigintType.BIGINT.getTypeId()), new ConnectorViewDefinition.ViewColumn("name", VarcharType.createVarcharType(25).getTypeId())), + Optional.empty(), + Optional.of(VIEW_OWNER), + false); + + MockConnectorFactory mock = MockConnectorFactory.builder() + .withGetViews((s, prefix) -> ImmutableMap.builder() + .put(new SchemaTableName("default", "nation_view"), view) + .build()) .build(); - runner.createCatalog(CATALOG, new TpchConnectorFactory(1), ImmutableMap.of()); + runner.createCatalog(MOCK_CATALOG, mock, ImmutableMap.of()); assertions = new QueryAssertions(runner); accessControl = assertions.getQueryRunner().getAccessControl(); @@ -149,6 +173,58 @@ public void testSubquery() }); } + @Test + public void testView() + { + // mask on the underlying table for view owner when running query as different user + assertions.executeExclusively(() -> { + accessControl.reset(); + accessControl.columnMask( + new QualifiedObjectName(CATALOG, "tiny", "nation"), + "name", + VIEW_OWNER, + new ViewExpression(VIEW_OWNER, Optional.empty(), Optional.empty(), "reverse(name)")); + + Session session = Session.builder(SESSION) + .setIdentity(Identity.forUser(RUN_AS_USER).build()) + .build(); + + assertions.assertQuery(session, "SELECT name FROM mock.default.nation_view WHERE nationkey = 1", "VALUES CAST('ANITNEGRA' AS VARCHAR(25))"); + }); + + // mask on the underlying table for view owner when running as themselves + assertions.executeExclusively(() -> { + accessControl.reset(); + accessControl.columnMask( + new QualifiedObjectName(CATALOG, "tiny", "nation"), + "name", + VIEW_OWNER, + new ViewExpression(VIEW_OWNER, Optional.of(CATALOG), Optional.of("tiny"), "reverse(name)")); + + Session session = Session.builder(SESSION) + .setIdentity(Identity.forUser(VIEW_OWNER).build()) + .build(); + + assertions.assertQuery(session, "SELECT name FROM mock.default.nation_view WHERE nationkey = 1", "VALUES CAST('ANITNEGRA' AS VARCHAR(25))"); + }); + + // mask on the underlying table for user running the query (different from view owner) should not be applied + assertions.executeExclusively(() -> { + accessControl.reset(); + accessControl.columnMask( + new QualifiedObjectName(CATALOG, "tiny", "nation"), + "name", + RUN_AS_USER, + new ViewExpression(RUN_AS_USER, Optional.of(CATALOG), Optional.of("tiny"), "reverse(name)")); + + Session session = Session.builder(SESSION) + .setIdentity(Identity.forUser(RUN_AS_USER).build()) + .build(); + + assertions.assertQuery(session, "SELECT name FROM mock.default.nation_view WHERE nationkey = 1", "VALUES CAST('ARGENTINA' AS VARCHAR(25))"); + }); + } + @Test public void testTableReferenceInWithClause() { diff --git a/presto-main/src/test/java/io/prestosql/sql/query/TestRowFilter.java b/presto-main/src/test/java/io/prestosql/sql/query/TestRowFilter.java index 0f5115261c51..7c6931f1072a 100644 --- a/presto-main/src/test/java/io/prestosql/sql/query/TestRowFilter.java +++ b/presto-main/src/test/java/io/prestosql/sql/query/TestRowFilter.java @@ -13,12 +13,18 @@ */ package io.prestosql.sql.query; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.prestosql.Session; +import io.prestosql.connector.MockConnectorFactory; import io.prestosql.metadata.QualifiedObjectName; import io.prestosql.plugin.tpch.TpchConnectorFactory; +import io.prestosql.spi.connector.ConnectorViewDefinition; +import io.prestosql.spi.connector.SchemaTableName; import io.prestosql.spi.security.Identity; import io.prestosql.spi.security.ViewExpression; +import io.prestosql.spi.type.BigintType; +import io.prestosql.spi.type.VarcharType; import io.prestosql.testing.LocalQueryRunner; import io.prestosql.testing.TestingAccessControlManager; import org.testng.annotations.AfterClass; @@ -33,25 +39,43 @@ public class TestRowFilter { private static final String CATALOG = "local"; + private static final String MOCK_CATALOG = "mock"; private static final String USER = "user"; + private static final String VIEW_OWNER = "view-owner"; private static final String RUN_AS_USER = "run-as-user"; + private static final Session SESSION = testSessionBuilder() + .setCatalog(CATALOG) + .setSchema(TINY_SCHEMA_NAME) + .setIdentity(Identity.forUser(USER).build()) + .build(); + private QueryAssertions assertions; private TestingAccessControlManager accessControl; @BeforeClass public void init() { - Session session = testSessionBuilder() - .setCatalog(CATALOG) - .setSchema(TINY_SCHEMA_NAME) - .setIdentity(Identity.forUser(USER).build()) - .build(); + LocalQueryRunner runner = LocalQueryRunner.builder(SESSION).build(); - LocalQueryRunner runner = LocalQueryRunner.builder(session) + runner.createCatalog(CATALOG, new TpchConnectorFactory(1), ImmutableMap.of()); + + ConnectorViewDefinition view = new ConnectorViewDefinition( + "SELECT nationkey, name FROM local.tiny.nation", + Optional.empty(), + Optional.empty(), + ImmutableList.of(new ConnectorViewDefinition.ViewColumn("nationkey", BigintType.BIGINT.getTypeId()), new ConnectorViewDefinition.ViewColumn("name", VarcharType.createVarcharType(25).getTypeId())), + Optional.empty(), + Optional.of(VIEW_OWNER), + false); + + MockConnectorFactory mock = MockConnectorFactory.builder() + .withGetViews((s, prefix) -> ImmutableMap.builder() + .put(new SchemaTableName("default", "nation_view"), view) + .build()) .build(); - runner.createCatalog(CATALOG, new TpchConnectorFactory(1), ImmutableMap.of()); + runner.createCatalog(MOCK_CATALOG, mock, ImmutableMap.of()); assertions = new QueryAssertions(runner); accessControl = assertions.getQueryRunner().getAccessControl(); @@ -118,6 +142,55 @@ public void testCorrelatedSubquery() }); } + @Test + public void testView() + { + // filter on the underlying table for view owner when running query as different user + assertions.executeExclusively(() -> { + accessControl.reset(); + accessControl.rowFilter( + new QualifiedObjectName(CATALOG, "tiny", "nation"), + VIEW_OWNER, + new ViewExpression(VIEW_OWNER, Optional.empty(), Optional.empty(), "nationkey = 1")); + + Session session = Session.builder(SESSION) + .setIdentity(Identity.forUser(RUN_AS_USER).build()) + .build(); + + assertions.assertQuery(session, "SELECT name FROM mock.default.nation_view", "VALUES CAST('ARGENTINA' AS VARCHAR(25))"); + }); + + // filter on the underlying table for view owner when running as themselves + assertions.executeExclusively(() -> { + accessControl.reset(); + accessControl.rowFilter( + new QualifiedObjectName(CATALOG, "tiny", "nation"), + VIEW_OWNER, + new ViewExpression(VIEW_OWNER, Optional.of(CATALOG), Optional.of("tiny"), "nationkey = 1")); + + Session session = Session.builder(SESSION) + .setIdentity(Identity.forUser(VIEW_OWNER).build()) + .build(); + + assertions.assertQuery(session, "SELECT name FROM mock.default.nation_view", "VALUES CAST('ARGENTINA' AS VARCHAR(25))"); + }); + + // filter on the underlying table for user running the query (different from view owner) should not be applied + assertions.executeExclusively(() -> { + accessControl.reset(); + accessControl.rowFilter( + new QualifiedObjectName(CATALOG, "tiny", "nation"), + RUN_AS_USER, + new ViewExpression(RUN_AS_USER, Optional.of(CATALOG), Optional.of("tiny"), "nationkey = 1")); + + Session session = Session.builder(SESSION) + .setIdentity(Identity.forUser(RUN_AS_USER).build()) + .build(); + + assertions.assertQuery(session, "SELECT count(*) FROM mock.default.nation_view", "VALUES BIGINT '25'"); + }); + } + @Test public void testTableReferenceInWithClause() { From 6721cfad45d47a663c805c496eb40e648a8a1710 Mon Sep 17 00:00:00 2001 From: Martin Traverso Date: Fri, 24 Apr 2020 08:28:20 -0700 Subject: [PATCH 236/519] Apply filters and masks for view object The planner for Table node was bailing out early if the table reference corresponded to a view or named query without attaching any masks or filters that may have been resolved. --- .../sql/planner/RelationPlanner.java | 44 ++++++++++++------- .../prestosql/sql/query/TestColumnMask.java | 11 +++++ .../io/prestosql/sql/query/TestRowFilter.java | 10 +++++ 3 files changed, 49 insertions(+), 16 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java b/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java index 36976ba062a3..04818169a501 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java @@ -148,6 +148,7 @@ protected RelationPlan visitTable(Table node, Void context) Query namedQuery = analysis.getNamedQuery(node); Scope scope = analysis.getScope(node); + RelationPlan plan; if (namedQuery != null) { RelationPlan subPlan = process(namedQuery, null); @@ -155,27 +156,31 @@ protected RelationPlan visitTable(Table node, Void context) // of the view (e.g., if the underlying tables referenced by the view changed) Type[] types = scope.getRelationType().getAllFields().stream().map(Field::getType).toArray(Type[]::new); RelationPlan withCoercions = addCoercions(subPlan, types); - return new RelationPlan(withCoercions.getRoot(), scope, withCoercions.getFieldMappings()); + + plan = new RelationPlan(withCoercions.getRoot(), scope, withCoercions.getFieldMappings()); } + else { + TableHandle handle = analysis.getTableHandle(node); - TableHandle handle = analysis.getTableHandle(node); + ImmutableList.Builder outputSymbolsBuilder = ImmutableList.builder(); + ImmutableMap.Builder columns = ImmutableMap.builder(); + for (Field field : scope.getRelationType().getAllFields()) { + Symbol symbol = symbolAllocator.newSymbol(field.getName().get(), field.getType()); - ImmutableList.Builder outputSymbolsBuilder = ImmutableList.builder(); - ImmutableMap.Builder columns = ImmutableMap.builder(); - for (Field field : scope.getRelationType().getAllFields()) { - Symbol symbol = symbolAllocator.newSymbol(field.getName().get(), field.getType()); + outputSymbolsBuilder.add(symbol); + columns.put(symbol, analysis.getColumn(field)); + } - outputSymbolsBuilder.add(symbol); - columns.put(symbol, analysis.getColumn(field)); + List outputSymbols = outputSymbolsBuilder.build(); + PlanNode root = TableScanNode.newInstance(idAllocator.getNextId(), handle, outputSymbols, columns.build()); + + plan = new RelationPlan(root, scope, outputSymbols); } - List outputSymbols = outputSymbolsBuilder.build(); - PlanNode root = TableScanNode.newInstance(idAllocator.getNextId(), handle, outputSymbols, columns.build()); + plan = addRowFilters(node, plan); + plan = addColumnMasks(node, plan); - RelationPlan tableScan = new RelationPlan(root, scope, outputSymbols); - tableScan = addRowFilters(node, tableScan); - tableScan = addColumnMasks(node, tableScan); - return tableScan; + return plan; } private RelationPlan addRowFilters(Table node, RelationPlan plan) @@ -196,11 +201,18 @@ private RelationPlan addRowFilters(Table node, RelationPlan plan) private RelationPlan addColumnMasks(Table table, RelationPlan plan) { + Map> columnMasks = analysis.getColumnMasks(table); + + // A Table can represent a WITH query, which can have anonymous fields. On the other hand, + // it can't have masks. The loop below expects fields to have proper names, so bail out + // if the masks are missing + if (columnMasks.isEmpty()) { + return plan; + } + PlanBuilder planBuilder = initializePlanBuilder(plan); TranslationMap translations = planBuilder.getTranslations(); - Map> columnMasks = analysis.getColumnMasks(table); - for (int i = 0; i < plan.getDescriptor().getAllFieldCount(); i++) { Field field = plan.getDescriptor().getFieldByIndex(i); diff --git a/presto-main/src/test/java/io/prestosql/sql/query/TestColumnMask.java b/presto-main/src/test/java/io/prestosql/sql/query/TestColumnMask.java index 61f988fac3c7..672b3fcffeff 100644 --- a/presto-main/src/test/java/io/prestosql/sql/query/TestColumnMask.java +++ b/presto-main/src/test/java/io/prestosql/sql/query/TestColumnMask.java @@ -223,6 +223,17 @@ public void testView() assertions.assertQuery(session, "SELECT name FROM mock.default.nation_view WHERE nationkey = 1", "VALUES CAST('ARGENTINA' AS VARCHAR(25))"); }); + + // mask on the view + assertions.executeExclusively(() -> { + accessControl.reset(); + accessControl.columnMask( + new QualifiedObjectName(MOCK_CATALOG, "default", "nation_view"), + "name", + USER, + new ViewExpression(USER, Optional.of(CATALOG), Optional.of("tiny"), "reverse(name)")); + assertions.assertQuery("SELECT name FROM mock.default.nation_view WHERE nationkey = 1", "VALUES CAST('ANITNEGRA' AS VARCHAR(25))"); + }); } @Test diff --git a/presto-main/src/test/java/io/prestosql/sql/query/TestRowFilter.java b/presto-main/src/test/java/io/prestosql/sql/query/TestRowFilter.java index 7c6931f1072a..b834c0087c32 100644 --- a/presto-main/src/test/java/io/prestosql/sql/query/TestRowFilter.java +++ b/presto-main/src/test/java/io/prestosql/sql/query/TestRowFilter.java @@ -189,6 +189,16 @@ public void testView() assertions.assertQuery(session, "SELECT count(*) FROM mock.default.nation_view", "VALUES BIGINT '25'"); }); + + // filter on the view + assertions.executeExclusively(() -> { + accessControl.reset(); + accessControl.rowFilter( + new QualifiedObjectName(MOCK_CATALOG, "default", "nation_view"), + USER, + new ViewExpression(USER, Optional.of(CATALOG), Optional.of("tiny"), "nationkey = 1")); + assertions.assertQuery("SELECT name FROM mock.default.nation_view", "VALUES CAST('ARGENTINA' AS VARCHAR(25))"); + }); } @Test From 1cbc72c87eb0bdff40cb9cabdf40f2a27411eaea Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Fri, 24 Apr 2020 21:48:17 +0200 Subject: [PATCH 237/519] Remove unused RowNumberNode from plan --- .../planner/optimizations/PruneUnreferencedOutputs.java | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java index 5d4d56fbcf9e..bbf528850b5f 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java @@ -608,6 +608,15 @@ public PlanNode visitTopN(TopNNode node, RewriteContext> context) @Override public PlanNode visitRowNumber(RowNumberNode node, RewriteContext> context) { + // Remove unused RowNumberNode + if (!context.get().contains(node.getRowNumberSymbol()) && node.getPartitionBy().isEmpty()) { + PlanNode source = context.rewrite(node.getSource(), context.get()); + if (node.getMaxRowCountPerPartition().isPresent()) { + return new LimitNode(node.getId(), source, node.getMaxRowCountPerPartition().get(), false); + } + return source; + } + ImmutableSet.Builder inputsBuilder = ImmutableSet.builder(); ImmutableSet.Builder expectedInputs = inputsBuilder .addAll(context.get()) From e6e4170b2d091b65e42a588ee3e2ea0ada8f8a95 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Fri, 24 Apr 2020 23:13:07 +0200 Subject: [PATCH 238/519] Add project-off rule for RowNumberNode --- .../prestosql/sql/planner/PlanOptimizers.java | 2 + .../iterative/rule/PruneRowNumberColumns.java | 61 +++++++ .../rule/TestPruneRowNumberColumns.java | 154 ++++++++++++++++++ .../iterative/rule/test/PlanBuilder.java | 7 +- 4 files changed, 223 insertions(+), 1 deletion(-) create mode 100644 presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneRowNumberColumns.java create mode 100644 presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneRowNumberColumns.java diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java index 8c79f5e43de7..266d833c072a 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/PlanOptimizers.java @@ -85,6 +85,7 @@ import io.prestosql.sql.planner.iterative.rule.PruneOrderByInAggregation; import io.prestosql.sql.planner.iterative.rule.PruneOutputSourceColumns; import io.prestosql.sql.planner.iterative.rule.PruneProjectColumns; +import io.prestosql.sql.planner.iterative.rule.PruneRowNumberColumns; import io.prestosql.sql.planner.iterative.rule.PruneSampleColumns; import io.prestosql.sql.planner.iterative.rule.PruneSemiJoinColumns; import io.prestosql.sql.planner.iterative.rule.PruneSemiJoinFilteringSourceColumns; @@ -272,6 +273,7 @@ public PlanOptimizers( new PruneOffsetColumns(), new PruneOutputSourceColumns(), new PruneProjectColumns(), + new PruneRowNumberColumns(), new PruneSampleColumns(), new PruneSemiJoinColumns(), new PruneSemiJoinFilteringSourceColumns(), diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneRowNumberColumns.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneRowNumberColumns.java new file mode 100644 index 000000000000..7cd90c4b1e96 --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneRowNumberColumns.java @@ -0,0 +1,61 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.Streams; +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.plan.LimitNode; +import io.prestosql.sql.planner.plan.PlanNode; +import io.prestosql.sql.planner.plan.RowNumberNode; + +import java.util.Optional; +import java.util.Set; +import java.util.stream.Stream; + +import static com.google.common.collect.ImmutableSet.toImmutableSet; +import static io.prestosql.sql.planner.iterative.rule.Util.restrictChildOutputs; +import static io.prestosql.sql.planner.plan.Patterns.rowNumber; + +public class PruneRowNumberColumns + extends ProjectOffPushDownRule +{ + public PruneRowNumberColumns() + { + super(rowNumber()); + } + + @Override + protected Optional pushDownProjectOff(Context context, RowNumberNode rowNumberNode, Set referencedOutputs) + { + if (!referencedOutputs.contains(rowNumberNode.getRowNumberSymbol()) && rowNumberNode.getPartitionBy().isEmpty()) { + if (rowNumberNode.getMaxRowCountPerPartition().isPresent()) { + return Optional.of(new LimitNode( + rowNumberNode.getId(), + rowNumberNode.getSource(), + rowNumberNode.getMaxRowCountPerPartition().get(), + false)); + } + return Optional.of(rowNumberNode.getSource()); + } + + Set requiredInputs = Streams.concat( + referencedOutputs.stream() + .filter(symbol -> !symbol.equals(rowNumberNode.getRowNumberSymbol())), + rowNumberNode.getPartitionBy().stream(), + rowNumberNode.getHashSymbol().map(Stream::of).orElse(Stream.empty())) + .collect(toImmutableSet()); + + return restrictChildOutputs(context.getIdAllocator(), rowNumberNode, requiredInputs); + } +} diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneRowNumberColumns.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneRowNumberColumns.java new file mode 100644 index 000000000000..e64550e23c91 --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneRowNumberColumns.java @@ -0,0 +1,154 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.planner.iterative.rule; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.sql.planner.Symbol; +import io.prestosql.sql.planner.assertions.RowNumberSymbolMatcher; +import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; +import io.prestosql.sql.planner.plan.Assignments; +import org.testng.annotations.Test; + +import java.util.Optional; + +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.limit; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.rowNumber; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.strictProject; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; + +public class TestPruneRowNumberColumns + extends BaseRuleTest +{ + @Test + public void testRowNumberSymbolNotReferenced() + { + // no partitioning, no limit per partition + tester().assertThat(new PruneRowNumberColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol rowNumber = p.symbol("row_number"); + return p.project( + Assignments.identity(a), + p.rowNumber(ImmutableList.of(), Optional.empty(), rowNumber, p.values(a))); + }) + .matches( + strictProject( + ImmutableMap.of("a", expression("a")), + values(ImmutableList.of("a")))); + + // no partitioning, limit per partition is present + tester().assertThat(new PruneRowNumberColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol rowNumber = p.symbol("row_number"); + return p.project( + Assignments.identity(a), + p.rowNumber(ImmutableList.of(), Optional.of(5), rowNumber, p.values(a))); + }) + .matches( + strictProject( + ImmutableMap.of("a", expression("a")), + limit( + 5, + values(ImmutableList.of("a"))))); + + // partitioning is present + tester().assertThat(new PruneRowNumberColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol b = p.symbol("b"); + Symbol rowNumber = p.symbol("row_number"); + return p.project( + Assignments.identity(a), + p.rowNumber(ImmutableList.of(a), Optional.empty(), rowNumber, p.values(a, b))); + }) + .matches( + strictProject( + ImmutableMap.of("a", expression("a")), + rowNumber( + pattern -> pattern + .partitionBy(ImmutableList.of("a")), + strictProject( + ImmutableMap.of("a", expression("a")), + values(ImmutableList.of("a", "b")))))); + } + + @Test + public void testDoNotPrunePartitioningSymbol() + { + tester().assertThat(new PruneRowNumberColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol rowNumber = p.symbol("row_number"); + return p.project( + Assignments.identity(rowNumber), + p.rowNumber(ImmutableList.of(a), Optional.empty(), rowNumber, p.values(a))); + }) + .doesNotFire(); + } + + @Test + public void testDoNotPruneHashSymbol() + { + tester().assertThat(new PruneRowNumberColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol rowNumber = p.symbol("row_number"); + Symbol hash = p.symbol("hash"); + return p.project( + Assignments.identity(a, rowNumber), + p.rowNumber(ImmutableList.of(a), Optional.empty(), rowNumber, Optional.of(hash), p.values(a, hash))); + }) + .doesNotFire(); + } + + @Test + public void testSourceSymbolNotReferenced() + { + tester().assertThat(new PruneRowNumberColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol rowNumber = p.symbol("row_number"); + return p.project( + Assignments.identity(rowNumber), + p.rowNumber(ImmutableList.of(), Optional.empty(), rowNumber, p.values(a))); + }) + .matches( + strictProject( + ImmutableMap.of("row_number", expression("row_number")), + rowNumber( + pattern -> pattern + .partitionBy(ImmutableList.of()), + strictProject( + ImmutableMap.of(), + values(ImmutableList.of("a")))) + .withAlias("row_number", new RowNumberSymbolMatcher()))); + } + + @Test + public void testAllSymbolsReferenced() + { + tester().assertThat(new PruneRowNumberColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol rowNumber = p.symbol("row_number"); + return p.project( + Assignments.identity(a, rowNumber), + p.rowNumber(ImmutableList.of(), Optional.empty(), rowNumber, p.values(a))); + }) + .doesNotFire(); + } +} diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/test/PlanBuilder.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/test/PlanBuilder.java index 60db570c3c62..d8ad592fb223 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/test/PlanBuilder.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/test/PlanBuilder.java @@ -875,6 +875,11 @@ public WindowNode window(WindowNode.Specification specification, Map partitionBy, Optional maxRowCountPerPartition, Symbol rowNumberSymbol, PlanNode source) + { + return rowNumber(partitionBy, maxRowCountPerPartition, rowNumberSymbol, Optional.empty(), source); + } + + public RowNumberNode rowNumber(List partitionBy, Optional maxRowCountPerPartition, Symbol rowNumberSymbol, Optional hashSymbol, PlanNode source) { return new RowNumberNode( idAllocator.getNextId(), @@ -882,7 +887,7 @@ public RowNumberNode rowNumber(List partitionBy, Optional maxRo partitionBy, rowNumberSymbol, maxRowCountPerPartition, - Optional.empty()); + hashSymbol); } public RemoteSourceNode remoteSourceNode(List fragmentIds, List symbols, ExchangeNode.Type exchangeType) From a747d67557a18a1755c33563ceadb68078e126b2 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Thu, 16 Apr 2020 15:17:12 -0700 Subject: [PATCH 239/519] Generate multiple splits for Hive buckets --- .../plugin/hive/BackgroundHiveSplitLoader.java | 16 ++++++++-------- .../hive/util/InternalHiveSplitFactory.java | 12 +----------- .../hive/TestBackgroundHiveSplitLoader.java | 17 +++++++++++++++++ 3 files changed, 26 insertions(+), 19 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/BackgroundHiveSplitLoader.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/BackgroundHiveSplitLoader.java index b12f10941526..ed2457ce4f31 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/BackgroundHiveSplitLoader.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/BackgroundHiveSplitLoader.java @@ -463,19 +463,19 @@ private ListenableFuture loadPartition(HivePartitionMetadata partition) deleteDeltaLocations = Optional.empty(); } + // S3 Select pushdown works at the granularity of individual S3 objects, + // therefore we must not split files when it is enabled. + boolean splittable = getHeaderCount(schema) == 0 && getFooterCount(schema) == 0 && !s3SelectPushdownEnabled; + // Bucketed partitions are fully loaded immediately since all files must be loaded to determine the file to bucket mapping if (tableBucketInfo.isPresent()) { ListenableFuture lastResult = immediateFuture(null); // TODO document in addToQueue() that it is sufficient to hold on to last returned future for (Path readPath : readPaths) { - lastResult = hiveSplitSource.addToQueue(getBucketedSplits(readPath, fs, splitFactory, tableBucketInfo.get(), bucketConversion, deleteDeltaLocations)); + lastResult = hiveSplitSource.addToQueue(getBucketedSplits(readPath, fs, splitFactory, tableBucketInfo.get(), bucketConversion, splittable, deleteDeltaLocations)); } return lastResult; } - // S3 Select pushdown works at the granularity of individual S3 objects, - // therefore we must not split files when it is enabled. - boolean splittable = getHeaderCount(schema) == 0 && getFooterCount(schema) == 0 && !s3SelectPushdownEnabled; - for (Path readPath : readPaths) { fileIterators.addLast(createInternalHiveSplitIterator(readPath, fs, splitFactory, splittable, deleteDeltaLocations)); } @@ -510,13 +510,13 @@ private static boolean shouldUseFileSplitsFromInputFormat(InputFormat inpu private Iterator createInternalHiveSplitIterator(Path path, FileSystem fileSystem, InternalHiveSplitFactory splitFactory, boolean splittable, Optional deleteDeltaLocations) { return Streams.stream(new HiveFileIterator(table, path, fileSystem, directoryLister, namenodeStats, recursiveDirWalkerEnabled ? RECURSE : IGNORED, ignoreAbsentPartitions)) - .map(status -> splitFactory.createInternalHiveSplit(status, splittable, deleteDeltaLocations)) + .map(status -> splitFactory.createInternalHiveSplit(status, OptionalInt.empty(), splittable, deleteDeltaLocations)) .filter(Optional::isPresent) .map(Optional::get) .iterator(); } - private List getBucketedSplits(Path path, FileSystem fileSystem, InternalHiveSplitFactory splitFactory, BucketSplitInfo bucketSplitInfo, Optional bucketConversion, Optional deleteDeltaLocations) + private List getBucketedSplits(Path path, FileSystem fileSystem, InternalHiveSplitFactory splitFactory, BucketSplitInfo bucketSplitInfo, Optional bucketConversion, boolean splittable, Optional deleteDeltaLocations) { int readBucketCount = bucketSplitInfo.getReadBucketCount(); int tableBucketCount = bucketSplitInfo.getTableBucketCount(); @@ -604,7 +604,7 @@ private List getBucketedSplits(Path path, FileSystem fileSyst for (LocatedFileStatus file : bucketFiles.get(partitionBucketNumber)) { // OrcDeletedRows will load only delete delta files matching current bucket (same file name), // so we can pass all delete delta locations here, without filtering. - splitFactory.createInternalHiveSplit(file, readBucketNumber, deleteDeltaLocations) + splitFactory.createInternalHiveSplit(file, OptionalInt.of(readBucketNumber), splittable, deleteDeltaLocations) .ifPresent(splitList::add); } } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/util/InternalHiveSplitFactory.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/util/InternalHiveSplitFactory.java index 0c69611d5fea..39c9f9fcd420 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/util/InternalHiveSplitFactory.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/util/InternalHiveSplitFactory.java @@ -92,17 +92,7 @@ public String getPartitionName() return partitionName; } - public Optional createInternalHiveSplit(LocatedFileStatus status, boolean splittable, Optional deleteDeltaLocations) - { - return createInternalHiveSplit(status, OptionalInt.empty(), splittable, deleteDeltaLocations); - } - - public Optional createInternalHiveSplit(LocatedFileStatus status, int bucketNumber, Optional deleteDeltaLocations) - { - return createInternalHiveSplit(status, OptionalInt.of(bucketNumber), false, deleteDeltaLocations); - } - - private Optional createInternalHiveSplit(LocatedFileStatus status, OptionalInt bucketNumber, boolean splittable, Optional deleteDeltaLocations) + public Optional createInternalHiveSplit(LocatedFileStatus status, OptionalInt bucketNumber, boolean splittable, Optional deleteDeltaLocations) { splittable = splittable && isSplittable(inputFormat, fileSystem, status.getPath()); return createInternalHiveSplit( diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestBackgroundHiveSplitLoader.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestBackgroundHiveSplitLoader.java index 89bdde3e4c9a..e492581b13bd 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestBackgroundHiveSplitLoader.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestBackgroundHiveSplitLoader.java @@ -389,6 +389,23 @@ public Object[][] testPropagateExceptionDataProvider() }; } + @Test + public void testMultipleSplitsPerBucket() + throws Exception + { + BackgroundHiveSplitLoader backgroundHiveSplitLoader = backgroundHiveSplitLoader( + ImmutableList.of(locatedFileStatus(new Path(SAMPLE_PATH), DataSize.of(1, GIGABYTE).toBytes())), + TupleDomain.all(), + Optional.empty(), + SIMPLE_TABLE, + Optional.of(new HiveBucketHandle(BUCKET_COLUMN_HANDLES, BUCKETING_V1, BUCKET_COUNT, BUCKET_COUNT))); + + HiveSplitSource hiveSplitSource = hiveSplitSource(backgroundHiveSplitLoader); + backgroundHiveSplitLoader.start(hiveSplitSource); + + assertEquals(drainSplits(hiveSplitSource).size(), 17); + } + @Test public void testSplitsGenerationWithAbortedTransactions() throws Exception From 8fc0e57e9713b50a95e62e7866cf37d92d8977ff Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Fri, 24 Apr 2020 23:03:54 +0200 Subject: [PATCH 240/519] Use assertThat to compare primitive arrays TestNG's `assertEquals` compares lengths first and does not quote array values in error message when lengths are not equal. --- .../prestosql/parquet/reader/TestMetadataReader.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java b/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java index b5ff3b1a9ce5..f9aadf863b37 100644 --- a/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java +++ b/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java @@ -137,20 +137,20 @@ public void testReadStatsBinaryUtf8OldWriter(Optional fileCreatedBy, int .isInstanceOfSatisfying(BinaryStatistics.class, columnStatistics -> { assertEquals(columnStatistics.getNumNulls(), expectedNullCount); - assertEquals(columnStatistics.getMinBytes(), expectedMin); + assertThat(columnStatistics.getMinBytes()).isEqualTo(expectedMin); if (expectedMin != null) { - assertEquals(columnStatistics.getMin().getBytes(), expectedMin); - assertEquals(columnStatistics.genericGetMin().getBytes(), expectedMin); + assertThat(columnStatistics.getMin().getBytes()).isEqualTo(expectedMin); + assertThat(columnStatistics.genericGetMin().getBytes()).isEqualTo(expectedMin); } else { assertNull(columnStatistics.getMin()); assertNull(columnStatistics.genericGetMin()); } - assertEquals(columnStatistics.getMaxBytes(), expectedMax); + assertThat(columnStatistics.getMaxBytes()).isEqualTo(expectedMax); if (expectedMax != null) { - assertEquals(columnStatistics.getMax().getBytes(), expectedMax); - assertEquals(columnStatistics.genericGetMax().getBytes(), expectedMax); + assertThat(columnStatistics.getMax().getBytes()).isEqualTo(expectedMax); + assertThat(columnStatistics.genericGetMax().getBytes()).isEqualTo(expectedMax); } else { assertNull(columnStatistics.getMax()); From 054f1a17839dc0ec92859467dbcfed38526e14c0 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Fri, 24 Apr 2020 23:05:31 +0200 Subject: [PATCH 241/519] Remove useless test cases --- .../prestosql/parquet/reader/TestMetadataReader.java | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java b/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java index f9aadf863b37..f7bf87cf28eb 100644 --- a/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java +++ b/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java @@ -167,38 +167,31 @@ public Object[][] testReadStatsBinaryUtf8OldWriterDataProvider() {NO_CREATED_BY, 13, "aa".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null}, {PARQUET_MR, 13, "aa".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null}, {PARQUET_MR_1_8, 13, "aa".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, "aa".getBytes(UTF_8), "c".getBytes(UTF_8)}, - {PARQUET_MR_1_10, 13, "aa".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, "aa".getBytes(UTF_8), "c".getBytes(UTF_8)}, // however, 1.10 won't fill old min/max // [abc\u007fé, bcd\u007fé]; \u007f is retained in min value, but removed from max {NO_CREATED_BY, 13, "abc\u007fé".getBytes(UTF_8), "bcd\u007fé".getBytes(UTF_8), 13, null, null}, {PARQUET_MR, 13, "abc\u007fé".getBytes(UTF_8), "bcd\u007fé".getBytes(UTF_8), 13, null, null}, {PARQUET_MR_1_8, 13, "abc\u007fé".getBytes(UTF_8), "bcd\u007fé".getBytes(UTF_8), 13, "abc\u007f".getBytes(UTF_8), "bce".getBytes(UTF_8)}, - // however, 1.10 won't fill old min/max - {PARQUET_MR_1_10, 13, "abc\u007fé".getBytes(UTF_8), "bcd\u007fé".getBytes(UTF_8), 13, "abc\u007f".getBytes(UTF_8), "bce".getBytes(UTF_8)}, - // [é, a] or [a, é] + // [é, a] {NO_CREATED_BY, 13, "é".getBytes(UTF_8), "a".getBytes(UTF_8), 13, null, null}, {PARQUET_MR, 13, "é".getBytes(UTF_8), "a".getBytes(UTF_8), 13, null, null}, {PARQUET_MR_1_8, 13, "é".getBytes(UTF_8), "a".getBytes(UTF_8), 13, new byte[0], "b".getBytes(UTF_8)}, - {PARQUET_MR_1_10, 13, "a".getBytes(UTF_8), "é".getBytes(UTF_8), 13, null, null}, // however, 1.10 won't fill old min/max - // [é, ê]; both, before PARQUET-1025 and after than, Parquet writer would order them this way + // [é, ê] {NO_CREATED_BY, 13, "é".getBytes(UTF_8), "ê".getBytes(UTF_8), 13, null, null}, {PARQUET_MR, 13, "é".getBytes(UTF_8), "ê".getBytes(UTF_8), 13, null, null}, {PARQUET_MR_1_8, 13, "é".getBytes(UTF_8), "ê".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR_1_10, 13, "é".getBytes(UTF_8), "ê".getBytes(UTF_8), 13, null, null}, // however, 1.10 won't fill old min/max // [aé, aé] {NO_CREATED_BY, 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8), 13, null, null}, {PARQUET_MR, 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8), 13, null, null}, {PARQUET_MR_1_8, 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8), 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8)}, - {PARQUET_MR_1_10, 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8), 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8)}, // however, 1.10 won't fill old min/max // [aé, bé] {NO_CREATED_BY, 13, "aé".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null}, {PARQUET_MR, 13, "aé".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null}, {PARQUET_MR_1_8, 13, "aé".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, "a".getBytes(UTF_8), "c".getBytes(UTF_8)}, - {PARQUET_MR_1_10, 13, "aé".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, "a".getBytes(UTF_8), "c".getBytes(UTF_8)}, // however, 1.10 won't fill old min/max }; } From af4f6ab8c6b71d39973ac1f269fdfba097432835 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Fri, 24 Apr 2020 23:32:26 +0200 Subject: [PATCH 242/519] Fix string statistics recovery logic --- .../parquet/reader/MetadataReader.java | 48 +++++++++++++------ .../parquet/reader/TestMetadataReader.java | 36 ++++++++++---- 2 files changed, 61 insertions(+), 23 deletions(-) diff --git a/presto-parquet/src/main/java/io/prestosql/parquet/reader/MetadataReader.java b/presto-parquet/src/main/java/io/prestosql/parquet/reader/MetadataReader.java index ab8bda79c1ae..33f14a202f2b 100644 --- a/presto-parquet/src/main/java/io/prestosql/parquet/reader/MetadataReader.java +++ b/presto-parquet/src/main/java/io/prestosql/parquet/reader/MetadataReader.java @@ -231,18 +231,31 @@ private static void tryReadOldUtf8Stats(Statistics statistics, BinaryStatistics max = min; } else { - // For min it's enough to retain leading all-ASCII, because this produces a strictly lower value. - int minFirstNonAsciiOffset = firstOutsideRange(min, 0, 128); - min = Arrays.copyOf(min, minFirstNonAsciiOffset); - - // For max we chop away everything at the first non-ASCII, then increment last character. - int maxFirstBadCharacter = firstOutsideRange(max, 0, 127); // last ASCII is also bad because we can't increment it - if (maxFirstBadCharacter == 0) { - // We can't help. + int commonPrefix = commonPrefix(min, max); + + // For min we can retain all-ASCII, because this produces a strictly lower value. + int minGoodLength = commonPrefix; + while (minGoodLength < min.length && isAscii(min[minGoodLength])) { + minGoodLength++; + } + + // For max we can be sure only of the part matching the min. When they differ, we can consider only one next, and only if both are ASCII + int maxGoodLength = commonPrefix; + if (maxGoodLength < max.length && (maxGoodLength == min.length || isAscii(min[maxGoodLength])) && isAscii(max[maxGoodLength])) { + maxGoodLength++; + } + // Incrementing 127 would overflow. Incrementing within non-ASCII can have side-effects. + while (maxGoodLength > 0 && (!isAscii(max[maxGoodLength - 1]) || max[maxGoodLength - 1] == 127)) { + maxGoodLength--; + } + if (maxGoodLength == 0) { + // We can return just min bound, but code downstream likely expects both are present or both are absent. return; } - max[maxFirstBadCharacter - 1]++; - max = Arrays.copyOf(max, maxFirstBadCharacter); + + min = Arrays.copyOf(min, minGoodLength); + max = Arrays.copyOf(max, maxGoodLength); + max[maxGoodLength - 1]++; } columnStatistics.setMinMaxFromBytes(min, max); @@ -251,13 +264,18 @@ private static void tryReadOldUtf8Stats(Statistics statistics, BinaryStatistics } } - private static int firstOutsideRange(byte[] bytes, int rangeStartInclusive, int rangeEndExclusive) + private static boolean isAscii(byte b) + { + return 0 <= b; + } + + private static int commonPrefix(byte[] a, byte[] b) { - int offset = 0; - while (offset < bytes.length && rangeStartInclusive <= bytes[offset] && bytes[offset] < rangeEndExclusive) { - offset++; + int commonPrefixLength = 0; + while (commonPrefixLength < a.length && commonPrefixLength < b.length && a[commonPrefixLength] == b[commonPrefixLength]) { + commonPrefixLength++; } - return offset; + return commonPrefixLength; } private static Set readEncodings(List encodings) diff --git a/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java b/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java index f7bf87cf28eb..e6d07e0868ff 100644 --- a/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java +++ b/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java @@ -163,32 +163,52 @@ public void testReadStatsBinaryUtf8OldWriter(Optional fileCreatedBy, int public Object[][] testReadStatsBinaryUtf8OldWriterDataProvider() { return new Object[][] { - // [aa, bé] + // [aa, bé]: no common prefix, first different are both ASCII, min is all ASCII {NO_CREATED_BY, 13, "aa".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null}, {PARQUET_MR, 13, "aa".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null}, {PARQUET_MR_1_8, 13, "aa".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, "aa".getBytes(UTF_8), "c".getBytes(UTF_8)}, - // [abc\u007fé, bcd\u007fé]; \u007f is retained in min value, but removed from max + // [abcé, abcéN]: common prefix, not only ASCII, one prefix of the other + {NO_CREATED_BY, 13, "abcé".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, null, null}, + {PARQUET_MR, 13, "abcé".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, null, null}, + {PARQUET_MR_1_8, 13, "abcé".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, "abcé".getBytes(UTF_8), "abcéO".getBytes(UTF_8)}, + + // [abcéM, abcéN]: common prefix, not only ASCII, first different are both ASCII + {NO_CREATED_BY, 13, "abcéM".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, null, null}, + {PARQUET_MR, 13, "abcéM".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, null, null}, + {PARQUET_MR_1_8, 13, "abcéM".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, "abcéM".getBytes(UTF_8), "abcéO".getBytes(UTF_8)}, + + // [abcéMab, abcéNxy]: common prefix, not only ASCII, first different are both ASCII, more characters afterwards + {NO_CREATED_BY, 13, "abcéMab".getBytes(UTF_8), "abcéNxy".getBytes(UTF_8), 13, null, null}, + {PARQUET_MR, 13, "abcéMab".getBytes(UTF_8), "abcéNxy".getBytes(UTF_8), 13, null, null}, + {PARQUET_MR_1_8, 13, "abcéMab".getBytes(UTF_8), "abcéNxy".getBytes(UTF_8), 13, "abcéMab".getBytes(UTF_8), "abcéO".getBytes(UTF_8)}, + + // [abcéM, abcé\u00f7]: common prefix, not only ASCII, first different are both ASCII, but need to be chopped off (127) + {NO_CREATED_BY, 13, "abcéM".getBytes(UTF_8), "abcé\u00f7".getBytes(UTF_8), 13, null, null}, + {PARQUET_MR, 13, "abcéM".getBytes(UTF_8), "abcé\u00f7".getBytes(UTF_8), 13, null, null}, + {PARQUET_MR_1_8, 13, "abcéM".getBytes(UTF_8), "abcé\u00f7".getBytes(UTF_8), 13, "abcéM".getBytes(UTF_8), "abd".getBytes(UTF_8)}, + + // [abc\u007fé, bcd\u007fé]: no common prefix, first different are both ASCII {NO_CREATED_BY, 13, "abc\u007fé".getBytes(UTF_8), "bcd\u007fé".getBytes(UTF_8), 13, null, null}, {PARQUET_MR, 13, "abc\u007fé".getBytes(UTF_8), "bcd\u007fé".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR_1_8, 13, "abc\u007fé".getBytes(UTF_8), "bcd\u007fé".getBytes(UTF_8), 13, "abc\u007f".getBytes(UTF_8), "bce".getBytes(UTF_8)}, + {PARQUET_MR_1_8, 13, "abc\u007fé".getBytes(UTF_8), "bcd\u007fé".getBytes(UTF_8), 13, "abc\u007f".getBytes(UTF_8), "c".getBytes(UTF_8)}, - // [é, a] + // [é, a]: no common prefix, first different are not both ASCII {NO_CREATED_BY, 13, "é".getBytes(UTF_8), "a".getBytes(UTF_8), 13, null, null}, {PARQUET_MR, 13, "é".getBytes(UTF_8), "a".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR_1_8, 13, "é".getBytes(UTF_8), "a".getBytes(UTF_8), 13, new byte[0], "b".getBytes(UTF_8)}, + {PARQUET_MR_1_8, 13, "é".getBytes(UTF_8), "a".getBytes(UTF_8), 13, null, null}, - // [é, ê] + // [é, ê]: no common prefix, first different are both not ASCII {NO_CREATED_BY, 13, "é".getBytes(UTF_8), "ê".getBytes(UTF_8), 13, null, null}, {PARQUET_MR, 13, "é".getBytes(UTF_8), "ê".getBytes(UTF_8), 13, null, null}, {PARQUET_MR_1_8, 13, "é".getBytes(UTF_8), "ê".getBytes(UTF_8), 13, null, null}, - // [aé, aé] + // [aé, aé]: min = max (common prefix, first different are both not ASCII) {NO_CREATED_BY, 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8), 13, null, null}, {PARQUET_MR, 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8), 13, null, null}, {PARQUET_MR_1_8, 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8), 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8)}, - // [aé, bé] + // [aé, bé]: no common prefix, first different are both ASCII {NO_CREATED_BY, 13, "aé".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null}, {PARQUET_MR, 13, "aé".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null}, {PARQUET_MR_1_8, 13, "aé".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, "a".getBytes(UTF_8), "c".getBytes(UTF_8)}, From d52432e2ecc6b08d497d529cac139e29708a181f Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Sat, 25 Apr 2020 17:49:56 +0200 Subject: [PATCH 243/519] Flip condition to match comment --- .../main/java/io/prestosql/parquet/reader/MetadataReader.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-parquet/src/main/java/io/prestosql/parquet/reader/MetadataReader.java b/presto-parquet/src/main/java/io/prestosql/parquet/reader/MetadataReader.java index 33f14a202f2b..9d27c732672a 100644 --- a/presto-parquet/src/main/java/io/prestosql/parquet/reader/MetadataReader.java +++ b/presto-parquet/src/main/java/io/prestosql/parquet/reader/MetadataReader.java @@ -245,7 +245,7 @@ private static void tryReadOldUtf8Stats(Statistics statistics, BinaryStatistics maxGoodLength++; } // Incrementing 127 would overflow. Incrementing within non-ASCII can have side-effects. - while (maxGoodLength > 0 && (!isAscii(max[maxGoodLength - 1]) || max[maxGoodLength - 1] == 127)) { + while (maxGoodLength > 0 && (max[maxGoodLength - 1] == 127 || !isAscii(max[maxGoodLength - 1]))) { maxGoodLength--; } if (maxGoodLength == 0) { From a47b4b3559a0281439f72cc394a0d54dcb4d0208 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Sat, 25 Apr 2020 17:53:21 +0200 Subject: [PATCH 244/519] Fix handling of stats when min is prefix of max --- .../parquet/reader/MetadataReader.java | 2 +- .../parquet/reader/TestMetadataReader.java | 19 +++++++++++++++++-- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/presto-parquet/src/main/java/io/prestosql/parquet/reader/MetadataReader.java b/presto-parquet/src/main/java/io/prestosql/parquet/reader/MetadataReader.java index 9d27c732672a..29a52aedee66 100644 --- a/presto-parquet/src/main/java/io/prestosql/parquet/reader/MetadataReader.java +++ b/presto-parquet/src/main/java/io/prestosql/parquet/reader/MetadataReader.java @@ -241,7 +241,7 @@ private static void tryReadOldUtf8Stats(Statistics statistics, BinaryStatistics // For max we can be sure only of the part matching the min. When they differ, we can consider only one next, and only if both are ASCII int maxGoodLength = commonPrefix; - if (maxGoodLength < max.length && (maxGoodLength == min.length || isAscii(min[maxGoodLength])) && isAscii(max[maxGoodLength])) { + if (maxGoodLength < max.length && maxGoodLength < min.length && isAscii(min[maxGoodLength]) && isAscii(max[maxGoodLength])) { maxGoodLength++; } // Incrementing 127 would overflow. Incrementing within non-ASCII can have side-effects. diff --git a/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java b/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java index e6d07e0868ff..60e012be4dd4 100644 --- a/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java +++ b/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java @@ -163,15 +163,30 @@ public void testReadStatsBinaryUtf8OldWriter(Optional fileCreatedBy, int public Object[][] testReadStatsBinaryUtf8OldWriterDataProvider() { return new Object[][] { + // [, bcé]: min is empty, max starts with ASCII + {NO_CREATED_BY, 13, new byte[0], "bcé".getBytes(UTF_8), 13, null, null}, + {PARQUET_MR, 13, new byte[0], "bcé".getBytes(UTF_8), 13, null, null}, + {PARQUET_MR_1_8, 13, new byte[0], "bcé".getBytes(UTF_8), 13, null, null}, + + // [, ébc]: min is empty, max starts with non-ASCII + {NO_CREATED_BY, 13, new byte[0], "ébc".getBytes(UTF_8), 13, null, null}, + {PARQUET_MR, 13, new byte[0], "ébc".getBytes(UTF_8), 13, null, null}, + {PARQUET_MR_1_8, 13, new byte[0], "ébc".getBytes(UTF_8), 13, null, null}, + // [aa, bé]: no common prefix, first different are both ASCII, min is all ASCII {NO_CREATED_BY, 13, "aa".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null}, {PARQUET_MR, 13, "aa".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null}, {PARQUET_MR_1_8, 13, "aa".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, "aa".getBytes(UTF_8), "c".getBytes(UTF_8)}, - // [abcé, abcéN]: common prefix, not only ASCII, one prefix of the other + // [abcd, abcdN]: common prefix, not only ASCII, one prefix of the other, last common ASCII + {NO_CREATED_BY, 13, "abcd".getBytes(UTF_8), "abcdN".getBytes(UTF_8), 13, null, null}, + {PARQUET_MR, 13, "abcd".getBytes(UTF_8), "abcdN".getBytes(UTF_8), 13, null, null}, + {PARQUET_MR_1_8, 13, "abcd".getBytes(UTF_8), "abcdN".getBytes(UTF_8), 13, "abcd".getBytes(UTF_8), "abce".getBytes(UTF_8)}, + + // [abcé, abcéN]: common prefix, not only ASCII, one prefix of the other, last common non ASCII {NO_CREATED_BY, 13, "abcé".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, null, null}, {PARQUET_MR, 13, "abcé".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR_1_8, 13, "abcé".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, "abcé".getBytes(UTF_8), "abcéO".getBytes(UTF_8)}, + {PARQUET_MR_1_8, 13, "abcé".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, "abcé".getBytes(UTF_8), "abd".getBytes(UTF_8)}, // [abcéM, abcéN]: common prefix, not only ASCII, first different are both ASCII {NO_CREATED_BY, 13, "abcéM".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, null, null}, From 956c359eaf616707c380e99fe7f6b90f8ef6458e Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Sat, 25 Apr 2020 22:35:19 +0200 Subject: [PATCH 245/519] Cleanup Throwable declarations in benchmarks --- .../BenchmarkGetPartitionsSample.java | 2 +- .../io/prestosql/block/BenchmarkMapCopy.java | 2 +- .../execution/BenchmarkNodeScheduler.java | 2 +- .../BenchmarkResourceGroup.java | 2 +- .../BenchmarkArrayAggregation.java | 2 +- ...enchmarkDictionaryBlockGetSizeInBytes.java | 2 +- .../scalar/BenchmarkArrayDistinct.java | 2 +- .../operator/scalar/BenchmarkArrayFilter.java | 2 +- .../BenchmarkArrayHashCodeOperator.java | 2 +- .../scalar/BenchmarkArrayIntersect.java | 2 +- .../operator/scalar/BenchmarkArrayJoin.java | 2 +- .../operator/scalar/BenchmarkArraySort.java | 2 +- .../scalar/BenchmarkArraySubscript.java | 2 +- .../scalar/BenchmarkArrayTransform.java | 2 +- .../scalar/BenchmarkJsonToArrayCast.java | 2 +- .../scalar/BenchmarkJsonToMapCast.java | 2 +- .../operator/scalar/BenchmarkMapConcat.java | 2 +- .../scalar/BenchmarkMapSubscript.java | 2 +- .../scalar/BenchmarkMapToMapCast.java | 2 +- .../scalar/BenchmarkRoundFunction.java | 2 +- .../scalar/BenchmarkRowToRowCast.java | 2 +- .../scalar/BenchmarkTransformKey.java | 2 +- .../scalar/BenchmarkTransformValue.java | 2 +- .../sql/planner/BenchmarkPlanner.java | 2 +- .../type/BenchmarkBigIntOperators.java | 2 +- .../prestosql/orc/BenchmarkColumnReaders.java | 52 +++++++++---------- .../orc/BenchmarkOrcDecimalReader.java | 6 +-- .../orc/stream/BenchmarkLongBitPacker.java | 34 ++++++------ .../spi/block/BenchmarkComputePosition.java | 2 +- .../BenchmarkInformationSchema.java | 2 +- 30 files changed, 73 insertions(+), 73 deletions(-) diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/statistics/BenchmarkGetPartitionsSample.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/statistics/BenchmarkGetPartitionsSample.java index 15e4f2ce821e..e3edf4aa96e1 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/statistics/BenchmarkGetPartitionsSample.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/statistics/BenchmarkGetPartitionsSample.java @@ -72,7 +72,7 @@ public void setup() } public static void main(String[] args) - throws Throwable + throws Exception { Options options = new OptionsBuilder() .verbosity(VerboseMode.NORMAL) diff --git a/presto-main/src/test/java/io/prestosql/block/BenchmarkMapCopy.java b/presto-main/src/test/java/io/prestosql/block/BenchmarkMapCopy.java index 264f38b78060..54dff64a31c8 100644 --- a/presto-main/src/test/java/io/prestosql/block/BenchmarkMapCopy.java +++ b/presto-main/src/test/java/io/prestosql/block/BenchmarkMapCopy.java @@ -107,7 +107,7 @@ public BlockBuilder getBlockBuilder() } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-main/src/test/java/io/prestosql/execution/BenchmarkNodeScheduler.java b/presto-main/src/test/java/io/prestosql/execution/BenchmarkNodeScheduler.java index e2f258a1b39b..8471c303a8f6 100644 --- a/presto-main/src/test/java/io/prestosql/execution/BenchmarkNodeScheduler.java +++ b/presto-main/src/test/java/io/prestosql/execution/BenchmarkNodeScheduler.java @@ -222,7 +222,7 @@ public List getSplits() } public static void main(String[] args) - throws Throwable + throws Exception { Options options = new OptionsBuilder() .verbosity(VerboseMode.NORMAL) diff --git a/presto-main/src/test/java/io/prestosql/execution/resourcegroups/BenchmarkResourceGroup.java b/presto-main/src/test/java/io/prestosql/execution/resourcegroups/BenchmarkResourceGroup.java index 33d920c8b8e1..3a679788b9af 100644 --- a/presto-main/src/test/java/io/prestosql/execution/resourcegroups/BenchmarkResourceGroup.java +++ b/presto-main/src/test/java/io/prestosql/execution/resourcegroups/BenchmarkResourceGroup.java @@ -104,7 +104,7 @@ public RootInternalResourceGroup getRoot() } public static void main(String[] args) - throws Throwable + throws Exception { Options options = new OptionsBuilder() .verbosity(VerboseMode.NORMAL) diff --git a/presto-main/src/test/java/io/prestosql/operator/aggregation/BenchmarkArrayAggregation.java b/presto-main/src/test/java/io/prestosql/operator/aggregation/BenchmarkArrayAggregation.java index c6de51b148d1..8f668b546310 100644 --- a/presto-main/src/test/java/io/prestosql/operator/aggregation/BenchmarkArrayAggregation.java +++ b/presto-main/src/test/java/io/prestosql/operator/aggregation/BenchmarkArrayAggregation.java @@ -146,7 +146,7 @@ public Page getPage() } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-main/src/test/java/io/prestosql/operator/project/BenchmarkDictionaryBlockGetSizeInBytes.java b/presto-main/src/test/java/io/prestosql/operator/project/BenchmarkDictionaryBlockGetSizeInBytes.java index cbc24342b769..f0f0ebd8dc10 100644 --- a/presto-main/src/test/java/io/prestosql/operator/project/BenchmarkDictionaryBlockGetSizeInBytes.java +++ b/presto-main/src/test/java/io/prestosql/operator/project/BenchmarkDictionaryBlockGetSizeInBytes.java @@ -132,7 +132,7 @@ public DictionaryBlock getDictionaryBlock() } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayDistinct.java b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayDistinct.java index 8ae27ca142ad..8003349952df 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayDistinct.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayDistinct.java @@ -158,7 +158,7 @@ public Page getPage() } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayFilter.java b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayFilter.java index 5b94bcfdf55a..dd18a5d3b66a 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayFilter.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayFilter.java @@ -182,7 +182,7 @@ public Page getPage() } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayHashCodeOperator.java b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayHashCodeOperator.java index c43383247d3e..9632dd15ab85 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayHashCodeOperator.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayHashCodeOperator.java @@ -190,7 +190,7 @@ public Page getPage() } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayIntersect.java b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayIntersect.java index 288220e6046d..ace37284b755 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayIntersect.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayIntersect.java @@ -181,7 +181,7 @@ public void verify() } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayJoin.java b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayJoin.java index 2d4020ae94dd..34ac646f09ec 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayJoin.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayJoin.java @@ -138,7 +138,7 @@ public void verify() } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArraySort.java b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArraySort.java index e7432ceac116..f94f28889e1f 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArraySort.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArraySort.java @@ -158,7 +158,7 @@ public Page getPage() } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArraySubscript.java b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArraySubscript.java index 878b57d34a90..1ee0637a4c8a 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArraySubscript.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArraySubscript.java @@ -229,7 +229,7 @@ private static Block createSliceArrayBlock(List keys) } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayTransform.java b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayTransform.java index d34fc581113f..47da26037d72 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayTransform.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkArrayTransform.java @@ -171,7 +171,7 @@ public PageBuilder getPageBuilder() } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkJsonToArrayCast.java b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkJsonToArrayCast.java index 20112a08ac95..797c67111703 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkJsonToArrayCast.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkJsonToArrayCast.java @@ -188,7 +188,7 @@ public void verify() } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkJsonToMapCast.java b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkJsonToMapCast.java index b3a1e06049cb..deb9be86c7c7 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkJsonToMapCast.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkJsonToMapCast.java @@ -194,7 +194,7 @@ public void verify() } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkMapConcat.java b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkMapConcat.java index 05e4f8306603..a9362c1667db 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkMapConcat.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkMapConcat.java @@ -199,7 +199,7 @@ private static Block createSliceArrayBlock(List keys) } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkMapSubscript.java b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkMapSubscript.java index 15a110305499..88601d08c161 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkMapSubscript.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkMapSubscript.java @@ -250,7 +250,7 @@ private static Block createSliceArrayBlock(List keys) } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkMapToMapCast.java b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkMapToMapCast.java index bcd8ae4de547..5e1b525d0b6b 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkMapToMapCast.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkMapToMapCast.java @@ -146,7 +146,7 @@ public Page getPage() } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkRoundFunction.java b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkRoundFunction.java index 5a96963deab7..7d78f77feb72 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkRoundFunction.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkRoundFunction.java @@ -126,7 +126,7 @@ public static double roundBaseline(@SqlType(StandardTypes.DOUBLE) double num, @S } public static void main(String[] args) - throws Throwable + throws Exception { Options options = new OptionsBuilder() .verbosity(VerboseMode.NORMAL) diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkRowToRowCast.java b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkRowToRowCast.java index aa0df5848174..9bc2fd7c922e 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkRowToRowCast.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkRowToRowCast.java @@ -149,7 +149,7 @@ public void verify() } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkTransformKey.java b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkTransformKey.java index 59baca4f85ef..d76000c97ad1 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkTransformKey.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkTransformKey.java @@ -177,7 +177,7 @@ public Page getPage() } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkTransformValue.java b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkTransformValue.java index 3c261aaedb62..7d5a551ab200 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkTransformValue.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/BenchmarkTransformValue.java @@ -192,7 +192,7 @@ public Page getPage() } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/BenchmarkPlanner.java b/presto-main/src/test/java/io/prestosql/sql/planner/BenchmarkPlanner.java index bd1b1e96ca72..833b6ad7fce4 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/BenchmarkPlanner.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/BenchmarkPlanner.java @@ -127,7 +127,7 @@ public List planQueries(BenchmarkData benchmarkData) } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-main/src/test/java/io/prestosql/type/BenchmarkBigIntOperators.java b/presto-main/src/test/java/io/prestosql/type/BenchmarkBigIntOperators.java index a3c854d85294..8e64f762e2e6 100644 --- a/presto-main/src/test/java/io/prestosql/type/BenchmarkBigIntOperators.java +++ b/presto-main/src/test/java/io/prestosql/type/BenchmarkBigIntOperators.java @@ -399,7 +399,7 @@ private static long negateBaseLine(@SqlType(StandardTypes.BIGINT) long x) } public static void main(String[] args) - throws Throwable + throws Exception { Options options = new OptionsBuilder() .verbosity(VerboseMode.NORMAL) diff --git a/presto-orc/src/test/java/io/prestosql/orc/BenchmarkColumnReaders.java b/presto-orc/src/test/java/io/prestosql/orc/BenchmarkColumnReaders.java index 54869ab37843..7bf401e64cbc 100644 --- a/presto-orc/src/test/java/io/prestosql/orc/BenchmarkColumnReaders.java +++ b/presto-orc/src/test/java/io/prestosql/orc/BenchmarkColumnReaders.java @@ -92,7 +92,7 @@ public class BenchmarkColumnReaders @Benchmark public Object readBooleanNoNull(BooleanNoNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -101,7 +101,7 @@ public Object readBooleanNoNull(BooleanNoNullBenchmarkData data) @Benchmark public Object readBooleanWithNull(BooleanWithNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -110,7 +110,7 @@ public Object readBooleanWithNull(BooleanWithNullBenchmarkData data) @Benchmark public Object readAllNull(AllNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -119,7 +119,7 @@ public Object readAllNull(AllNullBenchmarkData data) @Benchmark public Object readByteNoNull(TinyIntNoNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -128,7 +128,7 @@ public Object readByteNoNull(TinyIntNoNullBenchmarkData data) @Benchmark public Object readByteWithNull(TinyIntWithNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -137,7 +137,7 @@ public Object readByteWithNull(TinyIntWithNullBenchmarkData data) @Benchmark public Object readShortDecimalNoNull(ShortDecimalNoNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -146,7 +146,7 @@ public Object readShortDecimalNoNull(ShortDecimalNoNullBenchmarkData data) @Benchmark public Object readShortDecimalWithNull(ShortDecimalWithNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -155,7 +155,7 @@ public Object readShortDecimalWithNull(ShortDecimalWithNullBenchmarkData data) @Benchmark public Object readLongDecimalNoNull(LongDecimalNoNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -164,7 +164,7 @@ public Object readLongDecimalNoNull(LongDecimalNoNullBenchmarkData data) @Benchmark public Object readLongDecimalWithNull(LongDecimalWithNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -173,7 +173,7 @@ public Object readLongDecimalWithNull(LongDecimalWithNullBenchmarkData data) @Benchmark public Object readDoubleNoNull(DoubleNoNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -182,7 +182,7 @@ public Object readDoubleNoNull(DoubleNoNullBenchmarkData data) @Benchmark public Object readDoubleWithNull(DoubleWithNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -191,7 +191,7 @@ public Object readDoubleWithNull(DoubleWithNullBenchmarkData data) @Benchmark public Object readFloatNoNull(FloatNoNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -200,7 +200,7 @@ public Object readFloatNoNull(FloatNoNullBenchmarkData data) @Benchmark public Object readFloatWithNull(FloatWithNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -209,7 +209,7 @@ public Object readFloatWithNull(FloatWithNullBenchmarkData data) @Benchmark public Object readLongNoNull(BigintNoNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -218,7 +218,7 @@ public Object readLongNoNull(BigintNoNullBenchmarkData data) @Benchmark public Object readLongWithNull(BigintWithNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -227,7 +227,7 @@ public Object readLongWithNull(BigintWithNullBenchmarkData data) @Benchmark public Object readIntNoNull(IntegerNoNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -236,7 +236,7 @@ public Object readIntNoNull(IntegerNoNullBenchmarkData data) @Benchmark public Object readIntWithNull(IntegerWithNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -245,7 +245,7 @@ public Object readIntWithNull(IntegerWithNullBenchmarkData data) @Benchmark public Object readShortNoNull(SmallintNoNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -254,7 +254,7 @@ public Object readShortNoNull(SmallintNoNullBenchmarkData data) @Benchmark public Object readShortWithNull(SmallintWithNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -263,7 +263,7 @@ public Object readShortWithNull(SmallintWithNullBenchmarkData data) @Benchmark public Object readSliceDirectNoNull(VarcharDirectNoNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -272,7 +272,7 @@ public Object readSliceDirectNoNull(VarcharDirectNoNullBenchmarkData data) @Benchmark public Object readSliceDirectWithNull(VarcharDirectWithNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -281,7 +281,7 @@ public Object readSliceDirectWithNull(VarcharDirectWithNullBenchmarkData data) @Benchmark public Object readSliceDictionaryNoNull(VarcharDictionaryNoNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -290,7 +290,7 @@ public Object readSliceDictionaryNoNull(VarcharDictionaryNoNullBenchmarkData dat @Benchmark public Object readSliceDictionaryWithNull(VarcharDictionaryWithNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -299,7 +299,7 @@ public Object readSliceDictionaryWithNull(VarcharDictionaryWithNullBenchmarkData @Benchmark public Object readTimestampNoNull(TimestampNoNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -308,7 +308,7 @@ public Object readTimestampNoNull(TimestampNoNullBenchmarkData data) @Benchmark public Object readTimestampWithNull(TimestampWithNullBenchmarkData data) - throws Throwable + throws Exception { try (OrcRecordReader recordReader = data.createRecordReader()) { return readFirstColumn(recordReader); @@ -1085,7 +1085,7 @@ protected Iterator createValues() } public static void main(String[] args) - throws Throwable + throws Exception { Options options = new OptionsBuilder() .verbosity(VerboseMode.NORMAL) diff --git a/presto-orc/src/test/java/io/prestosql/orc/BenchmarkOrcDecimalReader.java b/presto-orc/src/test/java/io/prestosql/orc/BenchmarkOrcDecimalReader.java index 06ee304b911a..8986280ff7c3 100644 --- a/presto-orc/src/test/java/io/prestosql/orc/BenchmarkOrcDecimalReader.java +++ b/presto-orc/src/test/java/io/prestosql/orc/BenchmarkOrcDecimalReader.java @@ -69,7 +69,7 @@ public class BenchmarkOrcDecimalReader @Benchmark public Object readDecimal(BenchmarkData data) - throws Throwable + throws Exception { OrcRecordReader recordReader = data.createRecordReader(); List blocks = new ArrayList<>(); @@ -81,7 +81,7 @@ public Object readDecimal(BenchmarkData data) @Test public void testReadDecimal() - throws Throwable + throws Exception { BenchmarkData data = new BenchmarkData(); data.setup(); @@ -138,7 +138,7 @@ private List createDecimalValues() } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-orc/src/test/java/io/prestosql/orc/stream/BenchmarkLongBitPacker.java b/presto-orc/src/test/java/io/prestosql/orc/stream/BenchmarkLongBitPacker.java index 5f0f3a9b2e40..e0f037b5f5f1 100644 --- a/presto-orc/src/test/java/io/prestosql/orc/stream/BenchmarkLongBitPacker.java +++ b/presto-orc/src/test/java/io/prestosql/orc/stream/BenchmarkLongBitPacker.java @@ -48,7 +48,7 @@ public class BenchmarkLongBitPacker { @Benchmark public Object baselineLength1(BenchmarkData data) - throws Throwable + throws Exception { data.input.setPosition(0); unpackGeneric(data.buffer, 0, 1, data.bits, data.input); @@ -58,7 +58,7 @@ public Object baselineLength1(BenchmarkData data) @Benchmark @OperationsPerInvocation(2) public Object baselineLength2(BenchmarkData data) - throws Throwable + throws Exception { data.input.setPosition(0); unpackGeneric(data.buffer, 0, 2, data.bits, data.input); @@ -68,7 +68,7 @@ public Object baselineLength2(BenchmarkData data) @Benchmark @OperationsPerInvocation(3) public Object baselineLength3(BenchmarkData data) - throws Throwable + throws Exception { data.input.setPosition(0); unpackGeneric(data.buffer, 0, 3, data.bits, data.input); @@ -78,7 +78,7 @@ public Object baselineLength3(BenchmarkData data) @Benchmark @OperationsPerInvocation(4) public Object baselineLength4(BenchmarkData data) - throws Throwable + throws Exception { data.input.setPosition(0); unpackGeneric(data.buffer, 0, 4, data.bits, data.input); @@ -88,7 +88,7 @@ public Object baselineLength4(BenchmarkData data) @Benchmark @OperationsPerInvocation(5) public Object baselineLength5(BenchmarkData data) - throws Throwable + throws Exception { data.input.setPosition(0); unpackGeneric(data.buffer, 0, 5, data.bits, data.input); @@ -98,7 +98,7 @@ public Object baselineLength5(BenchmarkData data) @Benchmark @OperationsPerInvocation(6) public Object baselineLength6(BenchmarkData data) - throws Throwable + throws Exception { data.input.setPosition(0); unpackGeneric(data.buffer, 0, 6, data.bits, data.input); @@ -108,7 +108,7 @@ public Object baselineLength6(BenchmarkData data) @Benchmark @OperationsPerInvocation(7) public Object baselineLength7(BenchmarkData data) - throws Throwable + throws Exception { data.input.setPosition(0); unpackGeneric(data.buffer, 0, 7, data.bits, data.input); @@ -118,7 +118,7 @@ public Object baselineLength7(BenchmarkData data) @Benchmark @OperationsPerInvocation(256) public Object baselineLength256(BenchmarkData data) - throws Throwable + throws Exception { data.input.setPosition(0); unpackGeneric(data.buffer, 0, 256, data.bits, data.input); @@ -127,7 +127,7 @@ public Object baselineLength256(BenchmarkData data) @Benchmark public Object optimizedLength1(BenchmarkData data) - throws Throwable + throws Exception { data.input.setPosition(0); data.packer.unpack(data.buffer, 0, 1, data.bits, data.input); @@ -137,7 +137,7 @@ public Object optimizedLength1(BenchmarkData data) @Benchmark @OperationsPerInvocation(2) public Object optimizedLength2(BenchmarkData data) - throws Throwable + throws Exception { data.input.setPosition(0); data.packer.unpack(data.buffer, 0, 2, data.bits, data.input); @@ -147,7 +147,7 @@ public Object optimizedLength2(BenchmarkData data) @Benchmark @OperationsPerInvocation(3) public Object optimizedLength3(BenchmarkData data) - throws Throwable + throws Exception { data.input.setPosition(0); data.packer.unpack(data.buffer, 0, 3, data.bits, data.input); @@ -157,7 +157,7 @@ public Object optimizedLength3(BenchmarkData data) @Benchmark @OperationsPerInvocation(4) public Object optimizedLength4(BenchmarkData data) - throws Throwable + throws Exception { data.input.setPosition(0); data.packer.unpack(data.buffer, 0, 4, data.bits, data.input); @@ -167,7 +167,7 @@ public Object optimizedLength4(BenchmarkData data) @Benchmark @OperationsPerInvocation(5) public Object optimizedLength5(BenchmarkData data) - throws Throwable + throws Exception { data.input.setPosition(0); data.packer.unpack(data.buffer, 0, 5, data.bits, data.input); @@ -177,7 +177,7 @@ public Object optimizedLength5(BenchmarkData data) @Benchmark @OperationsPerInvocation(6) public Object optimizedLength6(BenchmarkData data) - throws Throwable + throws Exception { data.input.setPosition(0); data.packer.unpack(data.buffer, 0, 6, data.bits, data.input); @@ -187,7 +187,7 @@ public Object optimizedLength6(BenchmarkData data) @Benchmark @OperationsPerInvocation(7) public Object optimizedLength7(BenchmarkData data) - throws Throwable + throws Exception { data.input.setPosition(0); data.packer.unpack(data.buffer, 0, 7, data.bits, data.input); @@ -197,7 +197,7 @@ public Object optimizedLength7(BenchmarkData data) @Benchmark @OperationsPerInvocation(256) public Object optimizedLength256(BenchmarkData data) - throws Throwable + throws Exception { data.input.setPosition(0); data.packer.unpack(data.buffer, 0, 256, data.bits, data.input); @@ -226,7 +226,7 @@ public void setup() } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); diff --git a/presto-spi/src/test/java/io/prestosql/spi/block/BenchmarkComputePosition.java b/presto-spi/src/test/java/io/prestosql/spi/block/BenchmarkComputePosition.java index 215e02e5b8a4..63462e2c027c 100644 --- a/presto-spi/src/test/java/io/prestosql/spi/block/BenchmarkComputePosition.java +++ b/presto-spi/src/test/java/io/prestosql/spi/block/BenchmarkComputePosition.java @@ -82,7 +82,7 @@ public long computePositionWithDivision() } public static void main(String[] args) - throws Throwable + throws Exception { Options options = new OptionsBuilder() .verbosity(VerboseMode.NORMAL) diff --git a/presto-tests/src/test/java/io/prestosql/connector/informationschema/BenchmarkInformationSchema.java b/presto-tests/src/test/java/io/prestosql/connector/informationschema/BenchmarkInformationSchema.java index b0243370821e..1aefc2f904d4 100644 --- a/presto-tests/src/test/java/io/prestosql/connector/informationschema/BenchmarkInformationSchema.java +++ b/presto-tests/src/test/java/io/prestosql/connector/informationschema/BenchmarkInformationSchema.java @@ -146,7 +146,7 @@ public MaterializedResult queryInformationSchema(BenchmarkData benchmarkData) } public static void main(String[] args) - throws Throwable + throws Exception { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); From 435de64ab9f1ae5a1bb51e6f0447a1dd6c89f8a1 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Sat, 25 Apr 2020 22:35:20 +0200 Subject: [PATCH 246/519] Remove redundant throws declarations --- .../planner/AbstractCostBasedPlanTest.java | 1 - .../sql/planner/TestTpcdsCostBasedPlan.java | 1 - .../sql/planner/TestTpchCostBasedPlan.java | 1 - .../BigQueryEmptyProjectionPageSource.java | 3 --- .../bigquery/BigQueryResultPageSource.java | 1 - .../geospatial/TestBingTileFunctions.java | 1 - .../hive/TestHiveIntegrationSmokeTest.java | 7 ------- .../cache/TestCachingHiveMetastore.java | 1 - .../metastore/glue/TestHiveGlueMetastore.java | 1 - .../thrift/MockThriftMetastoreClient.java | 13 ------------- .../InformationSchemaPageSource.java | 2 -- .../operator/LookupJoinOperator.java | 1 - .../TableScanWorkProcessorOperator.java | 1 - .../exchange/LocalMergeSourceOperator.java | 2 -- .../execution/TestNodeScheduler.java | 1 - .../buffer/TestArbitraryOutputBuffer.java | 1 - .../buffer/TestBroadcastOutputBuffer.java | 1 - .../operator/TestHashJoinOperator.java | 2 +- .../aggregation/histogram/TestValueStore.java | 1 - .../scalar/TestMapZipWithFunction.java | 2 -- .../transaction/TestTransactionManager.java | 1 - .../io/prestosql/type/TestRealOperators.java | 1 - .../prestosql/type/TestUnknownOperators.java | 1 - .../TestMongoIntegrationSmokeTest.java | 1 - .../orc/stream/TestLongBitPacker.java | 2 +- .../TestPostgreSqlIntegrationSmokeTest.java | 1 - .../hive/TestAvroSymlinkInputFormat.java | 1 - .../tests/hive/TestCreateDropSchema.java | 3 --- .../tests/iceberg/TestIcebergCreateTable.java | 4 ---- .../raptor/legacy/backup/BackupModule.java | 1 - .../raptor/legacy/TestRaptorConnector.java | 1 - .../decoder/avro/TestAvroDecoder.java | 19 ------------------- .../redis/TestMinimalFunctionality.java | 3 --- .../plugin/redis/TestRedisDistributed.java | 1 - .../redis/TestRedisDistributedHash.java | 1 - .../redis/TestRedisIntegrationSmokeTest.java | 1 - .../plugin/redis/util/RedisServer.java | 2 -- .../db/TestDbSessionPropertyManager.java | 2 -- .../memory/TestMemorySessionProperties.java | 2 -- .../thrift/TestThriftIndexPageSource.java | 4 ---- 40 files changed, 2 insertions(+), 94 deletions(-) diff --git a/presto-benchto-benchmarks/src/test/java/io/prestosql/sql/planner/AbstractCostBasedPlanTest.java b/presto-benchto-benchmarks/src/test/java/io/prestosql/sql/planner/AbstractCostBasedPlanTest.java index 7cf69f4e3914..eb6e1872ea56 100644 --- a/presto-benchto-benchmarks/src/test/java/io/prestosql/sql/planner/AbstractCostBasedPlanTest.java +++ b/presto-benchto-benchmarks/src/test/java/io/prestosql/sql/planner/AbstractCostBasedPlanTest.java @@ -76,7 +76,6 @@ private String getQueryPlanResourcePath(String queryResourcePath) } protected void generate() - throws Exception { initPlanTest(); try { diff --git a/presto-benchto-benchmarks/src/test/java/io/prestosql/sql/planner/TestTpcdsCostBasedPlan.java b/presto-benchto-benchmarks/src/test/java/io/prestosql/sql/planner/TestTpcdsCostBasedPlan.java index ebc8869bc7ad..a76f1db49f76 100644 --- a/presto-benchto-benchmarks/src/test/java/io/prestosql/sql/planner/TestTpcdsCostBasedPlan.java +++ b/presto-benchto-benchmarks/src/test/java/io/prestosql/sql/planner/TestTpcdsCostBasedPlan.java @@ -87,7 +87,6 @@ public static final class UpdateTestFiles private UpdateTestFiles() {} public static void main(String[] args) - throws Exception { new TestTpcdsCostBasedPlan().generate(); } diff --git a/presto-benchto-benchmarks/src/test/java/io/prestosql/sql/planner/TestTpchCostBasedPlan.java b/presto-benchto-benchmarks/src/test/java/io/prestosql/sql/planner/TestTpchCostBasedPlan.java index fdbd12b6ca16..54c9dbd94b6e 100644 --- a/presto-benchto-benchmarks/src/test/java/io/prestosql/sql/planner/TestTpchCostBasedPlan.java +++ b/presto-benchto-benchmarks/src/test/java/io/prestosql/sql/planner/TestTpchCostBasedPlan.java @@ -82,7 +82,6 @@ public static final class UpdateTestFiles private UpdateTestFiles() {} public static void main(String[] args) - throws Exception { new TestTpchCostBasedPlan().generate(); } diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryEmptyProjectionPageSource.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryEmptyProjectionPageSource.java index 656d9fe69a90..2d98039be3ff 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryEmptyProjectionPageSource.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryEmptyProjectionPageSource.java @@ -18,8 +18,6 @@ import io.prestosql.spi.PageBuilder; import io.prestosql.spi.connector.ConnectorPageSource; -import java.io.IOException; - public class BigQueryEmptyProjectionPageSource implements ConnectorPageSource { @@ -69,7 +67,6 @@ public long getSystemMemoryUsage() @Override public void close() - throws IOException { // nothing to do } diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryResultPageSource.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryResultPageSource.java index 6e99cea0fd89..3574a15ca0e1 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryResultPageSource.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryResultPageSource.java @@ -262,7 +262,6 @@ public long getSystemMemoryUsage() @Override public void close() - throws IOException { bigQueryStorageClient.close(); closed = true; diff --git a/presto-geospatial/src/test/java/io/prestosql/plugin/geospatial/TestBingTileFunctions.java b/presto-geospatial/src/test/java/io/prestosql/plugin/geospatial/TestBingTileFunctions.java index 56faf14e5586..4b1b162349c4 100644 --- a/presto-geospatial/src/test/java/io/prestosql/plugin/geospatial/TestBingTileFunctions.java +++ b/presto-geospatial/src/test/java/io/prestosql/plugin/geospatial/TestBingTileFunctions.java @@ -60,7 +60,6 @@ public void testSerialization() @Test public void testArrayOfBingTiles() - throws Exception { assertFunction("array [bing_tile(1, 2, 10), bing_tile(3, 4, 11)]", new ArrayType(BING_TILE), diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java index 9374b9411888..3cfff0b3a4d3 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java @@ -364,7 +364,6 @@ public void testJoinWithPartitionFilterOnPartionedTable() @Test public void testPartitionPredicateAllowed() - throws Exception { Session session = Session.builder(getQueryRunner().getDefaultSession()) .setIdentity(Identity.forUser("hive") @@ -390,7 +389,6 @@ public void testPartitionPredicateAllowed() @Test public void testNestedQueryWithInnerPartitionPredicate() - throws Exception { Session session = Session.builder(getQueryRunner().getDefaultSession()) .setIdentity(Identity.forUser("hive") @@ -416,7 +414,6 @@ public void testNestedQueryWithInnerPartitionPredicate() @Test public void testPartitionPredicateDisallowed() - throws Exception { Session session = Session.builder(getQueryRunner().getDefaultSession()) .setIdentity(Identity.forUser("hive") @@ -6279,7 +6276,6 @@ private static File createAvroSchemaFile() @Test public void testCreateOrcTableWithSchemaUrl() - throws Exception { @Language("SQL") String createTableSql = format("" + "CREATE TABLE %s.%s.test_orc (\n" + @@ -6297,7 +6293,6 @@ public void testCreateOrcTableWithSchemaUrl() @Test public void testCtasFailsWithAvroSchemaUrl() - throws Exception { @Language("SQL") String ctasSqlWithoutData = "CREATE TABLE create_avro\n" + "WITH (avro_schema_url = 'dummy_schema')\n" + @@ -6314,7 +6309,6 @@ public void testCtasFailsWithAvroSchemaUrl() @Test public void testBucketedTablesFailWithAvroSchemaUrl() - throws Exception { @Language("SQL") String createSql = "CREATE TABLE create_avro (dummy VARCHAR)\n" + "WITH (avro_schema_url = 'dummy_schema',\n" + @@ -6325,7 +6319,6 @@ public void testBucketedTablesFailWithAvroSchemaUrl() @Test public void testPartitionedTablesFailWithAvroSchemaUrl() - throws Exception { @Language("SQL") String createSql = "CREATE TABLE create_avro (dummy VARCHAR)\n" + "WITH (avro_schema_url = 'dummy_schema',\n" + diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/cache/TestCachingHiveMetastore.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/cache/TestCachingHiveMetastore.java index 7c50eb3fd4a3..ef6a45d83d58 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/cache/TestCachingHiveMetastore.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/cache/TestCachingHiveMetastore.java @@ -235,7 +235,6 @@ public void testGetPartitionsByNames() @Test public void testListRoles() - throws Exception { assertEquals(mockClient.getAccessCount(), 0); diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/glue/TestHiveGlueMetastore.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/glue/TestHiveGlueMetastore.java index 86272efd81f7..5affce14b722 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/glue/TestHiveGlueMetastore.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/glue/TestHiveGlueMetastore.java @@ -67,7 +67,6 @@ public void testRenameTable() @Override public void testPartitionStatisticsSampling() - throws Exception { // Glue metastore does not support column level statistics } diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/thrift/MockThriftMetastoreClient.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/thrift/MockThriftMetastoreClient.java index e25ab89bb77e..5b4ebd927162 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/thrift/MockThriftMetastoreClient.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/metastore/thrift/MockThriftMetastoreClient.java @@ -161,7 +161,6 @@ public Table getTableWithCapabilities(String databaseName, String tableName) @Override public List getFields(String databaseName, String tableName) - throws TException { return ImmutableList.of(new FieldSchema("key", "string", null)); } @@ -381,14 +380,12 @@ public List getRoleNames() @Override public void createRole(String role, String grantor) - throws TException { // No-op } @Override public void dropRole(String role) - throws TException { // No-op } @@ -407,21 +404,18 @@ public boolean revokePrivileges(PrivilegeBag privilegeBag) @Override public void grantRole(String role, String granteeName, PrincipalType granteeType, String grantorName, PrincipalType grantorType, boolean grantOption) - throws TException { // No-op } @Override public void revokeRole(String role, String granteeName, PrincipalType granteeType, boolean grantOption) - throws TException { // No-op } @Override public List listRoleGrants(String name, PrincipalType principalType) - throws TException { accessCount.incrementAndGet(); if (throwException) { @@ -444,49 +438,42 @@ public void setUGI(String userName) @Override public long openTransaction(String user) - throws TException { throw new UnsupportedOperationException(); } @Override public void commitTransaction(long transactionId) - throws TException { throw new UnsupportedOperationException(); } @Override public void sendTransactionHeartbeat(long transactionId) - throws TException { throw new UnsupportedOperationException(); } @Override public LockResponse acquireLock(LockRequest lockRequest) - throws TException { throw new UnsupportedOperationException(); } @Override public LockResponse checkLock(long lockId) - throws TException { throw new UnsupportedOperationException(); } @Override public String getValidWriteIds(List tableList, long currentTransactionId) - throws TException { throw new UnsupportedOperationException(); } @Override public String get_config_value(String name, String defaultValue) - throws TException { throw new UnsupportedOperationException(); } diff --git a/presto-main/src/main/java/io/prestosql/connector/informationschema/InformationSchemaPageSource.java b/presto-main/src/main/java/io/prestosql/connector/informationschema/InformationSchemaPageSource.java index 880a002239c3..10600482ca3a 100644 --- a/presto-main/src/main/java/io/prestosql/connector/informationschema/InformationSchemaPageSource.java +++ b/presto-main/src/main/java/io/prestosql/connector/informationschema/InformationSchemaPageSource.java @@ -33,7 +33,6 @@ import io.prestosql.spi.security.RoleGrant; import io.prestosql.spi.type.Type; -import java.io.IOException; import java.util.ArrayDeque; import java.util.Iterator; import java.util.List; @@ -196,7 +195,6 @@ public long getSystemMemoryUsage() @Override public void close() - throws IOException { closed = true; } diff --git a/presto-main/src/main/java/io/prestosql/operator/LookupJoinOperator.java b/presto-main/src/main/java/io/prestosql/operator/LookupJoinOperator.java index d9cce930af6d..e18309c11807 100644 --- a/presto-main/src/main/java/io/prestosql/operator/LookupJoinOperator.java +++ b/presto-main/src/main/java/io/prestosql/operator/LookupJoinOperator.java @@ -131,7 +131,6 @@ public WorkProcessor getOutputPages() @Override public void close() - throws Exception { joinProcessor.close(); } diff --git a/presto-main/src/main/java/io/prestosql/operator/TableScanWorkProcessorOperator.java b/presto-main/src/main/java/io/prestosql/operator/TableScanWorkProcessorOperator.java index 05215e28dd2a..d2ebfa8bfcd0 100644 --- a/presto-main/src/main/java/io/prestosql/operator/TableScanWorkProcessorOperator.java +++ b/presto-main/src/main/java/io/prestosql/operator/TableScanWorkProcessorOperator.java @@ -121,7 +121,6 @@ public Duration getReadTime() @Override public void close() - throws Exception { splitToPages.close(); } diff --git a/presto-main/src/main/java/io/prestosql/operator/exchange/LocalMergeSourceOperator.java b/presto-main/src/main/java/io/prestosql/operator/exchange/LocalMergeSourceOperator.java index 23f435cc9f21..3c7ca1a0ce1a 100644 --- a/presto-main/src/main/java/io/prestosql/operator/exchange/LocalMergeSourceOperator.java +++ b/presto-main/src/main/java/io/prestosql/operator/exchange/LocalMergeSourceOperator.java @@ -28,7 +28,6 @@ import io.prestosql.sql.gen.OrderingCompiler; import io.prestosql.sql.planner.plan.PlanNodeId; -import java.io.IOException; import java.util.List; import java.util.stream.IntStream; @@ -172,7 +171,6 @@ public Page getOutput() @Override public void close() - throws IOException { sources.forEach(LocalExchangeSource::close); } diff --git a/presto-main/src/test/java/io/prestosql/execution/TestNodeScheduler.java b/presto-main/src/test/java/io/prestosql/execution/TestNodeScheduler.java index 5c6659fa927c..7408d681ed36 100644 --- a/presto-main/src/test/java/io/prestosql/execution/TestNodeScheduler.java +++ b/presto-main/src/test/java/io/prestosql/execution/TestNodeScheduler.java @@ -152,7 +152,6 @@ public void testScheduleLocal() @Test(timeOut = 60 * 1000) public void testTopologyAwareScheduling() - throws Exception { NodeTaskMap nodeTaskMap = new NodeTaskMap(finalizerService); InMemoryNodeManager nodeManager = new InMemoryNodeManager(); diff --git a/presto-main/src/test/java/io/prestosql/execution/buffer/TestArbitraryOutputBuffer.java b/presto-main/src/test/java/io/prestosql/execution/buffer/TestArbitraryOutputBuffer.java index 1a9f6e8afe45..8ed7aa4f27a4 100644 --- a/presto-main/src/test/java/io/prestosql/execution/buffer/TestArbitraryOutputBuffer.java +++ b/presto-main/src/test/java/io/prestosql/execution/buffer/TestArbitraryOutputBuffer.java @@ -978,7 +978,6 @@ public void testFinishBeforeNoMoreBuffers() @Test public void testForceFreeMemory() - throws Throwable { ArbitraryOutputBuffer buffer = createArbitraryBuffer(createInitialEmptyOutputBuffers(ARBITRARY), sizeOfPages(10)); for (int i = 0; i < 3; i++) { diff --git a/presto-main/src/test/java/io/prestosql/execution/buffer/TestBroadcastOutputBuffer.java b/presto-main/src/test/java/io/prestosql/execution/buffer/TestBroadcastOutputBuffer.java index 9d6fb3051ded..f40b40ae6cb4 100644 --- a/presto-main/src/test/java/io/prestosql/execution/buffer/TestBroadcastOutputBuffer.java +++ b/presto-main/src/test/java/io/prestosql/execution/buffer/TestBroadcastOutputBuffer.java @@ -1119,7 +1119,6 @@ public void testBufferFinishesWhenClientBuffersDestroyed() @Test public void testForceFreeMemory() - throws Throwable { BroadcastOutputBuffer buffer = createBroadcastBuffer( createInitialEmptyOutputBuffers(BROADCAST) diff --git a/presto-main/src/test/java/io/prestosql/operator/TestHashJoinOperator.java b/presto-main/src/test/java/io/prestosql/operator/TestHashJoinOperator.java index 3076fbe0edd9..239a7cf4b1c5 100644 --- a/presto-main/src/test/java/io/prestosql/operator/TestHashJoinOperator.java +++ b/presto-main/src/test/java/io/prestosql/operator/TestHashJoinOperator.java @@ -327,7 +327,7 @@ public void testInnerJoinWithSpill(boolean probeHashEnabled, List whe @Test(dataProvider = "joinWithFailingSpillValues") public void testInnerJoinWithFailingSpill(boolean probeHashEnabled, List whenSpill, WhenSpillFails whenSpillFails, boolean isDictionaryProcessingJoinEnabled) - throws Throwable + throws Exception { DummySpillerFactory buildSpillerFactory = new DummySpillerFactory(); DummySpillerFactory joinSpillerFactory = new DummySpillerFactory(); diff --git a/presto-main/src/test/java/io/prestosql/operator/aggregation/histogram/TestValueStore.java b/presto-main/src/test/java/io/prestosql/operator/aggregation/histogram/TestValueStore.java index fd317076241b..805e8ca2c3ac 100644 --- a/presto-main/src/test/java/io/prestosql/operator/aggregation/histogram/TestValueStore.java +++ b/presto-main/src/test/java/io/prestosql/operator/aggregation/histogram/TestValueStore.java @@ -34,7 +34,6 @@ public class TestValueStore @BeforeMethod(alwaysRun = true) public void setUp() - throws Exception { type = VarcharType.createVarcharType(100); BlockBuilder blockBuilder = type.createBlockBuilder(null, 100, 10); diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/TestMapZipWithFunction.java b/presto-main/src/test/java/io/prestosql/operator/scalar/TestMapZipWithFunction.java index 77c0a142ef27..0a0cb89d1704 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/TestMapZipWithFunction.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/TestMapZipWithFunction.java @@ -41,7 +41,6 @@ public void testRetainedSizeBounded() @Test public void testBasic() - throws Exception { assertFunction( "map_zip_with(" + @@ -83,7 +82,6 @@ public void testBasic() @Test public void testTypes() - throws Exception { assertFunction( "map_zip_with(" + diff --git a/presto-main/src/test/java/io/prestosql/transaction/TestTransactionManager.java b/presto-main/src/test/java/io/prestosql/transaction/TestTransactionManager.java index 8fd02af060b7..742b8741df06 100644 --- a/presto-main/src/test/java/io/prestosql/transaction/TestTransactionManager.java +++ b/presto-main/src/test/java/io/prestosql/transaction/TestTransactionManager.java @@ -167,7 +167,6 @@ public void testFailedTransactionWorkflow() @Test public void testExpiration() - throws Exception { try (IdleCheckExecutor executor = new IdleCheckExecutor()) { TransactionManager transactionManager = InMemoryTransactionManager.create( diff --git a/presto-main/src/test/java/io/prestosql/type/TestRealOperators.java b/presto-main/src/test/java/io/prestosql/type/TestRealOperators.java index 08c5cc516f19..0439f90be215 100644 --- a/presto-main/src/test/java/io/prestosql/type/TestRealOperators.java +++ b/presto-main/src/test/java/io/prestosql/type/TestRealOperators.java @@ -293,7 +293,6 @@ public void testIsDistinctFrom() @Test public void testIndeterminate() - throws Exception { assertOperator(INDETERMINATE, "cast(null as real)", BOOLEAN, true); assertOperator(INDETERMINATE, "cast(-1.2 as real)", BOOLEAN, false); diff --git a/presto-main/src/test/java/io/prestosql/type/TestUnknownOperators.java b/presto-main/src/test/java/io/prestosql/type/TestUnknownOperators.java index 74d64899dfb2..f4f395b172da 100644 --- a/presto-main/src/test/java/io/prestosql/type/TestUnknownOperators.java +++ b/presto-main/src/test/java/io/prestosql/type/TestUnknownOperators.java @@ -128,7 +128,6 @@ public void testIsDistinctFrom() @Test public void testIndeterminate() - throws Exception { assertOperator(INDETERMINATE, "null", BOOLEAN, true); } diff --git a/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoIntegrationSmokeTest.java b/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoIntegrationSmokeTest.java index 996bd7071e31..0d77121a4aaf 100644 --- a/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoIntegrationSmokeTest.java +++ b/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoIntegrationSmokeTest.java @@ -94,7 +94,6 @@ public void createTableWithEveryType() @Test public void testInsertWithEveryType() - throws Exception { String createSql = "" + "CREATE TABLE test_insert_types_table " + diff --git a/presto-orc/src/test/java/io/prestosql/orc/stream/TestLongBitPacker.java b/presto-orc/src/test/java/io/prestosql/orc/stream/TestLongBitPacker.java index 6b10bbcec68d..8f9cc4bb3acf 100644 --- a/presto-orc/src/test/java/io/prestosql/orc/stream/TestLongBitPacker.java +++ b/presto-orc/src/test/java/io/prestosql/orc/stream/TestLongBitPacker.java @@ -31,7 +31,7 @@ public class TestLongBitPacker @Test public void testBasic() - throws Throwable + throws Exception { LongBitPacker packer = new LongBitPacker(); for (int length = 0; length < LENGTHS; length++) { diff --git a/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlIntegrationSmokeTest.java b/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlIntegrationSmokeTest.java index ee974f3a992c..518c9292f441 100644 --- a/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlIntegrationSmokeTest.java +++ b/presto-postgresql/src/test/java/io/prestosql/plugin/postgresql/TestPostgreSqlIntegrationSmokeTest.java @@ -119,7 +119,6 @@ public void testForeignTable() @Test public void testSystemTable() - throws Exception { assertThat(computeActual("SHOW TABLES FROM pg_catalog").getOnlyColumnAsSet()) .contains("pg_tables", "pg_views", "pg_type", "pg_index"); diff --git a/presto-product-tests/src/main/java/io/prestosql/tests/hive/TestAvroSymlinkInputFormat.java b/presto-product-tests/src/main/java/io/prestosql/tests/hive/TestAvroSymlinkInputFormat.java index 558ba17f09a3..7d325eb85fdc 100644 --- a/presto-product-tests/src/main/java/io/prestosql/tests/hive/TestAvroSymlinkInputFormat.java +++ b/presto-product-tests/src/main/java/io/prestosql/tests/hive/TestAvroSymlinkInputFormat.java @@ -64,7 +64,6 @@ private void saveResourceOnHdfs(String resource, String location) @Test(groups = STORAGE_FORMATS) public void testSymlinkTable() - throws IOException { onHive().executeQuery("DROP TABLE IF EXISTS test_avro_symlink"); diff --git a/presto-product-tests/src/main/java/io/prestosql/tests/hive/TestCreateDropSchema.java b/presto-product-tests/src/main/java/io/prestosql/tests/hive/TestCreateDropSchema.java index 19c5bf33c38b..335aa6c7a9d9 100644 --- a/presto-product-tests/src/main/java/io/prestosql/tests/hive/TestCreateDropSchema.java +++ b/presto-product-tests/src/main/java/io/prestosql/tests/hive/TestCreateDropSchema.java @@ -18,8 +18,6 @@ import io.prestosql.tempto.hadoop.hdfs.HdfsClient; import org.testng.annotations.Test; -import java.sql.SQLException; - import static io.prestosql.tempto.assertions.QueryAssert.assertThat; import static io.prestosql.tempto.query.QueryExecutor.query; import static io.prestosql.tests.utils.QueryExecutors.onHive; @@ -35,7 +33,6 @@ public class TestCreateDropSchema @Test public void testCreateDropSchema() - throws SQLException { onHive().executeQuery("DROP DATABASE IF EXISTS test_drop_schema CASCADE"); diff --git a/presto-product-tests/src/main/java/io/prestosql/tests/iceberg/TestIcebergCreateTable.java b/presto-product-tests/src/main/java/io/prestosql/tests/iceberg/TestIcebergCreateTable.java index 4037d2b3a417..23a4da48e27e 100644 --- a/presto-product-tests/src/main/java/io/prestosql/tests/iceberg/TestIcebergCreateTable.java +++ b/presto-product-tests/src/main/java/io/prestosql/tests/iceberg/TestIcebergCreateTable.java @@ -17,8 +17,6 @@ import io.prestosql.tempto.query.QueryExecutor; import org.testng.annotations.Test; -import java.sql.SQLException; - import static io.prestosql.tempto.assertions.QueryAssert.Row.row; import static io.prestosql.tempto.assertions.QueryAssert.assertThat; import static io.prestosql.tests.TestGroups.ICEBERG; @@ -30,7 +28,6 @@ public class TestIcebergCreateTable { @Test(groups = {ICEBERG, STORAGE_FORMATS}) public void testCreateTable() - throws SQLException { QueryExecutor queryExecutor = onPresto(); queryExecutor.executeQuery("CREATE SCHEMA iceberg.iceberg"); @@ -51,7 +48,6 @@ public void testCreateTable() @Test(groups = {ICEBERG, STORAGE_FORMATS}) public void testCreateTableAsSelect() - throws SQLException { QueryExecutor queryExecutor = onPresto(); queryExecutor.executeQuery("CREATE SCHEMA iceberg.iceberg"); diff --git a/presto-raptor-legacy/src/main/java/io/prestosql/plugin/raptor/legacy/backup/BackupModule.java b/presto-raptor-legacy/src/main/java/io/prestosql/plugin/raptor/legacy/backup/BackupModule.java index a4f0390cb5f0..59d5f7334b47 100644 --- a/presto-raptor-legacy/src/main/java/io/prestosql/plugin/raptor/legacy/backup/BackupModule.java +++ b/presto-raptor-legacy/src/main/java/io/prestosql/plugin/raptor/legacy/backup/BackupModule.java @@ -79,7 +79,6 @@ private static Optional createBackupStore( MBeanExporter exporter, RaptorConnectorId connectorId, BackupConfig config) - throws Exception { if (store == null) { return Optional.empty(); diff --git a/presto-raptor-legacy/src/test/java/io/prestosql/plugin/raptor/legacy/TestRaptorConnector.java b/presto-raptor-legacy/src/test/java/io/prestosql/plugin/raptor/legacy/TestRaptorConnector.java index a6ff34f6c178..a49982b20ea4 100644 --- a/presto-raptor-legacy/src/test/java/io/prestosql/plugin/raptor/legacy/TestRaptorConnector.java +++ b/presto-raptor-legacy/src/test/java/io/prestosql/plugin/raptor/legacy/TestRaptorConnector.java @@ -87,7 +87,6 @@ public class TestRaptorConnector @BeforeMethod public void setup() - throws Exception { TypeManager typeManager = new InternalTypeManager(createTestMetadataManager()); DBI dbi = new DBI("jdbc:h2:mem:test" + System.nanoTime() + ThreadLocalRandom.current().nextLong()); diff --git a/presto-record-decoder/src/test/java/io/prestosql/decoder/avro/TestAvroDecoder.java b/presto-record-decoder/src/test/java/io/prestosql/decoder/avro/TestAvroDecoder.java index 19da9b4b7f70..1a9a479d0d67 100644 --- a/presto-record-decoder/src/test/java/io/prestosql/decoder/avro/TestAvroDecoder.java +++ b/presto-record-decoder/src/test/java/io/prestosql/decoder/avro/TestAvroDecoder.java @@ -178,7 +178,6 @@ private static GenericData.Record buildAvroRecord(Schema schema, ByteArrayOutput @Test public void testStringDecodedAsVarchar() - throws Exception { DecoderTestColumnHandle row = new DecoderTestColumnHandle(0, "row", VARCHAR, "string_field", null, null, false, false, false); Map decodedRow = buildAndDecodeColumn(row, "string_field", "\"string\"", "Mon Jul 28 20:38:07 +0000 2014"); @@ -188,7 +187,6 @@ public void testStringDecodedAsVarchar() @Test public void testSchemaEvolutionAddingColumn() - throws Exception { DecoderTestColumnHandle originalColumn = new DecoderTestColumnHandle(0, "row0", VARCHAR, "string_field", null, null, false, false, false); DecoderTestColumnHandle newlyAddedColumn = new DecoderTestColumnHandle(1, "row1", VARCHAR, "string_field_added", null, null, false, false, false); @@ -212,7 +210,6 @@ public void testSchemaEvolutionAddingColumn() @Test public void testSchemaEvolutionRenamingColumn() - throws Exception { byte[] originalData = buildAvroData(new Schema.Parser().parse( getAvroSchema("string_field", "\"string\"")), @@ -231,7 +228,6 @@ public void testSchemaEvolutionRenamingColumn() @Test public void testSchemaEvolutionRemovingColumn() - throws Exception { byte[] originalData = buildAvroData(new Schema.Parser().parse( getAvroSchema(ImmutableMap.of( @@ -254,7 +250,6 @@ public void testSchemaEvolutionRemovingColumn() @Test public void testSchemaEvolutionIntToLong() - throws Exception { byte[] originalIntData = buildAvroData(new Schema.Parser().parse( getAvroSchema("int_to_long_field", "\"int\"")), @@ -273,7 +268,6 @@ public void testSchemaEvolutionIntToLong() @Test public void testSchemaEvolutionIntToDouble() - throws Exception { byte[] originalIntData = buildAvroData(new Schema.Parser().parse( getAvroSchema("int_to_double_field", "\"int\"")), @@ -292,7 +286,6 @@ public void testSchemaEvolutionIntToDouble() @Test public void testSchemaEvolutionToIncompatibleType() - throws Exception { byte[] originalIntData = buildAvroData(new Schema.Parser().parse( getAvroSchema("int_to_string_field", "\"int\"")), @@ -310,7 +303,6 @@ public void testSchemaEvolutionToIncompatibleType() @Test public void testLongDecodedAsBigint() - throws Exception { DecoderTestColumnHandle row = new DecoderTestColumnHandle(0, "row", BIGINT, "id", null, null, false, false, false); Map decodedRow = buildAndDecodeColumn(row, "id", "\"long\"", 493857959588286460L); @@ -320,7 +312,6 @@ public void testLongDecodedAsBigint() @Test public void testIntDecodedAsBigint() - throws Exception { DecoderTestColumnHandle row = new DecoderTestColumnHandle(0, "row", BIGINT, "id", null, null, false, false, false); Map decodedRow = buildAndDecodeColumn(row, "id", "\"int\"", 100); @@ -330,7 +321,6 @@ public void testIntDecodedAsBigint() @Test public void testFloatDecodedAsDouble() - throws Exception { DecoderTestColumnHandle row = new DecoderTestColumnHandle(0, "row", DOUBLE, "float_field", null, null, false, false, false); Map decodedRow = buildAndDecodeColumn(row, "float_field", "\"float\"", 10.2f); @@ -340,7 +330,6 @@ public void testFloatDecodedAsDouble() @Test public void testBytesDecodedAsVarbinary() - throws Exception { DecoderTestColumnHandle row = new DecoderTestColumnHandle(0, "row", VARBINARY, "encoded", null, null, false, false, false); Map decodedRow = buildAndDecodeColumn(row, "encoded", "\"bytes\"", ByteBuffer.wrap("mytext".getBytes(UTF_8))); @@ -350,7 +339,6 @@ public void testBytesDecodedAsVarbinary() @Test public void testDoubleDecodedAsDouble() - throws Exception { DecoderTestColumnHandle row = new DecoderTestColumnHandle(0, "row", DOUBLE, "double_field", null, null, false, false, false); Map decodedRow = buildAndDecodeColumn(row, "double_field", "\"double\"", 56.898); @@ -360,7 +348,6 @@ public void testDoubleDecodedAsDouble() @Test public void testStringDecodedAsVarcharN() - throws Exception { DecoderTestColumnHandle row = new DecoderTestColumnHandle(0, "row", createVarcharType(10), "varcharn_field", null, null, false, false, false); Map decodedRow = buildAndDecodeColumn(row, "varcharn_field", "\"string\"", "abcdefghijklmno"); @@ -370,7 +357,6 @@ public void testStringDecodedAsVarcharN() @Test public void testNestedRecord() - throws Exception { String schema = "{\"type\" : \"record\", " + " \"name\" : \"nested_schema\"," + @@ -409,7 +395,6 @@ public void testNestedRecord() @Test public void testNonExistentFieldsAreNull() - throws Exception { DecoderTestColumnHandle row1 = new DecoderTestColumnHandle(0, "row1", createVarcharType(100), "very/deep/varchar", null, null, false, false, false); DecoderTestColumnHandle row2 = new DecoderTestColumnHandle(1, "row2", BIGINT, "no_bigint", null, null, false, false, false); @@ -440,7 +425,6 @@ public void testRuntimeDecodingFailure() @Test public void testArrayDecodedAsArray() - throws Exception { DecoderTestColumnHandle row = new DecoderTestColumnHandle(0, "row", new ArrayType(BIGINT), "array_field", null, null, false, false, false); @@ -450,7 +434,6 @@ public void testArrayDecodedAsArray() @Test public void testArrayWithNulls() - throws Exception { DecoderTestColumnHandle row = new DecoderTestColumnHandle(0, "row", new ArrayType(BIGINT), "array_field", null, null, false, false, false); @@ -462,7 +445,6 @@ public void testArrayWithNulls() @Test public void testMapDecodedAsMap() - throws Exception { DecoderTestColumnHandle row = new DecoderTestColumnHandle(0, "row", VARCHAR_MAP_TYPE, "map_field", null, null, false, false, false); @@ -478,7 +460,6 @@ public void testMapDecodedAsMap() @Test public void testMapWithNull() - throws Exception { DecoderTestColumnHandle row = new DecoderTestColumnHandle(0, "row", VARCHAR_MAP_TYPE, "map_field", null, null, false, false, false); diff --git a/presto-redis/src/test/java/io/prestosql/plugin/redis/TestMinimalFunctionality.java b/presto-redis/src/test/java/io/prestosql/plugin/redis/TestMinimalFunctionality.java index cc6f8aab75ea..1ce6a93d78bd 100644 --- a/presto-redis/src/test/java/io/prestosql/plugin/redis/TestMinimalFunctionality.java +++ b/presto-redis/src/test/java/io/prestosql/plugin/redis/TestMinimalFunctionality.java @@ -55,14 +55,12 @@ public class TestMinimalFunctionality @BeforeClass public void startRedis() - throws Exception { redisServer = new RedisServer(); } @AfterClass(alwaysRun = true) public void stopRedis() - throws Exception { redisServer.close(); redisServer = null; @@ -70,7 +68,6 @@ public void stopRedis() @BeforeMethod public void spinUp() - throws Exception { this.tableName = "test_" + UUID.randomUUID().toString().replaceAll("-", "_"); diff --git a/presto-redis/src/test/java/io/prestosql/plugin/redis/TestRedisDistributed.java b/presto-redis/src/test/java/io/prestosql/plugin/redis/TestRedisDistributed.java index edfa94128296..1f4bb5d5bd78 100644 --- a/presto-redis/src/test/java/io/prestosql/plugin/redis/TestRedisDistributed.java +++ b/presto-redis/src/test/java/io/prestosql/plugin/redis/TestRedisDistributed.java @@ -36,7 +36,6 @@ protected QueryRunner createQueryRunner() @AfterClass(alwaysRun = true) public void destroy() - throws Exception { redisServer.close(); } diff --git a/presto-redis/src/test/java/io/prestosql/plugin/redis/TestRedisDistributedHash.java b/presto-redis/src/test/java/io/prestosql/plugin/redis/TestRedisDistributedHash.java index 1950cc9490ae..549393a92bd8 100644 --- a/presto-redis/src/test/java/io/prestosql/plugin/redis/TestRedisDistributedHash.java +++ b/presto-redis/src/test/java/io/prestosql/plugin/redis/TestRedisDistributedHash.java @@ -38,7 +38,6 @@ protected QueryRunner createQueryRunner() @AfterClass(alwaysRun = true) public void destroy() - throws Exception { redisServer.close(); } diff --git a/presto-redis/src/test/java/io/prestosql/plugin/redis/TestRedisIntegrationSmokeTest.java b/presto-redis/src/test/java/io/prestosql/plugin/redis/TestRedisIntegrationSmokeTest.java index c90ecf378d9f..cf14b108a8a5 100644 --- a/presto-redis/src/test/java/io/prestosql/plugin/redis/TestRedisIntegrationSmokeTest.java +++ b/presto-redis/src/test/java/io/prestosql/plugin/redis/TestRedisIntegrationSmokeTest.java @@ -38,7 +38,6 @@ protected QueryRunner createQueryRunner() @AfterClass(alwaysRun = true) public void destroy() - throws Exception { redisServer.close(); } diff --git a/presto-redis/src/test/java/io/prestosql/plugin/redis/util/RedisServer.java b/presto-redis/src/test/java/io/prestosql/plugin/redis/util/RedisServer.java index 7ed5b040272e..38cfeab8866b 100644 --- a/presto-redis/src/test/java/io/prestosql/plugin/redis/util/RedisServer.java +++ b/presto-redis/src/test/java/io/prestosql/plugin/redis/util/RedisServer.java @@ -18,7 +18,6 @@ import redis.clients.jedis.JedisPool; import java.io.Closeable; -import java.io.IOException; public class RedisServer implements Closeable @@ -54,7 +53,6 @@ public HostAndPort getHostAndPort() @Override public void close() - throws IOException { jedisPool.destroy(); container.close(); diff --git a/presto-session-property-managers/src/test/java/io/prestosql/plugin/session/db/TestDbSessionPropertyManager.java b/presto-session-property-managers/src/test/java/io/prestosql/plugin/session/db/TestDbSessionPropertyManager.java index 463d343bd37d..0c016d2d027f 100644 --- a/presto-session-property-managers/src/test/java/io/prestosql/plugin/session/db/TestDbSessionPropertyManager.java +++ b/presto-session-property-managers/src/test/java/io/prestosql/plugin/session/db/TestDbSessionPropertyManager.java @@ -24,7 +24,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import java.io.IOException; import java.util.Map; import java.util.Optional; import java.util.regex.Pattern; @@ -84,7 +83,6 @@ public void destroy() @Override protected void assertProperties(Map properties, SessionMatchSpec... specs) - throws IOException { insertSpecs(specs); long failureCountBefore = specsProvider.getDbLoadFailures().getTotalCount(); diff --git a/presto-tests/src/test/java/io/prestosql/memory/TestMemorySessionProperties.java b/presto-tests/src/test/java/io/prestosql/memory/TestMemorySessionProperties.java index 09ef8827c1fb..84ee999f1f2a 100644 --- a/presto-tests/src/test/java/io/prestosql/memory/TestMemorySessionProperties.java +++ b/presto-tests/src/test/java/io/prestosql/memory/TestMemorySessionProperties.java @@ -37,7 +37,6 @@ protected QueryRunner createQueryRunner() @Test(timeOut = 240_000) public void testSessionQueryMemoryPerNodeLimit() - throws Exception { assertQuery(sql); Session session = Session.builder(getQueryRunner().getDefaultSession()) @@ -54,7 +53,6 @@ public void testSessionQueryMemoryPerNodeLimit() @Test(timeOut = 240_000) public void testSessionQueryMaxTotalMemoryPerNodeLimit() - throws Exception { assertQuery(sql); Session session = Session.builder(getQueryRunner().getDefaultSession()) diff --git a/presto-thrift/src/test/java/io/prestosql/plugin/thrift/TestThriftIndexPageSource.java b/presto-thrift/src/test/java/io/prestosql/plugin/thrift/TestThriftIndexPageSource.java index 36872fee33eb..eafb6e2e304f 100644 --- a/presto-thrift/src/test/java/io/prestosql/plugin/thrift/TestThriftIndexPageSource.java +++ b/presto-thrift/src/test/java/io/prestosql/plugin/thrift/TestThriftIndexPageSource.java @@ -26,7 +26,6 @@ import io.prestosql.plugin.thrift.api.PrestoThriftPageResult; import io.prestosql.plugin.thrift.api.PrestoThriftSchemaTableName; import io.prestosql.plugin.thrift.api.PrestoThriftService; -import io.prestosql.plugin.thrift.api.PrestoThriftServiceException; import io.prestosql.plugin.thrift.api.PrestoThriftSplit; import io.prestosql.plugin.thrift.api.PrestoThriftSplitBatch; import io.prestosql.plugin.thrift.api.PrestoThriftTupleDomain; @@ -291,21 +290,18 @@ public ListenableFuture getRows(PrestoThriftId splitId, @Override public List listSchemaNames() - throws PrestoThriftServiceException { throw new UnsupportedOperationException(); } @Override public List listTables(PrestoThriftNullableSchemaName schemaNameOrNull) - throws PrestoThriftServiceException { throw new UnsupportedOperationException(); } @Override public PrestoThriftNullableTableMetadata getTableMetadata(PrestoThriftSchemaTableName schemaTableName) - throws PrestoThriftServiceException { throw new UnsupportedOperationException(); } From 50b9b41dd3679c524eb71dcb44accc0a804bd580 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Sat, 25 Apr 2020 22:35:21 +0200 Subject: [PATCH 247/519] Inline @DataProvider `@DataProvider` makes it harder to locate a failing test cases when there are many test cases. --- .../parquet/reader/TestMetadataReader.java | 143 +++++++++--------- 1 file changed, 70 insertions(+), 73 deletions(-) diff --git a/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java b/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java index 60e012be4dd4..803db8f972a6 100644 --- a/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java +++ b/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java @@ -126,8 +126,76 @@ public void testReadStatsBinary(Optional fileCreatedBy) /** * Stats written by Parquet before https://issues.apache.org/jira/browse/PARQUET-1025 */ - @Test(dataProvider = "testReadStatsBinaryUtf8OldWriterDataProvider") - public void testReadStatsBinaryUtf8OldWriter(Optional fileCreatedBy, int nullCount, byte[] min, byte[] max, int expectedNullCount, byte[] expectedMin, byte[] expectedMax) + @Test + public void testReadStatsBinaryUtf8OldWriter() + { + // [, bcé]: min is empty, max starts with ASCII + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, new byte[0], "bcé".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, new byte[0], "bcé".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, new byte[0], "bcé".getBytes(UTF_8), 13, null, null); + + // [, ébc]: min is empty, max starts with non-ASCII + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, new byte[0], "ébc".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, new byte[0], "ébc".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, new byte[0], "ébc".getBytes(UTF_8), 13, null, null); + + // [aa, bé]: no common prefix, first different are both ASCII, min is all ASCII + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "aa".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "aa".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "aa".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, "aa".getBytes(UTF_8), "c".getBytes(UTF_8)); + + // [abcd, abcdN]: common prefix, not only ASCII, one prefix of the other, last common ASCII + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "abcd".getBytes(UTF_8), "abcdN".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "abcd".getBytes(UTF_8), "abcdN".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "abcd".getBytes(UTF_8), "abcdN".getBytes(UTF_8), 13, "abcd".getBytes(UTF_8), "abce".getBytes(UTF_8)); + + // [abcé, abcéN]: common prefix, not only ASCII, one prefix of the other, last common non ASCII + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "abcé".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "abcé".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "abcé".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, "abcé".getBytes(UTF_8), "abd".getBytes(UTF_8)); + + // [abcéM, abcéN]: common prefix, not only ASCII, first different are both ASCII + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "abcéM".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "abcéM".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "abcéM".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, "abcéM".getBytes(UTF_8), "abcéO".getBytes(UTF_8)); + + // [abcéMab, abcéNxy]: common prefix, not only ASCII, first different are both ASCII, more characters afterwards + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "abcéMab".getBytes(UTF_8), "abcéNxy".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "abcéMab".getBytes(UTF_8), "abcéNxy".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "abcéMab".getBytes(UTF_8), "abcéNxy".getBytes(UTF_8), 13, "abcéMab".getBytes(UTF_8), "abcéO".getBytes(UTF_8)); + + // [abcéM, abcé\u00f7]: common prefix, not only ASCII, first different are both ASCII, but need to be chopped off (127) + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "abcéM".getBytes(UTF_8), "abcé\u00f7".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "abcéM".getBytes(UTF_8), "abcé\u00f7".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "abcéM".getBytes(UTF_8), "abcé\u00f7".getBytes(UTF_8), 13, "abcéM".getBytes(UTF_8), "abd".getBytes(UTF_8)); + + // [abc\u007fé, bcd\u007fé]: no common prefix, first different are both ASCII + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "abc\u007fé".getBytes(UTF_8), "bcd\u007fé".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "abc\u007fé".getBytes(UTF_8), "bcd\u007fé".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "abc\u007fé".getBytes(UTF_8), "bcd\u007fé".getBytes(UTF_8), 13, "abc\u007f".getBytes(UTF_8), "c".getBytes(UTF_8)); + + // [é, a]: no common prefix, first different are not both ASCII + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "é".getBytes(UTF_8), "a".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "é".getBytes(UTF_8), "a".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "é".getBytes(UTF_8), "a".getBytes(UTF_8), 13, null, null); + + // [é, ê]: no common prefix, first different are both not ASCII + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "é".getBytes(UTF_8), "ê".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "é".getBytes(UTF_8), "ê".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "é".getBytes(UTF_8), "ê".getBytes(UTF_8), 13, null, null); + + // [aé, aé]: min = max (common prefix, first different are both not ASCII) + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8), 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8)); + + // [aé, bé]: no common prefix, first different are both ASCII + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "aé".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "aé".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "aé".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, "a".getBytes(UTF_8), "c".getBytes(UTF_8)); + } + + private void testReadStatsBinaryUtf8OldWriter(Optional fileCreatedBy, int nullCount, byte[] min, byte[] max, int expectedNullCount, byte[] expectedMin, byte[] expectedMax) { Statistics statistics = new Statistics(); statistics.setNull_count(nullCount); @@ -159,77 +227,6 @@ public void testReadStatsBinaryUtf8OldWriter(Optional fileCreatedBy, int }); } - @DataProvider - public Object[][] testReadStatsBinaryUtf8OldWriterDataProvider() - { - return new Object[][] { - // [, bcé]: min is empty, max starts with ASCII - {NO_CREATED_BY, 13, new byte[0], "bcé".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR, 13, new byte[0], "bcé".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR_1_8, 13, new byte[0], "bcé".getBytes(UTF_8), 13, null, null}, - - // [, ébc]: min is empty, max starts with non-ASCII - {NO_CREATED_BY, 13, new byte[0], "ébc".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR, 13, new byte[0], "ébc".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR_1_8, 13, new byte[0], "ébc".getBytes(UTF_8), 13, null, null}, - - // [aa, bé]: no common prefix, first different are both ASCII, min is all ASCII - {NO_CREATED_BY, 13, "aa".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR, 13, "aa".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR_1_8, 13, "aa".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, "aa".getBytes(UTF_8), "c".getBytes(UTF_8)}, - - // [abcd, abcdN]: common prefix, not only ASCII, one prefix of the other, last common ASCII - {NO_CREATED_BY, 13, "abcd".getBytes(UTF_8), "abcdN".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR, 13, "abcd".getBytes(UTF_8), "abcdN".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR_1_8, 13, "abcd".getBytes(UTF_8), "abcdN".getBytes(UTF_8), 13, "abcd".getBytes(UTF_8), "abce".getBytes(UTF_8)}, - - // [abcé, abcéN]: common prefix, not only ASCII, one prefix of the other, last common non ASCII - {NO_CREATED_BY, 13, "abcé".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR, 13, "abcé".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR_1_8, 13, "abcé".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, "abcé".getBytes(UTF_8), "abd".getBytes(UTF_8)}, - - // [abcéM, abcéN]: common prefix, not only ASCII, first different are both ASCII - {NO_CREATED_BY, 13, "abcéM".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR, 13, "abcéM".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR_1_8, 13, "abcéM".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, "abcéM".getBytes(UTF_8), "abcéO".getBytes(UTF_8)}, - - // [abcéMab, abcéNxy]: common prefix, not only ASCII, first different are both ASCII, more characters afterwards - {NO_CREATED_BY, 13, "abcéMab".getBytes(UTF_8), "abcéNxy".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR, 13, "abcéMab".getBytes(UTF_8), "abcéNxy".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR_1_8, 13, "abcéMab".getBytes(UTF_8), "abcéNxy".getBytes(UTF_8), 13, "abcéMab".getBytes(UTF_8), "abcéO".getBytes(UTF_8)}, - - // [abcéM, abcé\u00f7]: common prefix, not only ASCII, first different are both ASCII, but need to be chopped off (127) - {NO_CREATED_BY, 13, "abcéM".getBytes(UTF_8), "abcé\u00f7".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR, 13, "abcéM".getBytes(UTF_8), "abcé\u00f7".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR_1_8, 13, "abcéM".getBytes(UTF_8), "abcé\u00f7".getBytes(UTF_8), 13, "abcéM".getBytes(UTF_8), "abd".getBytes(UTF_8)}, - - // [abc\u007fé, bcd\u007fé]: no common prefix, first different are both ASCII - {NO_CREATED_BY, 13, "abc\u007fé".getBytes(UTF_8), "bcd\u007fé".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR, 13, "abc\u007fé".getBytes(UTF_8), "bcd\u007fé".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR_1_8, 13, "abc\u007fé".getBytes(UTF_8), "bcd\u007fé".getBytes(UTF_8), 13, "abc\u007f".getBytes(UTF_8), "c".getBytes(UTF_8)}, - - // [é, a]: no common prefix, first different are not both ASCII - {NO_CREATED_BY, 13, "é".getBytes(UTF_8), "a".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR, 13, "é".getBytes(UTF_8), "a".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR_1_8, 13, "é".getBytes(UTF_8), "a".getBytes(UTF_8), 13, null, null}, - - // [é, ê]: no common prefix, first different are both not ASCII - {NO_CREATED_BY, 13, "é".getBytes(UTF_8), "ê".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR, 13, "é".getBytes(UTF_8), "ê".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR_1_8, 13, "é".getBytes(UTF_8), "ê".getBytes(UTF_8), 13, null, null}, - - // [aé, aé]: min = max (common prefix, first different are both not ASCII) - {NO_CREATED_BY, 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR, 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR_1_8, 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8), 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8)}, - - // [aé, bé]: no common prefix, first different are both ASCII - {NO_CREATED_BY, 13, "aé".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR, 13, "aé".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null}, - {PARQUET_MR_1_8, 13, "aé".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, "a".getBytes(UTF_8), "c".getBytes(UTF_8)}, - }; - } - @Test(dataProvider = "allCreatedBy") public void testReadStatsBinaryUtf8(Optional fileCreatedBy) { From 6d748b5bf5ebe477121a0e77ba00b14654720841 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Sat, 25 Apr 2020 22:35:22 +0200 Subject: [PATCH 248/519] Simplify test code --- .../parquet/reader/TestMetadataReader.java | 106 +++++++++--------- 1 file changed, 54 insertions(+), 52 deletions(-) diff --git a/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java b/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java index 803db8f972a6..e17076d2078d 100644 --- a/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java +++ b/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java @@ -130,95 +130,97 @@ public void testReadStatsBinary(Optional fileCreatedBy) public void testReadStatsBinaryUtf8OldWriter() { // [, bcé]: min is empty, max starts with ASCII - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, new byte[0], "bcé".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, new byte[0], "bcé".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, new byte[0], "bcé".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "", "bcé", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "", "bcé", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "", "bcé", null, null); // [, ébc]: min is empty, max starts with non-ASCII - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, new byte[0], "ébc".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, new byte[0], "ébc".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, new byte[0], "ébc".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "", "ébc", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "", "ébc", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "", "ébc", null, null); // [aa, bé]: no common prefix, first different are both ASCII, min is all ASCII - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "aa".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "aa".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "aa".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, "aa".getBytes(UTF_8), "c".getBytes(UTF_8)); + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "aa", "bé", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "aa", "bé", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "aa", "bé", "aa", "c"); // [abcd, abcdN]: common prefix, not only ASCII, one prefix of the other, last common ASCII - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "abcd".getBytes(UTF_8), "abcdN".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "abcd".getBytes(UTF_8), "abcdN".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "abcd".getBytes(UTF_8), "abcdN".getBytes(UTF_8), 13, "abcd".getBytes(UTF_8), "abce".getBytes(UTF_8)); + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "abcd", "abcdN", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "abcd", "abcdN", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "abcd", "abcdN", "abcd", "abce"); // [abcé, abcéN]: common prefix, not only ASCII, one prefix of the other, last common non ASCII - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "abcé".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "abcé".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "abcé".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, "abcé".getBytes(UTF_8), "abd".getBytes(UTF_8)); + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "abcé", "abcéN", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "abcé", "abcéN", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "abcé", "abcéN", "abcé", "abd"); // [abcéM, abcéN]: common prefix, not only ASCII, first different are both ASCII - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "abcéM".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "abcéM".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "abcéM".getBytes(UTF_8), "abcéN".getBytes(UTF_8), 13, "abcéM".getBytes(UTF_8), "abcéO".getBytes(UTF_8)); + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "abcéM", "abcéN", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "abcéM", "abcéN", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "abcéM", "abcéN", "abcéM", "abcéO"); // [abcéMab, abcéNxy]: common prefix, not only ASCII, first different are both ASCII, more characters afterwards - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "abcéMab".getBytes(UTF_8), "abcéNxy".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "abcéMab".getBytes(UTF_8), "abcéNxy".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "abcéMab".getBytes(UTF_8), "abcéNxy".getBytes(UTF_8), 13, "abcéMab".getBytes(UTF_8), "abcéO".getBytes(UTF_8)); + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "abcéMab", "abcéNxy", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "abcéMab", "abcéNxy", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "abcéMab", "abcéNxy", "abcéMab", "abcéO"); // [abcéM, abcé\u00f7]: common prefix, not only ASCII, first different are both ASCII, but need to be chopped off (127) - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "abcéM".getBytes(UTF_8), "abcé\u00f7".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "abcéM".getBytes(UTF_8), "abcé\u00f7".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "abcéM".getBytes(UTF_8), "abcé\u00f7".getBytes(UTF_8), 13, "abcéM".getBytes(UTF_8), "abd".getBytes(UTF_8)); + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "abcéM", "abcé\u00f7", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "abcéM", "abcé\u00f7", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "abcéM", "abcé\u00f7", "abcéM", "abd"); // [abc\u007fé, bcd\u007fé]: no common prefix, first different are both ASCII - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "abc\u007fé".getBytes(UTF_8), "bcd\u007fé".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "abc\u007fé".getBytes(UTF_8), "bcd\u007fé".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "abc\u007fé".getBytes(UTF_8), "bcd\u007fé".getBytes(UTF_8), 13, "abc\u007f".getBytes(UTF_8), "c".getBytes(UTF_8)); + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "abc\u007fé", "bcd\u007fé", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "abc\u007fé", "bcd\u007fé", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "abc\u007fé", "bcd\u007fé", "abc\u007f", "c"); // [é, a]: no common prefix, first different are not both ASCII - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "é".getBytes(UTF_8), "a".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "é".getBytes(UTF_8), "a".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "é".getBytes(UTF_8), "a".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "é", "a", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "é", "a", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "é", "a", null, null); // [é, ê]: no common prefix, first different are both not ASCII - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "é".getBytes(UTF_8), "ê".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "é".getBytes(UTF_8), "ê".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "é".getBytes(UTF_8), "ê".getBytes(UTF_8), 13, null, null); + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "é", "ê", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "é", "ê", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "é", "ê", null, null); // [aé, aé]: min = max (common prefix, first different are both not ASCII) - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8), 13, "aé".getBytes(UTF_8), "aé".getBytes(UTF_8)); + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "aé", "aé", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "aé", "aé", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "aé", "aé", "aé", "aé"); // [aé, bé]: no common prefix, first different are both ASCII - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, 13, "aé".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, 13, "aé".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, 13, "aé".getBytes(UTF_8), "bé".getBytes(UTF_8), 13, "a".getBytes(UTF_8), "c".getBytes(UTF_8)); + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "aé", "bé", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "aé", "bé", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "aé", "bé", "a", "c"); } - private void testReadStatsBinaryUtf8OldWriter(Optional fileCreatedBy, int nullCount, byte[] min, byte[] max, int expectedNullCount, byte[] expectedMin, byte[] expectedMax) + private void testReadStatsBinaryUtf8OldWriter(Optional fileCreatedBy, String min, String max, String expectedMin, String expectedMax) { Statistics statistics = new Statistics(); - statistics.setNull_count(nullCount); - statistics.setMin(min); - statistics.setMax(max); + statistics.setNull_count(13); + statistics.setMin(min.getBytes(UTF_8)); + statistics.setMax(max.getBytes(UTF_8)); assertThat(MetadataReader.readStats(fileCreatedBy, Optional.of(statistics), new PrimitiveType(OPTIONAL, BINARY, "Test column", OriginalType.UTF8))) .isInstanceOfSatisfying(BinaryStatistics.class, columnStatistics -> { - assertEquals(columnStatistics.getNumNulls(), expectedNullCount); + assertEquals(columnStatistics.getNumNulls(), 13); - assertThat(columnStatistics.getMinBytes()).isEqualTo(expectedMin); - if (expectedMin != null) { - assertThat(columnStatistics.getMin().getBytes()).isEqualTo(expectedMin); - assertThat(columnStatistics.genericGetMin().getBytes()).isEqualTo(expectedMin); + byte[] expectedMinBytes = expectedMin != null ? expectedMin.getBytes(UTF_8) : null; + assertThat(columnStatistics.getMinBytes()).isEqualTo(expectedMinBytes); + if (expectedMinBytes != null) { + assertThat(columnStatistics.getMin().getBytes()).isEqualTo(expectedMinBytes); + assertThat(columnStatistics.genericGetMin().getBytes()).isEqualTo(expectedMinBytes); } else { assertNull(columnStatistics.getMin()); assertNull(columnStatistics.genericGetMin()); } - assertThat(columnStatistics.getMaxBytes()).isEqualTo(expectedMax); - if (expectedMax != null) { - assertThat(columnStatistics.getMax().getBytes()).isEqualTo(expectedMax); - assertThat(columnStatistics.genericGetMax().getBytes()).isEqualTo(expectedMax); + byte[] expectedMaxBytes = expectedMax != null ? expectedMax.getBytes(UTF_8) : null; + assertThat(columnStatistics.getMaxBytes()).isEqualTo(expectedMaxBytes); + if (expectedMaxBytes != null) { + assertThat(columnStatistics.getMax().getBytes()).isEqualTo(expectedMaxBytes); + assertThat(columnStatistics.genericGetMax().getBytes()).isEqualTo(expectedMaxBytes); } else { assertNull(columnStatistics.getMax()); From 833285e1c70c1672cf4c00edf1989688b9338d35 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Sat, 25 Apr 2020 22:35:23 +0200 Subject: [PATCH 249/519] Split test method --- .../parquet/reader/TestMetadataReader.java | 45 ++++++++----------- 1 file changed, 19 insertions(+), 26 deletions(-) diff --git a/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java b/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java index e17076d2078d..e91802b8a265 100644 --- a/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java +++ b/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java @@ -123,6 +123,25 @@ public void testReadStatsBinary(Optional fileCreatedBy) }); } + /** + * Stats written potentially before https://issues.apache.org/jira/browse/PARQUET-251 + */ + @Test + public void testReadStatsBinaryUtf8PotentiallyCorrupted() + { + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "", "abc", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "", "abc", null, null); + + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "abc", "def", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "abc", "def", null, null); + + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "abc", "abc", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "abc", "abc", null, null); + + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "abcéM", "abcé\u00f7", null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "abcéM", "abcé\u00f7", null, null); + } + /** * Stats written by Parquet before https://issues.apache.org/jira/browse/PARQUET-1025 */ @@ -130,68 +149,42 @@ public void testReadStatsBinary(Optional fileCreatedBy) public void testReadStatsBinaryUtf8OldWriter() { // [, bcé]: min is empty, max starts with ASCII - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "", "bcé", null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "", "bcé", null, null); testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "", "bcé", null, null); // [, ébc]: min is empty, max starts with non-ASCII - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "", "ébc", null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "", "ébc", null, null); testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "", "ébc", null, null); // [aa, bé]: no common prefix, first different are both ASCII, min is all ASCII - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "aa", "bé", null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "aa", "bé", null, null); testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "aa", "bé", "aa", "c"); // [abcd, abcdN]: common prefix, not only ASCII, one prefix of the other, last common ASCII - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "abcd", "abcdN", null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "abcd", "abcdN", null, null); testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "abcd", "abcdN", "abcd", "abce"); // [abcé, abcéN]: common prefix, not only ASCII, one prefix of the other, last common non ASCII - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "abcé", "abcéN", null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "abcé", "abcéN", null, null); testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "abcé", "abcéN", "abcé", "abd"); // [abcéM, abcéN]: common prefix, not only ASCII, first different are both ASCII - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "abcéM", "abcéN", null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "abcéM", "abcéN", null, null); testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "abcéM", "abcéN", "abcéM", "abcéO"); // [abcéMab, abcéNxy]: common prefix, not only ASCII, first different are both ASCII, more characters afterwards - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "abcéMab", "abcéNxy", null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "abcéMab", "abcéNxy", null, null); testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "abcéMab", "abcéNxy", "abcéMab", "abcéO"); // [abcéM, abcé\u00f7]: common prefix, not only ASCII, first different are both ASCII, but need to be chopped off (127) - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "abcéM", "abcé\u00f7", null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "abcéM", "abcé\u00f7", null, null); testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "abcéM", "abcé\u00f7", "abcéM", "abd"); // [abc\u007fé, bcd\u007fé]: no common prefix, first different are both ASCII - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "abc\u007fé", "bcd\u007fé", null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "abc\u007fé", "bcd\u007fé", null, null); testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "abc\u007fé", "bcd\u007fé", "abc\u007f", "c"); // [é, a]: no common prefix, first different are not both ASCII - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "é", "a", null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "é", "a", null, null); testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "é", "a", null, null); // [é, ê]: no common prefix, first different are both not ASCII - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "é", "ê", null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "é", "ê", null, null); testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "é", "ê", null, null); // [aé, aé]: min = max (common prefix, first different are both not ASCII) - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "aé", "aé", null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "aé", "aé", null, null); testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "aé", "aé", "aé", "aé"); // [aé, bé]: no common prefix, first different are both ASCII - testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "aé", "bé", null, null); - testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "aé", "bé", null, null); testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "aé", "bé", "a", "c"); } From 672c40ec3b62e2011a21d746974ddd4e62ae3470 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Sat, 25 Apr 2020 22:35:25 +0200 Subject: [PATCH 250/519] Add null test case --- .../parquet/reader/TestMetadataReader.java | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java b/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java index e91802b8a265..adfb2f665b7a 100644 --- a/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java +++ b/presto-parquet/src/test/java/io/prestosql/parquet/reader/TestMetadataReader.java @@ -129,6 +129,9 @@ public void testReadStatsBinary(Optional fileCreatedBy) @Test public void testReadStatsBinaryUtf8PotentiallyCorrupted() { + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, null, null, null, null); + testReadStatsBinaryUtf8OldWriter(PARQUET_MR, null, null, null, null); + testReadStatsBinaryUtf8OldWriter(NO_CREATED_BY, "", "abc", null, null); testReadStatsBinaryUtf8OldWriter(PARQUET_MR, "", "abc", null, null); @@ -148,6 +151,9 @@ public void testReadStatsBinaryUtf8PotentiallyCorrupted() @Test public void testReadStatsBinaryUtf8OldWriter() { + // null + testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, null, null, null, null); + // [, bcé]: min is empty, max starts with ASCII testReadStatsBinaryUtf8OldWriter(PARQUET_MR_1_8, "", "bcé", null, null); @@ -192,8 +198,12 @@ private void testReadStatsBinaryUtf8OldWriter(Optional fileCreatedBy, St { Statistics statistics = new Statistics(); statistics.setNull_count(13); - statistics.setMin(min.getBytes(UTF_8)); - statistics.setMax(max.getBytes(UTF_8)); + if (min != null) { + statistics.setMin(min.getBytes(UTF_8)); + } + if (max != null) { + statistics.setMax(max.getBytes(UTF_8)); + } assertThat(MetadataReader.readStats(fileCreatedBy, Optional.of(statistics), new PrimitiveType(OPTIONAL, BINARY, "Test column", OriginalType.UTF8))) .isInstanceOfSatisfying(BinaryStatistics.class, columnStatistics -> { assertEquals(columnStatistics.getNumNulls(), 13); From 3be0d7d624dd7e141a575810ce63f23cd81ec3f7 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Sat, 25 Apr 2020 22:59:33 +0200 Subject: [PATCH 251/519] Simplify trust setup --- .../prestosql/plugin/cassandra/CassandraClientModule.java | 6 ++---- .../src/main/java/io/prestosql/client/OkHttpUtil.java | 2 +- .../prestosql/elasticsearch/client/ElasticsearchClient.java | 6 ++---- 3 files changed, 5 insertions(+), 9 deletions(-) diff --git a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraClientModule.java b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraClientModule.java index 257dca8f0b54..dc89ac057f9b 100644 --- a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraClientModule.java +++ b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraClientModule.java @@ -217,14 +217,12 @@ private static Optional buildSslContext( // get X509TrustManager TrustManager[] trustManagers = trustManagerFactory.getTrustManagers(); - if ((trustManagers.length != 1) || !(trustManagers[0] instanceof X509TrustManager)) { + if (trustManagers.length != 1 || !(trustManagers[0] instanceof X509TrustManager)) { throw new RuntimeException("Unexpected default trust managers:" + Arrays.toString(trustManagers)); } - X509TrustManager trustManager = (X509TrustManager) trustManagers[0]; - // create SSLContext SSLContext result = SSLContext.getInstance("SSL"); - result.init(keyManagers, new TrustManager[] {trustManager}, null); + result.init(keyManagers, trustManagers, null); return Optional.of(result); } catch (GeneralSecurityException | IOException e) { diff --git a/presto-client/src/main/java/io/prestosql/client/OkHttpUtil.java b/presto-client/src/main/java/io/prestosql/client/OkHttpUtil.java index fa15ebe878e2..95b99702f993 100644 --- a/presto-client/src/main/java/io/prestosql/client/OkHttpUtil.java +++ b/presto-client/src/main/java/io/prestosql/client/OkHttpUtil.java @@ -226,7 +226,7 @@ public static void setupSsl( // get X509TrustManager TrustManager[] trustManagers = trustManagerFactory.getTrustManagers(); - if ((trustManagers.length != 1) || !(trustManagers[0] instanceof X509TrustManager)) { + if (trustManagers.length != 1 || !(trustManagers[0] instanceof X509TrustManager)) { throw new RuntimeException("Unexpected default trust managers:" + Arrays.toString(trustManagers)); } X509TrustManager trustManager = (X509TrustManager) trustManagers[0]; diff --git a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/client/ElasticsearchClient.java b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/client/ElasticsearchClient.java index e5f1ea03ed22..30e2df5d4026 100644 --- a/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/client/ElasticsearchClient.java +++ b/presto-elasticsearch/src/main/java/io/prestosql/elasticsearch/client/ElasticsearchClient.java @@ -274,14 +274,12 @@ private static Optional buildSslContext( // get X509TrustManager TrustManager[] trustManagers = trustManagerFactory.getTrustManagers(); - if ((trustManagers.length != 1) || !(trustManagers[0] instanceof X509TrustManager)) { + if (trustManagers.length != 1 || !(trustManagers[0] instanceof X509TrustManager)) { throw new RuntimeException("Unexpected default trust managers:" + Arrays.toString(trustManagers)); } - X509TrustManager trustManager = (X509TrustManager) trustManagers[0]; - // create SSLContext SSLContext result = SSLContext.getInstance("SSL"); - result.init(keyManagers, new TrustManager[] {trustManager}, null); + result.init(keyManagers, trustManagers, null); return Optional.of(result); } catch (GeneralSecurityException | IOException e) { From 6d71b83e057b5bdf2a0659506f38d1f9efd910e8 Mon Sep 17 00:00:00 2001 From: dhruvp-8 Date: Sun, 26 Apr 2020 16:09:56 -0700 Subject: [PATCH 252/519] Removed Blocked Time from the web ui --- presto-main/src/main/resources/webapp/dist/query.js | 2 +- .../main/resources/webapp/src/components/QueryDetail.jsx | 8 -------- 2 files changed, 1 insertion(+), 9 deletions(-) diff --git a/presto-main/src/main/resources/webapp/dist/query.js b/presto-main/src/main/resources/webapp/dist/query.js index 459630ed7ef3..e9ed79b40d8c 100644 --- a/presto-main/src/main/resources/webapp/dist/query.js +++ b/presto-main/src/main/resources/webapp/dist/query.js @@ -106,7 +106,7 @@ eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n}); /***/ (function(module, exports, __webpack_require__) { "use strict"; -eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.QueryDetail = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nvar _reactable = __webpack_require__(/*! reactable */ \"./node_modules/reactable/lib/reactable.js\");\n\nvar _reactable2 = _interopRequireDefault(_reactable);\n\nvar _utils = __webpack_require__(/*! ../utils */ \"./utils.js\");\n\nvar _QueryHeader = __webpack_require__(/*! ./QueryHeader */ \"./components/QueryHeader.jsx\");\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nvar Table = _reactable2.default.Table,\n Thead = _reactable2.default.Thead,\n Th = _reactable2.default.Th,\n Tr = _reactable2.default.Tr,\n Td = _reactable2.default.Td;\n\nvar TaskList = function (_React$Component) {\n _inherits(TaskList, _React$Component);\n\n function TaskList() {\n _classCallCheck(this, TaskList);\n\n return _possibleConstructorReturn(this, (TaskList.__proto__ || Object.getPrototypeOf(TaskList)).apply(this, arguments));\n }\n\n _createClass(TaskList, [{\n key: \"render\",\n value: function render() {\n var tasks = this.props.tasks;\n\n if (tasks === undefined || tasks.length === 0) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"No threads in the selected group\"\n )\n )\n );\n }\n\n var showPortNumbers = TaskList.showPortNumbers(tasks);\n\n var renderedTasks = tasks.map(function (task) {\n var elapsedTime = (0, _utils.parseDuration)(task.stats.elapsedTime);\n if (elapsedTime === 0) {\n elapsedTime = Date.now() - Date.parse(task.stats.createTime);\n }\n\n return _react2.default.createElement(\n Tr,\n { key: task.taskStatus.taskId },\n _react2.default.createElement(\n Td,\n { column: \"id\", value: task.taskStatus.taskId },\n _react2.default.createElement(\n \"a\",\n { href: \"/ui/api/worker/\" + task.taskStatus.nodeId + \"/task/\" + task.taskStatus.taskId + \"?pretty\" },\n (0, _utils.getTaskIdSuffix)(task.taskStatus.taskId)\n )\n ),\n _react2.default.createElement(\n Td,\n { column: \"host\", value: (0, _utils.getHostname)(task.taskStatus.self) },\n _react2.default.createElement(\n \"a\",\n { href: \"worker.html?\" + task.taskStatus.nodeId, className: \"font-light\", target: \"_blank\" },\n showPortNumbers ? (0, _utils.getHostAndPort)(task.taskStatus.self) : (0, _utils.getHostname)(task.taskStatus.self)\n )\n ),\n _react2.default.createElement(\n Td,\n { column: \"state\", value: TaskList.formatState(task.taskStatus.state, task.stats.fullyBlocked) },\n TaskList.formatState(task.taskStatus.state, task.stats.fullyBlocked)\n ),\n _react2.default.createElement(\n Td,\n { column: \"rows\", value: task.stats.rawInputPositions },\n (0, _utils.formatCount)(task.stats.rawInputPositions)\n ),\n _react2.default.createElement(\n Td,\n { column: \"rowsSec\", value: (0, _utils.computeRate)(task.stats.rawInputPositions, elapsedTime) },\n (0, _utils.formatCount)((0, _utils.computeRate)(task.stats.rawInputPositions, elapsedTime))\n ),\n _react2.default.createElement(\n Td,\n { column: \"bytes\", value: (0, _utils.parseDataSize)(task.stats.rawInputDataSize) },\n (0, _utils.formatDataSizeBytes)((0, _utils.parseDataSize)(task.stats.rawInputDataSize))\n ),\n _react2.default.createElement(\n Td,\n { column: \"bytesSec\", value: (0, _utils.computeRate)((0, _utils.parseDataSize)(task.stats.rawInputDataSize), elapsedTime) },\n (0, _utils.formatDataSizeBytes)((0, _utils.computeRate)((0, _utils.parseDataSize)(task.stats.rawInputDataSize), elapsedTime))\n ),\n _react2.default.createElement(\n Td,\n { column: \"splitsPending\", value: task.stats.queuedDrivers },\n task.stats.queuedDrivers\n ),\n _react2.default.createElement(\n Td,\n { column: \"splitsRunning\", value: task.stats.runningDrivers },\n task.stats.runningDrivers\n ),\n _react2.default.createElement(\n Td,\n { column: \"splitsBlocked\", value: task.stats.blockedDrivers },\n task.stats.blockedDrivers\n ),\n _react2.default.createElement(\n Td,\n { column: \"splitsDone\", value: task.stats.completedDrivers },\n task.stats.completedDrivers\n ),\n _react2.default.createElement(\n Td,\n { column: \"elapsedTime\", value: (0, _utils.parseDuration)(task.stats.elapsedTime) },\n task.stats.elapsedTime\n ),\n _react2.default.createElement(\n Td,\n { column: \"cpuTime\", value: (0, _utils.parseDuration)(task.stats.totalCpuTime) },\n task.stats.totalCpuTime\n ),\n _react2.default.createElement(\n Td,\n { column: \"bufferedBytes\", value: task.outputBuffers.totalBufferedBytes },\n (0, _utils.formatDataSizeBytes)(task.outputBuffers.totalBufferedBytes)\n )\n );\n });\n\n return _react2.default.createElement(\n Table,\n { id: \"tasks\", className: \"table table-striped sortable\", sortable: [{\n column: 'id',\n sortFunction: TaskList.compareTaskId\n }, 'host', 'state', 'splitsPending', 'splitsRunning', 'splitsBlocked', 'splitsDone', 'rows', 'rowsSec', 'bytes', 'bytesSec', 'elapsedTime', 'cpuTime', 'bufferedBytes'],\n defaultSort: { column: 'id', direction: 'asc' } },\n _react2.default.createElement(\n Thead,\n null,\n _react2.default.createElement(\n Th,\n { column: \"id\" },\n \"ID\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"host\" },\n \"Host\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"state\" },\n \"State\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"splitsPending\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-pause\", style: _utils.GLYPHICON_HIGHLIGHT, \"data-toggle\": \"tooltip\", \"data-placement\": \"top\",\n title: \"Pending splits\" })\n ),\n _react2.default.createElement(\n Th,\n { column: \"splitsRunning\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-play\", style: _utils.GLYPHICON_HIGHLIGHT, \"data-toggle\": \"tooltip\", \"data-placement\": \"top\",\n title: \"Running splits\" })\n ),\n _react2.default.createElement(\n Th,\n { column: \"splitsBlocked\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-bookmark\", style: _utils.GLYPHICON_HIGHLIGHT, \"data-toggle\": \"tooltip\", \"data-placement\": \"top\",\n title: \"Blocked splits\" })\n ),\n _react2.default.createElement(\n Th,\n { column: \"splitsDone\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-ok\", style: _utils.GLYPHICON_HIGHLIGHT, \"data-toggle\": \"tooltip\", \"data-placement\": \"top\",\n title: \"Completed splits\" })\n ),\n _react2.default.createElement(\n Th,\n { column: \"rows\" },\n \"Rows\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"rowsSec\" },\n \"Rows/s\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"bytes\" },\n \"Bytes\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"bytesSec\" },\n \"Bytes/s\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"elapsedTime\" },\n \"Elapsed\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"cpuTime\" },\n \"CPU Time\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"bufferedBytes\" },\n \"Buffered\"\n )\n ),\n renderedTasks\n );\n }\n }], [{\n key: \"removeQueryId\",\n value: function removeQueryId(id) {\n var pos = id.indexOf('.');\n if (pos !== -1) {\n return id.substring(pos + 1);\n }\n return id;\n }\n }, {\n key: \"compareTaskId\",\n value: function compareTaskId(taskA, taskB) {\n var taskIdArrA = TaskList.removeQueryId(taskA).split(\".\");\n var taskIdArrB = TaskList.removeQueryId(taskB).split(\".\");\n\n if (taskIdArrA.length > taskIdArrB.length) {\n return 1;\n }\n for (var i = 0; i < taskIdArrA.length; i++) {\n var anum = Number.parseInt(taskIdArrA[i]);\n var bnum = Number.parseInt(taskIdArrB[i]);\n if (anum !== bnum) {\n return anum > bnum ? 1 : -1;\n }\n }\n\n return 0;\n }\n }, {\n key: \"showPortNumbers\",\n value: function showPortNumbers(tasks) {\n // check if any host has multiple port numbers\n var hostToPortNumber = {};\n for (var i = 0; i < tasks.length; i++) {\n var taskUri = tasks[i].taskStatus.self;\n var hostname = (0, _utils.getHostname)(taskUri);\n var port = (0, _utils.getPort)(taskUri);\n if (hostname in hostToPortNumber && hostToPortNumber[hostname] !== port) {\n return true;\n }\n hostToPortNumber[hostname] = port;\n }\n\n return false;\n }\n }, {\n key: \"formatState\",\n value: function formatState(state, fullyBlocked) {\n if (fullyBlocked && state === \"RUNNING\") {\n return \"BLOCKED\";\n } else {\n return state;\n }\n }\n }]);\n\n return TaskList;\n}(_react2.default.Component);\n\nvar BAR_CHART_WIDTH = 800;\n\nvar BAR_CHART_PROPERTIES = {\n type: 'bar',\n barSpacing: '0',\n height: '80px',\n barColor: '#747F96',\n zeroColor: '#8997B3',\n chartRangeMin: 0,\n tooltipClassname: 'sparkline-tooltip',\n tooltipFormat: 'Task {{offset:offset}} - {{value}}',\n disableHiddenCheck: true\n};\n\nvar HISTOGRAM_WIDTH = 175;\n\nvar HISTOGRAM_PROPERTIES = {\n type: 'bar',\n barSpacing: '0',\n height: '80px',\n barColor: '#747F96',\n zeroColor: '#747F96',\n zeroAxis: true,\n chartRangeMin: 0,\n tooltipClassname: 'sparkline-tooltip',\n tooltipFormat: '{{offset:offset}} -- {{value}} tasks',\n disableHiddenCheck: true\n};\n\nvar StageSummary = function (_React$Component2) {\n _inherits(StageSummary, _React$Component2);\n\n function StageSummary(props) {\n _classCallCheck(this, StageSummary);\n\n var _this2 = _possibleConstructorReturn(this, (StageSummary.__proto__ || Object.getPrototypeOf(StageSummary)).call(this, props));\n\n _this2.state = {\n expanded: false,\n lastRender: null\n };\n return _this2;\n }\n\n _createClass(StageSummary, [{\n key: \"getExpandedIcon\",\n value: function getExpandedIcon() {\n return this.state.expanded ? \"glyphicon-chevron-up\" : \"glyphicon-chevron-down\";\n }\n }, {\n key: \"getExpandedStyle\",\n value: function getExpandedStyle() {\n return this.state.expanded ? {} : { display: \"none\" };\n }\n }, {\n key: \"toggleExpanded\",\n value: function toggleExpanded() {\n this.setState({\n expanded: !this.state.expanded\n });\n }\n }, {\n key: \"componentDidUpdate\",\n value: function componentDidUpdate() {\n var stage = this.props.stage;\n var numTasks = stage.tasks.length;\n\n // sort the x-axis\n stage.tasks.sort(function (taskA, taskB) {\n return (0, _utils.getTaskNumber)(taskA.taskStatus.taskId) - (0, _utils.getTaskNumber)(taskB.taskStatus.taskId);\n });\n\n var scheduledTimes = stage.tasks.map(function (task) {\n return (0, _utils.parseDuration)(task.stats.totalScheduledTime);\n });\n var cpuTimes = stage.tasks.map(function (task) {\n return (0, _utils.parseDuration)(task.stats.totalCpuTime);\n });\n\n // prevent multiple calls to componentDidUpdate (resulting from calls to setState or otherwise) within the refresh interval from re-rendering sparklines/charts\n if (this.state.lastRender === null || Date.now() - this.state.lastRender >= 1000) {\n var renderTimestamp = Date.now();\n var stageId = (0, _utils.getStageNumber)(stage.stageId);\n\n StageSummary.renderHistogram('#scheduled-time-histogram-' + stageId, scheduledTimes, _utils.formatDuration);\n StageSummary.renderHistogram('#cpu-time-histogram-' + stageId, cpuTimes, _utils.formatDuration);\n\n if (this.state.expanded) {\n // this needs to be a string otherwise it will also be passed to numberFormatter\n var tooltipValueLookups = { 'offset': {} };\n for (var i = 0; i < numTasks; i++) {\n tooltipValueLookups['offset'][i] = (0, _utils.getStageNumber)(stage.stageId) + \".\" + i;\n }\n\n var stageBarChartProperties = $.extend({}, BAR_CHART_PROPERTIES, { barWidth: BAR_CHART_WIDTH / numTasks, tooltipValueLookups: tooltipValueLookups });\n\n $('#scheduled-time-bar-chart-' + stageId).sparkline(scheduledTimes, $.extend({}, stageBarChartProperties, { numberFormatter: _utils.formatDuration }));\n $('#cpu-time-bar-chart-' + stageId).sparkline(cpuTimes, $.extend({}, stageBarChartProperties, { numberFormatter: _utils.formatDuration }));\n }\n\n this.setState({\n lastRender: renderTimestamp\n });\n }\n }\n }, {\n key: \"render\",\n value: function render() {\n var stage = this.props.stage;\n if (stage === undefined || !stage.hasOwnProperty('plan')) {\n return _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Information about this stage is unavailable.\"\n )\n );\n }\n\n var totalBufferedBytes = stage.tasks.map(function (task) {\n return task.outputBuffers.totalBufferedBytes;\n }).reduce(function (a, b) {\n return a + b;\n }, 0);\n\n var stageId = (0, _utils.getStageNumber)(stage.stageId);\n\n return _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-id\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stage-state-color\", style: { borderLeftColor: (0, _utils.getStageStateColor)(stage) } },\n stageId\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"table single-stage-table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"stage-table stage-table-time\" },\n _react2.default.createElement(\n \"thead\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"th\",\n { className: \"stage-table-stat-title stage-table-stat-header\" },\n \"Time\"\n ),\n _react2.default.createElement(\"th\", null)\n )\n ),\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Scheduled\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.stageStats.totalScheduledTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Blocked\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.stageStats.totalBlockedTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"CPU\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.stageStats.totalCpuTime\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"stage-table stage-table-memory\" },\n _react2.default.createElement(\n \"thead\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"th\",\n { className: \"stage-table-stat-title stage-table-stat-header\" },\n \"Memory\"\n ),\n _react2.default.createElement(\"th\", null)\n )\n ),\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Cumulative\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n (0, _utils.formatDataSizeBytes)(stage.stageStats.cumulativeUserMemory / 1000)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Current\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n (0, _utils.parseAndFormatDataSize)(stage.stageStats.userMemoryReservation)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Buffers\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n (0, _utils.formatDataSize)(totalBufferedBytes)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Peak\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n (0, _utils.parseAndFormatDataSize)(stage.stageStats.peakUserMemoryReservation)\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"stage-table stage-table-tasks\" },\n _react2.default.createElement(\n \"thead\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"th\",\n { className: \"stage-table-stat-title stage-table-stat-header\" },\n \"Tasks\"\n ),\n _react2.default.createElement(\"th\", null)\n )\n ),\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Pending\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.tasks.filter(function (task) {\n return task.taskStatus.state === \"PLANNED\";\n }).length\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Running\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.tasks.filter(function (task) {\n return task.taskStatus.state === \"RUNNING\";\n }).length\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Blocked\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.tasks.filter(function (task) {\n return task.stats.fullyBlocked;\n }).length\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Total\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.tasks.length\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"stage-table histogram-table\" },\n _react2.default.createElement(\n \"thead\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"th\",\n { className: \"stage-table-stat-title stage-table-chart-header\" },\n \"Scheduled Time Skew\"\n )\n )\n ),\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"histogram-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"histogram\", id: \"scheduled-time-histogram-\" + stageId },\n _react2.default.createElement(\"div\", { className: \"loader\" })\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"stage-table histogram-table\" },\n _react2.default.createElement(\n \"thead\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"th\",\n { className: \"stage-table-stat-title stage-table-chart-header\" },\n \"CPU Time Skew\"\n )\n )\n ),\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"histogram-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"histogram\", id: \"cpu-time-histogram-\" + stageId },\n _react2.default.createElement(\"div\", { className: \"loader\" })\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"expand-charts-container\" },\n _react2.default.createElement(\n \"a\",\n { onClick: this.toggleExpanded.bind(this), className: \"expand-charts-button\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon \" + this.getExpandedIcon(), style: _utils.GLYPHICON_HIGHLIGHT, \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"More\" })\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { style: this.getExpandedStyle() },\n _react2.default.createElement(\n \"td\",\n { colSpan: \"6\" },\n _react2.default.createElement(\n \"table\",\n { className: \"expanded-chart\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title expanded-chart-title\" },\n \"Task Scheduled Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"bar-chart-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"bar-chart\", id: \"scheduled-time-bar-chart-\" + stageId },\n _react2.default.createElement(\"div\", { className: \"loader\" })\n )\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { style: this.getExpandedStyle() },\n _react2.default.createElement(\n \"td\",\n { colSpan: \"6\" },\n _react2.default.createElement(\n \"table\",\n { className: \"expanded-chart\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title expanded-chart-title\" },\n \"Task CPU Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"bar-chart-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"bar-chart\", id: \"cpu-time-bar-chart-\" + stageId },\n _react2.default.createElement(\"div\", { className: \"loader\" })\n )\n )\n )\n )\n )\n )\n )\n )\n )\n )\n );\n }\n }], [{\n key: \"renderHistogram\",\n value: function renderHistogram(histogramId, inputData, numberFormatter) {\n var numBuckets = Math.min(HISTOGRAM_WIDTH, Math.sqrt(inputData.length));\n var dataMin = Math.min.apply(null, inputData);\n var dataMax = Math.max.apply(null, inputData);\n var bucketSize = (dataMax - dataMin) / numBuckets;\n\n var histogramData = [];\n if (bucketSize === 0) {\n histogramData = [inputData.length];\n } else {\n for (var i = 0; i < numBuckets + 1; i++) {\n histogramData.push(0);\n }\n\n for (var _i in inputData) {\n var dataPoint = inputData[_i];\n var bucket = Math.floor((dataPoint - dataMin) / bucketSize);\n histogramData[bucket] = histogramData[bucket] + 1;\n }\n }\n\n var tooltipValueLookups = { 'offset': {} };\n for (var _i2 = 0; _i2 < histogramData.length; _i2++) {\n tooltipValueLookups['offset'][_i2] = numberFormatter(dataMin + _i2 * bucketSize) + \"-\" + numberFormatter(dataMin + (_i2 + 1) * bucketSize);\n }\n\n var stageHistogramProperties = $.extend({}, HISTOGRAM_PROPERTIES, { barWidth: HISTOGRAM_WIDTH / histogramData.length, tooltipValueLookups: tooltipValueLookups });\n $(histogramId).sparkline(histogramData, stageHistogramProperties);\n }\n }]);\n\n return StageSummary;\n}(_react2.default.Component);\n\nvar StageList = function (_React$Component3) {\n _inherits(StageList, _React$Component3);\n\n function StageList() {\n _classCallCheck(this, StageList);\n\n return _possibleConstructorReturn(this, (StageList.__proto__ || Object.getPrototypeOf(StageList)).apply(this, arguments));\n }\n\n _createClass(StageList, [{\n key: \"getStages\",\n value: function getStages(stage) {\n if (stage === undefined || !stage.hasOwnProperty('subStages')) {\n return [];\n }\n\n return [].concat.apply(stage, stage.subStages.map(this.getStages, this));\n }\n }, {\n key: \"render\",\n value: function render() {\n var stages = this.getStages(this.props.outputStage);\n\n if (stages === undefined || stages.length === 0) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n \"No stage information available.\"\n )\n );\n }\n\n var renderedStages = stages.map(function (stage) {\n return _react2.default.createElement(StageSummary, { key: stage.stageId, stage: stage });\n });\n\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"table\",\n { className: \"table\", id: \"stage-list\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n renderedStages\n )\n )\n )\n );\n }\n }]);\n\n return StageList;\n}(_react2.default.Component);\n\nvar SMALL_SPARKLINE_PROPERTIES = {\n width: '100%',\n height: '57px',\n fillColor: '#3F4552',\n lineColor: '#747F96',\n spotColor: '#1EDCFF',\n tooltipClassname: 'sparkline-tooltip',\n disableHiddenCheck: true\n};\n\nvar TASK_FILTER = {\n ALL: function ALL() {\n return true;\n },\n PLANNED: function PLANNED(state) {\n return state === 'PLANNED';\n },\n RUNNING: function RUNNING(state) {\n return state === 'RUNNING';\n },\n FINISHED: function FINISHED(state) {\n return state === 'FINISHED';\n },\n FAILED: function FAILED(state) {\n return state === 'FAILED' || state === 'ABORTED' || state === 'CANCELED';\n }\n};\n\nvar QueryDetail = exports.QueryDetail = function (_React$Component4) {\n _inherits(QueryDetail, _React$Component4);\n\n function QueryDetail(props) {\n _classCallCheck(this, QueryDetail);\n\n var _this4 = _possibleConstructorReturn(this, (QueryDetail.__proto__ || Object.getPrototypeOf(QueryDetail)).call(this, props));\n\n _this4.state = {\n query: null,\n lastSnapshotStages: null,\n lastSnapshotTasks: null,\n\n lastScheduledTime: 0,\n lastCpuTime: 0,\n lastRowInput: 0,\n lastByteInput: 0,\n lastPhysicalInput: 0,\n lastPhysicalTime: 0,\n\n scheduledTimeRate: [],\n cpuTimeRate: [],\n rowInputRate: [],\n byteInputRate: [],\n physicalInputRate: [],\n\n reservedMemory: [],\n\n initialized: false,\n queryEnded: false,\n renderingEnded: false,\n\n lastRefresh: null,\n lastRender: null,\n\n stageRefresh: true,\n taskRefresh: true,\n\n taskFilter: TASK_FILTER.ALL\n };\n\n _this4.refreshLoop = _this4.refreshLoop.bind(_this4);\n return _this4;\n }\n\n _createClass(QueryDetail, [{\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n // stop refreshing when query finishes or fails\n if (this.state.query === null || !this.state.queryEnded) {\n // task.info-update-interval is set to 3 seconds by default\n this.timeoutId = setTimeout(this.refreshLoop, 3000);\n }\n }\n }, {\n key: \"refreshLoop\",\n value: function refreshLoop() {\n var _this5 = this;\n\n clearTimeout(this.timeoutId); // to stop multiple series of refreshLoop from going on simultaneously\n var queryId = (0, _utils.getFirstParameter)(window.location.search);\n $.get('/ui/api/query/' + queryId, function (query) {\n var lastSnapshotStages = this.state.lastSnapshotStage;\n if (this.state.stageRefresh) {\n lastSnapshotStages = query.outputStage;\n }\n var lastSnapshotTasks = this.state.lastSnapshotTasks;\n if (this.state.taskRefresh) {\n lastSnapshotTasks = query.outputStage;\n }\n\n var lastRefresh = this.state.lastRefresh;\n var lastScheduledTime = this.state.lastScheduledTime;\n var lastCpuTime = this.state.lastCpuTime;\n var lastRowInput = this.state.lastRowInput;\n var lastByteInput = this.state.lastByteInput;\n var lastPhysicalInput = this.state.lastPhysicalInput;\n var lastPhysicalTime = this.state.lastPhysicalTime;\n var alreadyEnded = this.state.queryEnded;\n var nowMillis = Date.now();\n\n this.setState({\n query: query,\n lastSnapshotStage: lastSnapshotStages,\n lastSnapshotTasks: lastSnapshotTasks,\n\n lastPhysicalTime: (0, _utils.parseDuration)(query.queryStats.physicalInputReadTime),\n lastScheduledTime: (0, _utils.parseDuration)(query.queryStats.totalScheduledTime),\n lastCpuTime: (0, _utils.parseDuration)(query.queryStats.totalCpuTime),\n lastRowInput: query.queryStats.processedInputPositions,\n lastByteInput: (0, _utils.parseDataSize)(query.queryStats.processedInputDataSize),\n lastPhysicalInput: (0, _utils.parseDataSize)(query.queryStats.physicalInputDataSize),\n\n initialized: true,\n queryEnded: !!query.finalQueryInfo,\n\n lastRefresh: nowMillis\n });\n\n // i.e. don't show sparklines if we've already decided not to update or if we don't have one previous measurement\n if (alreadyEnded || lastRefresh === null && query.state === \"RUNNING\") {\n this.resetTimer();\n return;\n }\n\n if (lastRefresh === null) {\n lastRefresh = nowMillis - (0, _utils.parseDuration)(query.queryStats.elapsedTime);\n }\n\n var elapsedSecsSinceLastRefresh = (nowMillis - lastRefresh) / 1000.0;\n if (elapsedSecsSinceLastRefresh >= 0) {\n var currentScheduledTimeRate = ((0, _utils.parseDuration)(query.queryStats.totalScheduledTime) - lastScheduledTime) / (elapsedSecsSinceLastRefresh * 1000);\n var currentCpuTimeRate = ((0, _utils.parseDuration)(query.queryStats.totalCpuTime) - lastCpuTime) / (elapsedSecsSinceLastRefresh * 1000);\n var currentPhysicalReadTime = ((0, _utils.parseDuration)(query.queryStats.physicalInputReadTime) - lastPhysicalTime) / 1000;\n var currentRowInputRate = (query.queryStats.processedInputPositions - lastRowInput) / elapsedSecsSinceLastRefresh;\n var currentByteInputRate = ((0, _utils.parseDataSize)(query.queryStats.processedInputDataSize) - lastByteInput) / elapsedSecsSinceLastRefresh;\n var currentPhysicalInputRate = currentPhysicalReadTime > 0 ? ((0, _utils.parseDataSize)(query.queryStats.physicalInputDataSize) - lastPhysicalInput) / currentPhysicalReadTime : 0;\n\n this.setState({\n scheduledTimeRate: (0, _utils.addToHistory)(currentScheduledTimeRate, this.state.scheduledTimeRate),\n cpuTimeRate: (0, _utils.addToHistory)(currentCpuTimeRate, this.state.cpuTimeRate),\n rowInputRate: (0, _utils.addToHistory)(currentRowInputRate, this.state.rowInputRate),\n byteInputRate: (0, _utils.addToHistory)(currentByteInputRate, this.state.byteInputRate),\n reservedMemory: (0, _utils.addToHistory)((0, _utils.parseDataSize)(query.queryStats.totalMemoryReservation), this.state.reservedMemory),\n physicalInputRate: (0, _utils.addToHistory)(currentPhysicalInputRate, this.state.physicalInputRate)\n });\n }\n this.resetTimer();\n }.bind(this)).error(function () {\n _this5.setState({\n initialized: true\n });\n _this5.resetTimer();\n });\n }\n }, {\n key: \"handleTaskRefreshClick\",\n value: function handleTaskRefreshClick() {\n if (this.state.taskRefresh) {\n this.setState({\n taskRefresh: false,\n lastSnapshotTasks: this.state.query.outputStage\n });\n } else {\n this.setState({\n taskRefresh: true\n });\n }\n }\n }, {\n key: \"renderTaskRefreshButton\",\n value: function renderTaskRefreshButton() {\n if (this.state.taskRefresh) {\n return _react2.default.createElement(\n \"button\",\n { className: \"btn btn-info live-button\", onClick: this.handleTaskRefreshClick.bind(this) },\n \"Auto-Refresh: On\"\n );\n } else {\n return _react2.default.createElement(\n \"button\",\n { className: \"btn btn-info live-button\", onClick: this.handleTaskRefreshClick.bind(this) },\n \"Auto-Refresh: Off\"\n );\n }\n }\n }, {\n key: \"handleStageRefreshClick\",\n value: function handleStageRefreshClick() {\n if (this.state.stageRefresh) {\n this.setState({\n stageRefresh: false,\n lastSnapshotStages: this.state.query.outputStage\n });\n } else {\n this.setState({\n stageRefresh: true\n });\n }\n }\n }, {\n key: \"renderStageRefreshButton\",\n value: function renderStageRefreshButton() {\n if (this.state.stageRefresh) {\n return _react2.default.createElement(\n \"button\",\n { className: \"btn btn-info live-button\", onClick: this.handleStageRefreshClick.bind(this) },\n \"Auto-Refresh: On\"\n );\n } else {\n return _react2.default.createElement(\n \"button\",\n { className: \"btn btn-info live-button\", onClick: this.handleStageRefreshClick.bind(this) },\n \"Auto-Refresh: Off\"\n );\n }\n }\n }, {\n key: \"renderTaskFilterListItem\",\n value: function renderTaskFilterListItem(taskFilter, taskFilterText) {\n return _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: this.state.taskFilter === taskFilter ? \"selected\" : \"\", onClick: this.handleTaskFilterClick.bind(this, taskFilter) },\n taskFilterText\n )\n );\n }\n }, {\n key: \"handleTaskFilterClick\",\n value: function handleTaskFilterClick(filter, event) {\n this.setState({\n taskFilter: filter\n });\n event.preventDefault();\n }\n }, {\n key: \"getTasksFromStage\",\n value: function getTasksFromStage(stage) {\n if (stage === undefined || !stage.hasOwnProperty('subStages') || !stage.hasOwnProperty('tasks')) {\n return [];\n }\n\n return [].concat.apply(stage.tasks, stage.subStages.map(this.getTasksFromStage, this));\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop();\n }\n }, {\n key: \"componentDidUpdate\",\n value: function componentDidUpdate() {\n // prevent multiple calls to componentDidUpdate (resulting from calls to setState or otherwise) within the refresh interval from re-rendering sparklines/charts\n if (this.state.lastRender === null || Date.now() - this.state.lastRender >= 1000 || this.state.ended && !this.state.renderingEnded) {\n var renderTimestamp = Date.now();\n $('#scheduled-time-rate-sparkline').sparkline(this.state.scheduledTimeRate, $.extend({}, SMALL_SPARKLINE_PROPERTIES, {\n chartRangeMin: 0,\n numberFormatter: _utils.precisionRound\n }));\n $('#cpu-time-rate-sparkline').sparkline(this.state.cpuTimeRate, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { chartRangeMin: 0, numberFormatter: _utils.precisionRound }));\n $('#row-input-rate-sparkline').sparkline(this.state.rowInputRate, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatCount }));\n $('#byte-input-rate-sparkline').sparkline(this.state.byteInputRate, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatDataSize }));\n $('#reserved-memory-sparkline').sparkline(this.state.reservedMemory, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatDataSize }));\n $('#physical-input-rate-sparkline').sparkline(this.state.physicalInputRate, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatDataSize }));\n\n if (this.state.lastRender === null) {\n $('#query').each(function (i, block) {\n hljs.highlightBlock(block);\n });\n\n $('#prepared-query').each(function (i, block) {\n hljs.highlightBlock(block);\n });\n }\n\n this.setState({\n renderingEnded: this.state.ended,\n lastRender: renderTimestamp\n });\n }\n\n $('[data-toggle=\"tooltip\"]').tooltip();\n new window.ClipboardJS('.copy-button');\n }\n }, {\n key: \"renderTasks\",\n value: function renderTasks() {\n var _this6 = this;\n\n if (this.state.lastSnapshotTasks === null) {\n return;\n }\n\n var tasks = this.getTasksFromStage(this.state.lastSnapshotTasks).filter(function (task) {\n return _this6.state.taskFilter(task.taskStatus.state);\n }, this);\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-9\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Tasks\"\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-3\" },\n _react2.default.createElement(\n \"table\",\n { className: \"header-inline-links\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"input-group-btn text-right\" },\n _react2.default.createElement(\n \"button\",\n { type: \"button\", className: \"btn btn-default dropdown-toggle pull-right text-right\", \"data-toggle\": \"dropdown\", \"aria-haspopup\": \"true\",\n \"aria-expanded\": \"false\" },\n \"Show \",\n _react2.default.createElement(\"span\", { className: \"caret\" })\n ),\n _react2.default.createElement(\n \"ul\",\n { className: \"dropdown-menu\" },\n this.renderTaskFilterListItem(TASK_FILTER.ALL, \"All\"),\n this.renderTaskFilterListItem(TASK_FILTER.PLANNED, \"Planned\"),\n this.renderTaskFilterListItem(TASK_FILTER.RUNNING, \"Running\"),\n this.renderTaskFilterListItem(TASK_FILTER.FINISHED, \"Finished\"),\n this.renderTaskFilterListItem(TASK_FILTER.FAILED, \"Aborted/Canceled/Failed\")\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n \"\\xA0\\xA0\",\n this.renderTaskRefreshButton()\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(TaskList, { key: this.state.query.queryId, tasks: tasks })\n )\n )\n );\n }\n }, {\n key: \"renderStages\",\n value: function renderStages() {\n if (this.state.lastSnapshotStage === null) {\n return;\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-9\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Stages\"\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-3\" },\n _react2.default.createElement(\n \"table\",\n { className: \"header-inline-links\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n this.renderStageRefreshButton()\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(StageList, { key: this.state.query.queryId, outputStage: this.state.lastSnapshotStage })\n )\n )\n );\n }\n }, {\n key: \"renderPreparedQuery\",\n value: function renderPreparedQuery() {\n var query = this.state.query;\n if (!query.hasOwnProperty('preparedQuery') || query.preparedQuery === null) {\n return;\n }\n\n return _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Prepared Query\",\n _react2.default.createElement(\n \"a\",\n { className: \"btn copy-button\", \"data-clipboard-target\": \"#prepared-query-text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", \"aria-hidden\": \"true\", alt: \"Copy to clipboard\" })\n )\n ),\n _react2.default.createElement(\n \"pre\",\n { id: \"prepared-query\" },\n _react2.default.createElement(\n \"code\",\n { className: \"lang-sql\", id: \"prepared-query-text\" },\n query.preparedQuery\n )\n )\n );\n }\n }, {\n key: \"renderSessionProperties\",\n value: function renderSessionProperties() {\n var query = this.state.query;\n\n var properties = [];\n for (var property in query.session.systemProperties) {\n if (query.session.systemProperties.hasOwnProperty(property)) {\n properties.push(_react2.default.createElement(\n \"span\",\n null,\n \"- \",\n property + \"=\" + query.session.systemProperties[property],\n \" \",\n _react2.default.createElement(\"br\", null)\n ));\n }\n }\n\n for (var catalog in query.session.catalogProperties) {\n if (query.session.catalogProperties.hasOwnProperty(catalog)) {\n for (var _property in query.session.catalogProperties[catalog]) {\n if (query.session.catalogProperties[catalog].hasOwnProperty(_property)) {\n properties.push(_react2.default.createElement(\n \"span\",\n null,\n \"- \",\n catalog + \".\" + _property + \"=\" + query.session.catalogProperties[catalog][_property],\n \" \",\n _react2.default.createElement(\"br\", null)\n ));\n }\n }\n }\n }\n\n return properties;\n }\n }, {\n key: \"renderResourceEstimates\",\n value: function renderResourceEstimates() {\n var query = this.state.query;\n var estimates = query.session.resourceEstimates;\n var renderedEstimates = [];\n\n for (var resource in estimates) {\n if (estimates.hasOwnProperty(resource)) {\n var upperChars = resource.match(/([A-Z])/g) || [];\n var snakeCased = resource;\n for (var i = 0, n = upperChars.length; i < n; i++) {\n snakeCased = snakeCased.replace(new RegExp(upperChars[i]), '_' + upperChars[i].toLowerCase());\n }\n\n renderedEstimates.push(_react2.default.createElement(\n \"span\",\n null,\n \"- \",\n snakeCased + \"=\" + query.session.resourceEstimates[resource],\n \" \",\n _react2.default.createElement(\"br\", null)\n ));\n }\n }\n\n return renderedEstimates;\n }\n }, {\n key: \"renderWarningInfo\",\n value: function renderWarningInfo() {\n var query = this.state.query;\n if (query.warnings.length > 0) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Warnings\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\", id: \"warnings-table\" },\n query.warnings.map(function (warning) {\n return _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n warning.warningCode.name\n ),\n _react2.default.createElement(\n \"td\",\n null,\n warning.message\n )\n );\n })\n )\n )\n );\n } else {\n return null;\n }\n }\n }, {\n key: \"renderFailureInfo\",\n value: function renderFailureInfo() {\n var query = this.state.query;\n if (query.failureInfo) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Error Information\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Error Type\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.errorType\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Error Code\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.errorCode.name + \" (\" + this.state.query.errorCode.code + \")\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Stack Trace\",\n _react2.default.createElement(\n \"a\",\n { className: \"btn copy-button\", \"data-clipboard-target\": \"#stack-trace\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", \"aria-hidden\": \"true\", alt: \"Copy to clipboard\" })\n )\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n _react2.default.createElement(\n \"pre\",\n { id: \"stack-trace\" },\n QueryDetail.formatStackTrace(query.failureInfo)\n )\n )\n )\n )\n )\n )\n );\n } else {\n return \"\";\n }\n }\n }, {\n key: \"render\",\n value: function render() {\n var query = this.state.query;\n\n if (query === null || this.state.initialized === false) {\n var label = _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading...\"\n );\n if (this.state.initialized) {\n label = \"Query not found\";\n }\n return _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n label\n )\n )\n );\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(_QueryHeader.QueryHeader, { query: query }),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Session\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"User\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n _react2.default.createElement(\n \"span\",\n { id: \"query-user\" },\n query.session.user\n ),\n \"\\xA0\\xA0\",\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: \"copy-button\", \"data-clipboard-target\": \"#query-user\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", \"aria-hidden\": \"true\", alt: \"Copy to clipboard\" })\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Principal\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n query.session.principal\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Source\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n query.session.source\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Catalog\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.session.catalog\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Schema\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.session.schema\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Client Address\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.session.remoteUserAddress\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Client Tags\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.session.clientTags.join(\", \")\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Session Properties\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n this.renderSessionProperties()\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Resource Estimates\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n this.renderResourceEstimates()\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Execution\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Resource Group\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n query.resourceGroupId ? query.resourceGroupId.join(\".\") : \"n/a\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Submission Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatShortDateTime)(new Date(query.queryStats.createTime))\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Completion Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.endTime ? (0, _utils.formatShortDateTime)(new Date(query.queryStats.endTime)) : \"\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Elapsed Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.elapsedTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Queued Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.queuedTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Analysis Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.analysisTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Planning Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.planningTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Execution Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.executionTime\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Resource Utilization Summary\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"CPU Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.totalCpuTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Scheduled Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.totalScheduledTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Blocked Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.totalBlockedTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Input Rows\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatCount)(query.queryStats.processedInputPositions)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Input Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.processedInputDataSize)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Physical Input Rows\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatCount)(query.queryStats.physicalInputPositions)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Physical Input Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.physicalInputDataSize)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Physical Input Read Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.physicalInputReadTime)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Internal Network Rows\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatCount)(query.queryStats.internalNetworkInputPositions)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Internal Network Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.internalNetworkInputDataSize)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Peak User Memory\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.peakUserMemoryReservation)\n )\n ),\n (0, _utils.parseDataSize)(query.queryStats.peakRevocableMemoryReservation) > 0 && _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Peak Revocable Memory\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.peakRevocableMemoryReservation)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Peak Total Memory\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.peakTotalMemoryReservation)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Memory Pool\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.memoryPool\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Cumulative User Memory\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatDataSizeBytes)(query.queryStats.cumulativeUserMemory / 1000.0) + \" seconds\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Output Rows\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatCount)(query.queryStats.outputPositions)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Output Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.outputDataSize)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Written Rows\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatCount)(query.queryStats.writtenPositions)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Logical Written Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.logicalWrittenDataSize)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Physical Written Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.physicalWrittenDataSize)\n )\n ),\n (0, _utils.parseDataSize)(query.queryStats.spilledDataSize) > 0 && _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Spilled Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.spilledDataSize)\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Timeline\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Parallelism\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"cpu-time-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatCount)(this.state.cpuTimeRate[this.state.cpuTimeRate.length - 1])\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Scheduled Time/s\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"scheduled-time-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatCount)(this.state.scheduledTimeRate[this.state.scheduledTimeRate.length - 1])\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Input Rows/s\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"row-input-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatCount)(this.state.rowInputRate[this.state.rowInputRate.length - 1])\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Input Bytes/s\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"byte-input-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatDataSize)(this.state.byteInputRate[this.state.byteInputRate.length - 1])\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Physical Input Bytes/s\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"physical-input-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatDataSize)(this.state.physicalInputRate[this.state.physicalInputRate.length - 1])\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Memory Utilization\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"reserved-memory-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatDataSize)(this.state.reservedMemory[this.state.reservedMemory.length - 1])\n )\n )\n )\n )\n )\n )\n )\n ),\n this.renderWarningInfo(),\n this.renderFailureInfo(),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Query\",\n _react2.default.createElement(\n \"a\",\n { className: \"btn copy-button\", \"data-clipboard-target\": \"#query-text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", \"aria-hidden\": \"true\", alt: \"Copy to clipboard\" })\n )\n ),\n _react2.default.createElement(\n \"pre\",\n { id: \"query\" },\n _react2.default.createElement(\n \"code\",\n { className: \"lang-sql\", id: \"query-text\" },\n query.query\n )\n )\n ),\n this.renderPreparedQuery()\n ),\n this.renderStages(),\n this.renderTasks()\n );\n }\n }], [{\n key: \"formatStackTrace\",\n value: function formatStackTrace(info) {\n return QueryDetail.formatStackTraceHelper(info, [], \"\", \"\");\n }\n }, {\n key: \"formatStackTraceHelper\",\n value: function formatStackTraceHelper(info, parentStack, prefix, linePrefix) {\n var s = linePrefix + prefix + QueryDetail.failureInfoToString(info) + \"\\n\";\n\n if (info.stack) {\n var sharedStackFrames = 0;\n if (parentStack !== null) {\n sharedStackFrames = QueryDetail.countSharedStackFrames(info.stack, parentStack);\n }\n\n for (var i = 0; i < info.stack.length - sharedStackFrames; i++) {\n s += linePrefix + \"\\tat \" + info.stack[i] + \"\\n\";\n }\n if (sharedStackFrames !== 0) {\n s += linePrefix + \"\\t... \" + sharedStackFrames + \" more\" + \"\\n\";\n }\n }\n\n if (info.suppressed) {\n for (var _i3 = 0; _i3 < info.suppressed.length; _i3++) {\n s += QueryDetail.formatStackTraceHelper(info.suppressed[_i3], info.stack, \"Suppressed: \", linePrefix + \"\\t\");\n }\n }\n\n if (info.cause) {\n s += QueryDetail.formatStackTraceHelper(info.cause, info.stack, \"Caused by: \", linePrefix);\n }\n\n return s;\n }\n }, {\n key: \"countSharedStackFrames\",\n value: function countSharedStackFrames(stack, parentStack) {\n var n = 0;\n var minStackLength = Math.min(stack.length, parentStack.length);\n while (n < minStackLength && stack[stack.length - 1 - n] === parentStack[parentStack.length - 1 - n]) {\n n++;\n }\n return n;\n }\n }, {\n key: \"failureInfoToString\",\n value: function failureInfoToString(t) {\n return t.message !== null ? t.type + \": \" + t.message : t.type;\n }\n }]);\n\n return QueryDetail;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/QueryDetail.jsx?"); +eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.QueryDetail = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nvar _reactable = __webpack_require__(/*! reactable */ \"./node_modules/reactable/lib/reactable.js\");\n\nvar _reactable2 = _interopRequireDefault(_reactable);\n\nvar _utils = __webpack_require__(/*! ../utils */ \"./utils.js\");\n\nvar _QueryHeader = __webpack_require__(/*! ./QueryHeader */ \"./components/QueryHeader.jsx\");\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nvar Table = _reactable2.default.Table,\n Thead = _reactable2.default.Thead,\n Th = _reactable2.default.Th,\n Tr = _reactable2.default.Tr,\n Td = _reactable2.default.Td;\n\nvar TaskList = function (_React$Component) {\n _inherits(TaskList, _React$Component);\n\n function TaskList() {\n _classCallCheck(this, TaskList);\n\n return _possibleConstructorReturn(this, (TaskList.__proto__ || Object.getPrototypeOf(TaskList)).apply(this, arguments));\n }\n\n _createClass(TaskList, [{\n key: \"render\",\n value: function render() {\n var tasks = this.props.tasks;\n\n if (tasks === undefined || tasks.length === 0) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"No threads in the selected group\"\n )\n )\n );\n }\n\n var showPortNumbers = TaskList.showPortNumbers(tasks);\n\n var renderedTasks = tasks.map(function (task) {\n var elapsedTime = (0, _utils.parseDuration)(task.stats.elapsedTime);\n if (elapsedTime === 0) {\n elapsedTime = Date.now() - Date.parse(task.stats.createTime);\n }\n\n return _react2.default.createElement(\n Tr,\n { key: task.taskStatus.taskId },\n _react2.default.createElement(\n Td,\n { column: \"id\", value: task.taskStatus.taskId },\n _react2.default.createElement(\n \"a\",\n { href: \"/ui/api/worker/\" + task.taskStatus.nodeId + \"/task/\" + task.taskStatus.taskId + \"?pretty\" },\n (0, _utils.getTaskIdSuffix)(task.taskStatus.taskId)\n )\n ),\n _react2.default.createElement(\n Td,\n { column: \"host\", value: (0, _utils.getHostname)(task.taskStatus.self) },\n _react2.default.createElement(\n \"a\",\n { href: \"worker.html?\" + task.taskStatus.nodeId, className: \"font-light\", target: \"_blank\" },\n showPortNumbers ? (0, _utils.getHostAndPort)(task.taskStatus.self) : (0, _utils.getHostname)(task.taskStatus.self)\n )\n ),\n _react2.default.createElement(\n Td,\n { column: \"state\", value: TaskList.formatState(task.taskStatus.state, task.stats.fullyBlocked) },\n TaskList.formatState(task.taskStatus.state, task.stats.fullyBlocked)\n ),\n _react2.default.createElement(\n Td,\n { column: \"rows\", value: task.stats.rawInputPositions },\n (0, _utils.formatCount)(task.stats.rawInputPositions)\n ),\n _react2.default.createElement(\n Td,\n { column: \"rowsSec\", value: (0, _utils.computeRate)(task.stats.rawInputPositions, elapsedTime) },\n (0, _utils.formatCount)((0, _utils.computeRate)(task.stats.rawInputPositions, elapsedTime))\n ),\n _react2.default.createElement(\n Td,\n { column: \"bytes\", value: (0, _utils.parseDataSize)(task.stats.rawInputDataSize) },\n (0, _utils.formatDataSizeBytes)((0, _utils.parseDataSize)(task.stats.rawInputDataSize))\n ),\n _react2.default.createElement(\n Td,\n { column: \"bytesSec\", value: (0, _utils.computeRate)((0, _utils.parseDataSize)(task.stats.rawInputDataSize), elapsedTime) },\n (0, _utils.formatDataSizeBytes)((0, _utils.computeRate)((0, _utils.parseDataSize)(task.stats.rawInputDataSize), elapsedTime))\n ),\n _react2.default.createElement(\n Td,\n { column: \"splitsPending\", value: task.stats.queuedDrivers },\n task.stats.queuedDrivers\n ),\n _react2.default.createElement(\n Td,\n { column: \"splitsRunning\", value: task.stats.runningDrivers },\n task.stats.runningDrivers\n ),\n _react2.default.createElement(\n Td,\n { column: \"splitsBlocked\", value: task.stats.blockedDrivers },\n task.stats.blockedDrivers\n ),\n _react2.default.createElement(\n Td,\n { column: \"splitsDone\", value: task.stats.completedDrivers },\n task.stats.completedDrivers\n ),\n _react2.default.createElement(\n Td,\n { column: \"elapsedTime\", value: (0, _utils.parseDuration)(task.stats.elapsedTime) },\n task.stats.elapsedTime\n ),\n _react2.default.createElement(\n Td,\n { column: \"cpuTime\", value: (0, _utils.parseDuration)(task.stats.totalCpuTime) },\n task.stats.totalCpuTime\n ),\n _react2.default.createElement(\n Td,\n { column: \"bufferedBytes\", value: task.outputBuffers.totalBufferedBytes },\n (0, _utils.formatDataSizeBytes)(task.outputBuffers.totalBufferedBytes)\n )\n );\n });\n\n return _react2.default.createElement(\n Table,\n { id: \"tasks\", className: \"table table-striped sortable\", sortable: [{\n column: 'id',\n sortFunction: TaskList.compareTaskId\n }, 'host', 'state', 'splitsPending', 'splitsRunning', 'splitsBlocked', 'splitsDone', 'rows', 'rowsSec', 'bytes', 'bytesSec', 'elapsedTime', 'cpuTime', 'bufferedBytes'],\n defaultSort: { column: 'id', direction: 'asc' } },\n _react2.default.createElement(\n Thead,\n null,\n _react2.default.createElement(\n Th,\n { column: \"id\" },\n \"ID\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"host\" },\n \"Host\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"state\" },\n \"State\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"splitsPending\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-pause\", style: _utils.GLYPHICON_HIGHLIGHT, \"data-toggle\": \"tooltip\", \"data-placement\": \"top\",\n title: \"Pending splits\" })\n ),\n _react2.default.createElement(\n Th,\n { column: \"splitsRunning\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-play\", style: _utils.GLYPHICON_HIGHLIGHT, \"data-toggle\": \"tooltip\", \"data-placement\": \"top\",\n title: \"Running splits\" })\n ),\n _react2.default.createElement(\n Th,\n { column: \"splitsBlocked\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-bookmark\", style: _utils.GLYPHICON_HIGHLIGHT, \"data-toggle\": \"tooltip\", \"data-placement\": \"top\",\n title: \"Blocked splits\" })\n ),\n _react2.default.createElement(\n Th,\n { column: \"splitsDone\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-ok\", style: _utils.GLYPHICON_HIGHLIGHT, \"data-toggle\": \"tooltip\", \"data-placement\": \"top\",\n title: \"Completed splits\" })\n ),\n _react2.default.createElement(\n Th,\n { column: \"rows\" },\n \"Rows\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"rowsSec\" },\n \"Rows/s\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"bytes\" },\n \"Bytes\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"bytesSec\" },\n \"Bytes/s\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"elapsedTime\" },\n \"Elapsed\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"cpuTime\" },\n \"CPU Time\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"bufferedBytes\" },\n \"Buffered\"\n )\n ),\n renderedTasks\n );\n }\n }], [{\n key: \"removeQueryId\",\n value: function removeQueryId(id) {\n var pos = id.indexOf('.');\n if (pos !== -1) {\n return id.substring(pos + 1);\n }\n return id;\n }\n }, {\n key: \"compareTaskId\",\n value: function compareTaskId(taskA, taskB) {\n var taskIdArrA = TaskList.removeQueryId(taskA).split(\".\");\n var taskIdArrB = TaskList.removeQueryId(taskB).split(\".\");\n\n if (taskIdArrA.length > taskIdArrB.length) {\n return 1;\n }\n for (var i = 0; i < taskIdArrA.length; i++) {\n var anum = Number.parseInt(taskIdArrA[i]);\n var bnum = Number.parseInt(taskIdArrB[i]);\n if (anum !== bnum) {\n return anum > bnum ? 1 : -1;\n }\n }\n\n return 0;\n }\n }, {\n key: \"showPortNumbers\",\n value: function showPortNumbers(tasks) {\n // check if any host has multiple port numbers\n var hostToPortNumber = {};\n for (var i = 0; i < tasks.length; i++) {\n var taskUri = tasks[i].taskStatus.self;\n var hostname = (0, _utils.getHostname)(taskUri);\n var port = (0, _utils.getPort)(taskUri);\n if (hostname in hostToPortNumber && hostToPortNumber[hostname] !== port) {\n return true;\n }\n hostToPortNumber[hostname] = port;\n }\n\n return false;\n }\n }, {\n key: \"formatState\",\n value: function formatState(state, fullyBlocked) {\n if (fullyBlocked && state === \"RUNNING\") {\n return \"BLOCKED\";\n } else {\n return state;\n }\n }\n }]);\n\n return TaskList;\n}(_react2.default.Component);\n\nvar BAR_CHART_WIDTH = 800;\n\nvar BAR_CHART_PROPERTIES = {\n type: 'bar',\n barSpacing: '0',\n height: '80px',\n barColor: '#747F96',\n zeroColor: '#8997B3',\n chartRangeMin: 0,\n tooltipClassname: 'sparkline-tooltip',\n tooltipFormat: 'Task {{offset:offset}} - {{value}}',\n disableHiddenCheck: true\n};\n\nvar HISTOGRAM_WIDTH = 175;\n\nvar HISTOGRAM_PROPERTIES = {\n type: 'bar',\n barSpacing: '0',\n height: '80px',\n barColor: '#747F96',\n zeroColor: '#747F96',\n zeroAxis: true,\n chartRangeMin: 0,\n tooltipClassname: 'sparkline-tooltip',\n tooltipFormat: '{{offset:offset}} -- {{value}} tasks',\n disableHiddenCheck: true\n};\n\nvar StageSummary = function (_React$Component2) {\n _inherits(StageSummary, _React$Component2);\n\n function StageSummary(props) {\n _classCallCheck(this, StageSummary);\n\n var _this2 = _possibleConstructorReturn(this, (StageSummary.__proto__ || Object.getPrototypeOf(StageSummary)).call(this, props));\n\n _this2.state = {\n expanded: false,\n lastRender: null\n };\n return _this2;\n }\n\n _createClass(StageSummary, [{\n key: \"getExpandedIcon\",\n value: function getExpandedIcon() {\n return this.state.expanded ? \"glyphicon-chevron-up\" : \"glyphicon-chevron-down\";\n }\n }, {\n key: \"getExpandedStyle\",\n value: function getExpandedStyle() {\n return this.state.expanded ? {} : { display: \"none\" };\n }\n }, {\n key: \"toggleExpanded\",\n value: function toggleExpanded() {\n this.setState({\n expanded: !this.state.expanded\n });\n }\n }, {\n key: \"componentDidUpdate\",\n value: function componentDidUpdate() {\n var stage = this.props.stage;\n var numTasks = stage.tasks.length;\n\n // sort the x-axis\n stage.tasks.sort(function (taskA, taskB) {\n return (0, _utils.getTaskNumber)(taskA.taskStatus.taskId) - (0, _utils.getTaskNumber)(taskB.taskStatus.taskId);\n });\n\n var scheduledTimes = stage.tasks.map(function (task) {\n return (0, _utils.parseDuration)(task.stats.totalScheduledTime);\n });\n var cpuTimes = stage.tasks.map(function (task) {\n return (0, _utils.parseDuration)(task.stats.totalCpuTime);\n });\n\n // prevent multiple calls to componentDidUpdate (resulting from calls to setState or otherwise) within the refresh interval from re-rendering sparklines/charts\n if (this.state.lastRender === null || Date.now() - this.state.lastRender >= 1000) {\n var renderTimestamp = Date.now();\n var stageId = (0, _utils.getStageNumber)(stage.stageId);\n\n StageSummary.renderHistogram('#scheduled-time-histogram-' + stageId, scheduledTimes, _utils.formatDuration);\n StageSummary.renderHistogram('#cpu-time-histogram-' + stageId, cpuTimes, _utils.formatDuration);\n\n if (this.state.expanded) {\n // this needs to be a string otherwise it will also be passed to numberFormatter\n var tooltipValueLookups = { 'offset': {} };\n for (var i = 0; i < numTasks; i++) {\n tooltipValueLookups['offset'][i] = (0, _utils.getStageNumber)(stage.stageId) + \".\" + i;\n }\n\n var stageBarChartProperties = $.extend({}, BAR_CHART_PROPERTIES, { barWidth: BAR_CHART_WIDTH / numTasks, tooltipValueLookups: tooltipValueLookups });\n\n $('#scheduled-time-bar-chart-' + stageId).sparkline(scheduledTimes, $.extend({}, stageBarChartProperties, { numberFormatter: _utils.formatDuration }));\n $('#cpu-time-bar-chart-' + stageId).sparkline(cpuTimes, $.extend({}, stageBarChartProperties, { numberFormatter: _utils.formatDuration }));\n }\n\n this.setState({\n lastRender: renderTimestamp\n });\n }\n }\n }, {\n key: \"render\",\n value: function render() {\n var stage = this.props.stage;\n if (stage === undefined || !stage.hasOwnProperty('plan')) {\n return _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Information about this stage is unavailable.\"\n )\n );\n }\n\n var totalBufferedBytes = stage.tasks.map(function (task) {\n return task.outputBuffers.totalBufferedBytes;\n }).reduce(function (a, b) {\n return a + b;\n }, 0);\n\n var stageId = (0, _utils.getStageNumber)(stage.stageId);\n\n return _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-id\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stage-state-color\", style: { borderLeftColor: (0, _utils.getStageStateColor)(stage) } },\n stageId\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"table single-stage-table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"stage-table stage-table-time\" },\n _react2.default.createElement(\n \"thead\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"th\",\n { className: \"stage-table-stat-title stage-table-stat-header\" },\n \"Time\"\n ),\n _react2.default.createElement(\"th\", null)\n )\n ),\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Scheduled\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.stageStats.totalScheduledTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Blocked\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.stageStats.totalBlockedTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"CPU\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.stageStats.totalCpuTime\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"stage-table stage-table-memory\" },\n _react2.default.createElement(\n \"thead\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"th\",\n { className: \"stage-table-stat-title stage-table-stat-header\" },\n \"Memory\"\n ),\n _react2.default.createElement(\"th\", null)\n )\n ),\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Cumulative\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n (0, _utils.formatDataSizeBytes)(stage.stageStats.cumulativeUserMemory / 1000)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Current\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n (0, _utils.parseAndFormatDataSize)(stage.stageStats.userMemoryReservation)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Buffers\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n (0, _utils.formatDataSize)(totalBufferedBytes)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Peak\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n (0, _utils.parseAndFormatDataSize)(stage.stageStats.peakUserMemoryReservation)\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"stage-table stage-table-tasks\" },\n _react2.default.createElement(\n \"thead\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"th\",\n { className: \"stage-table-stat-title stage-table-stat-header\" },\n \"Tasks\"\n ),\n _react2.default.createElement(\"th\", null)\n )\n ),\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Pending\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.tasks.filter(function (task) {\n return task.taskStatus.state === \"PLANNED\";\n }).length\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Running\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.tasks.filter(function (task) {\n return task.taskStatus.state === \"RUNNING\";\n }).length\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Blocked\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.tasks.filter(function (task) {\n return task.stats.fullyBlocked;\n }).length\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Total\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.tasks.length\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"stage-table histogram-table\" },\n _react2.default.createElement(\n \"thead\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"th\",\n { className: \"stage-table-stat-title stage-table-chart-header\" },\n \"Scheduled Time Skew\"\n )\n )\n ),\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"histogram-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"histogram\", id: \"scheduled-time-histogram-\" + stageId },\n _react2.default.createElement(\"div\", { className: \"loader\" })\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"stage-table histogram-table\" },\n _react2.default.createElement(\n \"thead\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"th\",\n { className: \"stage-table-stat-title stage-table-chart-header\" },\n \"CPU Time Skew\"\n )\n )\n ),\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"histogram-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"histogram\", id: \"cpu-time-histogram-\" + stageId },\n _react2.default.createElement(\"div\", { className: \"loader\" })\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"expand-charts-container\" },\n _react2.default.createElement(\n \"a\",\n { onClick: this.toggleExpanded.bind(this), className: \"expand-charts-button\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon \" + this.getExpandedIcon(), style: _utils.GLYPHICON_HIGHLIGHT, \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"More\" })\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { style: this.getExpandedStyle() },\n _react2.default.createElement(\n \"td\",\n { colSpan: \"6\" },\n _react2.default.createElement(\n \"table\",\n { className: \"expanded-chart\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title expanded-chart-title\" },\n \"Task Scheduled Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"bar-chart-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"bar-chart\", id: \"scheduled-time-bar-chart-\" + stageId },\n _react2.default.createElement(\"div\", { className: \"loader\" })\n )\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { style: this.getExpandedStyle() },\n _react2.default.createElement(\n \"td\",\n { colSpan: \"6\" },\n _react2.default.createElement(\n \"table\",\n { className: \"expanded-chart\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title expanded-chart-title\" },\n \"Task CPU Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"bar-chart-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"bar-chart\", id: \"cpu-time-bar-chart-\" + stageId },\n _react2.default.createElement(\"div\", { className: \"loader\" })\n )\n )\n )\n )\n )\n )\n )\n )\n )\n )\n );\n }\n }], [{\n key: \"renderHistogram\",\n value: function renderHistogram(histogramId, inputData, numberFormatter) {\n var numBuckets = Math.min(HISTOGRAM_WIDTH, Math.sqrt(inputData.length));\n var dataMin = Math.min.apply(null, inputData);\n var dataMax = Math.max.apply(null, inputData);\n var bucketSize = (dataMax - dataMin) / numBuckets;\n\n var histogramData = [];\n if (bucketSize === 0) {\n histogramData = [inputData.length];\n } else {\n for (var i = 0; i < numBuckets + 1; i++) {\n histogramData.push(0);\n }\n\n for (var _i in inputData) {\n var dataPoint = inputData[_i];\n var bucket = Math.floor((dataPoint - dataMin) / bucketSize);\n histogramData[bucket] = histogramData[bucket] + 1;\n }\n }\n\n var tooltipValueLookups = { 'offset': {} };\n for (var _i2 = 0; _i2 < histogramData.length; _i2++) {\n tooltipValueLookups['offset'][_i2] = numberFormatter(dataMin + _i2 * bucketSize) + \"-\" + numberFormatter(dataMin + (_i2 + 1) * bucketSize);\n }\n\n var stageHistogramProperties = $.extend({}, HISTOGRAM_PROPERTIES, { barWidth: HISTOGRAM_WIDTH / histogramData.length, tooltipValueLookups: tooltipValueLookups });\n $(histogramId).sparkline(histogramData, stageHistogramProperties);\n }\n }]);\n\n return StageSummary;\n}(_react2.default.Component);\n\nvar StageList = function (_React$Component3) {\n _inherits(StageList, _React$Component3);\n\n function StageList() {\n _classCallCheck(this, StageList);\n\n return _possibleConstructorReturn(this, (StageList.__proto__ || Object.getPrototypeOf(StageList)).apply(this, arguments));\n }\n\n _createClass(StageList, [{\n key: \"getStages\",\n value: function getStages(stage) {\n if (stage === undefined || !stage.hasOwnProperty('subStages')) {\n return [];\n }\n\n return [].concat.apply(stage, stage.subStages.map(this.getStages, this));\n }\n }, {\n key: \"render\",\n value: function render() {\n var stages = this.getStages(this.props.outputStage);\n\n if (stages === undefined || stages.length === 0) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n \"No stage information available.\"\n )\n );\n }\n\n var renderedStages = stages.map(function (stage) {\n return _react2.default.createElement(StageSummary, { key: stage.stageId, stage: stage });\n });\n\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"table\",\n { className: \"table\", id: \"stage-list\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n renderedStages\n )\n )\n )\n );\n }\n }]);\n\n return StageList;\n}(_react2.default.Component);\n\nvar SMALL_SPARKLINE_PROPERTIES = {\n width: '100%',\n height: '57px',\n fillColor: '#3F4552',\n lineColor: '#747F96',\n spotColor: '#1EDCFF',\n tooltipClassname: 'sparkline-tooltip',\n disableHiddenCheck: true\n};\n\nvar TASK_FILTER = {\n ALL: function ALL() {\n return true;\n },\n PLANNED: function PLANNED(state) {\n return state === 'PLANNED';\n },\n RUNNING: function RUNNING(state) {\n return state === 'RUNNING';\n },\n FINISHED: function FINISHED(state) {\n return state === 'FINISHED';\n },\n FAILED: function FAILED(state) {\n return state === 'FAILED' || state === 'ABORTED' || state === 'CANCELED';\n }\n};\n\nvar QueryDetail = exports.QueryDetail = function (_React$Component4) {\n _inherits(QueryDetail, _React$Component4);\n\n function QueryDetail(props) {\n _classCallCheck(this, QueryDetail);\n\n var _this4 = _possibleConstructorReturn(this, (QueryDetail.__proto__ || Object.getPrototypeOf(QueryDetail)).call(this, props));\n\n _this4.state = {\n query: null,\n lastSnapshotStages: null,\n lastSnapshotTasks: null,\n\n lastScheduledTime: 0,\n lastCpuTime: 0,\n lastRowInput: 0,\n lastByteInput: 0,\n lastPhysicalInput: 0,\n lastPhysicalTime: 0,\n\n scheduledTimeRate: [],\n cpuTimeRate: [],\n rowInputRate: [],\n byteInputRate: [],\n physicalInputRate: [],\n\n reservedMemory: [],\n\n initialized: false,\n queryEnded: false,\n renderingEnded: false,\n\n lastRefresh: null,\n lastRender: null,\n\n stageRefresh: true,\n taskRefresh: true,\n\n taskFilter: TASK_FILTER.ALL\n };\n\n _this4.refreshLoop = _this4.refreshLoop.bind(_this4);\n return _this4;\n }\n\n _createClass(QueryDetail, [{\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n // stop refreshing when query finishes or fails\n if (this.state.query === null || !this.state.queryEnded) {\n // task.info-update-interval is set to 3 seconds by default\n this.timeoutId = setTimeout(this.refreshLoop, 3000);\n }\n }\n }, {\n key: \"refreshLoop\",\n value: function refreshLoop() {\n var _this5 = this;\n\n clearTimeout(this.timeoutId); // to stop multiple series of refreshLoop from going on simultaneously\n var queryId = (0, _utils.getFirstParameter)(window.location.search);\n $.get('/ui/api/query/' + queryId, function (query) {\n var lastSnapshotStages = this.state.lastSnapshotStage;\n if (this.state.stageRefresh) {\n lastSnapshotStages = query.outputStage;\n }\n var lastSnapshotTasks = this.state.lastSnapshotTasks;\n if (this.state.taskRefresh) {\n lastSnapshotTasks = query.outputStage;\n }\n\n var lastRefresh = this.state.lastRefresh;\n var lastScheduledTime = this.state.lastScheduledTime;\n var lastCpuTime = this.state.lastCpuTime;\n var lastRowInput = this.state.lastRowInput;\n var lastByteInput = this.state.lastByteInput;\n var lastPhysicalInput = this.state.lastPhysicalInput;\n var lastPhysicalTime = this.state.lastPhysicalTime;\n var alreadyEnded = this.state.queryEnded;\n var nowMillis = Date.now();\n\n this.setState({\n query: query,\n lastSnapshotStage: lastSnapshotStages,\n lastSnapshotTasks: lastSnapshotTasks,\n\n lastPhysicalTime: (0, _utils.parseDuration)(query.queryStats.physicalInputReadTime),\n lastScheduledTime: (0, _utils.parseDuration)(query.queryStats.totalScheduledTime),\n lastCpuTime: (0, _utils.parseDuration)(query.queryStats.totalCpuTime),\n lastRowInput: query.queryStats.processedInputPositions,\n lastByteInput: (0, _utils.parseDataSize)(query.queryStats.processedInputDataSize),\n lastPhysicalInput: (0, _utils.parseDataSize)(query.queryStats.physicalInputDataSize),\n\n initialized: true,\n queryEnded: !!query.finalQueryInfo,\n\n lastRefresh: nowMillis\n });\n\n // i.e. don't show sparklines if we've already decided not to update or if we don't have one previous measurement\n if (alreadyEnded || lastRefresh === null && query.state === \"RUNNING\") {\n this.resetTimer();\n return;\n }\n\n if (lastRefresh === null) {\n lastRefresh = nowMillis - (0, _utils.parseDuration)(query.queryStats.elapsedTime);\n }\n\n var elapsedSecsSinceLastRefresh = (nowMillis - lastRefresh) / 1000.0;\n if (elapsedSecsSinceLastRefresh >= 0) {\n var currentScheduledTimeRate = ((0, _utils.parseDuration)(query.queryStats.totalScheduledTime) - lastScheduledTime) / (elapsedSecsSinceLastRefresh * 1000);\n var currentCpuTimeRate = ((0, _utils.parseDuration)(query.queryStats.totalCpuTime) - lastCpuTime) / (elapsedSecsSinceLastRefresh * 1000);\n var currentPhysicalReadTime = ((0, _utils.parseDuration)(query.queryStats.physicalInputReadTime) - lastPhysicalTime) / 1000;\n var currentRowInputRate = (query.queryStats.processedInputPositions - lastRowInput) / elapsedSecsSinceLastRefresh;\n var currentByteInputRate = ((0, _utils.parseDataSize)(query.queryStats.processedInputDataSize) - lastByteInput) / elapsedSecsSinceLastRefresh;\n var currentPhysicalInputRate = currentPhysicalReadTime > 0 ? ((0, _utils.parseDataSize)(query.queryStats.physicalInputDataSize) - lastPhysicalInput) / currentPhysicalReadTime : 0;\n\n this.setState({\n scheduledTimeRate: (0, _utils.addToHistory)(currentScheduledTimeRate, this.state.scheduledTimeRate),\n cpuTimeRate: (0, _utils.addToHistory)(currentCpuTimeRate, this.state.cpuTimeRate),\n rowInputRate: (0, _utils.addToHistory)(currentRowInputRate, this.state.rowInputRate),\n byteInputRate: (0, _utils.addToHistory)(currentByteInputRate, this.state.byteInputRate),\n reservedMemory: (0, _utils.addToHistory)((0, _utils.parseDataSize)(query.queryStats.totalMemoryReservation), this.state.reservedMemory),\n physicalInputRate: (0, _utils.addToHistory)(currentPhysicalInputRate, this.state.physicalInputRate)\n });\n }\n this.resetTimer();\n }.bind(this)).error(function () {\n _this5.setState({\n initialized: true\n });\n _this5.resetTimer();\n });\n }\n }, {\n key: \"handleTaskRefreshClick\",\n value: function handleTaskRefreshClick() {\n if (this.state.taskRefresh) {\n this.setState({\n taskRefresh: false,\n lastSnapshotTasks: this.state.query.outputStage\n });\n } else {\n this.setState({\n taskRefresh: true\n });\n }\n }\n }, {\n key: \"renderTaskRefreshButton\",\n value: function renderTaskRefreshButton() {\n if (this.state.taskRefresh) {\n return _react2.default.createElement(\n \"button\",\n { className: \"btn btn-info live-button\", onClick: this.handleTaskRefreshClick.bind(this) },\n \"Auto-Refresh: On\"\n );\n } else {\n return _react2.default.createElement(\n \"button\",\n { className: \"btn btn-info live-button\", onClick: this.handleTaskRefreshClick.bind(this) },\n \"Auto-Refresh: Off\"\n );\n }\n }\n }, {\n key: \"handleStageRefreshClick\",\n value: function handleStageRefreshClick() {\n if (this.state.stageRefresh) {\n this.setState({\n stageRefresh: false,\n lastSnapshotStages: this.state.query.outputStage\n });\n } else {\n this.setState({\n stageRefresh: true\n });\n }\n }\n }, {\n key: \"renderStageRefreshButton\",\n value: function renderStageRefreshButton() {\n if (this.state.stageRefresh) {\n return _react2.default.createElement(\n \"button\",\n { className: \"btn btn-info live-button\", onClick: this.handleStageRefreshClick.bind(this) },\n \"Auto-Refresh: On\"\n );\n } else {\n return _react2.default.createElement(\n \"button\",\n { className: \"btn btn-info live-button\", onClick: this.handleStageRefreshClick.bind(this) },\n \"Auto-Refresh: Off\"\n );\n }\n }\n }, {\n key: \"renderTaskFilterListItem\",\n value: function renderTaskFilterListItem(taskFilter, taskFilterText) {\n return _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: this.state.taskFilter === taskFilter ? \"selected\" : \"\", onClick: this.handleTaskFilterClick.bind(this, taskFilter) },\n taskFilterText\n )\n );\n }\n }, {\n key: \"handleTaskFilterClick\",\n value: function handleTaskFilterClick(filter, event) {\n this.setState({\n taskFilter: filter\n });\n event.preventDefault();\n }\n }, {\n key: \"getTasksFromStage\",\n value: function getTasksFromStage(stage) {\n if (stage === undefined || !stage.hasOwnProperty('subStages') || !stage.hasOwnProperty('tasks')) {\n return [];\n }\n\n return [].concat.apply(stage.tasks, stage.subStages.map(this.getTasksFromStage, this));\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop();\n }\n }, {\n key: \"componentDidUpdate\",\n value: function componentDidUpdate() {\n // prevent multiple calls to componentDidUpdate (resulting from calls to setState or otherwise) within the refresh interval from re-rendering sparklines/charts\n if (this.state.lastRender === null || Date.now() - this.state.lastRender >= 1000 || this.state.ended && !this.state.renderingEnded) {\n var renderTimestamp = Date.now();\n $('#scheduled-time-rate-sparkline').sparkline(this.state.scheduledTimeRate, $.extend({}, SMALL_SPARKLINE_PROPERTIES, {\n chartRangeMin: 0,\n numberFormatter: _utils.precisionRound\n }));\n $('#cpu-time-rate-sparkline').sparkline(this.state.cpuTimeRate, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { chartRangeMin: 0, numberFormatter: _utils.precisionRound }));\n $('#row-input-rate-sparkline').sparkline(this.state.rowInputRate, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatCount }));\n $('#byte-input-rate-sparkline').sparkline(this.state.byteInputRate, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatDataSize }));\n $('#reserved-memory-sparkline').sparkline(this.state.reservedMemory, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatDataSize }));\n $('#physical-input-rate-sparkline').sparkline(this.state.physicalInputRate, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatDataSize }));\n\n if (this.state.lastRender === null) {\n $('#query').each(function (i, block) {\n hljs.highlightBlock(block);\n });\n\n $('#prepared-query').each(function (i, block) {\n hljs.highlightBlock(block);\n });\n }\n\n this.setState({\n renderingEnded: this.state.ended,\n lastRender: renderTimestamp\n });\n }\n\n $('[data-toggle=\"tooltip\"]').tooltip();\n new window.ClipboardJS('.copy-button');\n }\n }, {\n key: \"renderTasks\",\n value: function renderTasks() {\n var _this6 = this;\n\n if (this.state.lastSnapshotTasks === null) {\n return;\n }\n\n var tasks = this.getTasksFromStage(this.state.lastSnapshotTasks).filter(function (task) {\n return _this6.state.taskFilter(task.taskStatus.state);\n }, this);\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-9\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Tasks\"\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-3\" },\n _react2.default.createElement(\n \"table\",\n { className: \"header-inline-links\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"input-group-btn text-right\" },\n _react2.default.createElement(\n \"button\",\n { type: \"button\", className: \"btn btn-default dropdown-toggle pull-right text-right\", \"data-toggle\": \"dropdown\", \"aria-haspopup\": \"true\",\n \"aria-expanded\": \"false\" },\n \"Show \",\n _react2.default.createElement(\"span\", { className: \"caret\" })\n ),\n _react2.default.createElement(\n \"ul\",\n { className: \"dropdown-menu\" },\n this.renderTaskFilterListItem(TASK_FILTER.ALL, \"All\"),\n this.renderTaskFilterListItem(TASK_FILTER.PLANNED, \"Planned\"),\n this.renderTaskFilterListItem(TASK_FILTER.RUNNING, \"Running\"),\n this.renderTaskFilterListItem(TASK_FILTER.FINISHED, \"Finished\"),\n this.renderTaskFilterListItem(TASK_FILTER.FAILED, \"Aborted/Canceled/Failed\")\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n \"\\xA0\\xA0\",\n this.renderTaskRefreshButton()\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(TaskList, { key: this.state.query.queryId, tasks: tasks })\n )\n )\n );\n }\n }, {\n key: \"renderStages\",\n value: function renderStages() {\n if (this.state.lastSnapshotStage === null) {\n return;\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-9\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Stages\"\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-3\" },\n _react2.default.createElement(\n \"table\",\n { className: \"header-inline-links\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n this.renderStageRefreshButton()\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(StageList, { key: this.state.query.queryId, outputStage: this.state.lastSnapshotStage })\n )\n )\n );\n }\n }, {\n key: \"renderPreparedQuery\",\n value: function renderPreparedQuery() {\n var query = this.state.query;\n if (!query.hasOwnProperty('preparedQuery') || query.preparedQuery === null) {\n return;\n }\n\n return _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Prepared Query\",\n _react2.default.createElement(\n \"a\",\n { className: \"btn copy-button\", \"data-clipboard-target\": \"#prepared-query-text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", \"aria-hidden\": \"true\", alt: \"Copy to clipboard\" })\n )\n ),\n _react2.default.createElement(\n \"pre\",\n { id: \"prepared-query\" },\n _react2.default.createElement(\n \"code\",\n { className: \"lang-sql\", id: \"prepared-query-text\" },\n query.preparedQuery\n )\n )\n );\n }\n }, {\n key: \"renderSessionProperties\",\n value: function renderSessionProperties() {\n var query = this.state.query;\n\n var properties = [];\n for (var property in query.session.systemProperties) {\n if (query.session.systemProperties.hasOwnProperty(property)) {\n properties.push(_react2.default.createElement(\n \"span\",\n null,\n \"- \",\n property + \"=\" + query.session.systemProperties[property],\n \" \",\n _react2.default.createElement(\"br\", null)\n ));\n }\n }\n\n for (var catalog in query.session.catalogProperties) {\n if (query.session.catalogProperties.hasOwnProperty(catalog)) {\n for (var _property in query.session.catalogProperties[catalog]) {\n if (query.session.catalogProperties[catalog].hasOwnProperty(_property)) {\n properties.push(_react2.default.createElement(\n \"span\",\n null,\n \"- \",\n catalog + \".\" + _property + \"=\" + query.session.catalogProperties[catalog][_property],\n \" \",\n _react2.default.createElement(\"br\", null)\n ));\n }\n }\n }\n }\n\n return properties;\n }\n }, {\n key: \"renderResourceEstimates\",\n value: function renderResourceEstimates() {\n var query = this.state.query;\n var estimates = query.session.resourceEstimates;\n var renderedEstimates = [];\n\n for (var resource in estimates) {\n if (estimates.hasOwnProperty(resource)) {\n var upperChars = resource.match(/([A-Z])/g) || [];\n var snakeCased = resource;\n for (var i = 0, n = upperChars.length; i < n; i++) {\n snakeCased = snakeCased.replace(new RegExp(upperChars[i]), '_' + upperChars[i].toLowerCase());\n }\n\n renderedEstimates.push(_react2.default.createElement(\n \"span\",\n null,\n \"- \",\n snakeCased + \"=\" + query.session.resourceEstimates[resource],\n \" \",\n _react2.default.createElement(\"br\", null)\n ));\n }\n }\n\n return renderedEstimates;\n }\n }, {\n key: \"renderWarningInfo\",\n value: function renderWarningInfo() {\n var query = this.state.query;\n if (query.warnings.length > 0) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Warnings\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\", id: \"warnings-table\" },\n query.warnings.map(function (warning) {\n return _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n warning.warningCode.name\n ),\n _react2.default.createElement(\n \"td\",\n null,\n warning.message\n )\n );\n })\n )\n )\n );\n } else {\n return null;\n }\n }\n }, {\n key: \"renderFailureInfo\",\n value: function renderFailureInfo() {\n var query = this.state.query;\n if (query.failureInfo) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Error Information\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Error Type\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.errorType\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Error Code\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.errorCode.name + \" (\" + this.state.query.errorCode.code + \")\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Stack Trace\",\n _react2.default.createElement(\n \"a\",\n { className: \"btn copy-button\", \"data-clipboard-target\": \"#stack-trace\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", \"aria-hidden\": \"true\", alt: \"Copy to clipboard\" })\n )\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n _react2.default.createElement(\n \"pre\",\n { id: \"stack-trace\" },\n QueryDetail.formatStackTrace(query.failureInfo)\n )\n )\n )\n )\n )\n )\n );\n } else {\n return \"\";\n }\n }\n }, {\n key: \"render\",\n value: function render() {\n var query = this.state.query;\n\n if (query === null || this.state.initialized === false) {\n var label = _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading...\"\n );\n if (this.state.initialized) {\n label = \"Query not found\";\n }\n return _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n label\n )\n )\n );\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(_QueryHeader.QueryHeader, { query: query }),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Session\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"User\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n _react2.default.createElement(\n \"span\",\n { id: \"query-user\" },\n query.session.user\n ),\n \"\\xA0\\xA0\",\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: \"copy-button\", \"data-clipboard-target\": \"#query-user\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", \"aria-hidden\": \"true\", alt: \"Copy to clipboard\" })\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Principal\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n query.session.principal\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Source\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n query.session.source\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Catalog\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.session.catalog\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Schema\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.session.schema\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Client Address\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.session.remoteUserAddress\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Client Tags\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.session.clientTags.join(\", \")\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Session Properties\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n this.renderSessionProperties()\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Resource Estimates\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n this.renderResourceEstimates()\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Execution\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Resource Group\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n query.resourceGroupId ? query.resourceGroupId.join(\".\") : \"n/a\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Submission Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatShortDateTime)(new Date(query.queryStats.createTime))\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Completion Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.endTime ? (0, _utils.formatShortDateTime)(new Date(query.queryStats.endTime)) : \"\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Elapsed Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.elapsedTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Queued Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.queuedTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Analysis Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.analysisTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Planning Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.planningTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Execution Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.executionTime\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Resource Utilization Summary\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"CPU Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.totalCpuTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Scheduled Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.totalScheduledTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Input Rows\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatCount)(query.queryStats.processedInputPositions)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Input Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.processedInputDataSize)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Physical Input Rows\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatCount)(query.queryStats.physicalInputPositions)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Physical Input Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.physicalInputDataSize)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Physical Input Read Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.physicalInputReadTime)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Internal Network Rows\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatCount)(query.queryStats.internalNetworkInputPositions)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Internal Network Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.internalNetworkInputDataSize)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Peak User Memory\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.peakUserMemoryReservation)\n )\n ),\n (0, _utils.parseDataSize)(query.queryStats.peakRevocableMemoryReservation) > 0 && _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Peak Revocable Memory\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.peakRevocableMemoryReservation)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Peak Total Memory\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.peakTotalMemoryReservation)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Memory Pool\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.memoryPool\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Cumulative User Memory\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatDataSizeBytes)(query.queryStats.cumulativeUserMemory / 1000.0) + \" seconds\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Output Rows\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatCount)(query.queryStats.outputPositions)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Output Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.outputDataSize)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Written Rows\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatCount)(query.queryStats.writtenPositions)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Logical Written Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.logicalWrittenDataSize)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Physical Written Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.physicalWrittenDataSize)\n )\n ),\n (0, _utils.parseDataSize)(query.queryStats.spilledDataSize) > 0 && _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Spilled Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.spilledDataSize)\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Timeline\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Parallelism\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"cpu-time-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatCount)(this.state.cpuTimeRate[this.state.cpuTimeRate.length - 1])\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Scheduled Time/s\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"scheduled-time-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatCount)(this.state.scheduledTimeRate[this.state.scheduledTimeRate.length - 1])\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Input Rows/s\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"row-input-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatCount)(this.state.rowInputRate[this.state.rowInputRate.length - 1])\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Input Bytes/s\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"byte-input-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatDataSize)(this.state.byteInputRate[this.state.byteInputRate.length - 1])\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Physical Input Bytes/s\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"physical-input-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatDataSize)(this.state.physicalInputRate[this.state.physicalInputRate.length - 1])\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Memory Utilization\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"reserved-memory-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatDataSize)(this.state.reservedMemory[this.state.reservedMemory.length - 1])\n )\n )\n )\n )\n )\n )\n )\n ),\n this.renderWarningInfo(),\n this.renderFailureInfo(),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Query\",\n _react2.default.createElement(\n \"a\",\n { className: \"btn copy-button\", \"data-clipboard-target\": \"#query-text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", \"aria-hidden\": \"true\", alt: \"Copy to clipboard\" })\n )\n ),\n _react2.default.createElement(\n \"pre\",\n { id: \"query\" },\n _react2.default.createElement(\n \"code\",\n { className: \"lang-sql\", id: \"query-text\" },\n query.query\n )\n )\n ),\n this.renderPreparedQuery()\n ),\n this.renderStages(),\n this.renderTasks()\n );\n }\n }], [{\n key: \"formatStackTrace\",\n value: function formatStackTrace(info) {\n return QueryDetail.formatStackTraceHelper(info, [], \"\", \"\");\n }\n }, {\n key: \"formatStackTraceHelper\",\n value: function formatStackTraceHelper(info, parentStack, prefix, linePrefix) {\n var s = linePrefix + prefix + QueryDetail.failureInfoToString(info) + \"\\n\";\n\n if (info.stack) {\n var sharedStackFrames = 0;\n if (parentStack !== null) {\n sharedStackFrames = QueryDetail.countSharedStackFrames(info.stack, parentStack);\n }\n\n for (var i = 0; i < info.stack.length - sharedStackFrames; i++) {\n s += linePrefix + \"\\tat \" + info.stack[i] + \"\\n\";\n }\n if (sharedStackFrames !== 0) {\n s += linePrefix + \"\\t... \" + sharedStackFrames + \" more\" + \"\\n\";\n }\n }\n\n if (info.suppressed) {\n for (var _i3 = 0; _i3 < info.suppressed.length; _i3++) {\n s += QueryDetail.formatStackTraceHelper(info.suppressed[_i3], info.stack, \"Suppressed: \", linePrefix + \"\\t\");\n }\n }\n\n if (info.cause) {\n s += QueryDetail.formatStackTraceHelper(info.cause, info.stack, \"Caused by: \", linePrefix);\n }\n\n return s;\n }\n }, {\n key: \"countSharedStackFrames\",\n value: function countSharedStackFrames(stack, parentStack) {\n var n = 0;\n var minStackLength = Math.min(stack.length, parentStack.length);\n while (n < minStackLength && stack[stack.length - 1 - n] === parentStack[parentStack.length - 1 - n]) {\n n++;\n }\n return n;\n }\n }, {\n key: \"failureInfoToString\",\n value: function failureInfoToString(t) {\n return t.message !== null ? t.type + \": \" + t.message : t.type;\n }\n }]);\n\n return QueryDetail;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/QueryDetail.jsx?"); /***/ }), diff --git a/presto-main/src/main/resources/webapp/src/components/QueryDetail.jsx b/presto-main/src/main/resources/webapp/src/components/QueryDetail.jsx index 3b536e1d286c..45d2666df777 100644 --- a/presto-main/src/main/resources/webapp/src/components/QueryDetail.jsx +++ b/presto-main/src/main/resources/webapp/src/components/QueryDetail.jsx @@ -1336,14 +1336,6 @@ export class QueryDetail extends React.Component { {query.queryStats.totalScheduledTime} - - - Blocked Time - - - {query.queryStats.totalBlockedTime} - - Input Rows From 9c08506336e008d4bc0d64b68b01addcfbea0824 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Walkiewicz?= Date: Tue, 21 Apr 2020 17:29:34 +0200 Subject: [PATCH 253/519] Support LDAP without TLS --- .../password/ldap/LdapAuthenticator.java | 4 ++ .../plugin/password/ldap/LdapConfig.java | 28 +++++++++- .../plugin/password/ldap/TestLdapConfig.java | 24 +++++++-- .../environment/SinglenodeLdapInsecure.java | 54 +++++++++++++++++++ .../password-authenticator.properties | 6 +++ .../bin/product-tests-suite-6-non-generic.sh | 5 ++ 6 files changed, 117 insertions(+), 4 deletions(-) create mode 100644 presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeLdapInsecure.java create mode 100644 presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-ldap-without-ssl/password-authenticator.properties diff --git a/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapAuthenticator.java b/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapAuthenticator.java index 91336e9c0a22..67491a625668 100644 --- a/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapAuthenticator.java +++ b/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapAuthenticator.java @@ -91,6 +91,10 @@ public LdapAuthenticator(LdapConfig ldapConfig) checkState(bindDistinguishedName.isPresent() || userBindSearchPattern.isPresent(), "Either user bind search pattern or bind distinguished name must be provided"); + if (ldapConfig.getLdapUrl().startsWith("ldap://")) { + log.warn("Passwords will be sent in the clear to the LDAP server. Please consider using SSL to connect."); + } + Map environment = ImmutableMap.builder() .put(INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory") .put(PROVIDER_URL, ldapUrl) diff --git a/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapConfig.java b/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapConfig.java index 57cfbc3cf4ce..109559821e3d 100644 --- a/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapConfig.java +++ b/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapConfig.java @@ -16,16 +16,23 @@ import io.airlift.configuration.Config; import io.airlift.configuration.ConfigDescription; import io.airlift.configuration.ConfigSecuritySensitive; +import io.airlift.log.Logger; import io.airlift.units.Duration; +import javax.validation.constraints.AssertTrue; import javax.validation.constraints.NotNull; import javax.validation.constraints.Pattern; import java.util.concurrent.TimeUnit; +import static com.google.common.base.Strings.nullToEmpty; + public class LdapConfig { + private static final Logger log = Logger.get(LdapConfig.class); + private String ldapUrl; + private boolean allowInsecure; private String userBindSearchPattern; private String groupAuthorizationSearchPattern; private String userBaseDistinguishedName; @@ -35,7 +42,7 @@ public class LdapConfig private Duration ldapCacheTtl = new Duration(1, TimeUnit.HOURS); @NotNull - @Pattern(regexp = "^ldaps://.*", message = "LDAP without SSL/TLS unsupported. Expected ldaps://") + @Pattern(regexp = "^ldaps?://.*", message = "Invalid LDAP server URL. Expected ldap:// or ldaps://") public String getLdapUrl() { return ldapUrl; @@ -49,6 +56,25 @@ public LdapConfig setLdapUrl(String url) return this; } + public boolean isAllowInsecure() + { + return allowInsecure; + } + + @Config("ldap.allow-insecure") + @ConfigDescription("Allow insecure connection to the LDAP server") + public LdapConfig setAllowInsecure(boolean allowInsecure) + { + this.allowInsecure = allowInsecure; + return this; + } + + @AssertTrue(message = "Connecting to the LDAP server without SSL enabled requires `ldap.allow-insecure=true`") + public boolean isUrlConfigurationValid() + { + return nullToEmpty(ldapUrl).startsWith("ldaps://") || allowInsecure; + } + public String getUserBindSearchPattern() { return userBindSearchPattern; diff --git a/presto-password-authenticators/src/test/java/io/prestosql/plugin/password/ldap/TestLdapConfig.java b/presto-password-authenticators/src/test/java/io/prestosql/plugin/password/ldap/TestLdapConfig.java index 979ad4cc5235..f1cbb12f8c14 100644 --- a/presto-password-authenticators/src/test/java/io/prestosql/plugin/password/ldap/TestLdapConfig.java +++ b/presto-password-authenticators/src/test/java/io/prestosql/plugin/password/ldap/TestLdapConfig.java @@ -18,6 +18,7 @@ import io.airlift.units.Duration; import org.testng.annotations.Test; +import javax.validation.constraints.AssertTrue; import javax.validation.constraints.NotNull; import javax.validation.constraints.Pattern; @@ -37,6 +38,7 @@ public void testDefault() { assertRecordedDefaults(recordDefaults(LdapConfig.class) .setLdapUrl(null) + .setAllowInsecure(false) .setUserBindSearchPattern(null) .setUserBaseDistinguishedName(null) .setGroupAuthorizationSearchPattern(null) @@ -51,6 +53,7 @@ public void testExplicitConfig() { Map properties = new ImmutableMap.Builder() .put("ldap.url", "ldaps://localhost:636") + .put("ldap.allow-insecure", "true") .put("ldap.user-bind-pattern", "uid=${USER},ou=org,dc=test,dc=com") .put("ldap.user-base-dn", "dc=test,dc=com") .put("ldap.group-auth-pattern", "&(objectClass=user)(memberOf=cn=group)(user=username)") @@ -62,6 +65,7 @@ public void testExplicitConfig() LdapConfig expected = new LdapConfig() .setLdapUrl("ldaps://localhost:636") + .setAllowInsecure(true) .setUserBindSearchPattern("uid=${USER},ou=org,dc=test,dc=com") .setUserBaseDistinguishedName("dc=test,dc=com") .setGroupAuthorizationSearchPattern("&(objectClass=user)(memberOf=cn=group)(user=username)") @@ -82,9 +86,23 @@ public void testValidation() .setUserBaseDistinguishedName("dc=test,dc=com") .setGroupAuthorizationSearchPattern("&(objectClass=user)(memberOf=cn=group)(user=username)")); - assertFailsValidation(new LdapConfig().setLdapUrl("ldap://"), "ldapUrl", "LDAP without SSL/TLS unsupported. Expected ldaps://", Pattern.class); - assertFailsValidation(new LdapConfig().setLdapUrl("localhost"), "ldapUrl", "LDAP without SSL/TLS unsupported. Expected ldaps://", Pattern.class); - assertFailsValidation(new LdapConfig().setLdapUrl("ldaps:/localhost"), "ldapUrl", "LDAP without SSL/TLS unsupported. Expected ldaps://", Pattern.class); + assertValidates(new LdapConfig() + .setLdapUrl("ldap://localhost") + .setAllowInsecure(true) + .setUserBindSearchPattern("uid=${USER},ou=org,dc=test,dc=com") + .setUserBaseDistinguishedName("dc=test,dc=com") + .setGroupAuthorizationSearchPattern("&(objectClass=user)(memberOf=cn=group)(user=username)")); + + assertFailsValidation( + new LdapConfig() + .setLdapUrl("ldap://") + .setAllowInsecure(false), + "urlConfigurationValid", + "Connecting to the LDAP server without SSL enabled requires `ldap.allow-insecure=true`", + AssertTrue.class); + + assertFailsValidation(new LdapConfig().setLdapUrl("localhost"), "ldapUrl", "Invalid LDAP server URL. Expected ldap:// or ldaps://", Pattern.class); + assertFailsValidation(new LdapConfig().setLdapUrl("ldaps:/localhost"), "ldapUrl", "Invalid LDAP server URL. Expected ldap:// or ldaps://", Pattern.class); assertFailsValidation(new LdapConfig(), "ldapUrl", "may not be null", NotNull.class); } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeLdapInsecure.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeLdapInsecure.java new file mode 100644 index 000000000000..27707ba70d4d --- /dev/null +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/SinglenodeLdapInsecure.java @@ -0,0 +1,54 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.tests.product.launcher.env.environment; + +import com.google.common.collect.ImmutableList; +import io.prestosql.tests.product.launcher.docker.DockerFiles; +import io.prestosql.tests.product.launcher.env.Environment; +import io.prestosql.tests.product.launcher.env.EnvironmentOptions; +import io.prestosql.tests.product.launcher.env.common.Hadoop; +import io.prestosql.tests.product.launcher.env.common.Standard; +import io.prestosql.tests.product.launcher.env.common.TestsEnvironment; +import io.prestosql.tests.product.launcher.testcontainers.PortBinder; + +import javax.inject.Inject; + +import static java.util.Objects.requireNonNull; + +@TestsEnvironment +public class SinglenodeLdapInsecure + extends AbstractSinglenodeLdap +{ + private final PortBinder portBinder; + + @Inject + public SinglenodeLdapInsecure(Standard standard, Hadoop hadoop, DockerFiles dockerFiles, PortBinder portBinder, EnvironmentOptions environmentOptions) + { + super(ImmutableList.of(standard, hadoop), dockerFiles, portBinder, environmentOptions); + this.portBinder = requireNonNull(portBinder, "portBinder is null"); + } + + @Override + protected void extendEnvironment(Environment.Builder builder) + { + super.extendEnvironment(builder); + builder.configureContainer("ldapserver", container -> portBinder.exposePort(container, 389)); + } + + @Override + protected String getPasswordAuthenticatorConfigPath() + { + return "conf/environment/singlenode-ldap-without-ssl/password-authenticator.properties"; + } +} diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-ldap-without-ssl/password-authenticator.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-ldap-without-ssl/password-authenticator.properties new file mode 100644 index 000000000000..d7b1804a9b22 --- /dev/null +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-ldap-without-ssl/password-authenticator.properties @@ -0,0 +1,6 @@ +password-authenticator.name=ldap +ldap.url=ldap://ldapserver:389 +ldap.allow-insecure=true +ldap.user-bind-pattern=uid=${USER},ou=Asia,dc=presto,dc=testldap,dc=com +ldap.user-base-dn=ou=Asia,dc=presto,dc=testldap,dc=com +ldap.group-auth-pattern=(&(objectClass=inetOrgPerson)(uid=${USER})(memberof=cn=DefaultGroup,ou=America,dc=presto,dc=testldap,dc=com)) diff --git a/presto-product-tests/bin/product-tests-suite-6-non-generic.sh b/presto-product-tests/bin/product-tests-suite-6-non-generic.sh index 9eab67f9f34a..d678cd285382 100755 --- a/presto-product-tests/bin/product-tests-suite-6-non-generic.sh +++ b/presto-product-tests/bin/product-tests-suite-6-non-generic.sh @@ -18,6 +18,11 @@ presto-product-tests-launcher/bin/run-launcher test run \ -- -g ldap \ || suite_exit_code=1 +presto-product-tests-launcher/bin/run-launcher test run \ + --environment singlenode-ldap-insecure \ + -- -g ldap \ + || suite_exit_code=1 + presto-product-tests-launcher/bin/run-launcher test run \ --environment singlenode-ldap-referrals \ -- -g ldap \ From 9afb92b3ee4743d598510886447c45fc7a78d032 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Wed, 22 Apr 2020 22:08:50 -0700 Subject: [PATCH 254/519] Use common credential class --- .../password/{file => }/Credential.java | 15 +++- .../plugin/password/file/PasswordStore.java | 1 + .../password/ldap/LdapAuthenticator.java | 76 +++---------------- 3 files changed, 24 insertions(+), 68 deletions(-) rename presto-password-authenticators/src/main/java/io/prestosql/plugin/password/{file => }/Credential.java (79%) diff --git a/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/file/Credential.java b/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/Credential.java similarity index 79% rename from presto-password-authenticators/src/main/java/io/prestosql/plugin/password/file/Credential.java rename to presto-password-authenticators/src/main/java/io/prestosql/plugin/password/Credential.java index bb26f5a9a0c8..010a72fd3e02 100644 --- a/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/file/Credential.java +++ b/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/Credential.java @@ -11,10 +11,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.prestosql.plugin.password.file; +package io.prestosql.plugin.password; import java.util.Objects; +import static com.google.common.base.MoreObjects.toStringHelper; import static java.util.Objects.requireNonNull; public final class Credential @@ -22,9 +23,9 @@ public final class Credential private final String user; private final String password; - public Credential(String username, String password) + public Credential(String user, String password) { - this.user = requireNonNull(username, "username is null"); + this.user = requireNonNull(user, "user is null"); this.password = requireNonNull(password, "password is null"); } @@ -57,4 +58,12 @@ public int hashCode() { return Objects.hash(user, password); } + + @Override + public String toString() + { + return toStringHelper(this) + .add("user", user) + .toString(); + } } diff --git a/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/file/PasswordStore.java b/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/file/PasswordStore.java index 9bef86b8f32d..d564b376d8d1 100644 --- a/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/file/PasswordStore.java +++ b/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/file/PasswordStore.java @@ -19,6 +19,7 @@ import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableMap; +import io.prestosql.plugin.password.Credential; import io.prestosql.spi.PrestoException; import java.io.File; diff --git a/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapAuthenticator.java b/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapAuthenticator.java index 67491a625668..0f9925b3c56f 100644 --- a/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapAuthenticator.java +++ b/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapAuthenticator.java @@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.util.concurrent.UncheckedExecutionException; import io.airlift.log.Logger; +import io.prestosql.plugin.password.Credential; import io.prestosql.spi.security.AccessDeniedException; import io.prestosql.spi.security.BasicPrincipal; import io.prestosql.spi.security.PasswordAuthenticator; @@ -36,10 +37,8 @@ import java.security.Principal; import java.util.Map; -import java.util.Objects; import java.util.Optional; -import static com.google.common.base.MoreObjects.toStringHelper; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Throwables.throwIfInstanceOf; import static io.prestosql.plugin.password.jndi.JndiUtils.createDirContext; @@ -67,7 +66,7 @@ public class LdapAuthenticator private final Optional bindPassword; private final boolean ignoreReferrals; private final Map basicEnvironment; - private final LoadingCache authenticationCache; + private final LoadingCache authenticationCache; @Inject public LdapAuthenticator(LdapConfig ldapConfig) @@ -117,7 +116,7 @@ public LdapAuthenticator(LdapConfig ldapConfig) public Principal createAuthenticatedPrincipal(String user, String password) { try { - return authenticationCache.getUnchecked(new Credentials(user, password)); + return authenticationCache.getUnchecked(new Credential(user, password)); } catch (UncheckedExecutionException e) { throwIfInstanceOf(e.getCause(), AccessDeniedException.class); @@ -125,9 +124,9 @@ public Principal createAuthenticatedPrincipal(String user, String password) } } - private Principal authenticateWithUserBind(Credentials credentials) + private Principal authenticateWithUserBind(Credential credential) { - String user = credentials.getUser(); + String user = credential.getUser(); if (containsSpecialCharacters(user)) { throw new AccessDeniedException("Username contains a special LDAP character"); } @@ -135,10 +134,10 @@ private Principal authenticateWithUserBind(Credentials credentials) String userDistinguishedName = createUserDistinguishedName(user); if (groupAuthorizationSearchPattern.isPresent()) { // user password is also validated as user DN and password is used for querying LDAP - checkGroupMembership(user, userDistinguishedName, credentials.getPassword()); + checkGroupMembership(user, userDistinguishedName, credential.getPassword()); } else { - validatePassword(userDistinguishedName, credentials.getPassword()); + validatePassword(userDistinguishedName, credential.getPassword()); } log.debug("Authentication successful for user [%s]", user); } @@ -149,22 +148,22 @@ private Principal authenticateWithUserBind(Credentials credentials) return new BasicPrincipal(user); } - private Principal authenticateWithBindDistinguishedName(Credentials credentials) + private Principal authenticateWithBindDistinguishedName(Credential credential) { - String user = credentials.getUser(); + String user = credential.getUser(); if (containsSpecialCharacters(user)) { throw new AccessDeniedException("Username contains a special LDAP character"); } try { String userDistinguishedName = validateGroupMembership(user, bindDistinguishedName.get(), bindPassword.get()); - validatePassword(userDistinguishedName, credentials.getPassword()); + validatePassword(userDistinguishedName, credential.getPassword()); log.debug("Authentication successful for user [%s]", user); } catch (NamingException e) { log.debug(e, "Authentication failed for user [%s], %s", user, e.getMessage()); throw new RuntimeException("Authentication error"); } - return new BasicPrincipal(credentials.getUser()); + return new BasicPrincipal(credential.getUser()); } private String createUserDistinguishedName(String user) @@ -298,57 +297,4 @@ private static String replaceUser(String pattern, String user) { return pattern.replace("${USER}", user); } - - private static class Credentials - { - private final String user; - private final String password; - - private Credentials(String user, String password) - { - this.user = requireNonNull(user); - this.password = requireNonNull(password); - } - - public String getUser() - { - return user; - } - - public String getPassword() - { - return password; - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - Credentials that = (Credentials) o; - - return Objects.equals(this.user, that.user) && - Objects.equals(this.password, that.password); - } - - @Override - public int hashCode() - { - return Objects.hash(user, password); - } - - @Override - public String toString() - { - return toStringHelper(this) - .add("user", user) - .add("password", password) - .toString(); - } - } } From d3c0495806f7ffa6b4f8c2a770dd8a9c0acf2d08 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Wed, 22 Apr 2020 22:17:48 -0700 Subject: [PATCH 255/519] Minor cleanup of LdapAuthenticator --- .../password/ldap/LdapAuthenticator.java | 49 ++++++++----------- 1 file changed, 21 insertions(+), 28 deletions(-) diff --git a/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapAuthenticator.java b/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapAuthenticator.java index 0f9925b3c56f..9cf6c96ada8a 100644 --- a/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapAuthenticator.java +++ b/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapAuthenticator.java @@ -15,7 +15,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.CharMatcher; -import com.google.common.base.VerifyException; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; @@ -39,7 +38,7 @@ import java.util.Map; import java.util.Optional; -import static com.google.common.base.Preconditions.checkState; +import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Throwables.throwIfInstanceOf; import static io.prestosql.plugin.password.jndi.JndiUtils.createDirContext; import static java.lang.String.format; @@ -64,7 +63,6 @@ public class LdapAuthenticator private final Optional userBaseDistinguishedName; private final Optional bindDistinguishedName; private final Optional bindPassword; - private final boolean ignoreReferrals; private final Map basicEnvironment; private final LoadingCache authenticationCache; @@ -77,39 +75,35 @@ public LdapAuthenticator(LdapConfig ldapConfig) this.userBaseDistinguishedName = Optional.ofNullable(ldapConfig.getUserBaseDistinguishedName()); this.bindDistinguishedName = Optional.ofNullable(ldapConfig.getBindDistingushedName()); this.bindPassword = Optional.ofNullable(ldapConfig.getBindPassword()); - this.ignoreReferrals = ldapConfig.isIgnoreReferrals(); - if (groupAuthorizationSearchPattern.isPresent()) { - checkState(userBaseDistinguishedName.isPresent(), "Base distinguished name (DN) for user is null"); - } - checkState(bindDistinguishedName.isPresent() == bindPassword.isPresent(), - "Both or none bind distinguished name and bind password must be provided"); - checkState( + checkArgument( + !groupAuthorizationSearchPattern.isPresent() || userBaseDistinguishedName.isPresent(), + "Base distinguished name (DN) for user must be provided"); + checkArgument( + bindDistinguishedName.isPresent() == bindPassword.isPresent(), + "Both bind distinguished name and bind password must be provided together"); + checkArgument( !bindDistinguishedName.isPresent() || groupAuthorizationSearchPattern.isPresent(), - "Group authorization search pattern must be provided when bind distinguished name is not used"); - checkState(bindDistinguishedName.isPresent() || userBindSearchPattern.isPresent(), + "Group authorization search pattern must be provided when bind distinguished name is used"); + checkArgument( + bindDistinguishedName.isPresent() || userBindSearchPattern.isPresent(), "Either user bind search pattern or bind distinguished name must be provided"); - if (ldapConfig.getLdapUrl().startsWith("ldap://")) { + if (ldapUrl.startsWith("ldap://")) { log.warn("Passwords will be sent in the clear to the LDAP server. Please consider using SSL to connect."); } - Map environment = ImmutableMap.builder() + this.basicEnvironment = ImmutableMap.builder() .put(INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory") .put(PROVIDER_URL, ldapUrl) + .put(REFERRAL, ldapConfig.isIgnoreReferrals() ? "ignore" : "follow") .build(); - this.basicEnvironment = environment; - if (this.bindDistinguishedName.isPresent()) { - this.authenticationCache = CacheBuilder.newBuilder() - .expireAfterWrite(ldapConfig.getLdapCacheTtl().toMillis(), MILLISECONDS) - .build(CacheLoader.from(this::authenticateWithBindDistinguishedName)); - } - else { - this.authenticationCache = CacheBuilder.newBuilder() - .expireAfterWrite(ldapConfig.getLdapCacheTtl().toMillis(), MILLISECONDS) - .build(CacheLoader.from(this::authenticateWithUserBind)); - } + this.authenticationCache = CacheBuilder.newBuilder() + .expireAfterWrite(ldapConfig.getLdapCacheTtl().toMillis(), MILLISECONDS) + .build(CacheLoader.from(bindDistinguishedName.isPresent() + ? this::authenticateWithBindDistinguishedName + : this::authenticateWithUserBind)); } @Override @@ -208,7 +202,7 @@ private void checkGroupMembership(String user, String contextUserDistinguishedNa /** * Returns {@code true} when parameter contains a character that has a special meaning in * LDAP search or bind name (DN). - * + *

* Based on Preventing_LDAP_Injection_in_Java and * {@link javax.naming.ldap.Rdn#escapeValue(Object) escapeValue} method. */ @@ -252,7 +246,7 @@ private String validateGroupMembership(String user, DirContext context) private NamingEnumeration searchGroupMembership(String user, DirContext context) throws NamingException { - String userBase = userBaseDistinguishedName.orElseThrow(VerifyException::new); + String userBase = userBaseDistinguishedName.get(); String searchFilter = replaceUser(groupAuthorizationSearchPattern.get(), user); SearchControls searchControls = new SearchControls(); searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE); @@ -289,7 +283,6 @@ private Map createEnvironment(String userDistinguishedName, Stri .put(SECURITY_AUTHENTICATION, "simple") .put(SECURITY_PRINCIPAL, userDistinguishedName) .put(SECURITY_CREDENTIALS, password) - .put(REFERRAL, ignoreReferrals ? "ignore" : "follow") .build(); } From a8fec994e2962e488933bc49ac14fe280082c81c Mon Sep 17 00:00:00 2001 From: David Phillips Date: Wed, 22 Apr 2020 23:22:59 -0700 Subject: [PATCH 256/519] Allow configuring trust certificate for LDAP authenticator --- presto-docs/src/main/sphinx/security/ldap.rst | 29 ++---- presto-password-authenticators/pom.xml | 5 ++ .../password/ldap/LdapAuthenticator.java | 53 ++++++++++- .../plugin/password/ldap/LdapConfig.java | 15 ++++ .../password/ldap/LdapSslSocketFactory.java | 88 +++++++++++++++++++ .../plugin/password/ldap/TestLdapConfig.java | 4 + .../password-authenticator.properties | 1 + .../password-authenticator.properties | 1 + .../password-authenticator.properties | 1 + 9 files changed, 170 insertions(+), 27 deletions(-) create mode 100644 presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapSslSocketFactory.java diff --git a/presto-docs/src/main/sphinx/security/ldap.rst b/presto-docs/src/main/sphinx/security/ldap.rst index 4c5afc23af21..6d586d85b292 100644 --- a/presto-docs/src/main/sphinx/security/ldap.rst +++ b/presto-docs/src/main/sphinx/security/ldap.rst @@ -21,38 +21,17 @@ Presto Server Configuration Environment Configuration ^^^^^^^^^^^^^^^^^^^^^^^^^ -.. _ldap_server: - Secure LDAP ~~~~~~~~~~~ Presto requires Secure LDAP (LDAPS), so make sure you have TLS enabled on your LDAP server. -TLS Configuration on Presto Coordinator -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You need to import the LDAP server's TLS certificate to the default Java -truststore of the Presto coordinator to secure TLS connection. You can use -the following example ``keytool`` command to import the certificate -``ldap_server.crt``, to the truststore on the coordinator. - -.. code-block:: none - - $ keytool -import -keystore /jre/lib/security/cacerts -trustcacerts -alias ldap_server -file ldap_server.crt - -In addition to this, access to the Presto coordinator should be -through HTTPS. You can do that by creating a :ref:`server_java_keystore` on -the coordinator. - Presto Coordinator Node Configuration ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -You must make the following changes to the environment prior to configuring the -Presto coordinator to use LDAP authentication and HTTPS. - - * :ref:`ldap_server` - * :ref:`server_java_keystore` +Access to the Presto coordinator should be through HTTPS. You can do that +by creating a :ref:`server_java_keystore` on the coordinator. You also need to make changes to the Presto configuration files. LDAP authentication is configured on the coordinator in two parts. @@ -110,6 +89,7 @@ Password authentication needs to be configured to use LDAP. Create an password-authenticator.name=ldap ldap.url=ldaps://ldap-server:636 + ldap.ssl-trust-certificate=/path/to/ldap_server.crt ldap.user-bind-pattern= ======================================================= ====================================================== @@ -117,6 +97,9 @@ Property Description ======================================================= ====================================================== ``ldap.url`` The url to the LDAP server. The url scheme must be ``ldaps://`` since Presto allows only Secure LDAP. +``ldap.ssl-trust-certificate`` The path to the PEM encoded trust certificate for the + LDAP server. This file should contain the LDAP + server's certificate or its certificate authority. ``ldap.user-bind-pattern`` This property can be used to specify the LDAP user bind string for password authentication. This property must contain the pattern ``${USER}``, which is diff --git a/presto-password-authenticators/pom.xml b/presto-password-authenticators/pom.xml index 2b1bae70d9f1..a9d80217e7ee 100644 --- a/presto-password-authenticators/pom.xml +++ b/presto-password-authenticators/pom.xml @@ -32,6 +32,11 @@ log + + io.airlift + security + + io.airlift units diff --git a/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapAuthenticator.java b/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapAuthenticator.java index 9cf6c96ada8a..b22b9b7cc4b1 100644 --- a/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapAuthenticator.java +++ b/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapAuthenticator.java @@ -21,7 +21,9 @@ import com.google.common.collect.ImmutableMap; import com.google.common.util.concurrent.UncheckedExecutionException; import io.airlift.log.Logger; +import io.airlift.security.pem.PemReader; import io.prestosql.plugin.password.Credential; +import io.prestosql.spi.classloader.ThreadContextClassLoader; import io.prestosql.spi.security.AccessDeniedException; import io.prestosql.spi.security.BasicPrincipal; import io.prestosql.spi.security.PasswordAuthenticator; @@ -33,8 +35,17 @@ import javax.naming.directory.DirContext; import javax.naming.directory.SearchControls; import javax.naming.directory.SearchResult; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManager; +import javax.net.ssl.TrustManagerFactory; +import javax.net.ssl.X509TrustManager; +import java.io.File; +import java.io.IOException; +import java.security.GeneralSecurityException; +import java.security.KeyStore; import java.security.Principal; +import java.util.Arrays; import java.util.Map; import java.util.Optional; @@ -65,6 +76,7 @@ public class LdapAuthenticator private final Optional bindPassword; private final Map basicEnvironment; private final LoadingCache authenticationCache; + private final Optional sslContext; @Inject public LdapAuthenticator(LdapConfig ldapConfig) @@ -104,12 +116,15 @@ public LdapAuthenticator(LdapConfig ldapConfig) .build(CacheLoader.from(bindDistinguishedName.isPresent() ? this::authenticateWithBindDistinguishedName : this::authenticateWithUserBind)); + + this.sslContext = Optional.ofNullable(ldapConfig.getTrustCertificate()) + .map(LdapAuthenticator::createSslContext); } @Override public Principal createAuthenticatedPrincipal(String user, String password) { - try { + try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(getClass().getClassLoader())) { return authenticationCache.getUnchecked(new Credential(user, password)); } catch (UncheckedExecutionException e) { @@ -278,16 +293,46 @@ private DirContext createUserDirContext(String userDistinguishedName, String pas private Map createEnvironment(String userDistinguishedName, String password) { - return ImmutableMap.builder() + ImmutableMap.Builder environment = ImmutableMap.builder() .putAll(basicEnvironment) .put(SECURITY_AUTHENTICATION, "simple") .put(SECURITY_PRINCIPAL, userDistinguishedName) - .put(SECURITY_CREDENTIALS, password) - .build(); + .put(SECURITY_CREDENTIALS, password); + + sslContext.ifPresent(context -> { + LdapSslSocketFactory.setSslContextForCurrentThread(context); + + // see https://docs.oracle.com/javase/jndi/tutorial/ldap/security/ssl.html + environment.put("java.naming.ldap.factory.socket", LdapSslSocketFactory.class.getName()); + }); + + return environment.build(); } private static String replaceUser(String pattern, String user) { return pattern.replace("${USER}", user); } + + private static SSLContext createSslContext(File trustCertificate) + { + try { + KeyStore trustStore = PemReader.loadTrustStore(trustCertificate); + + TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); + trustManagerFactory.init(trustStore); + + TrustManager[] trustManagers = trustManagerFactory.getTrustManagers(); + if (trustManagers.length != 1 || !(trustManagers[0] instanceof X509TrustManager)) { + throw new RuntimeException("Unexpected default trust managers:" + Arrays.toString(trustManagers)); + } + + SSLContext sslContext = SSLContext.getInstance("SSL"); + sslContext.init(null, trustManagers, null); + return sslContext; + } + catch (GeneralSecurityException | IOException e) { + throw new RuntimeException(e); + } + } } diff --git a/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapConfig.java b/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapConfig.java index 109559821e3d..ad6a1f7343c5 100644 --- a/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapConfig.java +++ b/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapConfig.java @@ -23,6 +23,7 @@ import javax.validation.constraints.NotNull; import javax.validation.constraints.Pattern; +import java.io.File; import java.util.concurrent.TimeUnit; import static com.google.common.base.Strings.nullToEmpty; @@ -33,6 +34,7 @@ public class LdapConfig private String ldapUrl; private boolean allowInsecure; + private File trustCertificate; private String userBindSearchPattern; private String groupAuthorizationSearchPattern; private String userBaseDistinguishedName; @@ -75,6 +77,19 @@ public boolean isUrlConfigurationValid() return nullToEmpty(ldapUrl).startsWith("ldaps://") || allowInsecure; } + public File getTrustCertificate() + { + return trustCertificate; + } + + @Config("ldap.ssl-trust-certificate") + @ConfigDescription("Path to the PEM trust certificate for the LDAP server") + public LdapConfig setTrustCertificate(File trustCertificate) + { + this.trustCertificate = trustCertificate; + return this; + } + public String getUserBindSearchPattern() { return userBindSearchPattern; diff --git a/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapSslSocketFactory.java b/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapSslSocketFactory.java new file mode 100644 index 000000000000..7bb4b0295e29 --- /dev/null +++ b/presto-password-authenticators/src/main/java/io/prestosql/plugin/password/ldap/LdapSslSocketFactory.java @@ -0,0 +1,88 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.password.ldap; + +import javax.net.SocketFactory; +import javax.net.ssl.SSLContext; + +import java.io.IOException; +import java.net.InetAddress; +import java.net.Socket; + +import static com.google.common.base.Preconditions.checkState; +import static java.util.Objects.requireNonNull; + +// this class must implement SocketFactory for Java 8 +public final class LdapSslSocketFactory + extends SocketFactory +{ + private static final ThreadLocal SSL_CONTEXT = new ThreadLocal<>(); + + private final SocketFactory socketFactory; + + public LdapSslSocketFactory(SocketFactory socketFactory) + { + this.socketFactory = requireNonNull(socketFactory, "socketFactory is null"); + } + + @Override + public Socket createSocket() + throws IOException + { + return socketFactory.createSocket(); + } + + @Override + public Socket createSocket(String host, int port) + throws IOException + { + return socketFactory.createSocket(host, port); + } + + @Override + public Socket createSocket(String host, int port, InetAddress localHost, int localPort) + throws IOException + { + return socketFactory.createSocket(host, port, localHost, localPort); + } + + @Override + public Socket createSocket(InetAddress host, int port) + throws IOException + { + return socketFactory.createSocket(host, port); + } + + @Override + public Socket createSocket(InetAddress address, int port, InetAddress localAddress, int localPort) + throws IOException + { + return socketFactory.createSocket(address, port, localAddress, localPort); + } + + // entry point per https://docs.oracle.com/javase/jndi/tutorial/ldap/security/ssl.html + @SuppressWarnings({"unused", "MethodOverridesStaticMethodOfSuperclass"}) + public static SocketFactory getDefault() + { + SSLContext sslContext = SSL_CONTEXT.get(); + checkState(sslContext != null, "SSLContext was not set"); + // TODO: simplify on Java 11 by returning the socket factory directly + return new LdapSslSocketFactory(sslContext.getSocketFactory()); + } + + public static void setSslContextForCurrentThread(SSLContext sslContext) + { + SSL_CONTEXT.set(sslContext); + } +} diff --git a/presto-password-authenticators/src/test/java/io/prestosql/plugin/password/ldap/TestLdapConfig.java b/presto-password-authenticators/src/test/java/io/prestosql/plugin/password/ldap/TestLdapConfig.java index f1cbb12f8c14..f4bb834d57f0 100644 --- a/presto-password-authenticators/src/test/java/io/prestosql/plugin/password/ldap/TestLdapConfig.java +++ b/presto-password-authenticators/src/test/java/io/prestosql/plugin/password/ldap/TestLdapConfig.java @@ -22,6 +22,7 @@ import javax.validation.constraints.NotNull; import javax.validation.constraints.Pattern; +import java.io.File; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -39,6 +40,7 @@ public void testDefault() assertRecordedDefaults(recordDefaults(LdapConfig.class) .setLdapUrl(null) .setAllowInsecure(false) + .setTrustCertificate(null) .setUserBindSearchPattern(null) .setUserBaseDistinguishedName(null) .setGroupAuthorizationSearchPattern(null) @@ -54,6 +56,7 @@ public void testExplicitConfig() Map properties = new ImmutableMap.Builder() .put("ldap.url", "ldaps://localhost:636") .put("ldap.allow-insecure", "true") + .put("ldap.ssl-trust-certificate", "/trust.pem") .put("ldap.user-bind-pattern", "uid=${USER},ou=org,dc=test,dc=com") .put("ldap.user-base-dn", "dc=test,dc=com") .put("ldap.group-auth-pattern", "&(objectClass=user)(memberOf=cn=group)(user=username)") @@ -66,6 +69,7 @@ public void testExplicitConfig() LdapConfig expected = new LdapConfig() .setLdapUrl("ldaps://localhost:636") .setAllowInsecure(true) + .setTrustCertificate(new File("/trust.pem")) .setUserBindSearchPattern("uid=${USER},ou=org,dc=test,dc=com") .setUserBaseDistinguishedName("dc=test,dc=com") .setGroupAuthorizationSearchPattern("&(objectClass=user)(memberOf=cn=group)(user=username)") diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-ldap-bind-dn/password-authenticator.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-ldap-bind-dn/password-authenticator.properties index 41813655e89a..0daff4bd8e5b 100644 --- a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-ldap-bind-dn/password-authenticator.properties +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-ldap-bind-dn/password-authenticator.properties @@ -1,5 +1,6 @@ password-authenticator.name=ldap ldap.url=ldaps://ldapserver:636 +ldap.ssl-trust-certificate=/etc/openldap/certs/openldap-certificate.pem ldap.user-base-dn=ou=Asia,dc=presto,dc=testldap,dc=com ldap.bind-dn=cn=admin,dc=presto,dc=testldap,dc=com ldap.bind-password=admin diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-ldap-referrals/password-authenticator.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-ldap-referrals/password-authenticator.properties index 2023c1fa1c4a..18f14ed7cb7d 100644 --- a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-ldap-referrals/password-authenticator.properties +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-ldap-referrals/password-authenticator.properties @@ -1,5 +1,6 @@ password-authenticator.name=ldap ldap.url=ldaps://ldapserver:636 +ldap.ssl-trust-certificate=/etc/openldap/certs/openldap-certificate.pem ldap.user-bind-pattern=uid=${USER},ou=Asia,dc=presto,dc=testldap,dc=com ldap.user-base-dn=ou=World,dc=presto,dc=testldap,dc=com ldap.group-auth-pattern=(&(objectClass=inetOrgPerson)(uid=${USER})(memberof=cn=DefaultGroup,ou=America,dc=presto,dc=testldap,dc=com)) diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-ldap/password-authenticator.properties b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-ldap/password-authenticator.properties index 5cc0104a0a28..4cf26529e3c3 100644 --- a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-ldap/password-authenticator.properties +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/conf/environment/singlenode-ldap/password-authenticator.properties @@ -1,5 +1,6 @@ password-authenticator.name=ldap ldap.url=ldaps://ldapserver:636 +ldap.ssl-trust-certificate=/etc/openldap/certs/openldap-certificate.pem ldap.user-bind-pattern=uid=${USER},ou=Asia,dc=presto,dc=testldap,dc=com ldap.user-base-dn=ou=Asia,dc=presto,dc=testldap,dc=com ldap.group-auth-pattern=(&(objectClass=inetOrgPerson)(uid=${USER})(memberof=cn=DefaultGroup,ou=America,dc=presto,dc=testldap,dc=com)) From b8db58d84bdbc6c2a2ddbc907e0cd7f98599656f Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Sun, 29 Mar 2020 22:53:06 -0700 Subject: [PATCH 257/519] Add session property for hive projection pushdown --- .../io/prestosql/plugin/hive/HiveConfig.java | 15 ++++++++ .../prestosql/plugin/hive/HiveMetadata.java | 5 +++ .../plugin/hive/HiveSessionProperties.java | 11 ++++++ .../prestosql/plugin/hive/TestHiveConfig.java | 7 ++-- ...stHiveProjectionPushdownIntoTableScan.java | 35 ++++++++++++++++--- 5 files changed, 67 insertions(+), 6 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveConfig.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveConfig.java index edd495697e2c..207f59dfc649 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveConfig.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveConfig.java @@ -129,6 +129,8 @@ public class HiveConfig private boolean queryPartitionFilterRequired; private boolean partitionUseColumnNames; + private boolean projectionPushdownEnabled = true; + public int getMaxInitialSplits() { return maxInitialSplits; @@ -922,4 +924,17 @@ public HiveConfig setPartitionUseColumnNames(boolean partitionUseColumnNames) this.partitionUseColumnNames = partitionUseColumnNames; return this; } + + public boolean isProjectionPushdownEnabled() + { + return projectionPushdownEnabled; + } + + @Config("hive.projection-pushdown-enabled") + @ConfigDescription("Projection pushdown into hive is enabled through applyProjection") + public HiveConfig setProjectionPushdownEnabled(boolean projectionPushdownEnabled) + { + this.projectionPushdownEnabled = projectionPushdownEnabled; + return this; + } } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java index 95521711540e..98a043de38ea 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java @@ -163,6 +163,7 @@ import static io.prestosql.plugin.hive.HiveSessionProperties.isCollectColumnStatisticsOnWrite; import static io.prestosql.plugin.hive.HiveSessionProperties.isCreateEmptyBucketFiles; import static io.prestosql.plugin.hive.HiveSessionProperties.isOptimizedMismatchedBucketCount; +import static io.prestosql.plugin.hive.HiveSessionProperties.isProjectionPushdownEnabled; import static io.prestosql.plugin.hive.HiveSessionProperties.isRespectTableFormat; import static io.prestosql.plugin.hive.HiveSessionProperties.isSortedWritingEnabled; import static io.prestosql.plugin.hive.HiveSessionProperties.isStatisticsEnabled; @@ -1968,6 +1969,10 @@ public Optional> applyProjecti List projections, Map assignments) { + if (!isProjectionPushdownEnabled(session)) { + return Optional.empty(); + } + // Create projected column representations for supported sub expressions. Simple column references and chain of // dereferences on a variable are supported right now. Set projectedExpressions = projections.stream() diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveSessionProperties.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveSessionProperties.java index 6290873581a8..128281f08194 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveSessionProperties.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveSessionProperties.java @@ -89,6 +89,7 @@ public final class HiveSessionProperties private static final String TEMPORARY_STAGING_DIRECTORY_PATH = "temporary_staging_directory_path"; private static final String IGNORE_ABSENT_PARTITIONS = "ignore_absent_partitions"; private static final String QUERY_PARTITION_FILTER_REQUIRED = "query_partition_filter_required"; + private static final String PROJECTION_PUSHDOWN_ENABLED = "projection_pushdown_enabled"; private final List> sessionProperties; @@ -357,6 +358,11 @@ public HiveSessionProperties( QUERY_PARTITION_FILTER_REQUIRED, "Require filter on partition column", hiveConfig.isQueryPartitionFilterRequired(), + false), + booleanProperty( + PROJECTION_PUSHDOWN_ENABLED, + "Projection push down enabled for hive", + hiveConfig.isProjectionPushdownEnabled(), false)); } @@ -613,4 +619,9 @@ public static boolean isQueryPartitionFilterRequired(ConnectorSession session) { return session.getProperty(QUERY_PARTITION_FILTER_REQUIRED, Boolean.class); } + + public static boolean isProjectionPushdownEnabled(ConnectorSession session) + { + return session.getProperty(PROJECTION_PUSHDOWN_ENABLED, Boolean.class); + } } diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveConfig.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveConfig.java index d6233251b33f..61d812bb59e8 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveConfig.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveConfig.java @@ -93,7 +93,8 @@ public void testDefaults() .setHiveTransactionHeartbeatThreads(5) .setAllowRegisterPartition(false) .setQueryPartitionFilterRequired(false) - .setPartitionUseColumnNames(false)); + .setPartitionUseColumnNames(false) + .setProjectionPushdownEnabled(true)); } @Test @@ -160,6 +161,7 @@ public void testExplicitPropertyMappings() .put("hive.allow-register-partition-procedure", "true") .put("hive.query-partition-filter-required", "true") .put("hive.partition-use-column-names", "true") + .put("hive.projection-pushdown-enabled", "false") .build(); HiveConfig expected = new HiveConfig() @@ -222,7 +224,8 @@ public void testExplicitPropertyMappings() .setHiveTransactionHeartbeatThreads(10) .setAllowRegisterPartition(true) .setQueryPartitionFilterRequired(true) - .setPartitionUseColumnNames(true); + .setPartitionUseColumnNames(true) + .setProjectionPushdownEnabled(false); assertFullMapping(properties, expected); } diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestHiveProjectionPushdownIntoTableScan.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestHiveProjectionPushdownIntoTableScan.java index b5554e85d372..6f1636e2ff2f 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestHiveProjectionPushdownIntoTableScan.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestHiveProjectionPushdownIntoTableScan.java @@ -49,8 +49,11 @@ import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE; import static io.prestosql.plugin.hive.TestHiveReaderProjectionsUtil.createProjectedColumnHandle; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.any; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.project; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.tableScan; import static io.prestosql.testing.TestingSession.testSessionBuilder; +import static java.lang.String.format; import static org.testng.Assert.assertTrue; public class TestHiveProjectionPushdownIntoTableScan @@ -89,15 +92,39 @@ protected LocalQueryRunner createLocalQueryRunner() return queryRunner; } + @Test + public void testPushdownDisabled() + { + String testTable = "test_disabled_pushdown"; + + Session session = Session.builder(getQueryRunner().getDefaultSession()) + .setCatalogSessionProperty(HIVE_CATALOG_NAME, "projection_pushdown_enabled", "false") + .build(); + + getQueryRunner().execute(format( + "CREATE TABLE %s (col0) AS" + + " SELECT cast(row(5, 6) as row(a bigint, b bigint)) AS col0 WHERE false", + testTable)); + + assertPlan( + format("SELECT col0.a expr_a, col0.b expr_b FROM %s", testTable), + session, + any( + project( + ImmutableMap.of("expr", expression("col0.a"), "expr_2", expression("col0.b")), + tableScan(testTable, ImmutableMap.of("col0", "col0"))))); + } + @Test public void testProjectionPushdown() { String testTable = "test_simple_projection_pushdown"; QualifiedObjectName completeTableName = new QualifiedObjectName(HIVE_CATALOG_NAME, SCHEMA_NAME, testTable); - String tableName = HIVE_CATALOG_NAME + "." + SCHEMA_NAME + "." + testTable; - getQueryRunner().execute("CREATE TABLE " + tableName + " " + "(col0) AS" + - " SELECT cast(row(5, 6) as row(a bigint, b bigint)) as col0 where false"); + getQueryRunner().execute(format( + "CREATE TABLE %s (col0) AS" + + " SELECT cast(row(5, 6) as row(a bigint, b bigint)) AS col0 WHERE false", + testTable)); Session session = getQueryRunner().getDefaultSession(); @@ -110,7 +137,7 @@ public void testProjectionPushdown() HiveColumnHandle baseColumnHandle = (HiveColumnHandle) columns.get("col0"); assertPlan( - "SELECT col0.a expr_a, col0.b expr_b FROM " + tableName, + "SELECT col0.a expr_a, col0.b expr_b FROM " + testTable, any(tableScan( equalTo(tableHandle.get().getConnectorHandle()), TupleDomain.all(), From f5f425a6c66903046af0ccda1a4ed603e763e315 Mon Sep 17 00:00:00 2001 From: Pratham Desai Date: Tue, 21 Apr 2020 23:10:26 -0700 Subject: [PATCH 258/519] Use default session for rule application --- .../hive/optimizer/TestConnectorPushdownRulesWithHive.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestConnectorPushdownRulesWithHive.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestConnectorPushdownRulesWithHive.java index 6050bd143948..62742e19b30f 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestConnectorPushdownRulesWithHive.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/optimizer/TestConnectorPushdownRulesWithHive.java @@ -155,7 +155,6 @@ public void testProjectionPushdown() table, ImmutableList.of(p.symbol("struct_of_int", baseType)), ImmutableMap.of(p.symbol("struct_of_int", baseType), fullColumn)))) - .withSession(HIVE_SESSION) .doesNotFire(); // Test Dereference pushdown @@ -168,7 +167,6 @@ public void testProjectionPushdown() table, ImmutableList.of(p.symbol("struct_of_int", baseType)), ImmutableMap.of(p.symbol("struct_of_int", baseType), fullColumn)))) - .withSession(HIVE_SESSION) .matches(project( ImmutableMap.of("expr_deref", expression(new SymbolReference("struct_of_int#a"))), tableScan( From e53af1cc4570dc1515c0850420a255a1cf73110b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Osipiuk?= Date: Thu, 23 Apr 2020 14:30:36 +0200 Subject: [PATCH 259/519] Ensure proper memory visibility in RubixConfigurationInitializer Making RubixConfigurationInitializer.cacheNotReady volatile to ensure changes made to instance variables before cacheNotReady was switched to false are visible to thread calling out to initializeConfiguration. --- .../plugin/hive/rubix/RubixConfigurationInitializer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfigurationInitializer.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfigurationInitializer.java index 1b8e77178c1c..0351db0b2aca 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfigurationInitializer.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfigurationInitializer.java @@ -53,7 +53,7 @@ public class RubixConfigurationInitializer private final int dataTransferServerPort; // Configs below are dependent on node joining the cluster - private boolean cacheNotReady = true; + private volatile boolean cacheNotReady = true; private boolean isMaster; private HostAddress masterAddress; private String nodeAddress; From 813375d5ad4b63b12b3c9822b4dbaf36697ff877 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Wed, 22 Apr 2020 15:50:34 +0200 Subject: [PATCH 260/519] Qualify constructor assignments consistently --- .../tests/product/launcher/env/common/Standard.java | 6 +++--- .../tests/product/launcher/env/environment/Multinode.java | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Standard.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Standard.java index 3525db9d4cc9..f9af25ad51cd 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Standard.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Standard.java @@ -76,10 +76,10 @@ public Standard( this.dockerFiles = requireNonNull(dockerFiles, "dockerFiles is null"); this.portBinder = requireNonNull(portBinder, "portBinder is null"); requireNonNull(environmentOptions, "environmentOptions is null"); - imagesVersion = requireNonNull(environmentOptions.imagesVersion, "environmentOptions.imagesVersion is null"); - serverPackage = requireNonNull(environmentOptions.serverPackage, "environmentOptions.serverPackage is null"); + this.imagesVersion = requireNonNull(environmentOptions.imagesVersion, "environmentOptions.imagesVersion is null"); + this.serverPackage = requireNonNull(environmentOptions.serverPackage, "environmentOptions.serverPackage is null"); checkArgument(serverPackage.getName().endsWith(".tar.gz"), "Currently only server .tar.gz package is supported"); - debug = environmentOptions.debug; + this.debug = environmentOptions.debug; } @Override diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/Multinode.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/Multinode.java index 6177d58a74d1..388ca25b3aff 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/Multinode.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/Multinode.java @@ -57,8 +57,8 @@ public Multinode( super(ImmutableList.of(standard, hadoop)); this.pathResolver = requireNonNull(pathResolver, "pathResolver is null"); this.dockerFiles = requireNonNull(dockerFiles, "dockerFiles is null"); - imagesVersion = requireNonNull(environmentOptions.imagesVersion, "environmentOptions.imagesVersion is null"); - serverPackage = requireNonNull(environmentOptions.serverPackage, "environmentOptions.serverPackage is null"); + this.imagesVersion = requireNonNull(environmentOptions.imagesVersion, "environmentOptions.imagesVersion is null"); + this.serverPackage = requireNonNull(environmentOptions.serverPackage, "environmentOptions.serverPackage is null"); } @Override From 805b27ccd9bd522cc87bbcd303092500abb01e0e Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Wed, 22 Apr 2020 15:50:36 +0200 Subject: [PATCH 261/519] Merge util classes --- .../product/launcher/cli/EnvironmentUp.java | 4 +- .../tests/product/launcher/cli/TestRun.java | 2 +- .../{DockerUtil.java => ContainerUtil.java} | 20 +++++++++- .../product/launcher/env/Environments.java | 7 ++-- .../product/launcher/env/common/Standard.java | 2 +- .../launcher/testcontainers/PortBinder.java | 3 +- .../testcontainers/TestcontainersUtil.java | 37 ------------------- 7 files changed, 28 insertions(+), 47 deletions(-) rename presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/docker/{DockerUtil.java => ContainerUtil.java} (77%) delete mode 100644 presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/testcontainers/TestcontainersUtil.java diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/cli/EnvironmentUp.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/cli/EnvironmentUp.java index 20451a2bd98a..7199088e8456 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/cli/EnvironmentUp.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/cli/EnvironmentUp.java @@ -21,12 +21,12 @@ import io.airlift.log.Logger; import io.prestosql.tests.product.launcher.Extensions; import io.prestosql.tests.product.launcher.LauncherModule; +import io.prestosql.tests.product.launcher.docker.ContainerUtil; import io.prestosql.tests.product.launcher.env.Environment; import io.prestosql.tests.product.launcher.env.EnvironmentFactory; import io.prestosql.tests.product.launcher.env.EnvironmentModule; import io.prestosql.tests.product.launcher.env.EnvironmentOptions; import io.prestosql.tests.product.launcher.env.Environments; -import io.prestosql.tests.product.launcher.testcontainers.TestcontainersUtil; import org.testcontainers.DockerClientFactory; import javax.inject.Inject; @@ -132,7 +132,7 @@ private void killContainersReaperContainer() { try (DockerClient dockerClient = DockerClientFactory.lazyClient()) { log.info("Killing the testcontainers reaper container (Ryuk) so that environment can stay alive"); - TestcontainersUtil.killContainersReaperContainer(dockerClient); + ContainerUtil.killContainersReaperContainer(dockerClient); } catch (IOException e) { throw new UncheckedIOException(e); diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/cli/TestRun.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/cli/TestRun.java index a185b1fcf1bc..f810d955be26 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/cli/TestRun.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/cli/TestRun.java @@ -43,8 +43,8 @@ import static com.google.common.base.Preconditions.checkState; import static io.prestosql.tests.product.launcher.cli.Commands.runCommand; +import static io.prestosql.tests.product.launcher.docker.ContainerUtil.exposePort; import static io.prestosql.tests.product.launcher.env.common.Standard.CONTAINER_TEMPTO_PROFILE_CONFIG; -import static io.prestosql.tests.product.launcher.testcontainers.TestcontainersUtil.exposePort; import static java.util.Objects.requireNonNull; import static org.testcontainers.containers.BindMode.READ_ONLY; diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/docker/DockerUtil.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/docker/ContainerUtil.java similarity index 77% rename from presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/docker/DockerUtil.java rename to presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/docker/ContainerUtil.java index 5823e01faca0..76b7a5291547 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/docker/DockerUtil.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/docker/ContainerUtil.java @@ -20,13 +20,15 @@ import com.github.dockerjava.api.exception.NotFoundException; import com.github.dockerjava.api.model.Container; import com.github.dockerjava.api.model.Network; +import io.prestosql.tests.product.launcher.env.DockerContainer; +import org.testcontainers.DockerClientFactory; import java.util.List; import java.util.function.Function; -public final class DockerUtil +public final class ContainerUtil { - private DockerUtil() {} + private ContainerUtil() {} public static void killContainers(DockerClient dockerClient, Function filter) { @@ -59,4 +61,18 @@ public static void removeNetworks(DockerClient dockerClient, Function listContainersCmd.withLabelFilter(ImmutableMap.of(PRODUCT_TEST_LAUNCHER_STARTED_LABEL_NAME, PRODUCT_TEST_LAUNCHER_STARTED_LABEL_VALUE))); - DockerUtil.removeNetworks( + removeNetworks( dockerClient, listNetworksCmd -> listNetworksCmd.withNameFilter(PRODUCT_TEST_LAUNCHER_NETWORK)); } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Standard.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Standard.java index f9af25ad51cd..947c00188f86 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Standard.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Standard.java @@ -40,8 +40,8 @@ import static com.google.common.base.MoreObjects.firstNonNull; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; +import static io.prestosql.tests.product.launcher.docker.ContainerUtil.exposePort; import static io.prestosql.tests.product.launcher.docker.DockerFiles.createTemporaryDirectoryForDocker; -import static io.prestosql.tests.product.launcher.testcontainers.TestcontainersUtil.exposePort; import static java.nio.charset.StandardCharsets.UTF_8; import static java.nio.file.Files.isRegularFile; import static java.nio.file.Files.readAllLines; diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/testcontainers/PortBinder.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/testcontainers/PortBinder.java index 078a57c9447a..929c811fe9e3 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/testcontainers/PortBinder.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/testcontainers/PortBinder.java @@ -13,6 +13,7 @@ */ package io.prestosql.tests.product.launcher.testcontainers; +import io.prestosql.tests.product.launcher.docker.ContainerUtil; import io.prestosql.tests.product.launcher.env.DockerContainer; import io.prestosql.tests.product.launcher.env.EnvironmentOptions; @@ -33,7 +34,7 @@ public PortBinder(EnvironmentOptions environmentOptions) public void exposePort(DockerContainer container, int port) { if (bindPorts) { - TestcontainersUtil.exposePort(container, port); + ContainerUtil.exposePort(container, port); } else { // Still export port, at a random free number, as certain startup checks require this. diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/testcontainers/TestcontainersUtil.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/testcontainers/TestcontainersUtil.java deleted file mode 100644 index 3aa74666e37b..000000000000 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/testcontainers/TestcontainersUtil.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package io.prestosql.tests.product.launcher.testcontainers; - -import com.github.dockerjava.api.DockerClient; -import io.prestosql.tests.product.launcher.env.DockerContainer; -import org.testcontainers.DockerClientFactory; - -public final class TestcontainersUtil -{ - private TestcontainersUtil() {} - - public static void killContainersReaperContainer(DockerClient dockerClient) - { - @SuppressWarnings("resource") - Void ignore = dockerClient.removeContainerCmd("testcontainers-ryuk-" + DockerClientFactory.SESSION_ID) - .withForce(true) - .exec(); - } - - public static void exposePort(DockerContainer container, int port) - { - container.addExposedPort(port); - container.withFixedExposedPort(port, port); - } -} From 7f2160d48a79930890df460cbd119f88baf96def Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Wed, 22 Apr 2020 15:50:37 +0200 Subject: [PATCH 262/519] Extract Java debugger initiation --- .../launcher/docker/ContainerUtil.java | 60 +++++++++++++++++++ .../product/launcher/env/common/Standard.java | 57 +----------------- 2 files changed, 62 insertions(+), 55 deletions(-) diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/docker/ContainerUtil.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/docker/ContainerUtil.java index 76b7a5291547..b61a2f18cd1f 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/docker/ContainerUtil.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/docker/ContainerUtil.java @@ -14,18 +14,35 @@ package io.prestosql.tests.product.launcher.docker; import com.github.dockerjava.api.DockerClient; +import com.github.dockerjava.api.command.CreateContainerCmd; import com.github.dockerjava.api.command.ListContainersCmd; import com.github.dockerjava.api.command.ListNetworksCmd; import com.github.dockerjava.api.exception.ConflictException; import com.github.dockerjava.api.exception.NotFoundException; +import com.github.dockerjava.api.model.AccessMode; +import com.github.dockerjava.api.model.Bind; import com.github.dockerjava.api.model.Container; import com.github.dockerjava.api.model.Network; import io.prestosql.tests.product.launcher.env.DockerContainer; import org.testcontainers.DockerClientFactory; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; import java.util.List; import java.util.function.Function; +import static com.google.common.base.MoreObjects.firstNonNull; +import static com.google.common.base.Preconditions.checkState; +import static io.prestosql.tests.product.launcher.docker.DockerFiles.createTemporaryDirectoryForDocker; +import static java.nio.charset.StandardCharsets.UTF_8; +import static java.nio.file.Files.isRegularFile; +import static java.nio.file.Files.readAllLines; +import static java.nio.file.Files.write; +import static java.util.Objects.requireNonNull; + public final class ContainerUtil { private ContainerUtil() {} @@ -75,4 +92,47 @@ public static void exposePort(DockerContainer container, int port) container.addExposedPort(port); container.withFixedExposedPort(port, port); } + + public static void enableJavaDebugger(DockerContainer container, String jvmConfigPath, int debugPort) + { + requireNonNull(jvmConfigPath, "jvmConfigPath is null"); + container.withCreateContainerCmdModifier(createContainerCmd -> enableDebuggerInJvmConfig(createContainerCmd, jvmConfigPath, debugPort)); + exposePort(container, debugPort); + } + + private static void enableDebuggerInJvmConfig(CreateContainerCmd createContainerCmd, String jvmConfigPath, int debugPort) + { + try { + Bind[] binds = firstNonNull(createContainerCmd.getBinds(), new Bind[0]); + boolean found = false; + + // Last bind wins, so we can find the last one only + for (int bindIndex = binds.length - 1; bindIndex >= 0; bindIndex--) { + Bind bind = binds[bindIndex]; + if (!bind.getVolume().getPath().equals(jvmConfigPath)) { + continue; + } + + Path hostJvmConfig = Paths.get(bind.getPath()); + checkState(isRegularFile(hostJvmConfig), "Bind for %s is not a file", jvmConfigPath); + + Path temporaryDirectory = createTemporaryDirectoryForDocker(); + Path newJvmConfig = temporaryDirectory.resolve("jvm.config"); + + List jvmOptions = new ArrayList<>(readAllLines(hostJvmConfig, UTF_8)); + jvmOptions.add("-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=0.0.0.0:" + debugPort); + write(newJvmConfig, jvmOptions, UTF_8); + + binds[bindIndex] = new Bind(newJvmConfig.toString(), bind.getVolume(), AccessMode.ro, bind.getSecMode(), bind.getNoCopy(), bind.getPropagationMode()); + found = true; + break; + } + + checkState(found, "Could not find %s bind", jvmConfigPath); + createContainerCmd.withBinds(binds); + } + catch (IOException e) { + throw new UncheckedIOException(e); + } + } } diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Standard.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Standard.java index 947c00188f86..765c31695a4a 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Standard.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/common/Standard.java @@ -13,9 +13,6 @@ */ package io.prestosql.tests.product.launcher.env.common; -import com.github.dockerjava.api.command.CreateContainerCmd; -import com.github.dockerjava.api.model.AccessMode; -import com.github.dockerjava.api.model.Bind; import io.prestosql.tests.product.launcher.PathResolver; import io.prestosql.tests.product.launcher.docker.DockerFiles; import io.prestosql.tests.product.launcher.env.DockerContainer; @@ -29,23 +26,10 @@ import javax.inject.Inject; import java.io.File; -import java.io.IOException; -import java.io.UncheckedIOException; -import java.nio.file.Path; -import java.nio.file.Paths; import java.time.Duration; -import java.util.ArrayList; -import java.util.List; -import static com.google.common.base.MoreObjects.firstNonNull; import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkState; -import static io.prestosql.tests.product.launcher.docker.ContainerUtil.exposePort; -import static io.prestosql.tests.product.launcher.docker.DockerFiles.createTemporaryDirectoryForDocker; -import static java.nio.charset.StandardCharsets.UTF_8; -import static java.nio.file.Files.isRegularFile; -import static java.nio.file.Files.readAllLines; -import static java.nio.file.Files.write; +import static io.prestosql.tests.product.launcher.docker.ContainerUtil.enableJavaDebugger; import static java.util.Objects.requireNonNull; import static org.testcontainers.containers.BindMode.READ_ONLY; @@ -99,8 +83,7 @@ private DockerContainer createPrestoMaster() portBinder.exposePort(container, 8080); // Presto default port if (debug) { - container.withCreateContainerCmdModifier(this::enableDebuggerInJvmConfig); - exposePort(container, 5005); // debug port + enableJavaDebugger(container, CONTAINER_PRESTO_JVM_CONFIG, 5005); // debug port } return container; @@ -130,40 +113,4 @@ public static DockerContainer createPrestoContainer(DockerFiles dockerFiles, Pat .waitingFor(Wait.forLogMessage(".*======== SERVER STARTED ========.*", 1)) .withStartupTimeout(Duration.ofMinutes(5)); } - - private void enableDebuggerInJvmConfig(CreateContainerCmd createContainerCmd) - { - try { - Bind[] binds = firstNonNull(createContainerCmd.getBinds(), new Bind[0]); - boolean found = false; - - // Last bind wins, so we can find the last one only - for (int bindIndex = binds.length - 1; bindIndex >= 0; bindIndex--) { - Bind bind = binds[bindIndex]; - if (!bind.getVolume().getPath().equals(CONTAINER_PRESTO_JVM_CONFIG)) { - continue; - } - - Path hostJvmConfig = Paths.get(bind.getPath()); - checkState(isRegularFile(hostJvmConfig), "Bind for %s is not a file", CONTAINER_PRESTO_JVM_CONFIG); - - Path temporaryDirectory = createTemporaryDirectoryForDocker(); - Path newJvmConfig = temporaryDirectory.resolve("jvm.config"); - - List jvmOptions = new ArrayList<>(readAllLines(hostJvmConfig, UTF_8)); - jvmOptions.add("-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=0.0.0.0:5005"); - write(newJvmConfig, jvmOptions, UTF_8); - - binds[bindIndex] = new Bind(newJvmConfig.toString(), bind.getVolume(), AccessMode.ro, bind.getSecMode(), bind.getNoCopy(), bind.getPropagationMode()); - found = true; - break; - } - - checkState(found, "Could not find %s bind", CONTAINER_PRESTO_JVM_CONFIG); - createContainerCmd.withBinds(binds); - } - catch (IOException e) { - throw new UncheckedIOException(e); - } - } } From 8076e0a34d3c0a42ae23ef2c4752267a3e19c805 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Wed, 22 Apr 2020 15:50:38 +0200 Subject: [PATCH 263/519] Attach debugger to multinode's worker --- .../tests/product/launcher/env/environment/Multinode.java | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/Multinode.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/Multinode.java index 388ca25b3aff..98d8ccd36f80 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/Multinode.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/environment/Multinode.java @@ -28,6 +28,7 @@ import java.io.File; +import static io.prestosql.tests.product.launcher.docker.ContainerUtil.enableJavaDebugger; import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_HIVE_PROPERTIES; import static io.prestosql.tests.product.launcher.env.common.Hadoop.CONTAINER_PRESTO_ICEBERG_PROPERTIES; import static io.prestosql.tests.product.launcher.env.common.Standard.CONTAINER_PRESTO_CONFIG_PROPERTIES; @@ -45,6 +46,7 @@ public final class Multinode private final String imagesVersion; private final File serverPackage; + private final boolean debug; @Inject public Multinode( @@ -59,6 +61,7 @@ public Multinode( this.dockerFiles = requireNonNull(dockerFiles, "dockerFiles is null"); this.imagesVersion = requireNonNull(environmentOptions.imagesVersion, "environmentOptions.imagesVersion is null"); this.serverPackage = requireNonNull(environmentOptions.serverPackage, "environmentOptions.serverPackage is null"); + this.debug = environmentOptions.debug; } @Override @@ -82,6 +85,10 @@ private DockerContainer createPrestoWorker() .withFileSystemBind(dockerFiles.getDockerFilesHostPath("common/hadoop/hive.properties"), CONTAINER_PRESTO_HIVE_PROPERTIES, READ_ONLY) .withFileSystemBind(dockerFiles.getDockerFilesHostPath("common/hadoop/iceberg.properties"), CONTAINER_PRESTO_ICEBERG_PROPERTIES, READ_ONLY); + if (debug) { + enableJavaDebugger(container, CONTAINER_PRESTO_JVM_CONFIG, 5008); // debug port + } + return container; } } From 1910e8cf9e25fdc5b9c42b6dab9e12f6807483d2 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Thu, 23 Apr 2020 22:07:50 +0200 Subject: [PATCH 264/519] Add kill switch for LogTestDurationListener --- .../tests/LogTestDurationListener.java | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/presto-main/src/test/java/io/prestosql/tests/LogTestDurationListener.java b/presto-main/src/test/java/io/prestosql/tests/LogTestDurationListener.java index ea5ed4d8ef37..49cf8a8e21f0 100644 --- a/presto-main/src/test/java/io/prestosql/tests/LogTestDurationListener.java +++ b/presto-main/src/test/java/io/prestosql/tests/LogTestDurationListener.java @@ -48,6 +48,10 @@ public class LogTestDurationListener { private static final Logger LOG = Logger.get(LogTestDurationListener.class); + // LogTestDurationListener does not support concurrent invocations of same test method + // (@Test(threadPoolSize=n)), so we need kill switch for local development purposes. + private static final boolean enabled = Boolean.getBoolean("LogTestDurationListener.enabled"); + private static final Duration SINGLE_TEST_LOGGING_THRESHOLD = Duration.valueOf("30s"); private static final Duration CLASS_LOGGING_THRESHOLD = Duration.valueOf("1m"); // Must be below Travis "no output" timeout (10m). E.g. TestElasticsearchIntegrationSmokeTest is known to take ~5-6m. @@ -70,6 +74,10 @@ public LogTestDurationListener() @Override public synchronized void onExecutionStart() { + if (!enabled) { + return; + } + resetHangMonitor(); finished.set(false); if (monitorHangTask == null) { @@ -80,6 +88,10 @@ public synchronized void onExecutionStart() @Override public synchronized void onExecutionFinish() { + if (!enabled) { + return; + } + resetHangMonitor(); finished.set(true); // do not stop hang task so notification of hung test JVM will fire @@ -133,12 +145,20 @@ private void resetHangMonitor() @Override public void onBeforeClass(ITestClass testClass) { + if (!enabled) { + return; + } + beginTest(getName(testClass)); } @Override public void onAfterClass(ITestClass testClass) { + if (!enabled) { + return; + } + String name = getName(testClass); Duration duration = endTest(name); if (duration.compareTo(CLASS_LOGGING_THRESHOLD) > 0) { @@ -149,12 +169,20 @@ public void onAfterClass(ITestClass testClass) @Override public void beforeInvocation(IInvokedMethod method, ITestResult testResult) { + if (!enabled) { + return; + } + beginTest(getName(method)); } @Override public void afterInvocation(IInvokedMethod method, ITestResult testResult) { + if (!enabled) { + return; + } + String name = getName(method); Duration duration = endTest(name); if (duration.compareTo(SINGLE_TEST_LOGGING_THRESHOLD) > 0) { From 941dd5f1b11ce5613e4a0d22488035bb0791fe76 Mon Sep 17 00:00:00 2001 From: Manfred Moser Date: Thu, 23 Apr 2020 11:48:07 -0700 Subject: [PATCH 265/519] Add FQDN configuration property --- presto-docs/src/main/sphinx/security/server.rst | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/presto-docs/src/main/sphinx/security/server.rst b/presto-docs/src/main/sphinx/security/server.rst index 31bd76d68463..2314e869b715 100644 --- a/presto-docs/src/main/sphinx/security/server.rst +++ b/presto-docs/src/main/sphinx/security/server.rst @@ -2,9 +2,9 @@ Coordinator Kerberos Authentication =================================== -The Presto coordinator can be configured to enable Kerberos authentication over -HTTPS for clients, such as the :doc:`Presto CLI `, or the -JDBC and ODBC drivers. +Presto can be configured to enable Kerberos authentication over HTTPS for +clients, such as the :doc:`Presto CLI `, or the JDBC and ODBC +drivers. To enable Kerberos authentication for Presto, configuration changes are made on the Presto coordinator. No changes are required to the worker configuration. @@ -12,7 +12,6 @@ The worker nodes continue to connect to the coordinator over unauthenticated HTTP. However, if you want to secure the communication between Presto nodes with SSL/TLS, configure :doc:`/security/internal-communication`. - Environment Configuration ------------------------- @@ -97,6 +96,8 @@ Kerberos authentication is configured in the coordinator node's http-server.https.keystore.path=/etc/presto_keystore.jks http-server.https.keystore.key=keystore_password + node.internal-address-source=FQDN + ========================================================= ====================================================== Property Description ========================================================= ====================================================== @@ -124,6 +125,9 @@ Property Description authentication is denied. Default is ``(.*)``. ``http-server.authentication.krb5.user-mapping.file`` File containing rules for mapping user. See :doc:`/security/user-mapping` for more information. +``node.internal-address-source`` Kerberos is typically sensitive to DNS names. Setting + this property to use ``FQDN`` ensures correct + operation and usage of valid DNS host names. ========================================================= ====================================================== .. note:: From 5e2d22a3b68a4f8dfe08e50909d160c9bb2e2f77 Mon Sep 17 00:00:00 2001 From: Martin Traverso Date: Sun, 26 Apr 2020 13:33:05 -0700 Subject: [PATCH 266/519] Support LATERAL in left side of join or without join --- .../sql/planner/RelationPlanner.java | 6 ++ .../io/prestosql/sql/query/TestLateral.java | 72 +++++++++++++++++++ 2 files changed, 78 insertions(+) create mode 100644 presto-main/src/test/java/io/prestosql/sql/query/TestLateral.java diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java b/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java index 04818169a501..9eb08939f64e 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java @@ -277,6 +277,12 @@ protected RelationPlan visitSampledRelation(SampledRelation node, Void context) return new RelationPlan(planNode, analysis.getScope(node), subPlan.getFieldMappings()); } + @Override + protected RelationPlan visitLateral(Lateral node, Void context) + { + return process(node.getQuery(), context); + } + @Override protected RelationPlan visitJoin(Join node, Void context) { diff --git a/presto-main/src/test/java/io/prestosql/sql/query/TestLateral.java b/presto-main/src/test/java/io/prestosql/sql/query/TestLateral.java new file mode 100644 index 000000000000..772a2be11806 --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/sql/query/TestLateral.java @@ -0,0 +1,72 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.query; + +import org.testng.annotations.AfterClass; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.Test; + +public class TestLateral +{ + private QueryAssertions assertions; + + @BeforeClass + public void init() + { + assertions = new QueryAssertions(); + } + + @AfterClass(alwaysRun = true) + public void teardown() + { + assertions.close(); + assertions = null; + } + + @Test + public void testUncorrelatedLateral() + { + assertions.assertQuery( + "SELECT * FROM LATERAL (VALUES 1, 2, 3)", + "VALUES 1, 2, 3"); + + assertions.assertQuery( + "SELECT * FROM LATERAL (VALUES 1), (VALUES 'a')", + "VALUES (1, 'a')"); + + assertions.assertQuery( + "SELECT * FROM LATERAL (VALUES 1) CROSS JOIN (VALUES 'a')", + "VALUES (1, 'a')"); + + assertions.assertQuery( + "SELECT * FROM LATERAL (VALUES 1) t(a)", + "VALUES 1"); + + // The nested LATERAL is uncorrelated with respect to the subquery it belongs to. The column comes + // from the outer query + assertions.assertQuery( + "SELECT * FROM (VALUES 1) t(a), LATERAL (SELECT * FROM LATERAL (SELECT a))", + "VALUES (1, 1)"); + + assertions.assertQuery( + "SELECT (SELECT * FROM LATERAL (SELECT a)) FROM (VALUES 1) t(a)", + "VALUES 1"); + } + + @Test + public void testNotInScope() + { + assertions.assertFails("SELECT * FROM (VALUES 1) t(a), (SELECT * FROM LATERAL (SELECT a))", "line 1:63: Column 'a' cannot be resolved"); + } +} From a8f1c6f361fa12301ca2c18c6135064a48cccb8a Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 27 Apr 2020 14:12:45 +0200 Subject: [PATCH 267/519] Fix visitLateral in DefaultTraversalVisitor Make it somewhat more consistent with other methods. Actually `DefaultTraversalVisitor` did - return null - or return result of sub-process (in some cases where only one thing to visit) - call to super in this one case --- .../java/io/prestosql/sql/tree/DefaultTraversalVisitor.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-parser/src/main/java/io/prestosql/sql/tree/DefaultTraversalVisitor.java b/presto-parser/src/main/java/io/prestosql/sql/tree/DefaultTraversalVisitor.java index 6275d96d7eb2..951fe650bca8 100644 --- a/presto-parser/src/main/java/io/prestosql/sql/tree/DefaultTraversalVisitor.java +++ b/presto-parser/src/main/java/io/prestosql/sql/tree/DefaultTraversalVisitor.java @@ -642,6 +642,6 @@ protected R visitLateral(Lateral node, C context) { process(node.getQuery(), context); - return super.visitLateral(node, context); + return null; } } From 2de93cca7abc7ebcb208154cfddeecd94c7d8c71 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 27 Apr 2020 14:12:46 +0200 Subject: [PATCH 268/519] Avoid DefaultTraversalVisitor in StatementAnalyzer `DefaultTraversalVisitor` does not provide useful implementation. Also, if we used some behavior inherited from `DefaultTraversalVisitor`, it would cause a `NullPointerException` in our `process` method were we assume `super.process` does not return `null` scope`. --- .../sql/analyzer/StatementAnalyzer.java | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/analyzer/StatementAnalyzer.java b/presto-main/src/main/java/io/prestosql/sql/analyzer/StatementAnalyzer.java index 242d62b11838..20419e3dc7ef 100644 --- a/presto-main/src/main/java/io/prestosql/sql/analyzer/StatementAnalyzer.java +++ b/presto-main/src/main/java/io/prestosql/sql/analyzer/StatementAnalyzer.java @@ -68,6 +68,7 @@ import io.prestosql.sql.tree.AliasedRelation; import io.prestosql.sql.tree.AllColumns; import io.prestosql.sql.tree.Analyze; +import io.prestosql.sql.tree.AstVisitor; import io.prestosql.sql.tree.Call; import io.prestosql.sql.tree.Comment; import io.prestosql.sql.tree.Commit; @@ -77,7 +78,6 @@ import io.prestosql.sql.tree.CreateView; import io.prestosql.sql.tree.Cube; import io.prestosql.sql.tree.Deallocate; -import io.prestosql.sql.tree.DefaultTraversalVisitor; import io.prestosql.sql.tree.Delete; import io.prestosql.sql.tree.DereferenceExpression; import io.prestosql.sql.tree.DropColumn; @@ -141,6 +141,7 @@ import io.prestosql.sql.tree.SortItem; import io.prestosql.sql.tree.StartTransaction; import io.prestosql.sql.tree.Statement; +import io.prestosql.sql.tree.SubqueryExpression; import io.prestosql.sql.tree.SubscriptExpression; import io.prestosql.sql.tree.Table; import io.prestosql.sql.tree.TableSubquery; @@ -292,7 +293,7 @@ public Scope analyze(Node node, Optional outerQueryScope) * (if provided) as ancestor. */ private class Visitor - extends DefaultTraversalVisitor> + extends AstVisitor> { private final Optional outerQueryScope; private final WarningCollector warningCollector; @@ -319,6 +320,12 @@ private Scope process(Node node, Scope scope) return process(node, Optional.of(scope)); } + @Override + protected Scope visitNode(Node node, Optional context) + { + throw new IllegalStateException("Unsupported node type: " + node.getClass().getName()); + } + @Override protected Scope visitUse(Use node, Optional scope) { @@ -1289,6 +1296,12 @@ protected Scope visitQuerySpecification(QuerySpecification node, Optional return outputScope; } + @Override + protected Scope visitSubqueryExpression(SubqueryExpression node, Optional context) + { + return process(node.getQuery(), context); + } + @Override protected Scope visitSetOperation(SetOperation node, Optional scope) { From 778a651a60b9b3226dd53c757434bdb1bf0ded22 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 27 Apr 2020 14:12:48 +0200 Subject: [PATCH 269/519] Avoid DefaultTraversalVisitor in RelationPlanner `DefaultTraversalVisitor` does not provide useful implementation. Also, if we used some behavior inherited from `DefaultTraversalVisitor`, it would result in `null` `RelationPlan`, and subsequent `NullPointerException`. `NullPointerException` in our `process` method were we assume `super.process` does not return `null` scope`. --- .../prestosql/sql/planner/RelationPlanner.java | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java b/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java index 9eb08939f64e..e4c97f2aa667 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java @@ -48,10 +48,10 @@ import io.prestosql.sql.planner.plan.UnnestNode; import io.prestosql.sql.planner.plan.ValuesNode; import io.prestosql.sql.tree.AliasedRelation; +import io.prestosql.sql.tree.AstVisitor; import io.prestosql.sql.tree.Cast; import io.prestosql.sql.tree.CoalesceExpression; import io.prestosql.sql.tree.ComparisonExpression; -import io.prestosql.sql.tree.DefaultTraversalVisitor; import io.prestosql.sql.tree.Except; import io.prestosql.sql.tree.Expression; import io.prestosql.sql.tree.ExpressionTreeRewriter; @@ -73,6 +73,7 @@ import io.prestosql.sql.tree.Row; import io.prestosql.sql.tree.SampledRelation; import io.prestosql.sql.tree.SetOperation; +import io.prestosql.sql.tree.SubqueryExpression; import io.prestosql.sql.tree.SymbolReference; import io.prestosql.sql.tree.Table; import io.prestosql.sql.tree.TableSubquery; @@ -106,7 +107,7 @@ import static java.util.Objects.requireNonNull; class RelationPlanner - extends DefaultTraversalVisitor + extends AstVisitor { private final Analysis analysis; private final SymbolAllocator symbolAllocator; @@ -142,6 +143,12 @@ class RelationPlanner this.subqueryPlanner = new SubqueryPlanner(analysis, symbolAllocator, idAllocator, lambdaDeclarationToSymbolMap, metadata, session); } + @Override + protected RelationPlan visitNode(Node node, Void context) + { + throw new IllegalStateException("Unsupported node type: " + node.getClass().getName()); + } + @Override protected RelationPlan visitTable(Table node, Void context) { @@ -744,6 +751,12 @@ protected RelationPlan visitQuerySpecification(QuerySpecification node, Void con .plan(node); } + @Override + protected RelationPlan visitSubqueryExpression(SubqueryExpression node, Void context) + { + return process(node.getQuery(), context); + } + @Override protected RelationPlan visitValues(Values node, Void context) { From e094af16e5de2c12c40f58d9f797f861c0a96812 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 27 Apr 2020 14:12:49 +0200 Subject: [PATCH 270/519] Move non-contextual state from Context to field --- .../sql/planner/SubqueryPlanner.java | 24 ++++++++++++------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/SubqueryPlanner.java b/presto-main/src/main/java/io/prestosql/sql/planner/SubqueryPlanner.java index cb7f0d0e8962..b2b1e8f31256 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/SubqueryPlanner.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/SubqueryPlanner.java @@ -512,9 +512,9 @@ private Set> extractOuterColumnReferences(PlanNode planNode) private static Set> extractColumnReferences(Expression expression, Set> columnReferences) { - ImmutableSet.Builder> expressionColumnReferences = ImmutableSet.builder(); - new ColumnReferencesExtractor(columnReferences).process(expression, expressionColumnReferences); - return expressionColumnReferences.build(); + ColumnReferencesExtractor columnReferencesExtractor = new ColumnReferencesExtractor(columnReferences); + columnReferencesExtractor.process(expression, null); + return columnReferencesExtractor.getFound(); } private PlanNode replaceExpressionsWithSymbols(PlanNode planNode, Map, Expression> mapping) @@ -527,31 +527,37 @@ private PlanNode replaceExpressionsWithSymbols(PlanNode planNode, Map>> + extends DefaultExpressionTraversalVisitor { private final Set> columnReferences; + private final ImmutableSet.Builder> found = ImmutableSet.builder(); private ColumnReferencesExtractor(Set> columnReferences) { this.columnReferences = requireNonNull(columnReferences, "columnReferences is null"); } + public Set> getFound() + { + return found.build(); + } + @Override - protected Void visitDereferenceExpression(DereferenceExpression node, ImmutableSet.Builder> builder) + protected Void visitDereferenceExpression(DereferenceExpression node, Void context) { if (columnReferences.contains(NodeRef.of(node))) { - builder.add(NodeRef.of(node)); + found.add(NodeRef.of(node)); } else { - process(node.getBase(), builder); + process(node.getBase(), context); } return null; } @Override - protected Void visitIdentifier(Identifier node, ImmutableSet.Builder> builder) + protected Void visitIdentifier(Identifier node, Void context) { - builder.add(NodeRef.of(node)); + found.add(NodeRef.of(node)); return null; } } From 2b0289492859106f8814bfff9f5799bfc3326217 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 27 Apr 2020 14:12:50 +0200 Subject: [PATCH 271/519] Fix incorrect visitor result type The visitor always returned `null` result, and using `Node` as result type was misleading. --- .../io/prestosql/sql/analyzer/ExpressionTreeUtils.java | 8 ++++---- .../java/io/prestosql/sql/testing/TreeAssertions.java | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/analyzer/ExpressionTreeUtils.java b/presto-main/src/main/java/io/prestosql/sql/analyzer/ExpressionTreeUtils.java index 3963d2a4ed06..5da7ea571e8e 100644 --- a/presto-main/src/main/java/io/prestosql/sql/analyzer/ExpressionTreeUtils.java +++ b/presto-main/src/main/java/io/prestosql/sql/analyzer/ExpressionTreeUtils.java @@ -86,14 +86,14 @@ private static List extractExpressions( private static List linearizeNodes(Node node) { ImmutableList.Builder nodes = ImmutableList.builder(); - new DefaultExpressionTraversalVisitor() + new DefaultExpressionTraversalVisitor() { @Override - public Node process(Node node, Void context) + public Void process(Node node, Void context) { - Node result = super.process(node, context); + super.process(node, context); nodes.add(node); - return result; + return null; } }.process(node, null); return nodes.build(); diff --git a/presto-parser/src/main/java/io/prestosql/sql/testing/TreeAssertions.java b/presto-parser/src/main/java/io/prestosql/sql/testing/TreeAssertions.java index a3cdf927c9f4..832efc1309f2 100644 --- a/presto-parser/src/main/java/io/prestosql/sql/testing/TreeAssertions.java +++ b/presto-parser/src/main/java/io/prestosql/sql/testing/TreeAssertions.java @@ -70,14 +70,14 @@ private static Statement parseFormatted(SqlParser sqlParser, ParsingOptions pars private static List linearizeTree(Node tree) { ImmutableList.Builder nodes = ImmutableList.builder(); - new DefaultTraversalVisitor() + new DefaultTraversalVisitor() { @Override - public Node process(Node node, @Nullable Void context) + public Void process(Node node, @Nullable Void context) { - Node result = super.process(node, context); + super.process(node, context); nodes.add(node); - return result; + return null; } }.process(tree, null); return nodes.build(); From 1ce21bcb9c8253f7cb766c5fd7817276f982bd20 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 27 Apr 2020 14:12:51 +0200 Subject: [PATCH 272/519] Enforce DefaultTraversalVisitor Void result Default implementation provided by `DefaultTraversalVisitor` is suitable for `Void` result. Moreover, `DefaultTraversalVisitor` is useful base class for when traversing a tree (a `Void` result), not when transforming it (non-`Void` result). --- .../execution/ParameterExtractor.java | 2 +- .../analyzer/ConstantExpressionVerifier.java | 2 +- .../sql/analyzer/ExpressionTreeUtils.java | 2 +- .../FreeLambdaReferenceExtractor.java | 2 +- .../sql/analyzer/WindowFunctionValidator.java | 2 +- .../sql/planner/DeterminismEvaluator.java | 2 +- .../sql/planner/InputReferenceExtractor.java | 2 +- .../sql/planner/NullabilityAnalyzer.java | 2 +- .../sql/planner/SubqueryPlanner.java | 2 +- .../sql/planner/SymbolsExtractor.java | 4 +- .../planner/sanity/AllFunctionsResolved.java | 2 +- .../NoSubqueryExpressionLeftChecker.java | 2 +- .../sql/planner/sanity/SugarFreeChecker.java | 2 +- .../operator/scalar/FunctionAssertions.java | 2 +- .../java/io/prestosql/sql/TreePrinter.java | 2 +- .../prestosql/sql/testing/TreeAssertions.java | 2 +- .../DefaultExpressionTraversalVisitor.java | 6 +- .../sql/tree/DefaultTraversalVisitor.java | 171 ++++++++++-------- 18 files changed, 111 insertions(+), 100 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/execution/ParameterExtractor.java b/presto-main/src/main/java/io/prestosql/execution/ParameterExtractor.java index 3b9a9bd74796..90f21338de8a 100644 --- a/presto-main/src/main/java/io/prestosql/execution/ParameterExtractor.java +++ b/presto-main/src/main/java/io/prestosql/execution/ParameterExtractor.java @@ -39,7 +39,7 @@ public static List getParameters(Statement statement) } private static class ParameterExtractingVisitor - extends DefaultTraversalVisitor + extends DefaultTraversalVisitor { private final List parameters = new ArrayList<>(); diff --git a/presto-main/src/main/java/io/prestosql/sql/analyzer/ConstantExpressionVerifier.java b/presto-main/src/main/java/io/prestosql/sql/analyzer/ConstantExpressionVerifier.java index e693662de87f..8398605e04c8 100644 --- a/presto-main/src/main/java/io/prestosql/sql/analyzer/ConstantExpressionVerifier.java +++ b/presto-main/src/main/java/io/prestosql/sql/analyzer/ConstantExpressionVerifier.java @@ -35,7 +35,7 @@ public static void verifyExpressionIsConstant(Set> columnRef } private static class ConstantExpressionVerifierVisitor - extends DefaultTraversalVisitor + extends DefaultTraversalVisitor { private final Set> columnReferences; private final Expression expression; diff --git a/presto-main/src/main/java/io/prestosql/sql/analyzer/ExpressionTreeUtils.java b/presto-main/src/main/java/io/prestosql/sql/analyzer/ExpressionTreeUtils.java index 5da7ea571e8e..72d8d0824d5b 100644 --- a/presto-main/src/main/java/io/prestosql/sql/analyzer/ExpressionTreeUtils.java +++ b/presto-main/src/main/java/io/prestosql/sql/analyzer/ExpressionTreeUtils.java @@ -86,7 +86,7 @@ private static List extractExpressions( private static List linearizeNodes(Node node) { ImmutableList.Builder nodes = ImmutableList.builder(); - new DefaultExpressionTraversalVisitor() + new DefaultExpressionTraversalVisitor() { @Override public Void process(Node node, Void context) diff --git a/presto-main/src/main/java/io/prestosql/sql/analyzer/FreeLambdaReferenceExtractor.java b/presto-main/src/main/java/io/prestosql/sql/analyzer/FreeLambdaReferenceExtractor.java index af4d92bc09aa..7a1e06abcd98 100644 --- a/presto-main/src/main/java/io/prestosql/sql/analyzer/FreeLambdaReferenceExtractor.java +++ b/presto-main/src/main/java/io/prestosql/sql/analyzer/FreeLambdaReferenceExtractor.java @@ -49,7 +49,7 @@ public static List getFreeReferencesToLambdaArgument(Node node, Anal } private static class Visitor - extends DefaultExpressionTraversalVisitor> + extends DefaultExpressionTraversalVisitor> { private final Analysis analysis; private final ImmutableList.Builder freeReferencesToLambdaArgument = ImmutableList.builder(); diff --git a/presto-main/src/main/java/io/prestosql/sql/analyzer/WindowFunctionValidator.java b/presto-main/src/main/java/io/prestosql/sql/analyzer/WindowFunctionValidator.java index 4d025e013ac9..17694ff15bcb 100644 --- a/presto-main/src/main/java/io/prestosql/sql/analyzer/WindowFunctionValidator.java +++ b/presto-main/src/main/java/io/prestosql/sql/analyzer/WindowFunctionValidator.java @@ -24,7 +24,7 @@ import static java.util.Objects.requireNonNull; class WindowFunctionValidator - extends DefaultExpressionTraversalVisitor + extends DefaultExpressionTraversalVisitor { private final Metadata metadata; diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/DeterminismEvaluator.java b/presto-main/src/main/java/io/prestosql/sql/planner/DeterminismEvaluator.java index 0803b75217bf..acdf08880b0e 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/DeterminismEvaluator.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/DeterminismEvaluator.java @@ -53,7 +53,7 @@ public static boolean isDeterministic(Expression expression, Function + extends DefaultExpressionTraversalVisitor { private final Function functionMetadataSupplier; diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/InputReferenceExtractor.java b/presto-main/src/main/java/io/prestosql/sql/planner/InputReferenceExtractor.java index 8cc07ba6ac79..e6cd096a6863 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/InputReferenceExtractor.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/InputReferenceExtractor.java @@ -20,7 +20,7 @@ import java.util.Set; public class InputReferenceExtractor - extends DefaultExpressionTraversalVisitor + extends DefaultExpressionTraversalVisitor { private final ImmutableSet.Builder inputChannels = ImmutableSet.builder(); diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/NullabilityAnalyzer.java b/presto-main/src/main/java/io/prestosql/sql/planner/NullabilityAnalyzer.java index a6d47fa1a59a..b689de374843 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/NullabilityAnalyzer.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/NullabilityAnalyzer.java @@ -50,7 +50,7 @@ public static boolean mayReturnNullOnNonNullInput(Expression expression) } private static class Visitor - extends DefaultExpressionTraversalVisitor + extends DefaultExpressionTraversalVisitor { @Override protected Void visitCast(Cast node, AtomicBoolean result) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/SubqueryPlanner.java b/presto-main/src/main/java/io/prestosql/sql/planner/SubqueryPlanner.java index b2b1e8f31256..c2ef027cfa13 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/SubqueryPlanner.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/SubqueryPlanner.java @@ -527,7 +527,7 @@ private PlanNode replaceExpressionsWithSymbols(PlanNode planNode, Map + extends DefaultExpressionTraversalVisitor { private final Set> columnReferences; private final ImmutableSet.Builder> found = ImmutableSet.builder(); diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/SymbolsExtractor.java b/presto-main/src/main/java/io/prestosql/sql/planner/SymbolsExtractor.java index 83376241933a..6cf3604d964f 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/SymbolsExtractor.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/SymbolsExtractor.java @@ -143,7 +143,7 @@ public static Set extractOutputSymbols(PlanNode planNode, Lookup lookup) } private static class SymbolBuilderVisitor - extends DefaultExpressionTraversalVisitor> + extends DefaultExpressionTraversalVisitor> { @Override protected Void visitSymbolReference(SymbolReference node, ImmutableList.Builder builder) @@ -154,7 +154,7 @@ protected Void visitSymbolReference(SymbolReference node, ImmutableList.Builder< } private static class QualifiedNameBuilderVisitor - extends DefaultTraversalVisitor> + extends DefaultTraversalVisitor> { private final Set> columnReferences; diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/sanity/AllFunctionsResolved.java b/presto-main/src/main/java/io/prestosql/sql/planner/sanity/AllFunctionsResolved.java index ab46c067fe01..faf300887c51 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/sanity/AllFunctionsResolved.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/sanity/AllFunctionsResolved.java @@ -46,7 +46,7 @@ private static void validate(Expression expression) } private static class Visitor - extends DefaultExpressionTraversalVisitor> + extends DefaultExpressionTraversalVisitor> { @Override protected Void visitFunctionCall(FunctionCall node, Builder context) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/sanity/NoSubqueryExpressionLeftChecker.java b/presto-main/src/main/java/io/prestosql/sql/planner/sanity/NoSubqueryExpressionLeftChecker.java index 88c4e94c63b8..c6b506d336fc 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/sanity/NoSubqueryExpressionLeftChecker.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/sanity/NoSubqueryExpressionLeftChecker.java @@ -33,7 +33,7 @@ public final class NoSubqueryExpressionLeftChecker public void validate(PlanNode plan, Session session, Metadata metadata, TypeAnalyzer typeAnalyzer, TypeProvider types, WarningCollector warningCollector) { for (Expression expression : ExpressionExtractor.extractExpressions(plan)) { - new DefaultTraversalVisitor() + new DefaultTraversalVisitor() { @Override protected Void visitSubqueryExpression(SubqueryExpression node, Void context) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/sanity/SugarFreeChecker.java b/presto-main/src/main/java/io/prestosql/sql/planner/sanity/SugarFreeChecker.java index faa27ddc195c..b83fb5eb17a7 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/sanity/SugarFreeChecker.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/sanity/SugarFreeChecker.java @@ -53,7 +53,7 @@ private static void validate(Expression expression) } private static class Visitor - extends DefaultExpressionTraversalVisitor> + extends DefaultExpressionTraversalVisitor> { @Override protected Void visitExtract(Extract node, Builder context) diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/FunctionAssertions.java b/presto-main/src/test/java/io/prestosql/operator/scalar/FunctionAssertions.java index f21c51f6046a..8246a6809e52 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/FunctionAssertions.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/FunctionAssertions.java @@ -679,7 +679,7 @@ private static boolean executeFilterWithNoInputColumns(Operator operator) private static boolean needsBoundValue(Expression projectionExpression) { final AtomicBoolean hasSymbolReferences = new AtomicBoolean(); - new DefaultTraversalVisitor() + new DefaultTraversalVisitor() { @Override protected Void visitSymbolReference(SymbolReference node, Void context) diff --git a/presto-parser/src/main/java/io/prestosql/sql/TreePrinter.java b/presto-parser/src/main/java/io/prestosql/sql/TreePrinter.java index 779c1b07eb93..57ae09bf0393 100644 --- a/presto-parser/src/main/java/io/prestosql/sql/TreePrinter.java +++ b/presto-parser/src/main/java/io/prestosql/sql/TreePrinter.java @@ -71,7 +71,7 @@ public TreePrinter(IdentityHashMap resolvedNameRefere public void print(Node root) { - AstVisitor printer = new DefaultTraversalVisitor() + AstVisitor printer = new DefaultTraversalVisitor() { @Override protected Void visitNode(Node node, Integer indentLevel) diff --git a/presto-parser/src/main/java/io/prestosql/sql/testing/TreeAssertions.java b/presto-parser/src/main/java/io/prestosql/sql/testing/TreeAssertions.java index 832efc1309f2..e0c7a6a04a88 100644 --- a/presto-parser/src/main/java/io/prestosql/sql/testing/TreeAssertions.java +++ b/presto-parser/src/main/java/io/prestosql/sql/testing/TreeAssertions.java @@ -70,7 +70,7 @@ private static Statement parseFormatted(SqlParser sqlParser, ParsingOptions pars private static List linearizeTree(Node tree) { ImmutableList.Builder nodes = ImmutableList.builder(); - new DefaultTraversalVisitor() + new DefaultTraversalVisitor() { @Override public Void process(Node node, @Nullable Void context) diff --git a/presto-parser/src/main/java/io/prestosql/sql/tree/DefaultExpressionTraversalVisitor.java b/presto-parser/src/main/java/io/prestosql/sql/tree/DefaultExpressionTraversalVisitor.java index c5191ad45b94..56c838756748 100644 --- a/presto-parser/src/main/java/io/prestosql/sql/tree/DefaultExpressionTraversalVisitor.java +++ b/presto-parser/src/main/java/io/prestosql/sql/tree/DefaultExpressionTraversalVisitor.java @@ -16,11 +16,11 @@ /** * When walking Expressions, don't traverse into SubqueryExpressions */ -public abstract class DefaultExpressionTraversalVisitor - extends DefaultTraversalVisitor +public abstract class DefaultExpressionTraversalVisitor + extends DefaultTraversalVisitor { @Override - protected R visitSubqueryExpression(SubqueryExpression node, C context) + protected Void visitSubqueryExpression(SubqueryExpression node, C context) { // Don't traverse into Subqueries within an Expression return null; diff --git a/presto-parser/src/main/java/io/prestosql/sql/tree/DefaultTraversalVisitor.java b/presto-parser/src/main/java/io/prestosql/sql/tree/DefaultTraversalVisitor.java index 951fe650bca8..16d18ad49d87 100644 --- a/presto-parser/src/main/java/io/prestosql/sql/tree/DefaultTraversalVisitor.java +++ b/presto-parser/src/main/java/io/prestosql/sql/tree/DefaultTraversalVisitor.java @@ -13,23 +13,25 @@ */ package io.prestosql.sql.tree; -public abstract class DefaultTraversalVisitor - extends AstVisitor +public abstract class DefaultTraversalVisitor + extends AstVisitor { @Override - protected R visitExtract(Extract node, C context) + protected Void visitExtract(Extract node, C context) { - return process(node.getExpression(), context); + process(node.getExpression(), context); + return null; } @Override - protected R visitCast(Cast node, C context) + protected Void visitCast(Cast node, C context) { - return process(node.getExpression(), context); + process(node.getExpression(), context); + return null; } @Override - protected R visitArithmeticBinary(ArithmeticBinaryExpression node, C context) + protected Void visitArithmeticBinary(ArithmeticBinaryExpression node, C context) { process(node.getLeft(), context); process(node.getRight(), context); @@ -38,7 +40,7 @@ protected R visitArithmeticBinary(ArithmeticBinaryExpression node, C context) } @Override - protected R visitBetweenPredicate(BetweenPredicate node, C context) + protected Void visitBetweenPredicate(BetweenPredicate node, C context) { process(node.getValue(), context); process(node.getMin(), context); @@ -48,7 +50,7 @@ protected R visitBetweenPredicate(BetweenPredicate node, C context) } @Override - protected R visitCoalesceExpression(CoalesceExpression node, C context) + protected Void visitCoalesceExpression(CoalesceExpression node, C context) { for (Expression operand : node.getOperands()) { process(operand, context); @@ -58,7 +60,7 @@ protected R visitCoalesceExpression(CoalesceExpression node, C context) } @Override - protected R visitAtTimeZone(AtTimeZone node, C context) + protected Void visitAtTimeZone(AtTimeZone node, C context) { process(node.getValue(), context); process(node.getTimeZone(), context); @@ -67,7 +69,7 @@ protected R visitAtTimeZone(AtTimeZone node, C context) } @Override - protected R visitArrayConstructor(ArrayConstructor node, C context) + protected Void visitArrayConstructor(ArrayConstructor node, C context) { for (Expression expression : node.getValues()) { process(expression, context); @@ -77,7 +79,7 @@ protected R visitArrayConstructor(ArrayConstructor node, C context) } @Override - protected R visitSubscriptExpression(SubscriptExpression node, C context) + protected Void visitSubscriptExpression(SubscriptExpression node, C context) { process(node.getBase(), context); process(node.getIndex(), context); @@ -86,7 +88,7 @@ protected R visitSubscriptExpression(SubscriptExpression node, C context) } @Override - protected R visitComparisonExpression(ComparisonExpression node, C context) + protected Void visitComparisonExpression(ComparisonExpression node, C context) { process(node.getLeft(), context); process(node.getRight(), context); @@ -95,7 +97,7 @@ protected R visitComparisonExpression(ComparisonExpression node, C context) } @Override - protected R visitQuery(Query node, C context) + protected Void visitQuery(Query node, C context) { if (node.getWith().isPresent()) { process(node.getWith().get(), context); @@ -109,7 +111,7 @@ protected R visitQuery(Query node, C context) } @Override - protected R visitWith(With node, C context) + protected Void visitWith(With node, C context) { for (WithQuery query : node.getQueries()) { process(query, context); @@ -119,13 +121,14 @@ protected R visitWith(With node, C context) } @Override - protected R visitWithQuery(WithQuery node, C context) + protected Void visitWithQuery(WithQuery node, C context) { - return process(node.getQuery(), context); + process(node.getQuery(), context); + return null; } @Override - protected R visitSelect(Select node, C context) + protected Void visitSelect(Select node, C context) { for (SelectItem item : node.getSelectItems()) { process(item, context); @@ -135,7 +138,7 @@ protected R visitSelect(Select node, C context) } @Override - protected R visitSingleColumn(SingleColumn node, C context) + protected Void visitSingleColumn(SingleColumn node, C context) { process(node.getExpression(), context); @@ -143,7 +146,7 @@ protected R visitSingleColumn(SingleColumn node, C context) } @Override - protected R visitAllColumns(AllColumns node, C context) + protected Void visitAllColumns(AllColumns node, C context) { node.getTarget().ifPresent(value -> process(value, context)); @@ -151,7 +154,7 @@ protected R visitAllColumns(AllColumns node, C context) } @Override - protected R visitWhenClause(WhenClause node, C context) + protected Void visitWhenClause(WhenClause node, C context) { process(node.getOperand(), context); process(node.getResult(), context); @@ -160,7 +163,7 @@ protected R visitWhenClause(WhenClause node, C context) } @Override - protected R visitInPredicate(InPredicate node, C context) + protected Void visitInPredicate(InPredicate node, C context) { process(node.getValue(), context); process(node.getValueList(), context); @@ -169,7 +172,7 @@ protected R visitInPredicate(InPredicate node, C context) } @Override - protected R visitFunctionCall(FunctionCall node, C context) + protected Void visitFunctionCall(FunctionCall node, C context) { for (Expression argument : node.getArguments()) { process(argument, context); @@ -191,7 +194,7 @@ protected R visitFunctionCall(FunctionCall node, C context) } @Override - protected R visitGroupingOperation(GroupingOperation node, C context) + protected Void visitGroupingOperation(GroupingOperation node, C context) { for (Expression columnArgument : node.getGroupingColumns()) { process(columnArgument, context); @@ -201,14 +204,14 @@ protected R visitGroupingOperation(GroupingOperation node, C context) } @Override - protected R visitDereferenceExpression(DereferenceExpression node, C context) + protected Void visitDereferenceExpression(DereferenceExpression node, C context) { process(node.getBase(), context); return null; } @Override - public R visitWindow(Window node, C context) + public Void visitWindow(Window node, C context) { for (Expression expression : node.getPartitionBy()) { process(expression, context); @@ -226,7 +229,7 @@ public R visitWindow(Window node, C context) } @Override - public R visitWindowFrame(WindowFrame node, C context) + public Void visitWindowFrame(WindowFrame node, C context) { process(node.getStart(), context); if (node.getEnd().isPresent()) { @@ -237,7 +240,7 @@ public R visitWindowFrame(WindowFrame node, C context) } @Override - public R visitFrameBound(FrameBound node, C context) + public Void visitFrameBound(FrameBound node, C context) { if (node.getValue().isPresent()) { process(node.getValue().get(), context); @@ -247,7 +250,7 @@ public R visitFrameBound(FrameBound node, C context) } @Override - protected R visitSimpleCaseExpression(SimpleCaseExpression node, C context) + protected Void visitSimpleCaseExpression(SimpleCaseExpression node, C context) { process(node.getOperand(), context); for (WhenClause clause : node.getWhenClauses()) { @@ -261,7 +264,7 @@ protected R visitSimpleCaseExpression(SimpleCaseExpression node, C context) } @Override - protected R visitInListExpression(InListExpression node, C context) + protected Void visitInListExpression(InListExpression node, C context) { for (Expression value : node.getValues()) { process(value, context); @@ -271,7 +274,7 @@ protected R visitInListExpression(InListExpression node, C context) } @Override - protected R visitNullIfExpression(NullIfExpression node, C context) + protected Void visitNullIfExpression(NullIfExpression node, C context) { process(node.getFirst(), context); process(node.getSecond(), context); @@ -280,7 +283,7 @@ protected R visitNullIfExpression(NullIfExpression node, C context) } @Override - protected R visitIfExpression(IfExpression node, C context) + protected Void visitIfExpression(IfExpression node, C context) { process(node.getCondition(), context); process(node.getTrueValue(), context); @@ -292,14 +295,14 @@ protected R visitIfExpression(IfExpression node, C context) } @Override - protected R visitTryExpression(TryExpression node, C context) + protected Void visitTryExpression(TryExpression node, C context) { process(node.getInnerExpression(), context); return null; } @Override - protected R visitBindExpression(BindExpression node, C context) + protected Void visitBindExpression(BindExpression node, C context) { for (Expression value : node.getValues()) { process(value, context); @@ -310,19 +313,21 @@ protected R visitBindExpression(BindExpression node, C context) } @Override - protected R visitArithmeticUnary(ArithmeticUnaryExpression node, C context) + protected Void visitArithmeticUnary(ArithmeticUnaryExpression node, C context) { - return process(node.getValue(), context); + process(node.getValue(), context); + return null; } @Override - protected R visitNotExpression(NotExpression node, C context) + protected Void visitNotExpression(NotExpression node, C context) { - return process(node.getValue(), context); + process(node.getValue(), context); + return null; } @Override - protected R visitSearchedCaseExpression(SearchedCaseExpression node, C context) + protected Void visitSearchedCaseExpression(SearchedCaseExpression node, C context) { for (WhenClause clause : node.getWhenClauses()) { process(clause, context); @@ -334,7 +339,7 @@ protected R visitSearchedCaseExpression(SearchedCaseExpression node, C context) } @Override - protected R visitLikePredicate(LikePredicate node, C context) + protected Void visitLikePredicate(LikePredicate node, C context) { process(node.getValue(), context); process(node.getPattern(), context); @@ -344,19 +349,21 @@ protected R visitLikePredicate(LikePredicate node, C context) } @Override - protected R visitIsNotNullPredicate(IsNotNullPredicate node, C context) + protected Void visitIsNotNullPredicate(IsNotNullPredicate node, C context) { - return process(node.getValue(), context); + process(node.getValue(), context); + return null; } @Override - protected R visitIsNullPredicate(IsNullPredicate node, C context) + protected Void visitIsNullPredicate(IsNullPredicate node, C context) { - return process(node.getValue(), context); + process(node.getValue(), context); + return null; } @Override - protected R visitLogicalBinaryExpression(LogicalBinaryExpression node, C context) + protected Void visitLogicalBinaryExpression(LogicalBinaryExpression node, C context) { process(node.getLeft(), context); process(node.getRight(), context); @@ -365,13 +372,14 @@ protected R visitLogicalBinaryExpression(LogicalBinaryExpression node, C context } @Override - protected R visitSubqueryExpression(SubqueryExpression node, C context) + protected Void visitSubqueryExpression(SubqueryExpression node, C context) { - return process(node.getQuery(), context); + process(node.getQuery(), context); + return null; } @Override - protected R visitOrderBy(OrderBy node, C context) + protected Void visitOrderBy(OrderBy node, C context) { for (SortItem sortItem : node.getSortItems()) { process(sortItem, context); @@ -380,13 +388,14 @@ protected R visitOrderBy(OrderBy node, C context) } @Override - protected R visitSortItem(SortItem node, C context) + protected Void visitSortItem(SortItem node, C context) { - return process(node.getSortKey(), context); + process(node.getSortKey(), context); + return null; } @Override - protected R visitQuerySpecification(QuerySpecification node, C context) + protected Void visitQuerySpecification(QuerySpecification node, C context) { process(node.getSelect(), context); if (node.getFrom().isPresent()) { @@ -408,7 +417,7 @@ protected R visitQuerySpecification(QuerySpecification node, C context) } @Override - protected R visitSetOperation(SetOperation node, C context) + protected Void visitSetOperation(SetOperation node, C context) { for (Relation relation : node.getRelations()) { process(relation, context); @@ -417,7 +426,7 @@ protected R visitSetOperation(SetOperation node, C context) } @Override - protected R visitValues(Values node, C context) + protected Void visitValues(Values node, C context) { for (Expression row : node.getRows()) { process(row, context); @@ -426,7 +435,7 @@ protected R visitValues(Values node, C context) } @Override - protected R visitRow(Row node, C context) + protected Void visitRow(Row node, C context) { for (Expression expression : node.getItems()) { process(expression, context); @@ -435,19 +444,21 @@ protected R visitRow(Row node, C context) } @Override - protected R visitTableSubquery(TableSubquery node, C context) + protected Void visitTableSubquery(TableSubquery node, C context) { - return process(node.getQuery(), context); + process(node.getQuery(), context); + return null; } @Override - protected R visitAliasedRelation(AliasedRelation node, C context) + protected Void visitAliasedRelation(AliasedRelation node, C context) { - return process(node.getRelation(), context); + process(node.getRelation(), context); + return null; } @Override - protected R visitSampledRelation(SampledRelation node, C context) + protected Void visitSampledRelation(SampledRelation node, C context) { process(node.getRelation(), context); process(node.getSamplePercentage(), context); @@ -455,7 +466,7 @@ protected R visitSampledRelation(SampledRelation node, C context) } @Override - protected R visitJoin(Join node, C context) + protected Void visitJoin(Join node, C context) { process(node.getLeft(), context); process(node.getRight(), context); @@ -468,7 +479,7 @@ protected R visitJoin(Join node, C context) } @Override - protected R visitUnnest(Unnest node, C context) + protected Void visitUnnest(Unnest node, C context) { for (Expression expression : node.getExpressions()) { process(expression, context); @@ -478,7 +489,7 @@ protected R visitUnnest(Unnest node, C context) } @Override - protected R visitGroupBy(GroupBy node, C context) + protected Void visitGroupBy(GroupBy node, C context) { for (GroupingElement groupingElement : node.getGroupingElements()) { process(groupingElement, context); @@ -488,19 +499,19 @@ protected R visitGroupBy(GroupBy node, C context) } @Override - protected R visitCube(Cube node, C context) + protected Void visitCube(Cube node, C context) { return null; } @Override - protected R visitRollup(Rollup node, C context) + protected Void visitRollup(Rollup node, C context) { return null; } @Override - protected R visitSimpleGroupBy(SimpleGroupBy node, C context) + protected Void visitSimpleGroupBy(SimpleGroupBy node, C context) { for (Expression expression : node.getExpressions()) { process(expression, context); @@ -510,13 +521,13 @@ protected R visitSimpleGroupBy(SimpleGroupBy node, C context) } @Override - protected R visitGroupingSets(GroupingSets node, C context) + protected Void visitGroupingSets(GroupingSets node, C context) { return null; } @Override - protected R visitInsert(Insert node, C context) + protected Void visitInsert(Insert node, C context) { process(node.getQuery(), context); @@ -524,7 +535,7 @@ protected R visitInsert(Insert node, C context) } @Override - protected R visitDelete(Delete node, C context) + protected Void visitDelete(Delete node, C context) { process(node.getTable(), context); node.getWhere().ifPresent(where -> process(where, context)); @@ -533,7 +544,7 @@ protected R visitDelete(Delete node, C context) } @Override - protected R visitCreateTableAsSelect(CreateTableAsSelect node, C context) + protected Void visitCreateTableAsSelect(CreateTableAsSelect node, C context) { process(node.getQuery(), context); for (Property property : node.getProperties()) { @@ -544,7 +555,7 @@ protected R visitCreateTableAsSelect(CreateTableAsSelect node, C context) } @Override - protected R visitProperty(Property node, C context) + protected Void visitProperty(Property node, C context) { process(node.getName(), context); process(node.getValue(), context); @@ -553,7 +564,7 @@ protected R visitProperty(Property node, C context) } @Override - protected R visitAnalyze(Analyze node, C context) + protected Void visitAnalyze(Analyze node, C context) { for (Property property : node.getProperties()) { process(property, context); @@ -562,7 +573,7 @@ protected R visitAnalyze(Analyze node, C context) } @Override - protected R visitCreateView(CreateView node, C context) + protected Void visitCreateView(CreateView node, C context) { process(node.getQuery(), context); @@ -570,7 +581,7 @@ protected R visitCreateView(CreateView node, C context) } @Override - protected R visitSetSession(SetSession node, C context) + protected Void visitSetSession(SetSession node, C context) { process(node.getValue(), context); @@ -578,7 +589,7 @@ protected R visitSetSession(SetSession node, C context) } @Override - protected R visitAddColumn(AddColumn node, C context) + protected Void visitAddColumn(AddColumn node, C context) { process(node.getColumn(), context); @@ -586,7 +597,7 @@ protected R visitAddColumn(AddColumn node, C context) } @Override - protected R visitCreateTable(CreateTable node, C context) + protected Void visitCreateTable(CreateTable node, C context) { for (TableElement tableElement : node.getElements()) { process(tableElement, context); @@ -599,7 +610,7 @@ protected R visitCreateTable(CreateTable node, C context) } @Override - protected R visitStartTransaction(StartTransaction node, C context) + protected Void visitStartTransaction(StartTransaction node, C context) { for (TransactionMode transactionMode : node.getTransactionModes()) { process(transactionMode, context); @@ -609,7 +620,7 @@ protected R visitStartTransaction(StartTransaction node, C context) } @Override - protected R visitExplain(Explain node, C context) + protected Void visitExplain(Explain node, C context) { process(node.getStatement(), context); @@ -621,7 +632,7 @@ protected R visitExplain(Explain node, C context) } @Override - protected R visitQuantifiedComparisonExpression(QuantifiedComparisonExpression node, C context) + protected Void visitQuantifiedComparisonExpression(QuantifiedComparisonExpression node, C context) { process(node.getValue(), context); process(node.getSubquery(), context); @@ -630,7 +641,7 @@ protected R visitQuantifiedComparisonExpression(QuantifiedComparisonExpression n } @Override - protected R visitExists(ExistsPredicate node, C context) + protected Void visitExists(ExistsPredicate node, C context) { process(node.getSubquery(), context); @@ -638,7 +649,7 @@ protected R visitExists(ExistsPredicate node, C context) } @Override - protected R visitLateral(Lateral node, C context) + protected Void visitLateral(Lateral node, C context) { process(node.getQuery(), context); From aa89cb6e6557c5f756232c14c9c810af36b9dcf5 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Wed, 22 Apr 2020 11:07:56 -0700 Subject: [PATCH 273/519] Use Zulu JDK 11 to run product tests --- .../tests/product/launcher/env/EnvironmentOptions.java | 2 +- .../main/resources/docker/presto-product-tests/run-presto.sh | 3 +++ presto-product-tests/conf/product-tests-defaults.sh | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/EnvironmentOptions.java b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/EnvironmentOptions.java index b0b6720c0afd..8d3b4454c91e 100644 --- a/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/EnvironmentOptions.java +++ b/presto-product-tests-launcher/src/main/java/io/prestosql/tests/product/launcher/env/EnvironmentOptions.java @@ -28,7 +28,7 @@ public final class EnvironmentOptions public String hadoopBaseImage = System.getenv().getOrDefault("HADOOP_BASE_IMAGE", "prestodev/hdp2.6-hive"); @Option(name = "--image-version", title = "version", description = "docker images version") - public String imagesVersion = System.getenv().getOrDefault("DOCKER_IMAGES_VERSION", "27"); + public String imagesVersion = System.getenv().getOrDefault("DOCKER_IMAGES_VERSION", "28"); @Option(name = "--server-package", title = "server-package", description = "path to Presto server package") public File serverPackage = new File("presto-server/target/presto-server-${project.version}.tar.gz"); diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/run-presto.sh b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/run-presto.sh index 5a5c45830656..ca43062de3de 100755 --- a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/run-presto.sh +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/run-presto.sh @@ -17,6 +17,9 @@ if test -d /docker/presto-init.d; then done fi +export JAVA_HOME="/usr/lib/jvm/zulu-11" +export PATH="${JAVA_HOME}/bin:${PATH}" + exec /docker/presto-server-*/bin/launcher \ -Dpresto-temporarily-allow-java8=true \ -Dnode.id="${HOSTNAME}" \ diff --git a/presto-product-tests/conf/product-tests-defaults.sh b/presto-product-tests/conf/product-tests-defaults.sh index 8aac558cfa93..552c792a350a 100644 --- a/presto-product-tests/conf/product-tests-defaults.sh +++ b/presto-product-tests/conf/product-tests-defaults.sh @@ -1,4 +1,4 @@ -export DOCKER_IMAGES_VERSION=${DOCKER_IMAGES_VERSION:-27} +export DOCKER_IMAGES_VERSION=${DOCKER_IMAGES_VERSION:-28} if test -v HADOOP_BASE_IMAGE; then test -v TESTS_HIVE_VERSION_MAJOR From ea8f7bdc9a29d6612243648a1b285937d57ac968 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Tue, 11 Feb 2020 14:45:10 -0800 Subject: [PATCH 274/519] Remove fallback for Java 8 --- README.md | 6 ++---- presto-main/pom.xml | 15 --------------- .../server/PrestoSystemRequirements.java | 16 +--------------- .../server/TestPrestoSystemRequirements.java | 9 +++++++++ .../docker/presto-product-tests/run-presto.sh | 1 - .../java/io/prestosql/server/rpm/ServerIT.java | 13 ------------- 6 files changed, 12 insertions(+), 48 deletions(-) diff --git a/README.md b/README.md index 6678a10c5d66..aa25ec05aa59 100644 --- a/README.md +++ b/README.md @@ -34,8 +34,8 @@ After building Presto for the first time, you can load the project into your IDE After opening the project in IntelliJ, double check that the Java SDK is properly configured for the project: * Open the File menu and select Project Structure -* In the SDKs section, ensure that a 1.8 JDK is selected (create one if none exist) -* In the Project section, ensure the Project language level is set to 8.0 as Presto makes use of several Java 8 language features +* In the SDKs section, ensure that JDK 11 is selected (create one if none exist) +* In the Project section, ensure the Project language level is set to 8 (Presto does not yet use Java 11 language features) Presto comes with sample configuration that should work out-of-the-box for development. Use the following options to create a run configuration: @@ -50,8 +50,6 @@ Additionally, the Hive plugin must be configured with the location of your Hive -Dhive.metastore.uri=thrift://localhost:9083 -When running on Java 8, the VM Options must be supplemented with `-Dpresto-temporarily-allow-java8=true` in order for Presto to start. - ### Using SOCKS for Hive or HDFS If your Hive metastore or HDFS cluster is not directly accessible to your local machine, you can use SSH port forwarding to access it. Setup a dynamic SOCKS proxy with SSH listening on local port 1080: diff --git a/presto-main/pom.xml b/presto-main/pom.xml index 3f5bbd89366c..3c4d5839b7c6 100644 --- a/presto-main/pom.xml +++ b/presto-main/pom.xml @@ -392,21 +392,6 @@ - - - - org.apache.maven.plugins - maven-surefire-plugin - - - - true - - - - - - org.gaul diff --git a/presto-main/src/main/java/io/prestosql/server/PrestoSystemRequirements.java b/presto-main/src/main/java/io/prestosql/server/PrestoSystemRequirements.java index 16cb186f9691..6bf467b0f1b2 100644 --- a/presto-main/src/main/java/io/prestosql/server/PrestoSystemRequirements.java +++ b/presto-main/src/main/java/io/prestosql/server/PrestoSystemRequirements.java @@ -105,21 +105,7 @@ private static void verifyJavaVersion() return; } - if (!Boolean.getBoolean("presto-temporarily-allow-java8")) { - failRequirement("" + - "Future versions of Presto will require Java 11 after March 2020.\n\n" + - "You may temporarily continue running on Java 8 by adding the following\n" + - "JVM config option:\n\n" + - " -Dpresto-temporarily-allow-java8=true\n"); - } - - if ((version.getMajor() == 8 && version.getUpdate().isPresent() && version.getUpdate().getAsInt() >= 161) || - (version.getMajor() > 8 && version.getMajor() < 11)) { - warnRequirement("Future versions of Presto will require Java 11+ (found: %s)", javaVersion); - return; - } - - failRequirement("Presto requires Java 8u161+ (found %s)", javaVersion); + failRequirement("Presto requires Java 11+ (found %s)", javaVersion); } private static void verifyUsingG1Gc() diff --git a/presto-main/src/test/java/io/prestosql/server/TestPrestoSystemRequirements.java b/presto-main/src/test/java/io/prestosql/server/TestPrestoSystemRequirements.java index 385873e74479..a74c3c806f99 100644 --- a/presto-main/src/test/java/io/prestosql/server/TestPrestoSystemRequirements.java +++ b/presto-main/src/test/java/io/prestosql/server/TestPrestoSystemRequirements.java @@ -13,6 +13,8 @@ */ package io.prestosql.server; +import com.google.common.base.StandardSystemProperty; +import org.testng.SkipException; import org.testng.annotations.Test; import static io.prestosql.server.PrestoSystemRequirements.verifyJvmRequirements; @@ -23,6 +25,13 @@ public class TestPrestoSystemRequirements @Test public void testVerifyJvmRequirements() { + // TODO: remove when we stop running tests with Java 8 on CI + String javaVersion = StandardSystemProperty.JAVA_VERSION.value(); + JavaVersion version = JavaVersion.parse(javaVersion); + if (version.getMajor() < 11) { + throw new SkipException("Incompatible Java version: " + javaVersion); + } + verifyJvmRequirements(); } diff --git a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/run-presto.sh b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/run-presto.sh index ca43062de3de..94670ecdda3a 100755 --- a/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/run-presto.sh +++ b/presto-product-tests-launcher/src/main/resources/docker/presto-product-tests/run-presto.sh @@ -21,7 +21,6 @@ export JAVA_HOME="/usr/lib/jvm/zulu-11" export PATH="${JAVA_HOME}/bin:${PATH}" exec /docker/presto-server-*/bin/launcher \ - -Dpresto-temporarily-allow-java8=true \ -Dnode.id="${HOSTNAME}" \ --etc-dir="/docker/presto-product-tests/conf/presto/etc" \ --data-dir=/var/presto \ diff --git a/presto-server-rpm/src/test/java/io/prestosql/server/rpm/ServerIT.java b/presto-server-rpm/src/test/java/io/prestosql/server/rpm/ServerIT.java index e2620cf7a7c2..83fc37e48eea 100644 --- a/presto-server-rpm/src/test/java/io/prestosql/server/rpm/ServerIT.java +++ b/presto-server-rpm/src/test/java/io/prestosql/server/rpm/ServerIT.java @@ -41,13 +41,6 @@ @Test(singleThreaded = true) public class ServerIT { - @Parameters("rpm") - @Test - public void testWithJava8(String rpm) - { - testServer("prestodev/centos7-oj8", rpm, "1.8"); - } - @Parameters("rpm") @Test public void testWithJava11(String rpm) @@ -62,8 +55,6 @@ private static void testServer(String baseImage, String rpmHostPath, String expe String command = "" + // install RPM "yum localinstall -q -y " + rpm + "\n" + - // temporary hack to allow Java 8 - "#JAVA8\n" + // create Hive catalog file "mkdir /etc/presto/catalog\n" + "cat > /etc/presto/catalog/hive.properties <<\"EOT\"\n" + @@ -79,10 +70,6 @@ private static void testServer(String baseImage, String rpmHostPath, String expe // allow tail to work with Docker's non-local file system "tail ---disable-inotify -F /var/log/presto/server.log\n"; - if (expectedJavaVersion.equals("1.8")) { - command = command.replace("#JAVA8", "echo '-Dpresto-temporarily-allow-java8=true' >> /etc/presto/jvm.config"); - } - try (GenericContainer container = new GenericContainer<>(baseImage)) { container.withExposedPorts(8080) .withFileSystemBind(rpmHostPath, rpm, BindMode.READ_ONLY) From a4ed120028dd94038ea66bc21ae7ad009354d4b1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Osipiuk?= Date: Mon, 27 Apr 2020 14:36:28 +0200 Subject: [PATCH 275/519] Properly initialize HDFS configuration in RubixInitializer --- .../plugin/hive/rubix/RubixInitializer.java | 28 +++++++++++-------- 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixInitializer.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixInitializer.java index d0f84da2d31d..d8d850d5f607 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixInitializer.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixInitializer.java @@ -23,17 +23,18 @@ import com.qubole.rubix.core.CachingFileSystem; import io.airlift.log.Logger; import io.prestosql.plugin.base.CatalogName; -import io.prestosql.plugin.hive.ConfigurationInitializer; +import io.prestosql.plugin.hive.HdfsConfigurationInitializer; import io.prestosql.spi.Node; import io.prestosql.spi.NodeManager; import org.apache.hadoop.conf.Configuration; import javax.inject.Inject; -import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; +import static io.prestosql.plugin.hive.util.ConfigurationUtils.getInitialConfiguration; + /* * Responsibilities of this initializer: * 1. Lazily setup RubixConfigurationInitializer with information about master when it is available @@ -46,14 +47,17 @@ public class RubixInitializer private final CatalogName catalogName; private final RubixConfigurationInitializer rubixConfigurationInitializer; - private final Set configurationInitializers; + private final HdfsConfigurationInitializer hdfsConfigurationInitializer; @Inject - public RubixInitializer(CatalogName catalogName, RubixConfigurationInitializer rubixConfigurationInitializer, Set configurationInitializers) + public RubixInitializer( + CatalogName catalogName, + RubixConfigurationInitializer rubixConfigurationInitializer, + HdfsConfigurationInitializer hdfsConfigurationInitializer) { this.catalogName = catalogName; this.rubixConfigurationInitializer = rubixConfigurationInitializer; - this.configurationInitializers = configurationInitializers; + this.hdfsConfigurationInitializer = hdfsConfigurationInitializer; } public void initializeRubix(NodeManager nodeManager) @@ -84,14 +88,14 @@ public void initializeRubix(NodeManager nodeManager) rubixConfigurationInitializer.setMasterAddress(master.getHostAndPort()); rubixConfigurationInitializer.setCurrentNodeAddress(nodeManager.getCurrentNode().getHost()); - Configuration configuration = new Configuration(false); - for (ConfigurationInitializer configurationInitializer : configurationInitializers) { - configurationInitializer.initializeConfiguration(configuration); - } - - // RubixConfigurationInitializer.initializeConfiguration will not update configurations yet as it has not been fully initialized - // Apply configurations from it by skipping init check + Configuration configuration = getInitialConfiguration(); + // Perform standard HDFS configuration initialization. + // This will also call out to RubixConfigurationInitializer but this will be no-op because + // cacheNotReady is not yet changed to false. + hdfsConfigurationInitializer.initializeConfiguration(configuration); + // Apply RubixConfigurationInitializer directly suppressing cacheNotReady check rubixConfigurationInitializer.updateConfiguration(configuration); + MetricRegistry metricRegistry = new MetricRegistry(); BookKeeperServer bookKeeperServer = new BookKeeperServer(); BookKeeper bookKeeper = bookKeeperServer.startServer(configuration, metricRegistry); From 573dc381967d5c60acaebd309d7aba721b86c367 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Osipiuk?= Date: Mon, 27 Apr 2020 14:41:39 +0200 Subject: [PATCH 276/519] Rename variable --- .../plugin/hive/rubix/RubixConfigurationInitializer.java | 6 +++--- .../io/prestosql/plugin/hive/rubix/RubixInitializer.java | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfigurationInitializer.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfigurationInitializer.java index 0351db0b2aca..9cda8cc2943a 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfigurationInitializer.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfigurationInitializer.java @@ -53,7 +53,7 @@ public class RubixConfigurationInitializer private final int dataTransferServerPort; // Configs below are dependent on node joining the cluster - private volatile boolean cacheNotReady = true; + private volatile boolean cacheReady; private boolean isMaster; private HostAddress masterAddress; private String nodeAddress; @@ -70,7 +70,7 @@ public RubixConfigurationInitializer(RubixConfig config) @Override public void initializeConfiguration(Configuration config) { - if (cacheNotReady) { + if (!cacheReady) { setCacheDataEnabled(config, false); return; } @@ -125,6 +125,6 @@ public void setCurrentNodeAddress(String nodeAddress) public void initializationDone() { checkState(masterAddress != null, "masterAddress is not set"); - cacheNotReady = false; + cacheReady = true; } } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixInitializer.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixInitializer.java index d8d850d5f607..ee0d39f59ae9 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixInitializer.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixInitializer.java @@ -91,9 +91,9 @@ public void initializeRubix(NodeManager nodeManager) Configuration configuration = getInitialConfiguration(); // Perform standard HDFS configuration initialization. // This will also call out to RubixConfigurationInitializer but this will be no-op because - // cacheNotReady is not yet changed to false. + // cacheReady is not yet set. hdfsConfigurationInitializer.initializeConfiguration(configuration); - // Apply RubixConfigurationInitializer directly suppressing cacheNotReady check + // Apply RubixConfigurationInitializer directly suppressing cacheReady check rubixConfigurationInitializer.updateConfiguration(configuration); MetricRegistry metricRegistry = new MetricRegistry(); From 7d211efc33e5ca2ad9d2da8fb0514da77163208b Mon Sep 17 00:00:00 2001 From: praveenkrishna Date: Sun, 19 Apr 2020 19:42:02 +0530 Subject: [PATCH 277/519] Expose JdbcClient invocation statistics in JMX --- .../plugin/jdbc/JdbcDiagnosticModule.java | 10 ++-- .../prestosql/plugin/jdbc/TestJmxStats.java | 57 +++++++++++++++++++ 2 files changed, 61 insertions(+), 6 deletions(-) create mode 100644 presto-base-jdbc/src/test/java/io/prestosql/plugin/jdbc/TestJmxStats.java diff --git a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcDiagnosticModule.java b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcDiagnosticModule.java index dde0d2d5e245..a67c0d2eaf08 100644 --- a/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcDiagnosticModule.java +++ b/presto-base-jdbc/src/main/java/io/prestosql/plugin/jdbc/JdbcDiagnosticModule.java @@ -58,21 +58,19 @@ public void configure(Binder binder) @StatsCollecting public JdbcClient createJdbcClientWithStats(@ForBaseJdbc JdbcClient client) { - StatisticsAwareJdbcClient statisticsAwareJdbcClient = new StatisticsAwareJdbcClient(client); - Logger logger = Logger.get(format("io.prestosql.plugin.jdbc.%s.jdbcclient", catalogName)); JdbcClient loggingInvocationsJdbcClient = newProxy(JdbcClient.class, new LoggingInvocationHandler( - statisticsAwareJdbcClient, + client, new ReflectiveParameterNamesProvider(), logger::debug)); - return ForwardingJdbcClient.of(() -> { + return new StatisticsAwareJdbcClient(ForwardingJdbcClient.of(() -> { if (logger.isDebugEnabled()) { return loggingInvocationsJdbcClient; } - return statisticsAwareJdbcClient; - }); + return client; + })); } @Provides diff --git a/presto-base-jdbc/src/test/java/io/prestosql/plugin/jdbc/TestJmxStats.java b/presto-base-jdbc/src/test/java/io/prestosql/plugin/jdbc/TestJmxStats.java new file mode 100644 index 000000000000..e398bdab0894 --- /dev/null +++ b/presto-base-jdbc/src/test/java/io/prestosql/plugin/jdbc/TestJmxStats.java @@ -0,0 +1,57 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.jdbc; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import io.prestosql.spi.Plugin; +import io.prestosql.spi.connector.ConnectorFactory; +import io.prestosql.testing.TestingConnectorContext; +import org.testng.annotations.Test; + +import javax.management.MBeanInfo; +import javax.management.MBeanServer; +import javax.management.ObjectName; + +import java.util.Set; + +import static com.google.common.collect.Iterables.getOnlyElement; +import static java.lang.String.format; +import static java.lang.management.ManagementFactory.getPlatformMBeanServer; +import static org.testng.Assert.assertNotEquals; +import static org.testng.Assert.assertTrue; + +public class TestJmxStats +{ + @Test + public void testJmxStatsExposure() + throws Exception + { + Plugin plugin = new JdbcPlugin("base-jdbc", new TestingH2JdbcModule()); + ConnectorFactory factory = getOnlyElement(plugin.getConnectorFactories()); + factory.create("test", ImmutableMap.of("connection-url", "jdbc"), new TestingConnectorContext()); + MBeanServer mbeanServer = getPlatformMBeanServer(); + Set objectNames = mbeanServer.queryNames(new ObjectName("io.prestosql.plugin.jdbc:*"), null); + + assertTrue(objectNames.containsAll( + ImmutableSet.of( + new ObjectName("io.prestosql.plugin.jdbc:type=ConnectionFactory,name=test"), + new ObjectName("io.prestosql.plugin.jdbc:type=JdbcClient,name=test")))); + + for (ObjectName objectName : objectNames) { + MBeanInfo mbeanInfo = mbeanServer.getMBeanInfo(objectName); + assertNotEquals(mbeanInfo.getAttributes().length, 0, format("Object %s doesn't expose JMX stats", objectName.getCanonicalName())); + } + } +} From dc78d3381b5a1710f6a9f41f72b165319977fd56 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 27 Apr 2020 23:13:07 -0700 Subject: [PATCH 278/519] Remove "Java 8" phrase from "stream API" comments --- README.md | 2 +- .../io/prestosql/plugin/hive/parquet/HdfsParquetDataSource.java | 2 +- .../src/main/java/io/prestosql/orc/AbstractOrcDataSource.java | 2 +- presto-orc/src/main/java/io/prestosql/orc/StripeReader.java | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index aa25ec05aa59..f699587c18c5 100644 --- a/README.md +++ b/README.md @@ -82,7 +82,7 @@ In the sample configuration, the Hive connector is mounted in the `hive` catalog We recommend you use IntelliJ as your IDE. The code style template for the project can be found in the [codestyle](https://github.com/airlift/codestyle) repository along with our general programming and Java guidelines. In addition to those you should also adhere to the following: * Alphabetize sections in the documentation source files (both in the table of contents files and other regular documentation files). In general, alphabetize methods/variables/sections if such ordering already exists in the surrounding code. -* When appropriate, use the Java 8 stream API. However, note that the stream implementation does not perform well so avoid using it in inner loops or otherwise performance sensitive sections. +* When appropriate, use the stream API. However, note that the stream implementation does not perform well so avoid using it in inner loops or otherwise performance sensitive sections. * Categorize errors when throwing exceptions. For example, PrestoException takes an error code as an argument, `PrestoException(HIVE_TOO_MANY_OPEN_PARTITIONS)`. This categorization lets you generate reports so you can monitor the frequency of various failures. * Ensure that all files have the appropriate license header; you can generate the license by running `mvn license:format`. * Consider using String formatting (printf style formatting using the Java `Formatter` class): `format("Session property %s is invalid: %s", name, value)` (note that `format()` should always be statically imported). Sometimes, if you only need to append something, consider using the `+` operator. diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/parquet/HdfsParquetDataSource.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/parquet/HdfsParquetDataSource.java index 7a03eded8071..bda6919ede75 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/parquet/HdfsParquetDataSource.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/parquet/HdfsParquetDataSource.java @@ -136,7 +136,7 @@ public final Map planRead(Map diskRanges) } // - // Note: this code does not use the Java 8 stream APIs to avoid any extra object allocation + // Note: this code does not use the stream APIs to avoid any extra object allocation // // split disk ranges into "big" and "small" diff --git a/presto-orc/src/main/java/io/prestosql/orc/AbstractOrcDataSource.java b/presto-orc/src/main/java/io/prestosql/orc/AbstractOrcDataSource.java index 1d059d5a4b46..fcc1ea537584 100644 --- a/presto-orc/src/main/java/io/prestosql/orc/AbstractOrcDataSource.java +++ b/presto-orc/src/main/java/io/prestosql/orc/AbstractOrcDataSource.java @@ -110,7 +110,7 @@ public final Map readFully(Map diskRanges) } // - // Note: this code does not use the Java 8 stream APIs to avoid any extra object allocation + // Note: this code does not use the stream APIs to avoid any extra object allocation // // split disk ranges into "big" and "small" diff --git a/presto-orc/src/main/java/io/prestosql/orc/StripeReader.java b/presto-orc/src/main/java/io/prestosql/orc/StripeReader.java index 449b4fec9b80..2d9568c6f48d 100644 --- a/presto-orc/src/main/java/io/prestosql/orc/StripeReader.java +++ b/presto-orc/src/main/java/io/prestosql/orc/StripeReader.java @@ -251,7 +251,7 @@ private Map readDiskRanges(long stripeOffset, Map Date: Mon, 27 Apr 2020 23:11:23 -0700 Subject: [PATCH 279/519] Update server RPM for Java 11 --- presto-server-rpm/src/main/rpm/preinstall | 16 +++------------- 1 file changed, 3 insertions(+), 13 deletions(-) diff --git a/presto-server-rpm/src/main/rpm/preinstall b/presto-server-rpm/src/main/rpm/preinstall index 105da03415ca..61142a70cf8c 100644 --- a/presto-server-rpm/src/main/rpm/preinstall +++ b/presto-server-rpm/src/main/rpm/preinstall @@ -12,8 +12,8 @@ java_version() { check_if_correct_java_version() { # If the string is empty return non-zero code. We don't want false positives if /bin/java is -# a valid java version because that will leave java8_home unset and the init.d scripts will -# use the default java version, which may not be java 8. +# a valid java version because that will leave JAVA_HOME unset and the init.d scripts will +# use the default java version, which may not be the correct version. if [ -z $1 ] ; then return 1 fi @@ -26,9 +26,6 @@ check_if_correct_java_version() { if [[ ("$JAVA_MAJOR" -ge "11") ]]; then echo "JAVA_HOME=$1" > /tmp/presto_env.sh return 0 - elif [[ ("$JAVA_VERSION" > "1.8") && ($JAVA_UPDATE -ge 161) ]]; then - echo "JAVA_HOME=$1" > /tmp/presto_env.sh - return 0 else return 1 fi @@ -39,12 +36,7 @@ if ! check_if_correct_java_version "$JAVA_HOME"; then java_found=false for candidate in \ /usr/lib/jvm/java-11-* \ - /usr/lib/jvm/jdk1.8* \ - /usr/lib/jvm/jre1.8* \ - /usr/lib/jvm/java-8-oracle* \ - /usr/java/jdk1.8* \ - /usr/java/jre1.8* \ - /usr/jdk64/jdk1.8* \ + /usr/lib/jvm/zulu-11 \ /usr/lib/jvm/default-java \ /usr/java/default \ / \ @@ -69,8 +61,6 @@ if [ "$java_found" = false ]; then | You can also download an OpenJDK 11 build, such as Zulu Community: | | >>> https://www.azul.com/downloads/zulu-community/ <<< | | | -| Presto requires at least Java 1.8 update 161 (8u161), but Java 11 | -| is strongly recommend (and will be required in the future). | | NOTE: This script will attempt to find Java whether you install | | using the binary or the RPM based installer. | +======================================================================+ From 9432607332debfec5a38238ce0da9785f6a6f5ab Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 27 Apr 2020 23:15:21 -0700 Subject: [PATCH 280/519] Remove documentation references to Java 8 --- README.md | 2 +- presto-docs/src/main/sphinx/security/cli.rst | 2 -- .../main/sphinx/security/jce-policy.fragment | 21 ------------------- .../src/main/sphinx/security/server.rst | 2 -- 4 files changed, 1 insertion(+), 26 deletions(-) delete mode 100644 presto-docs/src/main/sphinx/security/jce-policy.fragment diff --git a/README.md b/README.md index f699587c18c5..771fc17a32d6 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ See the [User Manual](https://prestosql.io/docs/current/) for deployment instruc ## Requirements * Mac OS X or Linux -* Java 8 Update 161 or higher (8u161+), 64-bit is the minimum supported version. Java 11 is recommended and will soon become required. Both Oracle JDK and OpenJDK are supported. +* Java 11, 64-bit * Python 2.6+ (for running with the launcher script) ## Building Presto diff --git a/presto-docs/src/main/sphinx/security/cli.rst b/presto-docs/src/main/sphinx/security/cli.rst index 102534003b1c..08887c3c71c7 100644 --- a/presto-docs/src/main/sphinx/security/cli.rst +++ b/presto-docs/src/main/sphinx/security/cli.rst @@ -33,8 +33,6 @@ principal. .. include:: ktadd-note.fragment -.. include:: jce-policy.fragment - Java Keystore File for TLS ^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/presto-docs/src/main/sphinx/security/jce-policy.fragment b/presto-docs/src/main/sphinx/security/jce-policy.fragment deleted file mode 100644 index 69a6cc8bb196..000000000000 --- a/presto-docs/src/main/sphinx/security/jce-policy.fragment +++ /dev/null @@ -1,21 +0,0 @@ -Java Cryptography Extension Policy Files -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The Java Runtime Environment is shipped with policy files that limit the -strength of the cryptographic keys that can be used. Kerberos, by default, uses -keys that are larger than those supported by the included policy files. There -are two possible solutions to the problem: - - * Update the :abbr:`JCE (Java Cryptography Extension)` policy files. - * Configure Kerberos to use reduced-strength keys. - -Of the two options, updating the JCE policy files is recommended. The JCE -policy files can be downloaded from Oracle. Note that the JCE policy files vary -based on the major version of Java you are running. Java 6 policy files will -not work with Java 8, for example. - -The Java 8 policy files are available `from Oracle -`_. -Instructions for installing the policy files are included in a ``README`` file in -the ZIP archive. You need administrative access to install the policy -files, if you are installing them in a system JRE. diff --git a/presto-docs/src/main/sphinx/security/server.rst b/presto-docs/src/main/sphinx/security/server.rst index 2314e869b715..54162128f35e 100644 --- a/presto-docs/src/main/sphinx/security/server.rst +++ b/presto-docs/src/main/sphinx/security/server.rst @@ -46,8 +46,6 @@ In addition, the Presto coordinator needs a `keytab file .. include:: ktadd-note.fragment -.. include:: jce-policy.fragment - Java Keystore File for TLS ^^^^^^^^^^^^^^^^^^^^^^^^^^ When using Kerberos authentication, access to the Presto coordinator should be From 5bee68799867689ae2d88e4b09eeb1ea8537b402 Mon Sep 17 00:00:00 2001 From: Alex Date: Mon, 27 Apr 2020 16:56:36 -0400 Subject: [PATCH 281/519] Add random suffix to all tables in TestDistributedQueries --- .../AbstractTestDistributedQueries.java | 214 ++++++++++-------- 1 file changed, 116 insertions(+), 98 deletions(-) diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java index 723378469721..36299205a5c9 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java @@ -154,41 +154,45 @@ public void testResetSession() @Test public void testCreateTable() { - assertUpdate("CREATE TABLE test_create (a bigint, b double, c varchar)"); - assertTrue(getQueryRunner().tableExists(getSession(), "test_create")); - assertTableColumnNames("test_create", "a", "b", "c"); + String tableName = "test_create_" + randomTableSuffix(); + assertUpdate("CREATE TABLE " + tableName + " (a bigint, b double, c varchar)"); + assertTrue(getQueryRunner().tableExists(getSession(), tableName)); + assertTableColumnNames(tableName, "a", "b", "c"); - assertUpdate("DROP TABLE test_create"); - assertFalse(getQueryRunner().tableExists(getSession(), "test_create")); + assertUpdate("DROP TABLE " + tableName); + assertFalse(getQueryRunner().tableExists(getSession(), tableName)); - assertQueryFails("CREATE TABLE test_create (a bad_type)", ".* Unknown type 'bad_type' for column 'a'"); - assertFalse(getQueryRunner().tableExists(getSession(), "test_create")); + assertQueryFails("CREATE TABLE " + tableName + " (a bad_type)", ".* Unknown type 'bad_type' for column 'a'"); + assertFalse(getQueryRunner().tableExists(getSession(), tableName)); - assertUpdate("CREATE TABLE test_create_table_if_not_exists (a bigint, b varchar, c double)"); - assertTrue(getQueryRunner().tableExists(getSession(), "test_create_table_if_not_exists")); - assertTableColumnNames("test_create_table_if_not_exists", "a", "b", "c"); + tableName = "test_create_table_if_not_exists_" + randomTableSuffix(); + assertUpdate("CREATE TABLE " + tableName + " (a bigint, b varchar, c double)"); + assertTrue(getQueryRunner().tableExists(getSession(), tableName)); + assertTableColumnNames(tableName, "a", "b", "c"); - assertUpdate("CREATE TABLE IF NOT EXISTS test_create_table_if_not_exists (d bigint, e varchar)"); - assertTrue(getQueryRunner().tableExists(getSession(), "test_create_table_if_not_exists")); - assertTableColumnNames("test_create_table_if_not_exists", "a", "b", "c"); + assertUpdate("CREATE TABLE IF NOT EXISTS " + tableName + " (d bigint, e varchar)"); + assertTrue(getQueryRunner().tableExists(getSession(), tableName)); + assertTableColumnNames(tableName, "a", "b", "c"); - assertUpdate("DROP TABLE test_create_table_if_not_exists"); - assertFalse(getQueryRunner().tableExists(getSession(), "test_create_table_if_not_exists")); + assertUpdate("DROP TABLE " + tableName); + assertFalse(getQueryRunner().tableExists(getSession(), tableName)); // Test CREATE TABLE LIKE - assertUpdate("CREATE TABLE test_create_original (a bigint, b double, c varchar)"); - assertTrue(getQueryRunner().tableExists(getSession(), "test_create_original")); - assertTableColumnNames("test_create_original", "a", "b", "c"); + tableName = "test_create_original_" + randomTableSuffix(); + assertUpdate("CREATE TABLE " + tableName + " (a bigint, b double, c varchar)"); + assertTrue(getQueryRunner().tableExists(getSession(), tableName)); + assertTableColumnNames(tableName, "a", "b", "c"); - assertUpdate("CREATE TABLE test_create_like (LIKE test_create_original, d boolean, e varchar)"); - assertTrue(getQueryRunner().tableExists(getSession(), "test_create_like")); - assertTableColumnNames("test_create_like", "a", "b", "c", "d", "e"); + String tableNameLike = "test_create_like_" + randomTableSuffix(); + assertUpdate("CREATE TABLE " + tableNameLike + " (LIKE " + tableName + ", d boolean, e varchar)"); + assertTrue(getQueryRunner().tableExists(getSession(), tableNameLike)); + assertTableColumnNames(tableNameLike, "a", "b", "c", "d", "e"); - assertUpdate("DROP TABLE test_create_original"); - assertFalse(getQueryRunner().tableExists(getSession(), "test_create_original")); + assertUpdate("DROP TABLE " + tableName); + assertFalse(getQueryRunner().tableExists(getSession(), tableName)); - assertUpdate("DROP TABLE test_create_like"); - assertFalse(getQueryRunner().tableExists(getSession(), "test_create_like")); + assertUpdate("DROP TABLE " + tableNameLike); + assertFalse(getQueryRunner().tableExists(getSession(), tableNameLike)); } @Test @@ -349,94 +353,101 @@ protected void assertCreateTableAsSelect(Session session, @Language("SQL") Strin @Test public void testRenameTable() { - assertUpdate("CREATE TABLE test_rename AS SELECT 123 x", 1); + String tableName = "test_rename_" + randomTableSuffix(); + assertUpdate("CREATE TABLE " + tableName + " AS SELECT 123 x", 1); - assertUpdate("ALTER TABLE test_rename RENAME TO test_rename_new"); - assertQuery("SELECT x FROM test_rename_new", "VALUES 123"); + String renamedTable = "test_rename_new_" + randomTableSuffix(); + assertUpdate("ALTER TABLE " + tableName + " RENAME TO " + renamedTable); + assertQuery("SELECT x FROM " + renamedTable, "VALUES 123"); - assertUpdate("ALTER TABLE test_rename_new RENAME TO TEST_RENAME"); // 'TEST_RENAME' is upper-case, not delimited + String uppercaseName = "TEST_RENAME_" + randomTableSuffix(); // Test an upper-case, not delimited identifier + assertUpdate("ALTER TABLE " + renamedTable + " RENAME TO " + uppercaseName); assertQuery( - "SELECT x FROM test_rename", // 'test_rename' is lower-case, not delimited + "SELECT x FROM " + uppercaseName.toLowerCase(ENGLISH), // Ensure select allows for lower-case, not delimited identifier "VALUES 123"); - assertUpdate("DROP TABLE test_rename"); + assertUpdate("DROP TABLE " + uppercaseName); - assertFalse(getQueryRunner().tableExists(getSession(), "test_rename")); - assertFalse(getQueryRunner().tableExists(getSession(), "test_rename_new")); + assertFalse(getQueryRunner().tableExists(getSession(), tableName)); + assertFalse(getQueryRunner().tableExists(getSession(), renamedTable)); } @Test public void testCommentTable() { - assertUpdate("CREATE TABLE test_comment(id integer)"); + String tableName = "test_comment_" + randomTableSuffix(); + assertUpdate("CREATE TABLE " + tableName + "(id integer)"); - assertUpdate("COMMENT ON TABLE test_comment IS 'new comment'"); - MaterializedResult materializedRows = computeActual("SHOW CREATE TABLE test_comment"); + assertUpdate("COMMENT ON TABLE " + tableName + " IS 'new comment'"); + MaterializedResult materializedRows = computeActual("SHOW CREATE TABLE " + tableName); assertTrue(materializedRows.getMaterializedRows().get(0).getField(0).toString().contains("COMMENT 'new comment'")); - assertUpdate("COMMENT ON TABLE test_comment IS ''"); - materializedRows = computeActual("SHOW CREATE TABLE test_comment"); + assertUpdate("COMMENT ON TABLE " + tableName + " IS ''"); + materializedRows = computeActual("SHOW CREATE TABLE " + tableName); assertTrue(materializedRows.getMaterializedRows().get(0).getField(0).toString().contains("COMMENT ''")); - assertUpdate("COMMENT ON TABLE test_comment IS NULL"); - materializedRows = computeActual("SHOW CREATE TABLE test_comment"); + assertUpdate("COMMENT ON TABLE " + tableName + " IS NULL"); + materializedRows = computeActual("SHOW CREATE TABLE " + tableName); assertFalse(materializedRows.getMaterializedRows().get(0).getField(0).toString().contains("COMMENT")); - assertUpdate("DROP TABLE test_comment"); + assertUpdate("DROP TABLE " + tableName); } @Test public void testRenameColumn() { - assertUpdate("CREATE TABLE test_rename_column AS SELECT 'some value' x", 1); + String tableName = "test_rename_column_" + randomTableSuffix(); + assertUpdate("CREATE TABLE " + tableName + " AS SELECT 'some value' x", 1); - assertUpdate("ALTER TABLE test_rename_column RENAME COLUMN x TO y"); - assertQuery("SELECT y FROM test_rename_column", "VALUES 'some value'"); + assertUpdate("ALTER TABLE " + tableName + " RENAME COLUMN x TO y"); + assertQuery("SELECT y FROM " + tableName, "VALUES 'some value'"); - assertUpdate("ALTER TABLE test_rename_column RENAME COLUMN y TO Z"); // 'Z' is upper-case, not delimited + assertUpdate("ALTER TABLE " + tableName + " RENAME COLUMN y TO Z"); // 'Z' is upper-case, not delimited assertQuery( - "SELECT z FROM test_rename_column", // 'z' is lower-case, not delimited + "SELECT z FROM " + tableName, // 'z' is lower-case, not delimited "VALUES 'some value'"); // There should be exactly one column - assertQuery("SELECT * FROM test_rename_column", "VALUES 'some value'"); + assertQuery("SELECT * FROM " + tableName, "VALUES 'some value'"); - assertUpdate("DROP TABLE test_rename_column"); + assertUpdate("DROP TABLE " + tableName); } @Test public void testDropColumn() { - assertUpdate("CREATE TABLE test_drop_column AS SELECT 123 x, 111 a", 1); + String tableName = "test_drop_column_" + randomTableSuffix(); + assertUpdate("CREATE TABLE " + tableName + " AS SELECT 123 x, 111 a", 1); - assertUpdate("ALTER TABLE test_drop_column DROP COLUMN x"); - assertQueryFails("SELECT x FROM test_drop_column", ".* Column 'x' cannot be resolved"); + assertUpdate("ALTER TABLE " + tableName + " DROP COLUMN x"); + assertQueryFails("SELECT x FROM " + tableName, ".* Column 'x' cannot be resolved"); - assertQueryFails("ALTER TABLE test_drop_column DROP COLUMN a", ".* Cannot drop the only column in a table"); + assertQueryFails("ALTER TABLE " + tableName + " DROP COLUMN a", ".* Cannot drop the only column in a table"); } @Test public void testAddColumn() { - assertUpdate("CREATE TABLE test_add_column AS SELECT CAST('first' AS varchar) x", 1); + String tableName = "test_add_column_" + randomTableSuffix(); + assertUpdate("CREATE TABLE " + tableName + " AS SELECT CAST('first' AS varchar) x", 1); - assertQueryFails("ALTER TABLE test_add_column ADD COLUMN x bigint", ".* Column 'x' already exists"); - assertQueryFails("ALTER TABLE test_add_column ADD COLUMN X bigint", ".* Column 'X' already exists"); - assertQueryFails("ALTER TABLE test_add_column ADD COLUMN q bad_type", ".* Unknown type 'bad_type' for column 'q'"); + assertQueryFails("ALTER TABLE " + tableName + " ADD COLUMN x bigint", ".* Column 'x' already exists"); + assertQueryFails("ALTER TABLE " + tableName + " ADD COLUMN X bigint", ".* Column 'X' already exists"); + assertQueryFails("ALTER TABLE " + tableName + " ADD COLUMN q bad_type", ".* Unknown type 'bad_type' for column 'q'"); - assertUpdate("ALTER TABLE test_add_column ADD COLUMN a varchar"); - assertUpdate("INSERT INTO test_add_column SELECT 'second', 'xxx'", 1); + assertUpdate("ALTER TABLE " + tableName + " ADD COLUMN a varchar"); + assertUpdate("INSERT INTO " + tableName + " SELECT 'second', 'xxx'", 1); assertQuery( - "SELECT x, a FROM test_add_column", + "SELECT x, a FROM " + tableName, "VALUES ('first', NULL), ('second', 'xxx')"); - assertUpdate("ALTER TABLE test_add_column ADD COLUMN b double"); - assertUpdate("INSERT INTO test_add_column SELECT 'third', 'yyy', 33.3E0", 1); + assertUpdate("ALTER TABLE " + tableName + " ADD COLUMN b double"); + assertUpdate("INSERT INTO " + tableName + " SELECT 'third', 'yyy', 33.3E0", 1); assertQuery( - "SELECT x, a, b FROM test_add_column", + "SELECT x, a, b FROM " + tableName, "VALUES ('first', NULL, NULL), ('second', 'xxx', NULL), ('third', 'yyy', 33.3)"); - assertUpdate("DROP TABLE test_add_column"); + assertUpdate("DROP TABLE " + tableName); } @Test @@ -1056,31 +1067,33 @@ public void testViewColumnAccessControl() .setSchema(getSession().getSchema().get()) .build(); + String columnAccessViewName = "test_view_column_access_" + randomTableSuffix(); + // TEST COLUMN-LEVEL PRIVILEGES // view creation permissions are only checked at query time, not at creation assertAccessAllowed( viewOwnerSession, - "CREATE VIEW test_view_column_access AS SELECT * FROM orders", + "CREATE VIEW " + columnAccessViewName + " AS SELECT * FROM orders", privilege("orders", CREATE_VIEW_WITH_SELECT_COLUMNS)); // verify selecting from a view over a table requires the view owner to have special view creation privileges for the table assertAccessDenied( - "SELECT * FROM test_view_column_access", + "SELECT * FROM " + columnAccessViewName, "View owner 'test_view_access_owner' cannot create view that selects from .*.orders.*", privilege(viewOwnerSession.getUser(), "orders", CREATE_VIEW_WITH_SELECT_COLUMNS)); // verify the view owner can select from the view even without special view creation privileges assertAccessAllowed( viewOwnerSession, - "SELECT * FROM test_view_column_access", + "SELECT * FROM " + columnAccessViewName, privilege(viewOwnerSession.getUser(), "orders", CREATE_VIEW_WITH_SELECT_COLUMNS)); // verify selecting from a view over a table does not require the session user to have SELECT privileges on the underlying table assertAccessAllowed( - "SELECT * FROM test_view_column_access", + "SELECT * FROM " + columnAccessViewName, privilege(getSession().getUser(), "orders", CREATE_VIEW_WITH_SELECT_COLUMNS)); assertAccessAllowed( - "SELECT * FROM test_view_column_access", + "SELECT * FROM " + columnAccessViewName, privilege(getSession().getUser(), "orders", SELECT_COLUMN)); Session nestedViewOwnerSession = TestingSession.testSessionBuilder() @@ -1089,46 +1102,48 @@ public void testViewColumnAccessControl() .setSchema(getSession().getSchema().get()) .build(); + String nestedViewName = "test_nested_view_column_access_" + randomTableSuffix(); // view creation permissions are only checked at query time, not at creation assertAccessAllowed( nestedViewOwnerSession, - "CREATE VIEW test_nested_view_column_access AS SELECT * FROM test_view_column_access", - privilege("test_view_column_access", CREATE_VIEW_WITH_SELECT_COLUMNS)); + "CREATE VIEW " + nestedViewName + " AS SELECT * FROM " + columnAccessViewName, + privilege(columnAccessViewName, CREATE_VIEW_WITH_SELECT_COLUMNS)); // verify selecting from a view over a view requires the view owner of the outer view to have special view creation privileges for the inner view assertAccessDenied( - "SELECT * FROM test_nested_view_column_access", + "SELECT * FROM " + nestedViewName, "View owner 'test_nested_view_access_owner' cannot create view that selects from .*.test_view_column_access.*", - privilege(nestedViewOwnerSession.getUser(), "test_view_column_access", CREATE_VIEW_WITH_SELECT_COLUMNS)); + privilege(nestedViewOwnerSession.getUser(), columnAccessViewName, CREATE_VIEW_WITH_SELECT_COLUMNS)); // verify selecting from a view over a view does not require the session user to have SELECT privileges for the inner view assertAccessAllowed( - "SELECT * FROM test_nested_view_column_access", - privilege(getSession().getUser(), "test_view_column_access", CREATE_VIEW_WITH_SELECT_COLUMNS)); + "SELECT * FROM " + nestedViewName, + privilege(getSession().getUser(), columnAccessViewName, CREATE_VIEW_WITH_SELECT_COLUMNS)); assertAccessAllowed( - "SELECT * FROM test_nested_view_column_access", - privilege(getSession().getUser(), "test_view_column_access", SELECT_COLUMN)); + "SELECT * FROM " + nestedViewName, + privilege(getSession().getUser(), columnAccessViewName, SELECT_COLUMN)); // verify that INVOKER security runs as session user + String invokerViewName = "test_invoker_view_column_access_" + randomTableSuffix(); assertAccessAllowed( viewOwnerSession, - "CREATE VIEW test_invoker_view_column_access SECURITY INVOKER AS SELECT * FROM orders", + "CREATE VIEW " + invokerViewName + " SECURITY INVOKER AS SELECT * FROM orders", privilege("orders", CREATE_VIEW_WITH_SELECT_COLUMNS)); assertAccessAllowed( - "SELECT * FROM test_invoker_view_column_access", + "SELECT * FROM " + invokerViewName, privilege(viewOwnerSession.getUser(), "orders", SELECT_COLUMN)); assertAccessDenied( - "SELECT * FROM test_invoker_view_column_access", + "SELECT * FROM " + invokerViewName, "Cannot select from columns \\[.*\\] in table .*.orders.*", privilege(getSession().getUser(), "orders", SELECT_COLUMN)); // change access denied exception to view - assertAccessDenied("SHOW CREATE VIEW test_nested_view_column_access", "Cannot show create table for .*test_nested_view_column_access.*", privilege("test_nested_view_column_access", SHOW_CREATE_TABLE)); - assertAccessAllowed("SHOW CREATE VIEW test_nested_view_column_access", privilege("test_denied_access_view", SHOW_CREATE_TABLE)); + assertAccessDenied("SHOW CREATE VIEW " + nestedViewName, "Cannot show create table for .*test_nested_view_column_access.*", privilege(nestedViewName, SHOW_CREATE_TABLE)); + assertAccessAllowed("SHOW CREATE VIEW " + nestedViewName, privilege("test_denied_access_view", SHOW_CREATE_TABLE)); - assertAccessAllowed(nestedViewOwnerSession, "DROP VIEW test_nested_view_column_access"); - assertAccessAllowed(viewOwnerSession, "DROP VIEW test_view_column_access"); - assertAccessAllowed(viewOwnerSession, "DROP VIEW test_invoker_view_column_access"); + assertAccessAllowed(nestedViewOwnerSession, "DROP VIEW " + nestedViewName); + assertAccessAllowed(viewOwnerSession, "DROP VIEW " + columnAccessViewName); + assertAccessAllowed(viewOwnerSession, "DROP VIEW " + invokerViewName); } @Test @@ -1144,37 +1159,39 @@ public void testViewFunctionAccessControl() // TEST FUNCTION PRIVILEGES // view creation permissions are only checked at query time, not at creation + String functionAccessViewName = "test_view_function_access_" + randomTableSuffix(); assertAccessAllowed( viewOwnerSession, - "CREATE VIEW test_view_function_access AS SELECT abs(1) AS c", + "CREATE VIEW " + functionAccessViewName + " AS SELECT abs(1) AS c", privilege("abs", GRANT_EXECUTE_FUNCTION)); assertAccessDenied( - "SELECT * FROM test_view_function_access", + "SELECT * FROM " + functionAccessViewName, "'test_view_access_owner' cannot grant 'abs' execution to user '\\w*'", privilege(viewOwnerSession.getUser(), "abs", GRANT_EXECUTE_FUNCTION)); // verify executing from a view over a function does not require the session user to have execute privileges on the underlying function assertAccessAllowed( - "SELECT * FROM test_view_function_access", + "SELECT * FROM " + functionAccessViewName, privilege(getSession().getUser(), "abs", EXECUTE_FUNCTION)); // TEST SECURITY INVOKER // view creation permissions are only checked at query time, not at creation + String invokerFunctionAccessViewName = "test_invoker_view_function_access_" + randomTableSuffix(); assertAccessAllowed( viewOwnerSession, - "CREATE VIEW test_invoker_view_function_access SECURITY INVOKER AS SELECT abs(1) AS c", + "CREATE VIEW " + invokerFunctionAccessViewName + " SECURITY INVOKER AS SELECT abs(1) AS c", privilege("abs", GRANT_EXECUTE_FUNCTION)); assertAccessAllowed( - "SELECT * FROM test_invoker_view_function_access", + "SELECT * FROM " + invokerFunctionAccessViewName, privilege(viewOwnerSession.getUser(), "abs", EXECUTE_FUNCTION)); assertAccessDenied( - "SELECT * FROM test_invoker_view_function_access", + "SELECT * FROM " + invokerFunctionAccessViewName, "Cannot execute function abs", privilege(getSession().getUser(), "abs", EXECUTE_FUNCTION)); - assertAccessAllowed(viewOwnerSession, "DROP VIEW test_view_function_access"); - assertAccessAllowed(viewOwnerSession, "DROP VIEW test_invoker_view_function_access"); + assertAccessAllowed(viewOwnerSession, "DROP VIEW " + functionAccessViewName); + assertAccessAllowed(viewOwnerSession, "DROP VIEW " + invokerFunctionAccessViewName); } @Test @@ -1216,12 +1233,13 @@ public void testComplexCast() @Test public void testCreateSchema() { - assertThat(computeActual("SHOW SCHEMAS").getOnlyColumnAsSet()).doesNotContain("test_schema_create"); - assertUpdate("CREATE SCHEMA test_schema_create"); - assertThat(computeActual("SHOW SCHEMAS").getOnlyColumnAsSet()).contains("test_schema_create"); - assertQueryFails("CREATE SCHEMA test_schema_create", "line 1:1: Schema '.*\\.test_schema_create' already exists"); - assertUpdate("DROP SCHEMA test_schema_create"); - assertQueryFails("DROP SCHEMA test_schema_create", "line 1:1: Schema '.*\\.test_schema_create' does not exist"); + String schemaName = "test_schema_create_" + randomTableSuffix(); + assertThat(computeActual("SHOW SCHEMAS").getOnlyColumnAsSet()).doesNotContain(schemaName); + assertUpdate("CREATE SCHEMA " + schemaName); + assertThat(computeActual("SHOW SCHEMAS").getOnlyColumnAsSet()).contains(schemaName); + assertQueryFails("CREATE SCHEMA " + schemaName, format("line 1:1: Schema '.*\\.%s' already exists", schemaName)); + assertUpdate("DROP SCHEMA " + schemaName); + assertQueryFails("DROP SCHEMA " + schemaName, format("line 1:1: Schema '.*\\.%s' does not exist", schemaName)); } @Test From 69f8c05c6b2ae750d45bbcf0849557f440dd28a0 Mon Sep 17 00:00:00 2001 From: Martin Traverso Date: Sun, 19 Apr 2020 17:53:20 -0700 Subject: [PATCH 282/519] Clean up planning of unnest The decision of how unnest expressions map to output fields is now determined by the analyzer. Doing this in the planner duplicates effort and is brittle. --- .../io/prestosql/sql/analyzer/Analysis.java | 38 ++++ .../sql/analyzer/StatementAnalyzer.java | 25 ++- .../sql/planner/RelationPlanner.java | 162 +++++++----------- 3 files changed, 117 insertions(+), 108 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/analyzer/Analysis.java b/presto-main/src/main/java/io/prestosql/sql/analyzer/Analysis.java index cddf666cdbee..0f21f044eee8 100644 --- a/presto-main/src/main/java/io/prestosql/sql/analyzer/Analysis.java +++ b/presto-main/src/main/java/io/prestosql/sql/analyzer/Analysis.java @@ -58,6 +58,7 @@ import io.prestosql.sql.tree.Statement; import io.prestosql.sql.tree.SubqueryExpression; import io.prestosql.sql.tree.Table; +import io.prestosql.sql.tree.Unnest; import io.prestosql.transaction.TransactionId; import javax.annotation.Nullable; @@ -80,6 +81,7 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.ImmutableList.toImmutableList; +import static com.google.common.collect.ImmutableMap.toImmutableMap; import static java.lang.String.format; import static java.util.Collections.emptyList; import static java.util.Collections.unmodifiableList; @@ -151,6 +153,7 @@ public class Analysis private final Multiset columnMaskScopes = HashMultiset.create(); private final Map, Map>> columnMasks = new LinkedHashMap<>(); + private final Map, UnnestAnalysis> unnestAnalysis = new LinkedHashMap<>(); private Optional create = Optional.empty(); private Optional insert = Optional.empty(); private Optional analyzeTarget = Optional.empty(); @@ -676,6 +679,16 @@ public JoinUsingAnalysis getJoinUsing(Join node) return joinUsing.get(NodeRef.of(node)); } + public void setUnnest(Unnest node, UnnestAnalysis analysis) + { + unnestAnalysis.put(NodeRef.of(node), analysis); + } + + public UnnestAnalysis getUnnest(Unnest node) + { + return unnestAnalysis.get(NodeRef.of(node)); + } + public void addTableColumnReferences(AccessControl accessControl, Identity identity, Multimap tableColumnMap) { AccessControlInfo accessControlInfo = new AccessControlInfo(accessControl, identity); @@ -987,6 +1000,31 @@ public List getComplexExpressions() } } + public static class UnnestAnalysis + { + private final Map, List> mappings; + private final Optional ordinalityField; + + public UnnestAnalysis(Map, List> mappings, Optional ordinalityField) + { + requireNonNull(mappings, "mappings is null"); + this.mappings = mappings.entrySet().stream() + .collect(toImmutableMap(Map.Entry::getKey, entry -> ImmutableList.copyOf(entry.getValue()))); + + this.ordinalityField = requireNonNull(ordinalityField, "ordinalityField is null"); + } + + public Map, List> getMappings() + { + return mappings; + } + + public Optional getOrdinalityField() + { + return ordinalityField; + } + } + public static final class AccessControlInfo { private final AccessControl accessControl; diff --git a/presto-main/src/main/java/io/prestosql/sql/analyzer/StatementAnalyzer.java b/presto-main/src/main/java/io/prestosql/sql/analyzer/StatementAnalyzer.java index 20419e3dc7ef..de90d9388271 100644 --- a/presto-main/src/main/java/io/prestosql/sql/analyzer/StatementAnalyzer.java +++ b/presto-main/src/main/java/io/prestosql/sql/analyzer/StatementAnalyzer.java @@ -57,6 +57,7 @@ import io.prestosql.spi.type.VarcharType; import io.prestosql.sql.SqlPath; import io.prestosql.sql.analyzer.Analysis.SelectExpression; +import io.prestosql.sql.analyzer.Analysis.UnnestAnalysis; import io.prestosql.sql.analyzer.Scope.AsteriskedIdentifierChainBasis; import io.prestosql.sql.parser.ParsingException; import io.prestosql.sql.parser.SqlParser; @@ -945,8 +946,12 @@ protected Scope visitQuery(Query node, Optional scope) @Override protected Scope visitUnnest(Unnest node, Optional scope) { + ImmutableMap.Builder, List> mappings = ImmutableMap., List>builder(); + ImmutableList.Builder outputFields = ImmutableList.builder(); for (Expression expression : node.getExpressions()) { + List expressionOutputs = new ArrayList<>(); + ExpressionAnalysis expressionAnalysis = analyzeExpression(expression, createScope(scope)); Type expressionType = expressionAnalysis.getType(expression); if (expressionType instanceof ArrayType) { @@ -954,23 +959,33 @@ protected Scope visitUnnest(Unnest node, Optional scope) if (elementType instanceof RowType) { ((RowType) elementType).getFields().stream() .map(field -> Field.newUnqualified(field.getName(), field.getType())) - .forEach(outputFields::add); + .forEach(expressionOutputs::add); } else { - outputFields.add(Field.newUnqualified(Optional.empty(), elementType)); + expressionOutputs.add(Field.newUnqualified(Optional.empty(), elementType)); } } else if (expressionType instanceof MapType) { - outputFields.add(Field.newUnqualified(Optional.empty(), ((MapType) expressionType).getKeyType())); - outputFields.add(Field.newUnqualified(Optional.empty(), ((MapType) expressionType).getValueType())); + expressionOutputs.add(Field.newUnqualified(Optional.empty(), ((MapType) expressionType).getKeyType())); + expressionOutputs.add(Field.newUnqualified(Optional.empty(), ((MapType) expressionType).getValueType())); } else { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "Cannot unnest type: " + expressionType); } + + outputFields.addAll(expressionOutputs); + mappings.put(NodeRef.of(expression), expressionOutputs); } + + Optional ordinalityField = Optional.empty(); if (node.isWithOrdinality()) { - outputFields.add(Field.newUnqualified(Optional.empty(), BIGINT)); + ordinalityField = Optional.of(Field.newUnqualified(Optional.empty(), BIGINT)); } + + ordinalityField.ifPresent(outputFields::add); + + analysis.setUnnest(node, new UnnestAnalysis(mappings.build(), ordinalityField)); + return createAndAssignScope(node, scope, outputFields.build()); } diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java b/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java index e4c97f2aa667..863c7c06432b 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java @@ -18,17 +18,14 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.ListMultimap; -import com.google.common.collect.UnmodifiableIterator; import io.prestosql.Session; import io.prestosql.metadata.Metadata; import io.prestosql.metadata.TableHandle; import io.prestosql.spi.connector.ColumnHandle; -import io.prestosql.spi.type.ArrayType; -import io.prestosql.spi.type.MapType; -import io.prestosql.spi.type.RowType; import io.prestosql.spi.type.Type; import io.prestosql.sql.ExpressionUtils; import io.prestosql.sql.analyzer.Analysis; +import io.prestosql.sql.analyzer.Analysis.UnnestAnalysis; import io.prestosql.sql.analyzer.Field; import io.prestosql.sql.analyzer.RelationId; import io.prestosql.sql.analyzer.RelationType; @@ -84,17 +81,17 @@ import java.util.ArrayList; import java.util.HashMap; -import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.function.Function; import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Verify.verify; import static com.google.common.collect.ImmutableList.toImmutableList; +import static com.google.common.collect.ImmutableMap.toImmutableMap; import static com.google.common.collect.Iterables.getOnlyElement; import static io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED; import static io.prestosql.sql.analyzer.SemanticExceptions.semanticException; @@ -657,50 +654,6 @@ private static boolean isEqualComparisonExpression(Expression conjunct) private RelationPlan planJoinUnnest(RelationPlan leftPlan, Join joinNode, Unnest node) { - RelationType unnestOutputDescriptor = analysis.getOutputDescriptor(node); - // Create symbols for the result of unnesting - ImmutableList.Builder unnestedSymbolsBuilder = ImmutableList.builder(); - for (Field field : unnestOutputDescriptor.getVisibleFields()) { - Symbol symbol = symbolAllocator.newSymbol(field); - unnestedSymbolsBuilder.add(symbol); - } - ImmutableList unnestedSymbols = unnestedSymbolsBuilder.build(); - - // TODO do these need translation - // Add a projection for all the unnest arguments - PlanBuilder planBuilder = initializePlanBuilder(leftPlan); - planBuilder = planBuilder.appendProjections(node.getExpressions(), symbolAllocator, idAllocator); - TranslationMap translations = planBuilder.getTranslations(); - ProjectNode projectNode = (ProjectNode) planBuilder.getRoot(); - - ImmutableMap.Builder> unnestSymbols = ImmutableMap.builder(); - UnmodifiableIterator unnestedSymbolsIterator = unnestedSymbols.iterator(); - for (Expression expression : node.getExpressions()) { - Type type = analysis.getType(expression); - Symbol inputSymbol = translations.get(expression); - if (type instanceof ArrayType) { - Type elementType = ((ArrayType) type).getElementType(); - if (elementType instanceof RowType) { - ImmutableList.Builder unnestSymbolBuilder = ImmutableList.builder(); - for (int i = 0; i < ((RowType) elementType).getFields().size(); i++) { - unnestSymbolBuilder.add(unnestedSymbolsIterator.next()); - } - unnestSymbols.put(inputSymbol, unnestSymbolBuilder.build()); - } - else { - unnestSymbols.put(inputSymbol, ImmutableList.of(unnestedSymbolsIterator.next())); - } - } - else if (type instanceof MapType) { - unnestSymbols.put(inputSymbol, ImmutableList.of(unnestedSymbolsIterator.next(), unnestedSymbolsIterator.next())); - } - else { - throw new IllegalArgumentException("Unsupported type for UNNEST: " + type); - } - } - Optional ordinalitySymbol = node.isWithOrdinality() ? Optional.of(unnestedSymbolsIterator.next()) : Optional.empty(); - checkState(!unnestedSymbolsIterator.hasNext(), "Not all output symbols were matched with input symbols"); - Optional filterExpression = Optional.empty(); if (joinNode.getCriteria().isPresent()) { JoinCriteria criteria = joinNode.getCriteria().get(); @@ -719,16 +672,47 @@ else if (type instanceof MapType) { } } - UnnestNode unnestNode = new UnnestNode( - idAllocator.getNextId(), - projectNode, + return planUnnest( + initializePlanBuilder(leftPlan), + node, leftPlan.getFieldMappings(), - unnestSymbols.build(), - ordinalitySymbol, - JoinNode.Type.typeConvert(joinNode.getType()), - filterExpression); + filterExpression, + joinNode.getType(), + analysis.getScope(joinNode)); + } + + private RelationPlan planUnnest(PlanBuilder subPlan, Unnest node, List replicatedColumns, Optional filter, Join.Type type, Scope outputScope) + { + subPlan = subPlan.appendProjections(node.getExpressions(), symbolAllocator, idAllocator); + + Map allocations = analysis.getOutputDescriptor(node) + .getVisibleFields().stream() + .collect(toImmutableMap(Function.identity(), symbolAllocator::newSymbol)); + + UnnestAnalysis unnestAnalysis = analysis.getUnnest(node); + ImmutableMap.Builder> mappings = ImmutableMap.builder(); + for (Expression expression : node.getExpressions()) { + Symbol input = subPlan.translate(expression); + List outputs = unnestAnalysis.getMappings().get(NodeRef.of(expression)).stream() + .map(allocations::get) + .collect(toImmutableList()); + + mappings.put(input, outputs); + } - return new RelationPlan(unnestNode, analysis.getScope(joinNode), unnestNode.getOutputSymbols()); + UnnestNode unnestNode = new UnnestNode( + idAllocator.getNextId(), + subPlan.getRoot(), + replicatedColumns, + mappings.build(), + unnestAnalysis.getOrdinalityField().map(allocations::get), + JoinNode.Type.typeConvert(type), + filter); + + // TODO: Technically, we should derive the field mappings from the layout of fields and how they relate to the output symbols of the Unnest node. + // That's tricky to do for a Join+Unnest because the allocations come from the Unnest, but the mappings need to be done based on the Join output fields. + // Currently, it works out because, by construction, the order of the output symbols in the UnnestNode will match the order of the fields in the Join node. + return new RelationPlan(unnestNode, outputScope, unnestNode.getOutputSymbols()); } @Override @@ -794,52 +778,24 @@ protected RelationPlan visitValues(Values node, Void context) protected RelationPlan visitUnnest(Unnest node, Void context) { Scope scope = analysis.getScope(node); - ImmutableList.Builder outputSymbolsBuilder = ImmutableList.builder(); - for (Field field : scope.getRelationType().getVisibleFields()) { - Symbol symbol = symbolAllocator.newSymbol(field); - outputSymbolsBuilder.add(symbol); - } - List unnestedSymbols = outputSymbolsBuilder.build(); - - // If we got here, then we must be unnesting a constant, and not be in a join (where there could be column references) - TranslationMap translationMap = initializeTranslationMap(node, unnestedSymbols); - ImmutableList.Builder argumentSymbols = ImmutableList.builder(); - ImmutableList.Builder values = ImmutableList.builder(); - ImmutableMap.Builder> unnestSymbols = ImmutableMap.builder(); - Iterator unnestedSymbolsIterator = unnestedSymbols.iterator(); - for (Expression expression : node.getExpressions()) { - Type type = analysis.getType(expression); - Expression rewritten = translationMap.rewrite(expression); - rewritten = ExpressionTreeRewriter.rewriteWith(new ParameterRewriter(analysis), rewritten); - values.add(rewritten); - Symbol inputSymbol = symbolAllocator.newSymbol(rewritten, type); - argumentSymbols.add(inputSymbol); - if (type instanceof ArrayType) { - Type elementType = ((ArrayType) type).getElementType(); - if (elementType instanceof RowType) { - ImmutableList.Builder unnestSymbolBuilder = ImmutableList.builder(); - for (int i = 0; i < ((RowType) elementType).getFields().size(); i++) { - unnestSymbolBuilder.add(unnestedSymbolsIterator.next()); - } - unnestSymbols.put(inputSymbol, unnestSymbolBuilder.build()); - } - else { - unnestSymbols.put(inputSymbol, ImmutableList.of(unnestedSymbolsIterator.next())); - } - } - else if (type instanceof MapType) { - unnestSymbols.put(inputSymbol, ImmutableList.of(unnestedSymbolsIterator.next(), unnestedSymbolsIterator.next())); - } - else { - throw new IllegalArgumentException("Unsupported type for UNNEST: " + type); - } - } - Optional ordinalitySymbol = node.isWithOrdinality() ? Optional.of(unnestedSymbolsIterator.next()) : Optional.empty(); - checkState(!unnestedSymbolsIterator.hasNext(), "Not all output symbols were matched with input symbols"); - ValuesNode valuesNode = new ValuesNode(idAllocator.getNextId(), argumentSymbols.build(), ImmutableList.of(values.build())); - UnnestNode unnestNode = new UnnestNode(idAllocator.getNextId(), valuesNode, ImmutableList.of(), unnestSymbols.build(), ordinalitySymbol, JoinNode.Type.INNER, Optional.empty()); - return new RelationPlan(unnestNode, scope, unnestedSymbols); + return planUnnest( + planSingleEmptyRow(scope.getOuterQueryParent()), + node, + ImmutableList.of(), + Optional.empty(), + INNER, + scope); + } + + private PlanBuilder planSingleEmptyRow(Optional parent) + { + Scope.Builder scope = Scope.builder(); + parent.ifPresent(scope::withOuterQueryParent); + + PlanNode values = new ValuesNode(idAllocator.getNextId(), ImmutableList.of(), ImmutableList.of(ImmutableList.of())); + TranslationMap translations = new TranslationMap(new RelationPlan(values, scope.build(), ImmutableList.of()), analysis, lambdaDeclarationToSymbolMap); + return new PlanBuilder(translations, values); } private TranslationMap initializeTranslationMap(Node node, List outputSymbols) From 75eabcd279e7fead4447057c9f78fe3754181338 Mon Sep 17 00:00:00 2001 From: Karol Sobczak Date: Fri, 7 Feb 2020 23:22:59 +0100 Subject: [PATCH 283/519] Port join operator to WorkProcessor internally This simplifies join operator as: 1. Probe is spilled in just one place. Previously probe was spilled when page was added or output was requested 2. Join operator state is now split between two internal WorkProcessors which a) join probe b) manage final unspilling 3. Operator state is not managed across multiple classic in/out operator methods --- .../java/io/prestosql/operator/JoinProbe.java | 9 +- .../operator/LookupJoinOperator.java | 848 ++++++++---------- .../PartitionedLookupSourceFactory.java | 4 +- 3 files changed, 390 insertions(+), 471 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/operator/JoinProbe.java b/presto-main/src/main/java/io/prestosql/operator/JoinProbe.java index adc1d7c442f3..438bdf3d5c85 100644 --- a/presto-main/src/main/java/io/prestosql/operator/JoinProbe.java +++ b/presto-main/src/main/java/io/prestosql/operator/JoinProbe.java @@ -20,6 +20,7 @@ import java.util.Optional; import java.util.OptionalInt; +import static com.google.common.base.Verify.verify; import static io.prestosql.spi.type.BigintType.BIGINT; public class JoinProbe @@ -73,8 +74,14 @@ public int[] getOutputChannels() public boolean advanceNextPosition() { + verify(position < positionCount, "already finished"); position++; - return position < positionCount; + return !isFinished(); + } + + public boolean isFinished() + { + return position == positionCount; } public long getCurrentJoinPosition(LookupSource lookupSource) diff --git a/presto-main/src/main/java/io/prestosql/operator/LookupJoinOperator.java b/presto-main/src/main/java/io/prestosql/operator/LookupJoinOperator.java index e18309c11807..9327ed02b974 100644 --- a/presto-main/src/main/java/io/prestosql/operator/LookupJoinOperator.java +++ b/presto-main/src/main/java/io/prestosql/operator/LookupJoinOperator.java @@ -13,14 +13,15 @@ */ package io.prestosql.operator; -import com.google.common.collect.ImmutableList; import com.google.common.io.Closer; +import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import io.prestosql.memory.context.MemoryTrackingContext; import io.prestosql.operator.JoinProbe.JoinProbeFactory; import io.prestosql.operator.LookupJoinOperators.JoinType; import io.prestosql.operator.LookupSourceProvider.LookupSourceLease; import io.prestosql.operator.PartitionedConsumption.Partition; +import io.prestosql.operator.WorkProcessor.ProcessState; import io.prestosql.operator.WorkProcessor.Transformation; import io.prestosql.operator.WorkProcessor.TransformationState; import io.prestosql.operator.WorkProcessorOperatorAdapter.AdapterWorkProcessorOperator; @@ -43,20 +44,26 @@ import java.util.function.IntPredicate; import java.util.function.Supplier; +import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; +import static com.google.common.base.Suppliers.memoize; import static com.google.common.base.Verify.verify; import static com.google.common.base.Verify.verifyNotNull; +import static com.google.common.collect.Iterators.singletonIterator; +import static com.google.common.util.concurrent.MoreExecutors.directExecutor; import static io.airlift.concurrent.MoreFutures.addSuccessCallback; import static io.airlift.concurrent.MoreFutures.checkSuccess; import static io.airlift.concurrent.MoreFutures.getDone; import static io.prestosql.operator.LookupJoinOperators.JoinType.FULL_OUTER; import static io.prestosql.operator.LookupJoinOperators.JoinType.PROBE_OUTER; import static io.prestosql.operator.Operator.NOT_BLOCKED; +import static io.prestosql.operator.PartitionedLookupSourceFactory.NO_SPILL_EPOCH; import static io.prestosql.operator.WorkProcessor.TransformationState.blocked; import static io.prestosql.operator.WorkProcessor.TransformationState.finished; import static io.prestosql.operator.WorkProcessor.TransformationState.needsMoreData; import static io.prestosql.operator.WorkProcessor.TransformationState.ofResult; import static io.prestosql.operator.WorkProcessor.TransformationState.yield; +import static io.prestosql.operator.WorkProcessor.flatten; import static java.lang.String.format; import static java.util.Collections.emptyIterator; import static java.util.Objects.requireNonNull; @@ -64,9 +71,10 @@ public class LookupJoinOperator implements AdapterWorkProcessorOperator { + private final ListenableFuture lookupSourceProviderFuture; private final PageBuffer pageBuffer; private final WorkProcessor pages; - private final JoinProcessor joinProcessor; + private final SpillingJoinProcessor joinProcessor; private final JoinStatisticsCounter statisticsCounter; LookupJoinOperator(List probeTypes, @@ -82,21 +90,23 @@ public class LookupJoinOperator Optional> sourcePages) { this.statisticsCounter = new JoinStatisticsCounter(joinType); - this.joinProcessor = new JoinProcessor(probeTypes, - buildOutputTypes, - joinType, - lookupSourceFactory, - joinProbeFactory, + lookupSourceProviderFuture = lookupSourceFactory.createLookupSourceProvider(); + pageBuffer = new PageBuffer(lookupSourceProviderFuture); + joinProcessor = new SpillingJoinProcessor( + processorContext, afterClose, lookupJoinsCount, + probeTypes, + buildOutputTypes, + joinType, hashGenerator, - partitioningSpillerFactory, + joinProbeFactory, + lookupSourceFactory, + lookupSourceProviderFuture, statisticsCounter, - processorContext.getDriverYieldSignal(), - processorContext.getSpillContext(), - processorContext.getMemoryTrackingContext()); - pageBuffer = new PageBuffer(joinProcessor.lookupSourceProviderFuture); - pages = sourcePages.orElse(pageBuffer.pages()).transform(joinProcessor); + partitioningSpillerFactory, + sourcePages.orElse(pageBuffer.pages())); + pages = flatten(WorkProcessor.create(joinProcessor)); } @Override @@ -108,7 +118,7 @@ public Optional getOperatorInfo() @Override public boolean needsInput() { - return joinProcessor.lookupSourceProviderFuture.isDone() && pageBuffer.isEmpty() && !pageBuffer.isFinished(); + return lookupSourceProviderFuture.isDone() && pageBuffer.isEmpty() && !pageBuffer.isFinished(); } @Override @@ -135,408 +145,159 @@ public void close() joinProcessor.close(); } - private static class JoinProcessor + private static class PageJoiner implements Transformation { private final List probeTypes; private final JoinProbeFactory joinProbeFactory; - private final Runnable afterClose; - private final OptionalInt lookupJoinsCount; - private final HashGenerator hashGenerator; - private final LookupSourceFactory lookupSourceFactory; - private final PartitioningSpillerFactory partitioningSpillerFactory; - + private final ListenableFuture lookupSourceProviderFuture; + private final Optional partitioningSpillerFactory; + private final SpillContext spillContext; + private final MemoryTrackingContext memoryTrackingContext; private final JoinStatisticsCounter statisticsCounter; - + private final DriverYieldSignal yieldSignal; + private final Iterator savedRows; + private final Supplier partitionGenerator; private final LookupJoinPageBuilder pageBuilder; - + private final Map spilledRows = new HashMap<>(); private final boolean probeOnOuterSide; - private final ListenableFuture lookupSourceProviderFuture; + @Nullable private LookupSourceProvider lookupSourceProvider; + @Nullable private JoinProbe probe; - private boolean addedInputPage; - - private Page outputPage; - - private Optional spiller = Optional.empty(); - private Optional partitionGenerator = Optional.empty(); - private ListenableFuture spillInProgress = NOT_BLOCKED; - private long inputPageSpillEpoch; - private boolean closed; - private boolean finishing; - private boolean unspilling; - private boolean finished; + private long spillEpoch = NO_SPILL_EPOCH; private long joinPosition = -1; private int joinSourcePositions; - private boolean currentProbePositionProducedRow; - private final Map savedRows = new HashMap<>(); - @Nullable - private ListenableFuture>> partitionedConsumption; - @Nullable - private Iterator>> lookupPartitions; - private Optional>> currentPartition = Optional.empty(); - private Optional>> unspilledLookupSource = Optional.empty(); - private Iterator unspilledInputPages = emptyIterator(); - private final DriverYieldSignal yieldSignal; - private final SpillContext spillContext; - private final MemoryTrackingContext memoryTrackingContext; + private Optional spiller = Optional.empty(); + private ListenableFuture spillInProgress = NOT_BLOCKED; - public JoinProcessor( + private PageJoiner( + ProcessorContext processorContext, List probeTypes, List buildOutputTypes, JoinType joinType, - LookupSourceFactory lookupSourceFactory, - JoinProbeFactory joinProbeFactory, - Runnable afterClose, - OptionalInt lookupJoinsCount, HashGenerator hashGenerator, - PartitioningSpillerFactory partitioningSpillerFactory, + JoinProbeFactory joinProbeFactory, + LookupSourceFactory lookupSourceFactory, + ListenableFuture lookupSourceProvider, + Optional partitioningSpillerFactory, JoinStatisticsCounter statisticsCounter, - DriverYieldSignal yieldSignal, - SpillContext spillContext, - MemoryTrackingContext memoryTrackingContext) + Iterator savedRows) { - this.probeTypes = ImmutableList.copyOf(requireNonNull(probeTypes, "probeTypes is null")); - - requireNonNull(joinType, "joinType is null"); - // Cannot use switch case here, because javac will synthesize an inner class and cause IllegalAccessError - probeOnOuterSide = joinType == PROBE_OUTER || joinType == FULL_OUTER; - + requireNonNull(processorContext, "processorContext is null"); + this.probeTypes = requireNonNull(probeTypes, "probeTypes is null"); this.joinProbeFactory = requireNonNull(joinProbeFactory, "joinProbeFactory is null"); - this.afterClose = requireNonNull(afterClose, "afterClose is null"); - this.lookupJoinsCount = requireNonNull(lookupJoinsCount, "lookupJoinsCount is null"); - this.hashGenerator = requireNonNull(hashGenerator, "hashGenerator is null"); - this.lookupSourceFactory = requireNonNull(lookupSourceFactory, "lookupSourceFactory is null"); + this.lookupSourceProviderFuture = requireNonNull(lookupSourceProvider, "lookupSourceProvider is null"); this.partitioningSpillerFactory = requireNonNull(partitioningSpillerFactory, "partitioningSpillerFactory is null"); - this.lookupSourceProviderFuture = lookupSourceFactory.createLookupSourceProvider(); - - this.statisticsCounter = statisticsCounter; - + this.spillContext = processorContext.getSpillContext(); + this.memoryTrackingContext = processorContext.getMemoryTrackingContext(); + this.statisticsCounter = requireNonNull(statisticsCounter, "statisticsCounter is null"); + this.yieldSignal = processorContext.getDriverYieldSignal(); + this.savedRows = requireNonNull(savedRows, "savedRows is null"); + this.partitionGenerator = memoize(() -> new LocalPartitionGenerator(hashGenerator, lookupSourceFactory.partitions())); this.pageBuilder = new LookupJoinPageBuilder(buildOutputTypes); - this.yieldSignal = requireNonNull(yieldSignal, "yieldSignal is null"); - this.spillContext = requireNonNull(spillContext, "spillContext is null"); - this.memoryTrackingContext = requireNonNull(memoryTrackingContext, "memoryTrackingContext is null"); - } - - private void finish() - { - if (finishing) { - return; - } - - if (!spillInProgress.isDone()) { - return; - } - - checkSuccess(spillInProgress, "spilling failed"); - finishing = true; - } - - private boolean isFinished() - { - boolean finished = this.finished && probe == null && pageBuilder.isEmpty() && outputPage == null; - // if finished drop references so memory is freed early - if (finished) { - close(); - } - return finished; - } - - private ListenableFuture isBlocked() - { - if (!spillInProgress.isDone()) { - // Input spilling can happen only after lookupSourceProviderFuture was done. - return spillInProgress; - } - if (unspilledLookupSource.isPresent()) { - // Unspilling can happen only after lookupSourceProviderFuture was done. - return unspilledLookupSource.get(); - } - - if (finishing) { - return NOT_BLOCKED; - } - - return lookupSourceProviderFuture; - } - - private boolean needsInput() - { - return !finishing - && lookupSourceProviderFuture.isDone() - && spillInProgress.isDone() - && probe == null - && outputPage == null; - } - - private void addInput(Page page) - { - requireNonNull(page, "page is null"); - checkState(probe == null, "Current page has not been completely processed yet"); - - checkState(tryFetchLookupSourceProvider(), "Not ready to handle input yet"); - - SpillInfoSnapshot spillInfoSnapshot = lookupSourceProvider.withLease(SpillInfoSnapshot::from); - addInput(page, spillInfoSnapshot); + // Cannot use switch case here, because javac will synthesize an inner class and cause IllegalAccessError + probeOnOuterSide = joinType == PROBE_OUTER || joinType == FULL_OUTER; } - private void addInput(Page page, SpillInfoSnapshot spillInfoSnapshot) + @Override + public TransformationState process(@Nullable Page probePage) { - requireNonNull(spillInfoSnapshot, "spillInfoSnapshot is null"); + boolean finishing = probePage == null; - if (spillInfoSnapshot.hasSpilled()) { - page = spillAndMaskSpilledPositions(page, spillInfoSnapshot.getSpillMask()); - if (page.getPositionCount() == 0) { - return; + if (probe == null) { + if (!finishing) { + // create new probe for next probe page + probe = joinProbeFactory.createJoinProbe(probePage); + // force spill state check for new probe + spillEpoch = NO_SPILL_EPOCH; + } + else if (savedRows.hasNext()) { + // create probe from next saved row + restoreProbe(savedRows.next()); + } + else if (!spillInProgress.isDone()) { + // block on remaining spill before finishing + return blocked(spillInProgress); + } + else { + checkSuccess(spillInProgress, "spilling failed"); + close(); + return finished(); } } + verify(probe != null, "no probe to work with"); - // create probe - inputPageSpillEpoch = spillInfoSnapshot.getSpillEpoch(); - probe = joinProbeFactory.createJoinProbe(page); - } - - private boolean tryFetchLookupSourceProvider() - { if (lookupSourceProvider == null) { if (!lookupSourceProviderFuture.isDone()) { - return false; + return blocked(lookupSourceProviderFuture); } - lookupSourceProvider = requireNonNull(getDone(lookupSourceProviderFuture)); - statisticsCounter.updateLookupSourcePositions(lookupSourceProvider.withLease(lookupSourceLease -> lookupSourceLease.getLookupSource().getJoinPositionCount())); - } - return true; - } - - private Page spillAndMaskSpilledPositions(Page page, IntPredicate spillMask) - { - checkState(spillInProgress.isDone(), "Previous spill still in progress"); - checkSuccess(spillInProgress, "spilling failed"); - - if (!spiller.isPresent()) { - spiller = Optional.of(partitioningSpillerFactory.create( - probeTypes, - getPartitionGenerator(), - spillContext.newLocalSpillContext(), - memoryTrackingContext.newAggregateSystemMemoryContext())); - } - - PartitioningSpillResult result = spiller.get().partitionAndSpill(page, spillMask); - spillInProgress = result.getSpillingFuture(); - return result.getRetained(); - } - private LocalPartitionGenerator getPartitionGenerator() - { - if (!partitionGenerator.isPresent()) { - partitionGenerator = Optional.of(new LocalPartitionGenerator(hashGenerator, lookupSourceFactory.partitions())); - } - return partitionGenerator.get(); - } - - private Page getOutput() - { - // TODO introduce explicit state (enum), like in HBO - - if (!spillInProgress.isDone()) { - /* - * We cannot produce output when there is some previous input spilling. This is because getOutput() may result in additional portion of input being spilled - * (when spilling state has changed in partitioned lookup source since last time) and spiller does not allow multiple concurrent spills. - */ - return null; + lookupSourceProvider = requireNonNull(getDone(lookupSourceProviderFuture)); + statisticsCounter.updateLookupSourcePositions(lookupSourceProvider.withLease( + lookupSourceLease -> lookupSourceLease.getLookupSource().getJoinPositionCount())); } - checkSuccess(spillInProgress, "spilling failed"); - if (probe == null && pageBuilder.isEmpty() && !finishing) { - return null; - } + // Process probe or detect spill state change. Since we update spillEpoch only later, spill + // state change detection is idempotent. + Optional spillInfoSnapshotIfSpillChanged = processProbe(); - if (!tryFetchLookupSourceProvider()) { - if (!finishing) { - return null; + if (spillInfoSnapshotIfSpillChanged.isPresent()) { + if (!spillInProgress.isDone()) { + // block on previous spill + return blocked(spillInProgress); } + checkSuccess(spillInProgress, "spilling failed"); - verify(finishing); - // We are no longer interested in the build side (the lookupSourceProviderFuture's value). - addSuccessCallback(lookupSourceProviderFuture, LookupSourceProvider::close); - lookupSourceProvider = new StaticLookupSourceProvider(new EmptyLookupSource()); - } - - if (probe == null && finishing && !unspilling) { - /* - * We do not have input probe and we won't have any, as we're finishing. - * Let LookupSourceFactory know LookupSources can be disposed as far as we're concerned. - */ - verify(partitionedConsumption == null, "partitioned consumption already started"); - partitionedConsumption = lookupSourceFactory.finishProbeOperator(lookupJoinsCount); - unspilling = true; - } - - if (probe == null && unspilling && !finished) { - /* - * If no current partition or it was exhausted, unspill next one. - * Add input there when it needs one, produce output. Be Happy. - */ - tryUnspillNext(); - } - - if (probe != null) { - processProbe(); - } - - if (outputPage != null) { - verify(pageBuilder.isEmpty()); - Page output = outputPage; - outputPage = null; - return output; - } - - // It is impossible to have probe == null && !pageBuilder.isEmpty(), - // because we will flush a page whenever we reach the probe end - verify(probe != null || pageBuilder.isEmpty()); - return null; - } - - private void tryUnspillNext() - { - verify(probe == null); - - if (!partitionedConsumption.isDone()) { - return; - } + // flush any remaining output page for current probe + if (!pageBuilder.isEmpty()) { + return ofResult(buildOutputPage(), false); + } - if (lookupPartitions == null) { - lookupPartitions = getDone(partitionedConsumption).beginConsumption(); + spillJoinProbe(spillInfoSnapshotIfSpillChanged.get()); } - if (unspilledInputPages.hasNext()) { - addInput(unspilledInputPages.next()); - return; - } + if (!probe.isFinished()) { + // processProbe() returns when pageBuilder is full or yield signal is triggered. - if (unspilledLookupSource.isPresent()) { - if (!unspilledLookupSource.get().isDone()) { - // Not unspilled yet - return; + if (pageBuilder.isFull()) { + return ofResult(buildOutputPage(), false); } - LookupSource lookupSource = getDone(unspilledLookupSource.get()).get(); - unspilledLookupSource = Optional.empty(); - - // Close previous lookupSourceProvider (either supplied initially or for the previous partition) - lookupSourceProvider.close(); - lookupSourceProvider = new StaticLookupSourceProvider(lookupSource); - // If the partition was spilled during processing, its position count will be considered twice. - statisticsCounter.updateLookupSourcePositions(lookupSource.getJoinPositionCount()); - - int partition = currentPartition.get().number(); - unspilledInputPages = spiller.map(spiller -> spiller.getSpilledPages(partition)) - .orElse(emptyIterator()); - - Optional.ofNullable(savedRows.remove(partition)).ifPresent(savedRow -> { - restoreProbe( - savedRow.row, - savedRow.joinPositionWithinPartition, - savedRow.currentProbePositionProducedRow, - savedRow.joinSourcePositions, - SpillInfoSnapshot.noSpill()); - }); - return; + return yield(); } - if (lookupPartitions.hasNext()) { - currentPartition.ifPresent(Partition::release); - currentPartition = Optional.of(lookupPartitions.next()); - unspilledLookupSource = Optional.of(currentPartition.get().load()); - - return; + if (!pageBuilder.isEmpty() || finishing) { + // flush the current page (possibly empty one) and reset probe + Page outputPage = buildOutputPage(); + probe = null; + return ofResult(outputPage, !finishing); } - currentPartition.ifPresent(Partition::release); - if (lookupSourceProvider != null) { - // There are no more partitions to process, so clean up everything - lookupSourceProvider.close(); - lookupSourceProvider = null; - } - spiller.ifPresent(PartitioningSpiller::verifyAllPartitionsRead); - finished = true; + probe = null; + return needsMoreData(); } - private void processProbe() + private Optional processProbe() { - verifyNotNull(probe); - - Optional spillInfoSnapshotIfSpillChanged = lookupSourceProvider.withLease(lookupSourceLease -> { - if (lookupSourceLease.spillEpoch() == inputPageSpillEpoch) { - // Spill state didn't change, so process as usual. - processProbe(lookupSourceLease.getLookupSource()); - return Optional.empty(); + return lookupSourceProvider.withLease(lookupSourceLease -> { + if (spillEpoch != lookupSourceLease.spillEpoch()) { + // Spill state changed + return Optional.of(SpillInfoSnapshot.from(lookupSourceLease)); } - return Optional.of(SpillInfoSnapshot.from(lookupSourceLease)); + // Spill state didn't change, so process as usual. + processProbe(lookupSourceLease.getLookupSource()); + return Optional.empty(); }); - - if (!spillInfoSnapshotIfSpillChanged.isPresent()) { - return; - } - SpillInfoSnapshot spillInfoSnapshot = spillInfoSnapshotIfSpillChanged.get(); - long joinPositionWithinPartition; - if (joinPosition >= 0) { - joinPositionWithinPartition = lookupSourceProvider.withLease(lookupSourceLease -> lookupSourceLease.getLookupSource().joinPositionWithinPartition(joinPosition)); - } - else { - joinPositionWithinPartition = -1; - } - - /* - * Spill state changed. All probe rows that were not processed yet should be treated as regular input (and be partially spilled). - * If current row maps to the now-spilled partition, it needs to be saved for later. If it maps to a partition still in memory, it - * should be added together with not-yet-processed rows. In either case we need to resume processing the row starting at its - * current position in the lookup source. - */ - verify(spillInfoSnapshot.hasSpilled()); - verify(spillInfoSnapshot.getSpillEpoch() > inputPageSpillEpoch); - - Page currentPage = probe.getPage(); - int currentPosition = probe.getPosition(); - long currentJoinPosition = this.joinPosition; - boolean currentProbePositionProducedRow = this.currentProbePositionProducedRow; - - clearProbe(); - - if (currentPosition < 0) { - // Processing of the page hasn't been started yet. - addInput(currentPage, spillInfoSnapshot); - } - else { - int currentRowPartition = getPartitionGenerator().getPartition(currentPage, currentPosition); - boolean currentRowSpilled = spillInfoSnapshot.getSpillMask().test(currentRowPartition); - - if (currentRowSpilled) { - savedRows.merge( - currentRowPartition, - new SavedRow(currentPage, currentPosition, joinPositionWithinPartition, currentProbePositionProducedRow, joinSourcePositions), - (oldValue, newValue) -> { - throw new IllegalStateException(format("Partition %s is already spilled", currentRowPartition)); - }); - Page unprocessed = pageTail(currentPage, currentPosition + 1); - addInput(unprocessed, spillInfoSnapshot); - } - else { - Page remaining = pageTail(currentPage, currentPosition); - restoreProbe(remaining, currentJoinPosition, currentProbePositionProducedRow, joinSourcePositions, spillInfoSnapshot); - } - } } private void processProbe(LookupSource lookupSource) { - verifyNotNull(probe); - do { if (probe.getPosition() >= 0) { if (!joinCurrentPosition(lookupSource, yieldSignal)) { @@ -557,45 +318,6 @@ private void processProbe(LookupSource lookupSource) while (!yieldSignal.isSet()); } - private void restoreProbe(Page probePage, long joinPosition, boolean currentProbePositionProducedRow, int joinSourcePositions, SpillInfoSnapshot spillInfoSnapshot) - { - verify(probe == null); - - addInput(probePage, spillInfoSnapshot); - verify(probe.advanceNextPosition()); - this.joinPosition = joinPosition; - this.currentProbePositionProducedRow = currentProbePositionProducedRow; - this.joinSourcePositions = joinSourcePositions; - } - - private Page pageTail(Page currentPage, int startAtPosition) - { - verify(currentPage.getPositionCount() - startAtPosition >= 0); - return currentPage.getRegion(startAtPosition, currentPage.getPositionCount() - startAtPosition); - } - - public void close() - { - if (closed) { - return; - } - closed = true; - probe = null; - - try (Closer closer = Closer.create()) { - // `afterClose` must be run last. - // Closer is documented to mimic try-with-resource, which implies close will happen in reverse order. - closer.register(afterClose::run); - - closer.register(pageBuilder::reset); - closer.register(() -> Optional.ofNullable(lookupSourceProvider).ifPresent(LookupSourceProvider::close)); - spiller.ifPresent(closer::register); - } - catch (IOException e) { - throw new RuntimeException(e); - } - } - /** * Produce rows matching join condition for the current probe position. If this method was called previously * for the current probe position, calling this again will produce rows that wasn't been produced in previous @@ -617,20 +339,33 @@ private boolean joinCurrentPosition(LookupSource lookupSource, DriverYieldSignal // get next position on lookup side for this probe row joinPosition = lookupSource.getNextJoinPosition(joinPosition, probe.getPosition(), probe.getPage()); - if (yieldSignal.isSet() || tryBuildPage()) { + if (yieldSignal.isSet() || pageBuilder.isFull()) { return false; } } return true; } + /** + * Produce a row for the current probe position, if it doesn't match any row on lookup side and this is an outer join. + * + * @return whether pageBuilder became full + */ + private boolean outerJoinCurrentPosition() + { + if (probeOnOuterSide && joinPosition < 0) { + pageBuilder.appendNullForBuild(probe); + return !pageBuilder.isFull(); + } + return true; + } + /** * @return whether there are more positions on probe side */ private boolean advanceProbePosition(LookupSource lookupSource) { if (!probe.advanceNextPosition()) { - clearProbe(); return false; } @@ -642,105 +377,291 @@ private boolean advanceProbePosition(LookupSource lookupSource) return true; } - /** - * Produce a row for the current probe position, if it doesn't match any row on lookup side and this is an outer join. - * - * @return whether pageBuilder became full - */ - private boolean outerJoinCurrentPosition() + private void spillJoinProbe(SpillInfoSnapshot spillInfoSnapshot) { - if (probeOnOuterSide && joinPosition < 0) { - pageBuilder.appendNullForBuild(probe); - if (tryBuildPage()) { - return false; + verifyNotNull(probe, "probe is null"); + verify(pageBuilder.isEmpty(), "pageBuilder must be flushed before spill"); + checkArgument(spillInfoSnapshot.getSpillEpoch() > NO_SPILL_EPOCH, "invalid spill epoch"); + + /* + * Spill state changed. All probe rows that were not processed yet should be treated as regular input (and be partially spilled). + * If current row maps to the now-spilled partition, it needs to be saved for later. If it maps to a partition still in memory, it + * should be added together with not-yet-processed rows. In either case we need to resume processing the row starting at its + * current position in the lookup source. + */ + if (probe.getPosition() < 0) { + // Processing of the page hasn't been started yet. + probe = joinProbeFactory.createJoinProbe(spillAndMaskSpilledPositions(probe.getPage(), spillInfoSnapshot)); + } + else { + int currentRowPartition = partitionGenerator.get().getPartition(probe.getPage(), probe.getPosition()); + boolean currentRowSpilled = spillInfoSnapshot.getSpillMask().test(currentRowPartition); + + if (currentRowSpilled) { + spilledRows.merge( + currentRowPartition, + new SavedRow(probe.getPage(), probe.getPosition(), getJoinPositionWithinPartition(), currentProbePositionProducedRow, joinSourcePositions), + (oldValue, newValue) -> { + throw new IllegalStateException(format("Partition %s is already spilled", currentRowPartition)); + }); + Page remaining = pageTail(probe.getPage(), probe.getPosition() + 1); + + // create probe starting from next position + probe = joinProbeFactory.createJoinProbe(spillAndMaskSpilledPositions(remaining, spillInfoSnapshot)); + resetProbeRowState(); + } + else { + Page remaining = pageTail(probe.getPage(), probe.getPosition()); + // create probe starting from current position and keep current row join state + probe = joinProbeFactory.createJoinProbe(spillAndMaskSpilledPositions(remaining, spillInfoSnapshot)); + verify(probe.advanceNextPosition()); } } - return true; + + spillEpoch = spillInfoSnapshot.getSpillEpoch(); } - @Override - public TransformationState process(@Nullable Page inputPage) + private Page spillAndMaskSpilledPositions(Page page, SpillInfoSnapshot spillInfoSnapshot) { - boolean inputFinished = inputPage == null; - if (inputFinished) { - finish(); - } + checkSuccess(spillInProgress, "spilling failed"); - if (isFinished()) { - return finished(); + if (!spiller.isPresent()) { + checkState(partitioningSpillerFactory.isPresent(), "Spiller factory is not present"); + spiller = Optional.of(partitioningSpillerFactory.get().create( + probeTypes, + partitionGenerator.get(), + spillContext.newLocalSpillContext(), + memoryTrackingContext.newAggregateSystemMemoryContext())); } - ListenableFuture blocked = isBlocked(); - if (!blocked.isDone()) { - return blocked(blocked); + PartitioningSpillResult result = spiller.get().partitionAndSpill(page, spillInfoSnapshot.getSpillMask()); + spillInProgress = result.getSpillingFuture(); + return result.getRetained(); + } + + private long getJoinPositionWithinPartition() + { + if (joinPosition >= 0) { + return lookupSourceProvider.withLease(lookupSourceLease -> lookupSourceLease.getLookupSource().joinPositionWithinPartition(joinPosition)); } + else { + return -1; + } + } + + private Page buildOutputPage() + { + verifyNotNull(probe); + Page outputPage = pageBuilder.build(probe); + pageBuilder.reset(); + return outputPage; + } - // Make sure probe page is added at most once as join operator can yield - // or return multiple output pages for single probe page. - if (!addedInputPage && !inputFinished) { - addInput(inputPage); - addedInputPage = true; + private void resetProbeRowState() + { + joinPosition = -1; + joinSourcePositions = 0; + currentProbePositionProducedRow = false; + } + + private void restoreProbe(SavedRow savedRow) + { + probe = joinProbeFactory.createJoinProbe(savedRow.row); + verify(probe.advanceNextPosition()); + joinPosition = savedRow.joinPositionWithinPartition; + currentProbePositionProducedRow = savedRow.currentProbePositionProducedRow; + joinSourcePositions = savedRow.joinSourcePositions; + spillEpoch = NO_SPILL_EPOCH; // irrelevant + } + + private Page pageTail(Page currentPage, int startAtPosition) + { + verify(currentPage.getPositionCount() - startAtPosition >= 0); + return currentPage.getRegion(startAtPosition, currentPage.getPositionCount() - startAtPosition); + } + + private Map getSpilledRows() + { + return spilledRows; + } + + private Optional getSpiller() + { + return spiller; + } + + private void close() + { + pageBuilder.reset(); + addSuccessCallback(lookupSourceProviderFuture, LookupSourceProvider::close); + } + } + + private static class SpillingJoinProcessor + implements WorkProcessor.Process> + { + private final ProcessorContext processorContext; + private final Runnable afterClose; + private final OptionalInt lookupJoinsCount; + private final List probeTypes; + private final List buildOutputTypes; + private final JoinType joinType; + private final HashGenerator hashGenerator; + private final JoinProbeFactory joinProbeFactory; + private final LookupSourceFactory lookupSourceFactory; + private final JoinStatisticsCounter statisticsCounter; + private final PageJoiner sourcePagesJoiner; + private final WorkProcessor joinedSourcePages; + + private boolean closed; + + @Nullable + private ListenableFuture>> partitionedConsumption; + @Nullable + private Iterator>> lookupPartitions; + @Nullable + private Partition> previousPartition; + @Nullable + private ListenableFuture> previousPartitionLookupSource; + + private SpillingJoinProcessor( + ProcessorContext processorContext, + Runnable afterClose, + OptionalInt lookupJoinsCount, + List probeTypes, + List buildOutputTypes, + JoinType joinType, + HashGenerator hashGenerator, + JoinProbeFactory joinProbeFactory, + LookupSourceFactory lookupSourceFactory, + ListenableFuture lookupSourceProvider, + JoinStatisticsCounter statisticsCounter, + PartitioningSpillerFactory partitioningSpillerFactory, + WorkProcessor sourcePages) + { + this.processorContext = requireNonNull(processorContext, "processorContext is null"); + this.afterClose = requireNonNull(afterClose, "afterClose is null"); + this.lookupJoinsCount = requireNonNull(lookupJoinsCount, "lookupJoinsCount is null"); + this.probeTypes = requireNonNull(probeTypes, "probeTypes is null"); + this.buildOutputTypes = requireNonNull(buildOutputTypes, "buildOutputTypes is null"); + this.joinType = requireNonNull(joinType, "joinType is null"); + this.hashGenerator = requireNonNull(hashGenerator, "hashGenerator is null"); + this.joinProbeFactory = requireNonNull(joinProbeFactory, "joinProbeFactory is null"); + this.lookupSourceFactory = requireNonNull(lookupSourceFactory, "lookupSourceFactory is null"); + this.statisticsCounter = requireNonNull(statisticsCounter, "statisticsCounter is null"); + sourcePagesJoiner = new PageJoiner( + processorContext, + probeTypes, + buildOutputTypes, + joinType, + hashGenerator, + joinProbeFactory, + lookupSourceFactory, + lookupSourceProvider, + Optional.of(partitioningSpillerFactory), + statisticsCounter, + emptyIterator()); + joinedSourcePages = sourcePages.transform(sourcePagesJoiner); + } + + @Override + public ProcessState> process() + { + if (!joinedSourcePages.isFinished()) { + return ProcessState.ofResult(joinedSourcePages); } - Page outputPage = getOutput(); + if (partitionedConsumption == null) { + partitionedConsumption = lookupSourceFactory.finishProbeOperator(lookupJoinsCount); + return ProcessState.blocked(partitionedConsumption); + } - boolean fetchNextInputPage = !inputFinished && needsInput(); - if (fetchNextInputPage) { - // reset addedInputPage for the sake of next inputPage - addedInputPage = false; + if (lookupPartitions == null) { + lookupPartitions = getDone(partitionedConsumption).beginConsumption(); } - if (outputPage != null) { - return ofResult(outputPage, fetchNextInputPage); + if (previousPartition != null) { + // If we had no rows for the previous spill partition, we would finish before it is unspilled. + // Partition must be loaded before it can be released. // TODO remove this constraint + if (!previousPartitionLookupSource.isDone()) { + return ProcessState.blocked(previousPartitionLookupSource); + } + + previousPartition.release(); + previousPartition = null; + previousPartitionLookupSource = null; } - if (fetchNextInputPage) { - return needsMoreData(); + if (!lookupPartitions.hasNext()) { + close(); + return ProcessState.finished(); } - return yield(); + Partition> partition = lookupPartitions.next(); + previousPartition = partition; + previousPartitionLookupSource = partition.load(); + + return ProcessState.ofResult(joinUnspilledPages(partition)); } - private boolean tryBuildPage() + private WorkProcessor joinUnspilledPages(Partition> partition) { - if (pageBuilder.isFull()) { - buildPage(); - return true; - } - return false; + int partitionNumber = partition.number(); + WorkProcessor unspilledInputPages = WorkProcessor.fromIterator(sourcePagesJoiner.getSpiller() + .map(spiller -> spiller.getSpilledPages(partitionNumber)) + .orElse(emptyIterator())); + Iterator savedRow = Optional.ofNullable(sourcePagesJoiner.getSpilledRows().remove(partitionNumber)) + .map(row -> (Iterator) singletonIterator(row)) + .orElse(emptyIterator()); + + ListenableFuture unspilledLookupSourceProvider = Futures.transform( + partition.load(), + supplier -> new StaticLookupSourceProvider(supplier.get()), + directExecutor()); + + return unspilledInputPages.transform(new PageJoiner( + processorContext, + probeTypes, + buildOutputTypes, + joinType, + hashGenerator, + joinProbeFactory, + lookupSourceFactory, + unspilledLookupSourceProvider, + Optional.empty(), + statisticsCounter, + savedRow)); } - private void buildPage() + private void close() { - verify(outputPage == null); - verify(probe != null); - - if (pageBuilder.isEmpty()) { + if (closed) { return; } + closed = true; - outputPage = pageBuilder.build(probe); - pageBuilder.reset(); - } + try (Closer closer = Closer.create()) { + // `afterClose` must be run last. + // Closer is documented to mimic try-with-resource, which implies close will happen in reverse order. + closer.register(afterClose::run); - private void clearProbe() - { - // Before updating the probe flush the current page - buildPage(); - probe = null; + closer.register(sourcePagesJoiner::close); + sourcePagesJoiner.getSpiller().ifPresent(closer::register); + } + catch (IOException e) { + throw new RuntimeException(e); + } } } // This class must be public because LookupJoinOperator is isolated. public static class SpillInfoSnapshot { - private final boolean hasSpilled; private final long spillEpoch; private final IntPredicate spillMask; - public SpillInfoSnapshot(boolean hasSpilled, long spillEpoch, IntPredicate spillMask) + public SpillInfoSnapshot(long spillEpoch, IntPredicate spillMask) { - this.hasSpilled = hasSpilled; this.spillEpoch = spillEpoch; this.spillMask = requireNonNull(spillMask, "spillMask is null"); } @@ -748,21 +669,10 @@ public SpillInfoSnapshot(boolean hasSpilled, long spillEpoch, IntPredicate spill public static SpillInfoSnapshot from(LookupSourceLease lookupSourceLease) { return new SpillInfoSnapshot( - lookupSourceLease.hasSpilled(), lookupSourceLease.spillEpoch(), lookupSourceLease.getSpillMask()); } - public static SpillInfoSnapshot noSpill() - { - return new SpillInfoSnapshot(false, 0, i -> false); - } - - public boolean hasSpilled() - { - return hasSpilled; - } - public long getSpillEpoch() { return spillEpoch; @@ -783,18 +693,18 @@ public static class SavedRow public final Page row; /** - * A snapshot of {@link JoinProcessor#joinPosition} "de-partitioned", i.e. {@link JoinProcessor#joinPosition} is a join position + * A snapshot of {@link PageJoiner#joinPosition} "de-partitioned", i.e. {@link PageJoiner#joinPosition} is a join position * with respect to (potentially) partitioned lookup source, while this value is a join position with respect to containing partition. */ public final long joinPositionWithinPartition; /** - * A snapshot of {@link JoinProcessor#currentProbePositionProducedRow} + * A snapshot of {@link PageJoiner#currentProbePositionProducedRow} */ public final boolean currentProbePositionProducedRow; /** - * A snapshot of {@link JoinProcessor#joinSourcePositions} + * A snapshot of {@link PageJoiner#joinSourcePositions} */ public final int joinSourcePositions; diff --git a/presto-main/src/main/java/io/prestosql/operator/PartitionedLookupSourceFactory.java b/presto-main/src/main/java/io/prestosql/operator/PartitionedLookupSourceFactory.java index d5716451aa1b..6eaedbfc03e6 100644 --- a/presto-main/src/main/java/io/prestosql/operator/PartitionedLookupSourceFactory.java +++ b/presto-main/src/main/java/io/prestosql/operator/PartitionedLookupSourceFactory.java @@ -54,6 +54,8 @@ public final class PartitionedLookupSourceFactory implements LookupSourceFactory { + public static final long NO_SPILL_EPOCH = 0; + private final List types; private final List outputTypes; private final List hashChannelTypes; @@ -74,7 +76,7 @@ public final class PartitionedLookupSourceFactory private int partitionsSet; @GuardedBy("lock") - private SpillingInfo spillingInfo = new SpillingInfo(0, ImmutableSet.of()); + private SpillingInfo spillingInfo = new SpillingInfo(NO_SPILL_EPOCH, ImmutableSet.of()); @GuardedBy("lock") private final Map spilledPartitions = new HashMap<>(); From 1ab39682eb8859ad523e26c705df3f5a8a13dbbf Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Tue, 28 Apr 2020 12:05:05 +0200 Subject: [PATCH 284/519] Fix javadoc --- .../src/main/java/io/prestosql/operator/LookupJoinOperator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-main/src/main/java/io/prestosql/operator/LookupJoinOperator.java b/presto-main/src/main/java/io/prestosql/operator/LookupJoinOperator.java index 9327ed02b974..76902e181eea 100644 --- a/presto-main/src/main/java/io/prestosql/operator/LookupJoinOperator.java +++ b/presto-main/src/main/java/io/prestosql/operator/LookupJoinOperator.java @@ -349,7 +349,7 @@ private boolean joinCurrentPosition(LookupSource lookupSource, DriverYieldSignal /** * Produce a row for the current probe position, if it doesn't match any row on lookup side and this is an outer join. * - * @return whether pageBuilder became full + * @return whether pageBuilder can still not fill */ private boolean outerJoinCurrentPosition() { From d5bcdf9d0925ab900d9c744e7b76d1bd841f5f62 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Tue, 28 Apr 2020 12:05:15 +0200 Subject: [PATCH 285/519] Remove unsound check It's caller responsibility to choose when we append outer row. --- .../src/main/java/io/prestosql/operator/LookupJoinOperator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-main/src/main/java/io/prestosql/operator/LookupJoinOperator.java b/presto-main/src/main/java/io/prestosql/operator/LookupJoinOperator.java index 76902e181eea..6c88380a614f 100644 --- a/presto-main/src/main/java/io/prestosql/operator/LookupJoinOperator.java +++ b/presto-main/src/main/java/io/prestosql/operator/LookupJoinOperator.java @@ -353,7 +353,7 @@ private boolean joinCurrentPosition(LookupSource lookupSource, DriverYieldSignal */ private boolean outerJoinCurrentPosition() { - if (probeOnOuterSide && joinPosition < 0) { + if (probeOnOuterSide) { pageBuilder.appendNullForBuild(probe); return !pageBuilder.isFull(); } From a246feb89f785c99f5619c378eacdb30c869e3f0 Mon Sep 17 00:00:00 2001 From: praveenkrishna Date: Fri, 6 Mar 2020 13:04:45 +0530 Subject: [PATCH 286/519] Add Like predicate to SHOW COLUMNS --- .../sql/rewrite/ShowQueriesRewrite.java | 16 +++- .../prestosql/sql/query/TestShowQueries.java | 79 ++++++++++++++++++- .../antlr4/io/prestosql/sql/parser/SqlBase.g4 | 3 +- .../java/io/prestosql/sql/SqlFormatter.java | 8 ++ .../io/prestosql/sql/parser/AstBuilder.java | 8 +- .../io/prestosql/sql/tree/ShowColumns.java | 32 ++++++-- .../prestosql/sql/parser/TestSqlParser.java | 24 +++++- 7 files changed, 153 insertions(+), 17 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/rewrite/ShowQueriesRewrite.java b/presto-main/src/main/java/io/prestosql/sql/rewrite/ShowQueriesRewrite.java index 697693aa7962..0adf9f52d0f4 100644 --- a/presto-main/src/main/java/io/prestosql/sql/rewrite/ShowQueriesRewrite.java +++ b/presto-main/src/main/java/io/prestosql/sql/rewrite/ShowQueriesRewrite.java @@ -381,6 +381,18 @@ protected Node visitShowColumns(ShowColumns showColumns, Void context) accessControl.checkCanShowColumns(session.toSecurityContext(), tableName.asCatalogSchemaTableName()); + Expression predicate = logicalAnd( + equal(identifier("table_schema"), new StringLiteral(tableName.getSchemaName())), + equal(identifier("table_name"), new StringLiteral(tableName.getObjectName()))); + Optional likePattern = showColumns.getLikePattern(); + if (likePattern.isPresent()) { + Expression likePredicate = new LikePredicate( + identifier("column_name"), + new StringLiteral(likePattern.get()), + showColumns.getEscape().map(StringLiteral::new)); + predicate = logicalAnd(predicate, likePredicate); + } + return simpleQuery( selectList( aliasedName("column_name", "Column"), @@ -388,9 +400,7 @@ protected Node visitShowColumns(ShowColumns showColumns, Void context) aliasedNullToEmpty("extra_info", "Extra"), aliasedNullToEmpty("comment", "Comment")), from(tableName.getCatalogName(), COLUMNS.getSchemaTableName()), - logicalAnd( - equal(identifier("table_schema"), new StringLiteral(tableName.getSchemaName())), - equal(identifier("table_name"), new StringLiteral(tableName.getObjectName()))), + predicate, ordering(ascending("ordinal_position"))); } diff --git a/presto-main/src/test/java/io/prestosql/sql/query/TestShowQueries.java b/presto-main/src/test/java/io/prestosql/sql/query/TestShowQueries.java index 3f04d117b859..0db5e7ce8e11 100644 --- a/presto-main/src/test/java/io/prestosql/sql/query/TestShowQueries.java +++ b/presto-main/src/test/java/io/prestosql/sql/query/TestShowQueries.java @@ -13,10 +13,20 @@ */ package io.prestosql.sql.query; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.connector.MockConnectorFactory; +import io.prestosql.spi.connector.ColumnMetadata; +import io.prestosql.spi.connector.SchemaTableName; +import io.prestosql.testing.LocalQueryRunner; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.testing.TestingSession.testSessionBuilder; +import static org.testng.Assert.assertEquals; + public class TestShowQueries { private QueryAssertions assertions; @@ -24,7 +34,31 @@ public class TestShowQueries @BeforeClass public void init() { - assertions = new QueryAssertions(); + LocalQueryRunner queryRunner = LocalQueryRunner.create(testSessionBuilder() + .setCatalog("local") + .setSchema("default") + .build()); + queryRunner.createCatalog( + "mock", + MockConnectorFactory.builder() + .withGetColumns(schemaTableName -> + ImmutableList.of( + ColumnMetadata.builder() + .setName("colaa") + .setType(BIGINT) + .build(), + ColumnMetadata.builder() + .setName("cola_") + .setType(BIGINT) + .build(), + ColumnMetadata.builder() + .setName("colabc") + .setType(BIGINT) + .build())) + .withListTables((session, schemaName) -> ImmutableList.of(new SchemaTableName("mockSchema", "mockTable"))) + .build(), + ImmutableMap.of()); + assertions = new QueryAssertions(queryRunner); } @AfterClass(alwaysRun = true) @@ -91,4 +125,47 @@ public void testListingEmptyCatalogs() assertions.getQueryRunner().getAccessControl().reset(); }); } + + @Test + public void testShowColumns() + { + assertions.assertQuery("SHOW COLUMNS FROM mock.mockSchema.mockTable", + "VALUES " + + "(CAST('colaa' AS VARCHAR), CAST('bigint' AS VARCHAR) , CAST('' AS VARCHAR), CAST('' AS VARCHAR))," + + "(CAST('cola_' AS VARCHAR), CAST('bigint' AS VARCHAR) , CAST('' AS VARCHAR), CAST('' AS VARCHAR))," + + "(CAST('colabc' AS VARCHAR), CAST('bigint' AS VARCHAR) , CAST('' AS VARCHAR), CAST('' AS VARCHAR))"); + } + + @Test + public void testShowColumnsLike() + { + assertions.assertQuery("SHOW COLUMNS FROM mock.mockSchema.mockTable like 'colabc'", + "VALUES (CAST('colabc' AS VARCHAR), CAST('bigint' AS VARCHAR) , CAST('' AS VARCHAR), CAST('' AS VARCHAR))"); + assertions.assertQuery("SHOW COLUMNS FROM mock.mockSchema.mockTable like 'cola%'", + "VALUES " + + "(CAST('colaa' AS VARCHAR), CAST('bigint' AS VARCHAR) , CAST('' AS VARCHAR), CAST('' AS VARCHAR))," + + "(CAST('cola_' AS VARCHAR), CAST('bigint' AS VARCHAR) , CAST('' AS VARCHAR), CAST('' AS VARCHAR))," + + "(CAST('colabc' AS VARCHAR), CAST('bigint' AS VARCHAR) , CAST('' AS VARCHAR), CAST('' AS VARCHAR))"); + assertions.assertQuery("SHOW COLUMNS FROM mock.mockSchema.mockTable like 'cola_'", + "VALUES " + + "(CAST('colaa' AS VARCHAR), CAST('bigint' AS VARCHAR) , CAST('' AS VARCHAR), CAST('' AS VARCHAR))," + + "(CAST('cola_' AS VARCHAR), CAST('bigint' AS VARCHAR) , CAST('' AS VARCHAR), CAST('' AS VARCHAR))"); + + assertions.assertQuery("SHOW COLUMNS FROM system.runtime.nodes LIKE 'node%'", + "VALUES " + + "(CAST('node_id' AS VARCHAR), CAST('varchar' AS VARCHAR) , CAST('' AS VARCHAR), CAST('' AS VARCHAR))," + + "(CAST('node_version' AS VARCHAR), CAST('varchar' AS VARCHAR) , CAST('' AS VARCHAR), CAST('' AS VARCHAR))"); + assertions.assertQuery("SHOW COLUMNS FROM system.runtime.nodes LIKE 'node_id'", + "VALUES (CAST('node_id' AS VARCHAR), CAST('varchar' AS VARCHAR) , CAST('' AS VARCHAR), CAST('' AS VARCHAR))"); + assertEquals(assertions.execute("SHOW COLUMNS FROM system.runtime.nodes LIKE ''").getRowCount(), 0); + } + + @Test + public void testShowColumnsWithLikeWithEscape() + { + assertions.assertFails("SHOW COLUMNS FROM system.runtime.nodes LIKE 't$_%' ESCAPE ''", "Escape string must be a single character"); + assertions.assertFails("SHOW COLUMNS FROM system.runtime.nodes LIKE 't$_%' ESCAPE '$$'", "Escape string must be a single character"); + assertions.assertQuery("SHOW COLUMNS FROM mock.mockSchema.mockTable LIKE 'cola$_' ESCAPE '$'", + "VALUES (CAST('cola_' AS VARCHAR), CAST('bigint' AS VARCHAR) , CAST('' AS VARCHAR), CAST('' AS VARCHAR))"); + } } diff --git a/presto-parser/src/main/antlr4/io/prestosql/sql/parser/SqlBase.g4 b/presto-parser/src/main/antlr4/io/prestosql/sql/parser/SqlBase.g4 index 2c8cac0da5bd..14decc3d6107 100644 --- a/presto-parser/src/main/antlr4/io/prestosql/sql/parser/SqlBase.g4 +++ b/presto-parser/src/main/antlr4/io/prestosql/sql/parser/SqlBase.g4 @@ -105,7 +105,8 @@ statement (LIKE pattern=string (ESCAPE escape=string)?)? #showSchemas | SHOW CATALOGS (LIKE pattern=string (ESCAPE escape=string)?)? #showCatalogs - | SHOW COLUMNS (FROM | IN) qualifiedName #showColumns + | SHOW COLUMNS (FROM | IN) qualifiedName? + (LIKE pattern=string (ESCAPE escape=string)?)? #showColumns | SHOW STATS FOR qualifiedName #showStats | SHOW STATS FOR '(' querySpecification ')' #showStatsForQuery | SHOW CURRENT? ROLES ((FROM | IN) identifier)? #showRoles diff --git a/presto-parser/src/main/java/io/prestosql/sql/SqlFormatter.java b/presto-parser/src/main/java/io/prestosql/sql/SqlFormatter.java index 85000e1a70d9..d0cbdb6d716f 100644 --- a/presto-parser/src/main/java/io/prestosql/sql/SqlFormatter.java +++ b/presto-parser/src/main/java/io/prestosql/sql/SqlFormatter.java @@ -756,6 +756,14 @@ protected Void visitShowColumns(ShowColumns node, Integer context) builder.append("SHOW COLUMNS FROM ") .append(formatName(node.getTable())); + node.getLikePattern().ifPresent(value -> + builder.append(" LIKE ") + .append(formatStringLiteral(value))); + + node.getEscape().ifPresent(value -> + builder.append(" ESCAPE ") + .append(formatStringLiteral(value))); + return null; } diff --git a/presto-parser/src/main/java/io/prestosql/sql/parser/AstBuilder.java b/presto-parser/src/main/java/io/prestosql/sql/parser/AstBuilder.java index 99bf34471792..7ef5e197f3ce 100644 --- a/presto-parser/src/main/java/io/prestosql/sql/parser/AstBuilder.java +++ b/presto-parser/src/main/java/io/prestosql/sql/parser/AstBuilder.java @@ -880,7 +880,13 @@ public Node visitShowCatalogs(SqlBaseParser.ShowCatalogsContext context) @Override public Node visitShowColumns(SqlBaseParser.ShowColumnsContext context) { - return new ShowColumns(getLocation(context), getQualifiedName(context.qualifiedName())); + return new ShowColumns( + getLocation(context), + getQualifiedName(context.qualifiedName()), + getTextIfPresent(context.pattern) + .map(AstBuilder::unquote), + getTextIfPresent(context.escape) + .map(AstBuilder::unquote)); } @Override diff --git a/presto-parser/src/main/java/io/prestosql/sql/tree/ShowColumns.java b/presto-parser/src/main/java/io/prestosql/sql/tree/ShowColumns.java index bfd5573ac94f..920dfc2d7c70 100644 --- a/presto-parser/src/main/java/io/prestosql/sql/tree/ShowColumns.java +++ b/presto-parser/src/main/java/io/prestosql/sql/tree/ShowColumns.java @@ -26,21 +26,25 @@ public class ShowColumns extends Statement { private final QualifiedName table; + private final Optional likePattern; + private final Optional escape; - public ShowColumns(QualifiedName table) + public ShowColumns(QualifiedName table, Optional likePattern, Optional escape) { - this(Optional.empty(), table); + this(Optional.empty(), table, likePattern, escape); } - public ShowColumns(NodeLocation location, QualifiedName table) + public ShowColumns(NodeLocation location, QualifiedName table, Optional likePattern, Optional escape) { - this(Optional.of(location), table); + this(Optional.of(location), table, likePattern, escape); } - private ShowColumns(Optional location, QualifiedName table) + private ShowColumns(Optional location, QualifiedName table, Optional likePattern, Optional escape) { super(location); this.table = requireNonNull(table, "table is null"); + this.likePattern = requireNonNull(likePattern, "likePattern is null"); + this.escape = requireNonNull(escape, "escape is null"); } public QualifiedName getTable() @@ -48,6 +52,16 @@ public QualifiedName getTable() return table; } + public Optional getLikePattern() + { + return likePattern; + } + + public Optional getEscape() + { + return escape; + } + @Override public R accept(AstVisitor visitor, C context) { @@ -63,7 +77,7 @@ public List getChildren() @Override public int hashCode() { - return Objects.hash(table); + return Objects.hash(table, likePattern, escape); } @Override @@ -76,7 +90,9 @@ public boolean equals(Object obj) return false; } ShowColumns o = (ShowColumns) obj; - return Objects.equals(table, o.table); + return Objects.equals(table, o.table) && + Objects.equals(likePattern, o.likePattern) && + Objects.equals(escape, o.escape); } @Override @@ -84,6 +100,8 @@ public String toString() { return toStringHelper(this) .add("table", table) + .add("likePattern", likePattern) + .add("escape", escape) .toString(); } } diff --git a/presto-parser/src/test/java/io/prestosql/sql/parser/TestSqlParser.java b/presto-parser/src/test/java/io/prestosql/sql/parser/TestSqlParser.java index b54e6a3b433e..21841fa431c7 100644 --- a/presto-parser/src/test/java/io/prestosql/sql/parser/TestSqlParser.java +++ b/presto-parser/src/test/java/io/prestosql/sql/parser/TestSqlParser.java @@ -791,10 +791,13 @@ public void testShowTables() @Test public void testShowColumns() { - assertStatement("SHOW COLUMNS FROM a", new ShowColumns(QualifiedName.of("a"))); - assertStatement("SHOW COLUMNS FROM a.b", new ShowColumns(QualifiedName.of("a", "b"))); - assertStatement("SHOW COLUMNS FROM \"awesome table\"", new ShowColumns(QualifiedName.of("awesome table"))); - assertStatement("SHOW COLUMNS FROM \"awesome schema\".\"awesome table\"", new ShowColumns(QualifiedName.of("awesome schema", "awesome table"))); + assertStatement("SHOW COLUMNS FROM a", new ShowColumns(QualifiedName.of("a"), Optional.empty(), Optional.empty())); + assertStatement("SHOW COLUMNS FROM a.b", new ShowColumns(QualifiedName.of("a", "b"), Optional.empty(), Optional.empty())); + assertStatement("SHOW COLUMNS FROM \"awesome table\"", new ShowColumns(QualifiedName.of("awesome table"), Optional.empty(), Optional.empty())); + assertStatement("SHOW COLUMNS FROM \"awesome schema\".\"awesome table\"", new ShowColumns(QualifiedName.of("awesome schema", "awesome table"), Optional.empty(), Optional.empty())); + assertStatement("SHOW COLUMNS FROM a.b LIKE '%$_%' ESCAPE '$'", new ShowColumns(QualifiedName.of("a", "b"), Optional.of("%$_%"), Optional.of("$"))); + assertInvalidStatemennt("SHOW COLUMNS FROM a.b LIKE null", "mismatched input 'null'. Expecting: "); + assertInvalidStatemennt("SHOW COLUMNS FROM a.b LIKE 'a' ESCAPE null'", "mismatched input 'null'. Expecting: "); } @Test @@ -2503,6 +2506,19 @@ private static void assertStatement(String query, Statement expected) assertFormattedSql(SQL_PARSER, expected); } + private static void assertInvalidStatemennt(String statement, String expectedErrorMessageRegex) + { + try { + Statement result = SQL_PARSER.createStatement(statement, new ParsingOptions()); + fail("Expected to throw ParsingException for input:[" + statement + "], but got: " + result); + } + catch (ParsingException e) { + if (!e.getErrorMessage().matches(expectedErrorMessageRegex)) { + fail(format("Expected error message to match '%s', but was: '%s'", expectedErrorMessageRegex, e.getErrorMessage())); + } + } + } + private static void assertExpression(String expression, Expression expected) { assertParsed(expression, expected, SQL_PARSER.createExpression(expression, new ParsingOptions(AS_DECIMAL))); From bc35fdb23ebc4180594a41a5690b130108bdc808 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Osipiuk?= Date: Tue, 28 Apr 2020 15:10:32 +0200 Subject: [PATCH 287/519] Make Hive caching and S3 security mapping mutually exclusive --- .../plugin/hive/TestHiveHadoop2Plugin.java | 44 +++++++++++++++++++ .../plugin/hive/s3/HiveS3Module.java | 2 + 2 files changed, 46 insertions(+) create mode 100644 presto-hive-hadoop2/src/test/java/io/prestosql/plugin/hive/TestHiveHadoop2Plugin.java diff --git a/presto-hive-hadoop2/src/test/java/io/prestosql/plugin/hive/TestHiveHadoop2Plugin.java b/presto-hive-hadoop2/src/test/java/io/prestosql/plugin/hive/TestHiveHadoop2Plugin.java new file mode 100644 index 000000000000..76cec1d60377 --- /dev/null +++ b/presto-hive-hadoop2/src/test/java/io/prestosql/plugin/hive/TestHiveHadoop2Plugin.java @@ -0,0 +1,44 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.hive; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import io.prestosql.spi.Plugin; +import io.prestosql.spi.connector.ConnectorFactory; +import io.prestosql.testing.TestingConnectorContext; +import org.testng.annotations.Test; + +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +public class TestHiveHadoop2Plugin +{ + @Test + public void testS3SecurityMappingAndHiveCachingMutuallyExclusive() + { + Plugin plugin = new HiveHadoop2Plugin(); + ConnectorFactory connectorFactory = Iterables.getOnlyElement(plugin.getConnectorFactories()); + + assertThatThrownBy(() -> { + connectorFactory.create( + "test", + ImmutableMap.builder() + .put("hive.s3.security-mapping.config-file", "/tmp/blah.txt") + .put("hive.cache.enabled", "true") + .build(), + new TestingConnectorContext()) + .shutdown(); + }).hasMessageContaining("S3 security mapping is not compatible with Hive caching"); + } +} diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/HiveS3Module.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/HiveS3Module.java index 68ee66dd6bce..6ad75426a329 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/HiveS3Module.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/s3/HiveS3Module.java @@ -21,6 +21,7 @@ import io.prestosql.plugin.hive.ConfigurationInitializer; import io.prestosql.plugin.hive.DynamicConfigurationProvider; import io.prestosql.plugin.hive.HiveConfig; +import io.prestosql.plugin.hive.rubix.RubixEnabledConfig; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.JavaUtils; @@ -65,6 +66,7 @@ private void bindSecurityMapping(Binder binder) { if (buildConfigObject(S3SecurityMappingConfig.class).getConfigFile().isPresent()) { checkArgument(!buildConfigObject(HiveConfig.class).isS3SelectPushdownEnabled(), "S3 security mapping is not compatible with S3 Select pushdown"); + checkArgument(!buildConfigObject(RubixEnabledConfig.class).isCacheEnabled(), "S3 security mapping is not compatible with Hive caching"); newSetBinder(binder, DynamicConfigurationProvider.class).addBinding() .to(S3SecurityMappingConfigurationProvider.class).in(Scopes.SINGLETON); From 6912112ed277c6c8bdb0963466586809743f6fab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Osipiuk?= Date: Tue, 28 Apr 2020 15:13:04 +0200 Subject: [PATCH 288/519] Make Hive caching and GCS access token mutually exclusive --- .../plugin/hive/TestHiveHadoop2Plugin.java | 18 ++++++++++++++++++ .../plugin/hive/gcs/HiveGcsModule.java | 3 +++ 2 files changed, 21 insertions(+) diff --git a/presto-hive-hadoop2/src/test/java/io/prestosql/plugin/hive/TestHiveHadoop2Plugin.java b/presto-hive-hadoop2/src/test/java/io/prestosql/plugin/hive/TestHiveHadoop2Plugin.java index 76cec1d60377..8d7bf2dfc46b 100644 --- a/presto-hive-hadoop2/src/test/java/io/prestosql/plugin/hive/TestHiveHadoop2Plugin.java +++ b/presto-hive-hadoop2/src/test/java/io/prestosql/plugin/hive/TestHiveHadoop2Plugin.java @@ -41,4 +41,22 @@ public void testS3SecurityMappingAndHiveCachingMutuallyExclusive() .shutdown(); }).hasMessageContaining("S3 security mapping is not compatible with Hive caching"); } + + @Test + public void testGcsAccessTokenAndHiveCachingMutuallyExclusive() + { + Plugin plugin = new HiveHadoop2Plugin(); + ConnectorFactory connectorFactory = Iterables.getOnlyElement(plugin.getConnectorFactories()); + + assertThatThrownBy(() -> { + connectorFactory.create( + "test", + ImmutableMap.builder() + .put("hive.gcs.use-access-token", "true") + .put("hive.cache.enabled", "true") + .build(), + new TestingConnectorContext()) + .shutdown(); + }).hasMessageContaining("Use of GCS access token is not compatible with Hive caching"); + } } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/gcs/HiveGcsModule.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/gcs/HiveGcsModule.java index ff7672ece511..3ee9ffb76bf6 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/gcs/HiveGcsModule.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/gcs/HiveGcsModule.java @@ -18,7 +18,9 @@ import io.airlift.configuration.AbstractConfigurationAwareModule; import io.prestosql.plugin.hive.ConfigurationInitializer; import io.prestosql.plugin.hive.DynamicConfigurationProvider; +import io.prestosql.plugin.hive.rubix.RubixEnabledConfig; +import static com.google.common.base.Preconditions.checkArgument; import static com.google.inject.multibindings.Multibinder.newSetBinder; import static io.airlift.configuration.ConfigBinder.configBinder; @@ -33,6 +35,7 @@ protected void setup(Binder binder) newSetBinder(binder, ConfigurationInitializer.class).addBinding().to(GoogleGcsConfigurationInitializer.class).in(Scopes.SINGLETON); if (buildConfigObject(HiveGcsConfig.class).isUseGcsAccessToken()) { + checkArgument(!buildConfigObject(RubixEnabledConfig.class).isCacheEnabled(), "Use of GCS access token is not compatible with Hive caching"); newSetBinder(binder, DynamicConfigurationProvider.class).addBinding().to(GcsConfigurationProvider.class).in(Scopes.SINGLETON); } } From 5972bd332a4042f8dcc27a2a1552620408956800 Mon Sep 17 00:00:00 2001 From: Martin Traverso Date: Mon, 27 Apr 2020 16:46:23 -0700 Subject: [PATCH 289/519] Verify support for correlation during analysis --- .../io/prestosql/sql/analyzer/Analyzer.java | 2 +- .../sql/analyzer/CorrelationSupport.java | 20 +++++ .../sql/analyzer/ExpressionAnalyzer.java | 38 +++++++-- .../sql/analyzer/StatementAnalyzer.java | 48 ++++++++--- .../sql/planner/RelationPlanner.java | 3 +- .../sql/planner/SubqueryPlanner.java | 82 +++++++------------ .../testing/AbstractTestJoinQueries.java | 2 +- .../testing/AbstractTestQueries.java | 8 +- 8 files changed, 123 insertions(+), 80 deletions(-) create mode 100644 presto-main/src/main/java/io/prestosql/sql/analyzer/CorrelationSupport.java diff --git a/presto-main/src/main/java/io/prestosql/sql/analyzer/Analyzer.java b/presto-main/src/main/java/io/prestosql/sql/analyzer/Analyzer.java index cf0e04d98587..71a7bfb339f8 100644 --- a/presto-main/src/main/java/io/prestosql/sql/analyzer/Analyzer.java +++ b/presto-main/src/main/java/io/prestosql/sql/analyzer/Analyzer.java @@ -79,7 +79,7 @@ public Analysis analyze(Statement statement, boolean isDescribe) { Statement rewrittenStatement = StatementRewrite.rewrite(session, metadata, sqlParser, queryExplainer, statement, parameters, parameterLookup, accessControl, warningCollector); Analysis analysis = new Analysis(rewrittenStatement, parameterLookup, isDescribe); - StatementAnalyzer analyzer = new StatementAnalyzer(analysis, metadata, sqlParser, accessControl, session, warningCollector); + StatementAnalyzer analyzer = new StatementAnalyzer(analysis, metadata, sqlParser, accessControl, session, warningCollector, CorrelationSupport.ALLOWED); analyzer.analyze(rewrittenStatement, Optional.empty()); // check column access permissions for each table diff --git a/presto-main/src/main/java/io/prestosql/sql/analyzer/CorrelationSupport.java b/presto-main/src/main/java/io/prestosql/sql/analyzer/CorrelationSupport.java new file mode 100644 index 000000000000..3f0e9c61d2a3 --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/sql/analyzer/CorrelationSupport.java @@ -0,0 +1,20 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.sql.analyzer; + +public enum CorrelationSupport +{ + ALLOWED, + DISALLOWED +} diff --git a/presto-main/src/main/java/io/prestosql/sql/analyzer/ExpressionAnalyzer.java b/presto-main/src/main/java/io/prestosql/sql/analyzer/ExpressionAnalyzer.java index 64e48b6886f9..af8398dd953f 100644 --- a/presto-main/src/main/java/io/prestosql/sql/analyzer/ExpressionAnalyzer.java +++ b/presto-main/src/main/java/io/prestosql/sql/analyzer/ExpressionAnalyzer.java @@ -203,6 +203,7 @@ public class ExpressionAnalyzer private final Map, Expression> parameters; private final WarningCollector warningCollector; private final TypeCoercion typeCoercion; + private final CorrelationSupport correlationSupport; public ExpressionAnalyzer( Metadata metadata, @@ -212,7 +213,8 @@ public ExpressionAnalyzer( TypeProvider symbolTypes, Map, Expression> parameters, WarningCollector warningCollector, - boolean isDescribe) + boolean isDescribe, + CorrelationSupport correlationSupport) { this.metadata = requireNonNull(metadata, "metadata is null"); this.accessControl = requireNonNull(accessControl, "accessControl is null"); @@ -223,6 +225,7 @@ public ExpressionAnalyzer( this.isDescribe = isDescribe; this.warningCollector = requireNonNull(warningCollector, "warningCollector is null"); this.typeCoercion = new TypeCoercion(metadata::getType); + this.correlationSupport = requireNonNull(correlationSupport, "correlation is null"); } public Map, ResolvedFunction> getResolvedFunctions() @@ -411,6 +414,10 @@ protected Type visitIdentifier(Identifier node, StackableAstVisitorContext context) { + if (!resolvedField.isLocal() && correlationSupport != CorrelationSupport.ALLOWED) { + throw semanticException(NOT_SUPPORTED, node, "Reference to column '%s' from outer scope not allowed in this context", node); + } + return handleResolvedField(node, FieldId.from(resolvedField), resolvedField.getField(), context); } @@ -991,7 +998,8 @@ public List getCallArgumentTypes(List argumen symbolTypes, parameters, warningCollector, - isDescribe); + isDescribe, + correlationSupport); if (context.getContext().isInLambda()) { for (LambdaArgumentDeclaration lambdaArgument : context.getContext().getFieldToLambdaArgumentDeclaration().values()) { innerExpressionAnalyzer.setExpressionType(lambdaArgument, getExpressionType(lambdaArgument)); @@ -1586,9 +1594,10 @@ public static ExpressionAnalysis analyzeExpression( Scope scope, Analysis analysis, Expression expression, - WarningCollector warningCollector) + WarningCollector warningCollector, + CorrelationSupport correlationSupport) { - ExpressionAnalyzer analyzer = create(analysis, session, metadata, sqlParser, accessControl, TypeProvider.empty(), warningCollector); + ExpressionAnalyzer analyzer = create(analysis, session, metadata, sqlParser, accessControl, TypeProvider.empty(), warningCollector, correlationSupport); analyzer.analyze(expression, scope); Map, Type> expressionTypes = analyzer.getExpressionTypes(); @@ -1628,16 +1637,30 @@ public static ExpressionAnalyzer create( AccessControl accessControl, TypeProvider types, WarningCollector warningCollector) + { + return create(analysis, session, metadata, sqlParser, accessControl, types, warningCollector, CorrelationSupport.ALLOWED); + } + + public static ExpressionAnalyzer create( + Analysis analysis, + Session session, + Metadata metadata, + SqlParser sqlParser, + AccessControl accessControl, + TypeProvider types, + WarningCollector warningCollector, + CorrelationSupport correlationSupport) { return new ExpressionAnalyzer( metadata, accessControl, - node -> new StatementAnalyzer(analysis, metadata, sqlParser, accessControl, session, warningCollector), + node -> new StatementAnalyzer(analysis, metadata, sqlParser, accessControl, session, warningCollector, correlationSupport), session, types, analysis.getParameters(), warningCollector, - analysis.isDescribe()); + analysis.isDescribe(), + correlationSupport); } public static ExpressionAnalyzer createConstantAnalyzer( @@ -1718,6 +1741,7 @@ public static ExpressionAnalyzer createWithoutSubqueries( symbolTypes, parameters, warningCollector, - isDescribe); + isDescribe, + CorrelationSupport.ALLOWED); } } diff --git a/presto-main/src/main/java/io/prestosql/sql/analyzer/StatementAnalyzer.java b/presto-main/src/main/java/io/prestosql/sql/analyzer/StatementAnalyzer.java index de90d9388271..61a43fb4a3f9 100644 --- a/presto-main/src/main/java/io/prestosql/sql/analyzer/StatementAnalyzer.java +++ b/presto-main/src/main/java/io/prestosql/sql/analyzer/StatementAnalyzer.java @@ -239,6 +239,7 @@ import static io.prestosql.sql.tree.FrameBound.Type.UNBOUNDED_FOLLOWING; import static io.prestosql.sql.tree.FrameBound.Type.UNBOUNDED_PRECEDING; import static io.prestosql.sql.tree.Join.Type.FULL; +import static io.prestosql.sql.tree.Join.Type.INNER; import static io.prestosql.sql.tree.Join.Type.RIGHT; import static io.prestosql.sql.tree.WindowFrame.Type.RANGE; import static io.prestosql.type.UnknownType.UNKNOWN; @@ -260,6 +261,7 @@ class StatementAnalyzer private final SqlParser sqlParser; private final AccessControl accessControl; private final WarningCollector warningCollector; + private final CorrelationSupport correlationSupport; public StatementAnalyzer( Analysis analysis, @@ -267,7 +269,8 @@ public StatementAnalyzer( SqlParser sqlParser, AccessControl accessControl, Session session, - WarningCollector warningCollector) + WarningCollector warningCollector, + CorrelationSupport correlationSupport) { this.analysis = requireNonNull(analysis, "analysis is null"); this.metadata = requireNonNull(metadata, "metadata is null"); @@ -276,6 +279,7 @@ public StatementAnalyzer( this.accessControl = requireNonNull(accessControl, "accessControl is null"); this.session = requireNonNull(session, "session is null"); this.warningCollector = requireNonNull(warningCollector, "warningCollector is null"); + this.correlationSupport = requireNonNull(correlationSupport, "correlationAllowed is null"); } public Scope analyze(Node node, Scope outerQueryScope) @@ -494,7 +498,8 @@ protected Scope visitDelete(Delete node, Optional scope) sqlParser, new AllowAllAccessControl(), session, - warningCollector); + warningCollector, + CorrelationSupport.ALLOWED); Scope tableScope = analyzer.analyze(table, scope); node.getWhere().ifPresent(where -> analyzeWhere(node, tableScope, where)); @@ -664,7 +669,7 @@ protected Scope visitCreateView(CreateView node, Optional scope) analysis.setUpdateType("CREATE VIEW", viewName); // analyze the query that creates the view - StatementAnalyzer analyzer = new StatementAnalyzer(analysis, metadata, sqlParser, accessControl, session, warningCollector); + StatementAnalyzer analyzer = new StatementAnalyzer(analysis, metadata, sqlParser, accessControl, session, warningCollector, CorrelationSupport.ALLOWED); Scope queryScope = analyzer.analyze(node.getQuery(), scope); @@ -992,7 +997,7 @@ else if (expressionType instanceof MapType) { @Override protected Scope visitLateral(Lateral node, Optional scope) { - StatementAnalyzer analyzer = new StatementAnalyzer(analysis, metadata, sqlParser, accessControl, session, warningCollector); + StatementAnalyzer analyzer = new StatementAnalyzer(analysis, metadata, sqlParser, accessControl, session, warningCollector, CorrelationSupport.ALLOWED); Scope queryScope = analyzer.analyze(node.getQuery(), scope); return createAndAssignScope(node, scope, queryScope.getRelationType()); } @@ -1238,7 +1243,7 @@ protected Scope visitSampledRelation(SampledRelation relation, Optional s @Override protected Scope visitTableSubquery(TableSubquery node, Optional scope) { - StatementAnalyzer analyzer = new StatementAnalyzer(analysis, metadata, sqlParser, accessControl, session, warningCollector); + StatementAnalyzer analyzer = new StatementAnalyzer(analysis, metadata, sqlParser, accessControl, session, warningCollector, CorrelationSupport.ALLOWED); Scope queryScope = analyzer.analyze(node.getQuery(), scope); return createAndAssignScope(node, scope, queryScope.getRelationType()); } @@ -1464,8 +1469,9 @@ else if (node.getType() == FULL) { if (criteria instanceof JoinOn) { Expression expression = ((JoinOn) criteria).getExpression(); - // need to register coercions in case when join criteria requires coercion (e.g. join on char(1) = char(2)) - ExpressionAnalysis expressionAnalysis = analyzeExpression(expression, output); + // Need to register coercions in case when join criteria requires coercion (e.g. join on char(1) = char(2)) + // Correlations are only currently support in the join criteria for INNER joins + ExpressionAnalysis expressionAnalysis = analyzeExpression(expression, output, node.getType() == INNER ? CorrelationSupport.ALLOWED : CorrelationSupport.DISALLOWED); Type clauseType = expressionAnalysis.getType(expression); if (!clauseType.equals(BOOLEAN)) { if (!clauseType.equals(UNKNOWN)) { @@ -2368,7 +2374,7 @@ private RelationType analyzeView(Query query, QualifiedObjectName name, Optional // TODO: record path in view definition (?) (check spec) and feed it into the session object we use to evaluate the query defined by the view Session viewSession = createViewSession(catalog, schema, identity, session.getPath()); - StatementAnalyzer analyzer = new StatementAnalyzer(analysis, metadata, sqlParser, viewAccessControl, viewSession, warningCollector); + StatementAnalyzer analyzer = new StatementAnalyzer(analysis, metadata, sqlParser, viewAccessControl, viewSession, warningCollector, CorrelationSupport.ALLOWED); Scope queryScope = analyzer.analyze(query, Scope.create()); return queryScope.getRelationType().withAlias(name.getObjectName(), null); } @@ -2427,7 +2433,22 @@ private ExpressionAnalysis analyzeExpression(Expression expression, Scope scope) scope, analysis, expression, - warningCollector); + warningCollector, + correlationSupport); + } + + private ExpressionAnalysis analyzeExpression(Expression expression, Scope scope, CorrelationSupport correlationSupport) + { + return ExpressionAnalyzer.analyzeExpression( + session, + metadata, + accessControl, + sqlParser, + scope, + analysis, + expression, + warningCollector, + correlationSupport); } private void analyzeRowFilter(String currentIdentity, Table table, QualifiedObjectName name, Scope scope, ViewExpression filter) @@ -2455,7 +2476,8 @@ private void analyzeRowFilter(String currentIdentity, Table table, QualifiedObje scope, analysis, expression, - warningCollector); + warningCollector, + correlationSupport); } catch (PrestoException e) { throw new PrestoException(e::getErrorCode, extractLocation(table), format("Invalid row filter for '%s': %s", name, e.getRawMessage()), e); @@ -2508,7 +2530,8 @@ private void analyzeColumnMask(String currentIdentity, Table table, QualifiedObj scope, analysis, expression, - warningCollector); + warningCollector, + correlationSupport); } catch (PrestoException e) { throw new PrestoException(e::getErrorCode, extractLocation(table), format("Invalid column mask for '%s.%s': %s", tableName, column, e.getRawMessage()), e); @@ -2642,7 +2665,8 @@ private List analyzeOrderBy(Node node, List sortItems, Sco orderByScope, analysis, expression, - WarningCollector.NOOP); + WarningCollector.NOOP, + correlationSupport); analysis.recordSubqueries(node, expressionAnalysis); Type type = analysis.getType(expression); diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java b/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java index 863c7c06432b..51fb376e0a01 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java @@ -421,7 +421,7 @@ else if (firstDependencies.stream().allMatch(right::canResolve) && secondDepende } // subqueries can be applied only to one side of join - left side is selected in arbitrary way - leftPlanBuilder = subqueryPlanner.handleUncorrelatedSubqueries(leftPlanBuilder, complexJoinExpressions, node); + leftPlanBuilder = subqueryPlanner.handleSubqueries(leftPlanBuilder, complexJoinExpressions, node); } TranslationMap translationMap = initializeTranslationMap(node, outputSymbols); translationMap.setFieldMappings(outputSymbols); @@ -640,7 +640,6 @@ private RelationPlan planCorrelatedJoin(Join join, RelationPlan leftPlan, Latera leftPlanBuilder, rightPlanBuilder, lateral.getQuery(), - true, CorrelatedJoinNode.Type.typeConvert(join.getType()), rewrittenFilterCondition); diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/SubqueryPlanner.java b/presto-main/src/main/java/io/prestosql/sql/planner/SubqueryPlanner.java index c2ef027cfa13..e520d028ac62 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/SubqueryPlanner.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/SubqueryPlanner.java @@ -57,9 +57,7 @@ import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static com.google.common.collect.Iterables.getOnlyElement; -import static io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED; import static io.prestosql.spi.type.BooleanType.BOOLEAN; -import static io.prestosql.sql.analyzer.SemanticExceptions.semanticException; import static io.prestosql.sql.planner.ReferenceAwareExpressionNodeInliner.replaceExpression; import static io.prestosql.sql.planner.optimizations.PlanNodeSearcher.searchFrom; import static io.prestosql.sql.tree.BooleanLiteral.TRUE_LITERAL; @@ -103,30 +101,17 @@ class SubqueryPlanner public PlanBuilder handleSubqueries(PlanBuilder builder, Collection expressions, Node node) { for (Expression expression : expressions) { - builder = handleSubqueries(builder, expression, node, true); - } - return builder; - } - - public PlanBuilder handleUncorrelatedSubqueries(PlanBuilder builder, Collection expressions, Node node) - { - for (Expression expression : expressions) { - builder = handleSubqueries(builder, expression, node, false); + builder = handleSubqueries(builder, expression, node); } return builder; } public PlanBuilder handleSubqueries(PlanBuilder builder, Expression expression, Node node) { - return handleSubqueries(builder, expression, node, true); - } - - private PlanBuilder handleSubqueries(PlanBuilder builder, Expression expression, Node node, boolean correlationAllowed) - { - builder = appendInPredicateApplyNodes(builder, collectInPredicateSubqueries(expression, node), correlationAllowed, node); - builder = appendScalarSubqueryCorrelatedJoins(builder, collectScalarSubqueries(expression, node), correlationAllowed); - builder = appendExistsSubqueryApplyNodes(builder, collectExistsSubqueries(expression, node), correlationAllowed); - builder = appendQuantifiedComparisonApplyNodes(builder, collectQuantifiedComparisonSubqueries(expression, node), correlationAllowed, node); + builder = appendInPredicateApplyNodes(builder, collectInPredicateSubqueries(expression, node), node); + builder = appendScalarSubqueryCorrelatedJoins(builder, collectScalarSubqueries(expression, node)); + builder = appendExistsSubqueryApplyNodes(builder, collectExistsSubqueries(expression, node)); + builder = appendQuantifiedComparisonApplyNodes(builder, collectQuantifiedComparisonSubqueries(expression, node), node); return builder; } @@ -162,15 +147,15 @@ public Set collectQuantifiedComparisonSubqueries .collect(toImmutableSet()); } - private PlanBuilder appendInPredicateApplyNodes(PlanBuilder subPlan, Set inPredicates, boolean correlationAllowed, Node node) + private PlanBuilder appendInPredicateApplyNodes(PlanBuilder subPlan, Set inPredicates, Node node) { for (InPredicate inPredicate : inPredicates) { - subPlan = appendInPredicateApplyNode(subPlan, inPredicate, correlationAllowed, node); + subPlan = appendInPredicateApplyNode(subPlan, inPredicate, node); } return subPlan; } - private PlanBuilder appendInPredicateApplyNode(PlanBuilder subPlan, InPredicate inPredicate, boolean correlationAllowed, Node node) + private PlanBuilder appendInPredicateApplyNode(PlanBuilder subPlan, InPredicate inPredicate, Node node) { if (subPlan.canTranslate(inPredicate)) { // given subquery is already appended @@ -195,18 +180,18 @@ private PlanBuilder appendInPredicateApplyNode(PlanBuilder subPlan, InPredicate subPlan.getTranslations().put(inPredicate, inPredicateSubquerySymbol); - return appendApplyNode(subPlan, inPredicate, subqueryPlan.getRoot(), Assignments.of(inPredicateSubquerySymbol, inPredicateSubqueryExpression), correlationAllowed); + return appendApplyNode(subPlan, inPredicate, subqueryPlan.getRoot(), Assignments.of(inPredicateSubquerySymbol, inPredicateSubqueryExpression)); } - private PlanBuilder appendScalarSubqueryCorrelatedJoins(PlanBuilder builder, Set scalarSubqueries, boolean correlationAllowed) + private PlanBuilder appendScalarSubqueryCorrelatedJoins(PlanBuilder builder, Set scalarSubqueries) { for (SubqueryExpression scalarSubquery : scalarSubqueries) { - builder = appendScalarSubqueryApplyNode(builder, scalarSubquery, correlationAllowed); + builder = appendScalarSubqueryApplyNode(builder, scalarSubquery); } return builder; } - private PlanBuilder appendScalarSubqueryApplyNode(PlanBuilder subPlan, SubqueryExpression scalarSubquery, boolean correlationAllowed) + private PlanBuilder appendScalarSubqueryApplyNode(PlanBuilder subPlan, SubqueryExpression scalarSubquery) { if (subPlan.canTranslate(scalarSubquery)) { // given subquery is already appended @@ -229,16 +214,13 @@ private PlanBuilder appendScalarSubqueryApplyNode(PlanBuilder subPlan, SubqueryE } // The subquery's EnforceSingleRowNode always produces a row, so the join is effectively INNER - return appendCorrelatedJoin(subPlan, subqueryPlan, scalarSubquery.getQuery(), correlationAllowed, CorrelatedJoinNode.Type.INNER, TRUE_LITERAL); + return appendCorrelatedJoin(subPlan, subqueryPlan, scalarSubquery.getQuery(), CorrelatedJoinNode.Type.INNER, TRUE_LITERAL); } - public PlanBuilder appendCorrelatedJoin(PlanBuilder subPlan, PlanBuilder subqueryPlan, Query query, boolean correlationAllowed, CorrelatedJoinNode.Type type, Expression filterCondition) + public PlanBuilder appendCorrelatedJoin(PlanBuilder subPlan, PlanBuilder subqueryPlan, Query query, CorrelatedJoinNode.Type type, Expression filterCondition) { PlanNode subqueryNode = subqueryPlan.getRoot(); Map, Expression> correlation = extractCorrelation(subPlan, subqueryNode); - if (!correlationAllowed && !correlation.isEmpty()) { - throw semanticException(NOT_SUPPORTED, query, "Correlated subquery in given context is not supported"); - } subqueryNode = replaceExpressionsWithSymbols(subqueryNode, correlation); return new PlanBuilder( @@ -253,10 +235,10 @@ public PlanBuilder appendCorrelatedJoin(PlanBuilder subPlan, PlanBuilder subquer query)); } - private PlanBuilder appendExistsSubqueryApplyNodes(PlanBuilder builder, Set existsPredicates, boolean correlationAllowed) + private PlanBuilder appendExistsSubqueryApplyNodes(PlanBuilder builder, Set existsPredicates) { for (ExistsPredicate existsPredicate : existsPredicates) { - builder = appendExistSubqueryApplyNode(builder, existsPredicate, correlationAllowed); + builder = appendExistSubqueryApplyNode(builder, existsPredicate); } return builder; } @@ -270,7 +252,7 @@ private PlanBuilder appendExistsSubqueryApplyNodes(PlanBuilder builder, Set */ - private PlanBuilder appendExistSubqueryApplyNode(PlanBuilder subPlan, ExistsPredicate existsPredicate, boolean correlationAllowed) + private PlanBuilder appendExistSubqueryApplyNode(PlanBuilder subPlan, ExistsPredicate existsPredicate) { if (subPlan.canTranslate(existsPredicate)) { // given subquery is already appended @@ -295,19 +277,18 @@ private PlanBuilder appendExistSubqueryApplyNode(PlanBuilder subPlan, ExistsPred subPlan, existsPredicate.getSubquery(), subqueryNode, - Assignments.of(exists, rewrittenExistsPredicate), - correlationAllowed); + Assignments.of(exists, rewrittenExistsPredicate)); } - private PlanBuilder appendQuantifiedComparisonApplyNodes(PlanBuilder subPlan, Set quantifiedComparisons, boolean correlationAllowed, Node node) + private PlanBuilder appendQuantifiedComparisonApplyNodes(PlanBuilder subPlan, Set quantifiedComparisons, Node node) { for (QuantifiedComparisonExpression quantifiedComparison : quantifiedComparisons) { - subPlan = appendQuantifiedComparisonApplyNode(subPlan, quantifiedComparison, correlationAllowed, node); + subPlan = appendQuantifiedComparisonApplyNode(subPlan, quantifiedComparison, node); } return subPlan; } - private PlanBuilder appendQuantifiedComparisonApplyNode(PlanBuilder subPlan, QuantifiedComparisonExpression quantifiedComparison, boolean correlationAllowed, Node node) + private PlanBuilder appendQuantifiedComparisonApplyNode(PlanBuilder subPlan, QuantifiedComparisonExpression quantifiedComparison, Node node) { if (subPlan.canTranslate(quantifiedComparison)) { // given subquery is already appended @@ -317,12 +298,12 @@ private PlanBuilder appendQuantifiedComparisonApplyNode(PlanBuilder subPlan, Qua case EQUAL: switch (quantifiedComparison.getQuantifier()) { case ALL: - return planQuantifiedApplyNode(subPlan, quantifiedComparison, correlationAllowed); + return planQuantifiedApplyNode(subPlan, quantifiedComparison); case ANY: case SOME: // A = ANY B <=> A IN B InPredicate inPredicate = new InPredicate(quantifiedComparison.getValue(), quantifiedComparison.getSubquery()); - subPlan = appendInPredicateApplyNode(subPlan, inPredicate, correlationAllowed, node); + subPlan = appendInPredicateApplyNode(subPlan, inPredicate, node); subPlan.getTranslations().put(quantifiedComparison, subPlan.translate(inPredicate)); return subPlan; } @@ -341,7 +322,7 @@ private PlanBuilder appendQuantifiedComparisonApplyNode(PlanBuilder subPlan, Qua // "A <> ALL B" is equivalent to "NOT (A = ANY B)" so add a rewrite for the initial quantifiedComparison to notAny subPlan.getTranslations().put(quantifiedComparison, subPlan.getTranslations().rewrite(notAny)); // now plan "A = ANY B" part by calling ourselves for rewrittenAny - return appendQuantifiedComparisonApplyNode(subPlan, rewrittenAny, correlationAllowed, node); + return appendQuantifiedComparisonApplyNode(subPlan, rewrittenAny, node); case ANY: case SOME: // A <> ANY B <=> min B <> max B || A <> min B <=> !(min B = max B && A = min B) <=> !(A = ALL B) @@ -354,7 +335,7 @@ private PlanBuilder appendQuantifiedComparisonApplyNode(PlanBuilder subPlan, Qua // "A <> ANY B" is equivalent to "NOT (A = ALL B)" so add a rewrite for the initial quantifiedComparison to notAll subPlan.getTranslations().put(quantifiedComparison, subPlan.getTranslations().rewrite(notAll)); // now plan "A = ALL B" part by calling ourselves for rewrittenAll - return appendQuantifiedComparisonApplyNode(subPlan, rewrittenAll, correlationAllowed, node); + return appendQuantifiedComparisonApplyNode(subPlan, rewrittenAll, node); } break; @@ -362,14 +343,14 @@ private PlanBuilder appendQuantifiedComparisonApplyNode(PlanBuilder subPlan, Qua case LESS_THAN_OR_EQUAL: case GREATER_THAN: case GREATER_THAN_OR_EQUAL: - return planQuantifiedApplyNode(subPlan, quantifiedComparison, correlationAllowed); + return planQuantifiedApplyNode(subPlan, quantifiedComparison); } // all cases are checked, so this exception should never be thrown throw new IllegalArgumentException( format("Unexpected quantified comparison: '%s %s'", quantifiedComparison.getOperator().getValue(), quantifiedComparison.getQuantifier())); } - private PlanBuilder planQuantifiedApplyNode(PlanBuilder subPlan, QuantifiedComparisonExpression quantifiedComparison, boolean correlationAllowed) + private PlanBuilder planQuantifiedApplyNode(PlanBuilder subPlan, QuantifiedComparisonExpression quantifiedComparison) { subPlan = subPlan.appendProjections(ImmutableList.of(quantifiedComparison.getValue()), symbolAllocator, idAllocator); @@ -393,8 +374,7 @@ private PlanBuilder planQuantifiedApplyNode(PlanBuilder subPlan, QuantifiedCompa subPlan, quantifiedComparison.getSubquery(), subqueryPlan.getRoot(), - Assignments.of(coercedQuantifiedComparisonSymbol, coercedQuantifiedComparison), - correlationAllowed); + Assignments.of(coercedQuantifiedComparisonSymbol, coercedQuantifiedComparison)); } private static boolean isAggregationWithEmptyGroupBy(PlanNode planNode) @@ -430,13 +410,9 @@ private PlanBuilder appendApplyNode( PlanBuilder subPlan, Node subquery, PlanNode subqueryNode, - Assignments subqueryAssignments, - boolean correlationAllowed) + Assignments subqueryAssignments) { Map, Expression> correlation = extractCorrelation(subPlan, subqueryNode); - if (!correlationAllowed && !correlation.isEmpty()) { - throw semanticException(NOT_SUPPORTED, subquery, "Correlated subquery in given context is not supported"); - } subPlan = subPlan.appendProjections( correlation.keySet().stream().map(NodeRef::getNode).collect(toImmutableSet()), symbolAllocator, diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestJoinQueries.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestJoinQueries.java index 65a16e96cf50..e881691e2b4a 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestJoinQueries.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestJoinQueries.java @@ -889,7 +889,7 @@ public void testOuterJoinWithComplexCorrelatedSubquery() condition); queryTemplate.replaceAll( - (query) -> assertQueryFails(query, "line .*: .* is not supported"), + (query) -> assertQueryFails(query, "line .*: Reference to column 'x' from outer scope not allowed in this context"), ImmutableList.of(type.of("left"), type.of("right"), type.of("full")), ImmutableList.of( condition.of("EXISTS(SELECT 1 WHERE x = y)"), diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestQueries.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestQueries.java index e628b140af24..eed0f91d28be 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestQueries.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestQueries.java @@ -1278,7 +1278,7 @@ public void testCorrelatedScalarSubqueriesWithScalarAggregationAndEqualityPredic assertQueryFails( "SELECT count(*) FROM orders o1 LEFT JOIN orders o2 " + "ON NOT 1 = (SELECT count(*) WHERE o1.orderkey = o2.orderkey)", - "line .*: Correlated subquery in given context is not supported"); + "line 1:86: Reference to column 'o1.orderkey' from outer scope not allowed in this context"); // subrelation assertQuery( @@ -1339,7 +1339,7 @@ public void testCorrelatedScalarSubqueriesWithScalarAggregation() assertQueryFails( "SELECT count(*) FROM orders o1 LEFT JOIN orders o2 " + "ON NOT 1 = (SELECT avg(i.orderkey) FROM orders i WHERE o1.orderkey < o2.orderkey)", - "line .*: Correlated subquery in given context is not supported"); + "line 1:107: Reference to column 'o1.orderkey' from outer scope not allowed in this context"); // subrelation assertQuery( @@ -1498,7 +1498,7 @@ public void testCorrelatedExistsSubqueriesWithEqualityPredicatesInWhere() assertQueryFails( "SELECT count(*) FROM orders o1 LEFT JOIN orders o2 " + "ON NOT EXISTS(SELECT 1 WHERE o1.orderkey = o2.orderkey)", - "line .*: Correlated subquery in given context is not supported"); + "line 1:81: Reference to column 'o1.orderkey' from outer scope not allowed in this context"); // subrelation assertQuery( @@ -1579,7 +1579,7 @@ public void testCorrelatedExistsSubqueries() assertQueryFails( "SELECT count(*) FROM orders o1 LEFT JOIN orders o2 " + "ON NOT EXISTS(SELECT 1 FROM orders i WHERE o1.orderkey < o2.orderkey)", - "line .*: Correlated subquery in given context is not supported"); + "line 1:95: Reference to column 'o1.orderkey' from outer scope not allowed in this context"); // subrelation assertQuery( From c14d78f4ad83e5f6fce0b77b22bbec37aa8d1769 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Wed, 29 Apr 2020 10:54:28 +0200 Subject: [PATCH 290/519] Check Limit with ties rewritten --- .../io/prestosql/sql/planner/LocalExecutionPlanner.java | 3 +++ .../sql/planner/sanity/ValidateDependenciesChecker.java | 8 ++++++++ 2 files changed, 11 insertions(+) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/LocalExecutionPlanner.java b/presto-main/src/main/java/io/prestosql/sql/planner/LocalExecutionPlanner.java index cec258243491..82f18841f10b 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/LocalExecutionPlanner.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/LocalExecutionPlanner.java @@ -1035,6 +1035,9 @@ public PhysicalOperation visitSort(SortNode node, LocalExecutionPlanContext cont @Override public PhysicalOperation visitLimit(LimitNode node, LocalExecutionPlanContext context) { + // Limit with ties should be rewritten at this point + checkState(!node.getTiesResolvingScheme().isPresent(), "Limit with ties not supported"); + PhysicalOperation source = node.getSource().accept(this, context); OperatorFactory operatorFactory = new LimitOperatorFactory(context.getNextOperatorId(), node.getId(), node.getCount()); diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/sanity/ValidateDependenciesChecker.java b/presto-main/src/main/java/io/prestosql/sql/planner/sanity/ValidateDependenciesChecker.java index f401d34a2a48..6ff1da6bab7a 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/sanity/ValidateDependenciesChecker.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/sanity/ValidateDependenciesChecker.java @@ -319,6 +319,14 @@ public Void visitLimit(LimitNode node, Set boundSymbols) PlanNode source = node.getSource(); source.accept(this, boundSymbols); // visit child + if (node.getTiesResolvingScheme().isPresent()) { + checkDependencies( + createInputs(source, boundSymbols), + node.getTiesResolvingScheme().get().getOrderBy(), + "Invalid node. Ties resolving dependencies (%s) not in source plan output (%s)", + node.getTiesResolvingScheme().get().getOrderBy(), node.getSource().getOutputSymbols()); + } + return null; } From 052214ec4b6c71e8e23de09585077aa52c7bd345 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Sat, 25 Apr 2020 18:34:48 +0200 Subject: [PATCH 291/519] Support order sensitivity in RowNumberNode --- .../iterative/rule/ImplementOffset.java | 1 + .../optimizations/AddLocalExchanges.java | 12 +++- .../HashGenerationOptimizer.java | 1 + .../optimizations/PlanNodeDecorrelator.java | 2 + .../PruneUnreferencedOutputs.java | 2 +- .../UnaliasSymbolReferences.java | 9 ++- .../optimizations/WindowFilterPushDown.java | 10 ++- .../sql/planner/plan/RowNumberNode.java | 20 +++++- .../iterative/rule/test/PlanBuilder.java | 1 + .../optimizations/TestAddExchangesPlans.java | 62 +++++++++++++++++++ 10 files changed, 114 insertions(+), 6 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/ImplementOffset.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/ImplementOffset.java index 9e0a78b1389a..6f50bdc30444 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/ImplementOffset.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/ImplementOffset.java @@ -68,6 +68,7 @@ public Result apply(OffsetNode parent, Captures captures, Context context) context.getIdAllocator().getNextId(), parent.getSource(), ImmutableList.of(), + true, rowNumberSymbol, Optional.empty(), Optional.empty()); diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/AddLocalExchanges.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/AddLocalExchanges.java index 392d00cf40d2..25c568303bfa 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/AddLocalExchanges.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/AddLocalExchanges.java @@ -65,6 +65,7 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; +import static com.google.common.base.Verify.verify; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static io.prestosql.SystemSessionProperties.getTaskConcurrency; @@ -456,8 +457,15 @@ else if (!node.getDistinctSymbols().containsAll(property.getColumns())) { @Override public PlanWithProperties visitRowNumber(RowNumberNode node, StreamPreferredProperties parentPreferences) { - // row number requires that all data be partitioned - StreamPreferredProperties requiredProperties = parentPreferences.withDefaultParallelism(session).withPartitioning(node.getPartitionBy()); + StreamPreferredProperties requiredProperties; + if (node.isOrderSensitive()) { + // for an order sensitive RowNumberNode pass the orderSensitive context + verify(node.getPartitionBy().isEmpty(), "unexpected partitioning"); + requiredProperties = singleStream().withOrderSensitivity(); + } + else { + requiredProperties = parentPreferences.withDefaultParallelism(session).withPartitioning(node.getPartitionBy()); + } return planAndEnforceChildren(node, requiredProperties, requiredProperties); } diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/HashGenerationOptimizer.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/HashGenerationOptimizer.java index a87e219bf80f..630466fa409c 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/HashGenerationOptimizer.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/HashGenerationOptimizer.java @@ -270,6 +270,7 @@ public PlanWithProperties visitRowNumber(RowNumberNode node, HashComputationSet node.getId(), child.getNode(), node.getPartitionBy(), + node.isOrderSensitive(), node.getRowNumberSymbol(), node.getMaxRowCountPerPartition(), Optional.of(hashSymbol)), diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PlanNodeDecorrelator.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PlanNodeDecorrelator.java index 81c7c0de6cfa..743537f687d0 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PlanNodeDecorrelator.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PlanNodeDecorrelator.java @@ -255,6 +255,7 @@ private Optional rewriteLimitWithRowCountGreaterThanOne(Dec node.getId(), decorrelatedChildNode, ImmutableList.copyOf(childDecorrelationResult.symbolsToPropagate), + false, symbolAllocator.newSymbol("row_number", BIGINT), Optional.of(toIntExact(node.getCount())), Optional.empty()); @@ -338,6 +339,7 @@ public Optional visitTopN(TopNNode node, Void context) node.getId(), decorrelatedChildNode, ImmutableList.copyOf(childDecorrelationResult.symbolsToPropagate), + false, symbolAllocator.newSymbol("row_number", BIGINT), Optional.of(toIntExact(node.getCount())), Optional.empty()); diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java index bbf528850b5f..2e3a3add5095 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java @@ -627,7 +627,7 @@ public PlanNode visitRowNumber(RowNumberNode node, RewriteContext> c } PlanNode source = context.rewrite(node.getSource(), expectedInputs.build()); - return new RowNumberNode(node.getId(), source, node.getPartitionBy(), node.getRowNumberSymbol(), node.getMaxRowCountPerPartition(), node.getHashSymbol()); + return new RowNumberNode(node.getId(), source, node.getPartitionBy(), node.isOrderSensitive(), node.getRowNumberSymbol(), node.getMaxRowCountPerPartition(), node.getHashSymbol()); } @Override diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/UnaliasSymbolReferences.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/UnaliasSymbolReferences.java index c7f0109379fd..ecf184da1052 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/UnaliasSymbolReferences.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/UnaliasSymbolReferences.java @@ -413,7 +413,14 @@ public PlanNode visitTableFinish(TableFinishNode node, RewriteContext cont @Override public PlanNode visitRowNumber(RowNumberNode node, RewriteContext context) { - return new RowNumberNode(node.getId(), context.rewrite(node.getSource()), canonicalizeAndDistinct(node.getPartitionBy()), canonicalize(node.getRowNumberSymbol()), node.getMaxRowCountPerPartition(), canonicalize(node.getHashSymbol())); + return new RowNumberNode( + node.getId(), + context.rewrite(node.getSource()), + canonicalizeAndDistinct(node.getPartitionBy()), + node.isOrderSensitive(), + canonicalize(node.getRowNumberSymbol()), + node.getMaxRowCountPerPartition(), + canonicalize(node.getHashSymbol())); } @Override diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/WindowFilterPushDown.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/WindowFilterPushDown.java index d301a3d3a83a..55c0ce4fed20 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/WindowFilterPushDown.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/WindowFilterPushDown.java @@ -110,6 +110,7 @@ public PlanNode visitWindow(WindowNode node, RewriteContext context) return new RowNumberNode(idAllocator.getNextId(), rewrittenSource, node.getPartitionBy(), + false, getOnlyElement(node.getWindowFunctions().keySet()), Optional.empty(), Optional.empty()); @@ -254,7 +255,14 @@ private static RowNumberNode mergeLimit(RowNumberNode node, int newRowCountPerPa if (node.getMaxRowCountPerPartition().isPresent()) { newRowCountPerPartition = Math.min(node.getMaxRowCountPerPartition().get(), newRowCountPerPartition); } - return new RowNumberNode(node.getId(), node.getSource(), node.getPartitionBy(), node.getRowNumberSymbol(), Optional.of(newRowCountPerPartition), node.getHashSymbol()); + return new RowNumberNode( + node.getId(), + node.getSource(), + node.getPartitionBy(), + node.isOrderSensitive(), + node.getRowNumberSymbol(), + Optional.of(newRowCountPerPartition), + node.getHashSymbol()); } private TopNRowNumberNode convertToTopNRowNumber(WindowNode windowNode, int limit) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/plan/RowNumberNode.java b/presto-main/src/main/java/io/prestosql/sql/planner/plan/RowNumberNode.java index b6747893238b..c1f95fbc32d8 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/plan/RowNumberNode.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/plan/RowNumberNode.java @@ -24,6 +24,7 @@ import java.util.List; import java.util.Optional; +import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.Iterables.concat; import static java.util.Objects.requireNonNull; @@ -33,6 +34,14 @@ public final class RowNumberNode { private final PlanNode source; private final List partitionBy; + /* + * This flag indicates that the node depends on the row order established by the subplan. + * It is taken into account while adding local exchanges to the plan, ensuring that sorted order + * of data will be respected. + * Note: if the subplan doesn't produce sorted output, this flag doesn't change the resulting plan. + * Note: this flag is used for planning of queries involving ORDER BY and OFFSET. + */ + private final boolean orderSensitive; private final Optional maxRowCountPerPartition; private final Symbol rowNumberSymbol; private final Optional hashSymbol; @@ -42,6 +51,7 @@ public RowNumberNode( @JsonProperty("id") PlanNodeId id, @JsonProperty("source") PlanNode source, @JsonProperty("partitionBy") List partitionBy, + @JsonProperty("orderSensitive") boolean orderSensitive, @JsonProperty("rowNumberSymbol") Symbol rowNumberSymbol, @JsonProperty("maxRowCountPerPartition") Optional maxRowCountPerPartition, @JsonProperty("hashSymbol") Optional hashSymbol) @@ -50,12 +60,14 @@ public RowNumberNode( requireNonNull(source, "source is null"); requireNonNull(partitionBy, "partitionBy is null"); + checkArgument(!orderSensitive || partitionBy.isEmpty(), "unexpected partitioning in order sensitive node"); requireNonNull(rowNumberSymbol, "rowNumberSymbol is null"); requireNonNull(maxRowCountPerPartition, "maxRowCountPerPartition is null"); requireNonNull(hashSymbol, "hashSymbol is null"); this.source = source; this.partitionBy = ImmutableList.copyOf(partitionBy); + this.orderSensitive = orderSensitive; this.rowNumberSymbol = rowNumberSymbol; this.maxRowCountPerPartition = maxRowCountPerPartition; this.hashSymbol = hashSymbol; @@ -85,6 +97,12 @@ public List getPartitionBy() return partitionBy; } + @JsonProperty + public boolean isOrderSensitive() + { + return orderSensitive; + } + @JsonProperty public Symbol getRowNumberSymbol() { @@ -112,6 +130,6 @@ public R accept(PlanVisitor visitor, C context) @Override public PlanNode replaceChildren(List newChildren) { - return new RowNumberNode(getId(), Iterables.getOnlyElement(newChildren), partitionBy, rowNumberSymbol, maxRowCountPerPartition, hashSymbol); + return new RowNumberNode(getId(), Iterables.getOnlyElement(newChildren), partitionBy, orderSensitive, rowNumberSymbol, maxRowCountPerPartition, hashSymbol); } } diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/test/PlanBuilder.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/test/PlanBuilder.java index d8ad592fb223..8612dba22e48 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/test/PlanBuilder.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/test/PlanBuilder.java @@ -885,6 +885,7 @@ public RowNumberNode rowNumber(List partitionBy, Optional maxRo idAllocator.getNextId(), source, partitionBy, + false, rowNumberSymbol, maxRowCountPerPartition, hashSymbol); diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestAddExchangesPlans.java b/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestAddExchangesPlans.java index 035ff3677245..5e4ae2189551 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestAddExchangesPlans.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/optimizations/TestAddExchangesPlans.java @@ -23,6 +23,8 @@ import io.prestosql.sql.analyzer.FeaturesConfig.JoinDistributionType; import io.prestosql.sql.analyzer.FeaturesConfig.JoinReorderingStrategy; import io.prestosql.sql.planner.assertions.BasePlanTest; +import io.prestosql.sql.planner.assertions.PlanMatchPattern; +import io.prestosql.sql.planner.assertions.RowNumberSymbolMatcher; import io.prestosql.sql.planner.plan.ExchangeNode; import io.prestosql.sql.planner.plan.FilterNode; import io.prestosql.sql.planner.plan.JoinNode.DistributionType; @@ -41,15 +43,22 @@ import static io.prestosql.sql.analyzer.FeaturesConfig.JoinDistributionType.PARTITIONED; import static io.prestosql.sql.analyzer.FeaturesConfig.JoinReorderingStrategy.ELIMINATE_CROSS_JOINS; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.aggregation; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.any; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.anyNot; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.anyTree; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.equiJoinClause; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.exchange; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.filter; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.join; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.limit; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.node; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.output; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.project; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.rowNumber; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.sort; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.tableScan; +import static io.prestosql.sql.planner.assertions.PlanMatchPattern.topN; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; import static io.prestosql.sql.planner.plan.ExchangeNode.Scope.LOCAL; import static io.prestosql.sql.planner.plan.ExchangeNode.Scope.REMOTE; @@ -57,6 +66,9 @@ import static io.prestosql.sql.planner.plan.ExchangeNode.Type.REPLICATE; import static io.prestosql.sql.planner.plan.JoinNode.DistributionType.REPLICATED; import static io.prestosql.sql.planner.plan.JoinNode.Type.INNER; +import static io.prestosql.sql.planner.plan.TopNNode.Step.FINAL; +import static io.prestosql.sql.tree.SortItem.NullOrdering.LAST; +import static io.prestosql.sql.tree.SortItem.Ordering.ASCENDING; import static io.prestosql.testing.TestingSession.testSessionBuilder; public class TestAddExchangesPlans @@ -218,6 +230,56 @@ public void testForcePartitioningMarkDistinctInput() node(ValuesNode.class))))))))); } + @Test + public void testImplementOffsetWithOrderedSource() + { + // no repartitioning exchange is added below row number, so the ordering established by topN is preserved. + // also, no repartitioning exchange is added above row number, so the order is respected at output. + assertPlan( + "SELECT name FROM nation ORDER BY regionkey, name OFFSET 5 LIMIT 2", + output( + project( + ImmutableMap.of("name", PlanMatchPattern.expression("name")), + filter( + "row_num > BIGINT '5'", + rowNumber( + pattern -> pattern + .partitionBy(ImmutableList.of()), + project( + ImmutableMap.of("name", PlanMatchPattern.expression("name")), + topN( + 7, + ImmutableList.of(sort("regionkey", ASCENDING, LAST), sort("name", ASCENDING, LAST)), + FINAL, + anyTree( + tableScan("nation", ImmutableMap.of("NAME", "name", "REGIONKEY", "regionkey")))))) + .withAlias("row_num", new RowNumberSymbolMatcher()))))); + } + + @Test + public void testImplementOffsetWithUnorderedSource() + { + // no ordering of output is expected; repartitioning exchange is present in the plan + assertPlan( + "SELECT name FROM nation OFFSET 5 LIMIT 2", + any( + project( + ImmutableMap.of("name", PlanMatchPattern.expression("name")), + filter( + "row_num > BIGINT '5'", + exchange( + LOCAL, + REPARTITION, + rowNumber( + pattern -> pattern + .partitionBy(ImmutableList.of()), + limit( + 7, + anyTree( + tableScan("nation", ImmutableMap.of("NAME", "name"))))) + .withAlias("row_num", new RowNumberSymbolMatcher())))))); + } + private Session spillEnabledWithJoinDistributionType(JoinDistributionType joinDistributionType) { return Session.builder(getQueryRunner().getDefaultSession()) From ed1d66cb584b618a01f047e3827a4ef3002691b9 Mon Sep 17 00:00:00 2001 From: Shubham Tagra Date: Wed, 15 Apr 2020 13:29:17 +0530 Subject: [PATCH 292/519] Allow reading ORC files which do not have row-group information --- .../main/java/io/prestosql/orc/OrcReader.java | 2 +- .../io/prestosql/orc/OrcRecordReader.java | 6 ++-- .../main/java/io/prestosql/orc/OrcWriter.java | 3 +- .../java/io/prestosql/orc/StripeReader.java | 10 +++++-- .../io/prestosql/orc/metadata/Footer.java | 9 ++++-- .../orc/metadata/OrcMetadataReader.java | 3 +- .../orc/metadata/OrcMetadataWriter.java | 2 +- .../test/java/io/prestosql/orc/OrcTester.java | 2 +- .../io/prestosql/orc/TestReadBloomFilter.java | 2 +- .../hive/TestHiveTransactionalTable.java | 29 ++++++++++++++++--- 10 files changed, 48 insertions(+), 20 deletions(-) diff --git a/presto-orc/src/main/java/io/prestosql/orc/OrcReader.java b/presto-orc/src/main/java/io/prestosql/orc/OrcReader.java index 963f0749e83c..1bc24529b194 100644 --- a/presto-orc/src/main/java/io/prestosql/orc/OrcReader.java +++ b/presto-orc/src/main/java/io/prestosql/orc/OrcReader.java @@ -189,7 +189,7 @@ private OrcReader( this.rootColumn = createOrcColumn("", "", new OrcColumnId(0), footer.getTypes(), orcDataSource.getId()); validateWrite(validation -> validation.getColumnNames().equals(getColumnNames()), "Unexpected column names"); - validateWrite(validation -> validation.getRowGroupMaxRowCount() == footer.getRowsInRowGroup(), "Unexpected rows in group"); + validateWrite(validation -> validation.getRowGroupMaxRowCount() == footer.getRowsInRowGroup().orElse(0), "Unexpected rows in group"); if (writeValidation.isPresent()) { writeValidation.get().validateMetadata(orcDataSource.getId(), footer.getUserMetadata()); writeValidation.get().validateFileStatistics(orcDataSource.getId(), footer.getFileStats()); diff --git a/presto-orc/src/main/java/io/prestosql/orc/OrcRecordReader.java b/presto-orc/src/main/java/io/prestosql/orc/OrcRecordReader.java index 7bfe116a7696..e9c065be53be 100644 --- a/presto-orc/src/main/java/io/prestosql/orc/OrcRecordReader.java +++ b/presto-orc/src/main/java/io/prestosql/orc/OrcRecordReader.java @@ -51,6 +51,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.OptionalInt; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; @@ -130,7 +131,7 @@ public OrcRecordReader( long splitLength, ColumnMetadata orcTypes, Optional decompressor, - int rowsInRowGroup, + OptionalInt rowsInRowGroup, DateTimeZone hiveStorageTimeZone, HiveWriterVersion hiveWriterVersion, MetadataReader metadataReader, @@ -170,9 +171,6 @@ public OrcRecordReader( requireNonNull(options, "options is null"); this.maxBlockBytes = options.getMaxBlockSize().toBytes(); - // it is possible that old versions of orc use 0 to mean there are no row groups - checkArgument(rowsInRowGroup > 0, "rowsInRowGroup must be greater than zero"); - // sort stripes by file position List stripeInfos = new ArrayList<>(); for (int i = 0; i < fileStripes.size(); i++) { diff --git a/presto-orc/src/main/java/io/prestosql/orc/OrcWriter.java b/presto-orc/src/main/java/io/prestosql/orc/OrcWriter.java index 5315a8d4ec8f..b742e0db4f45 100644 --- a/presto-orc/src/main/java/io/prestosql/orc/OrcWriter.java +++ b/presto-orc/src/main/java/io/prestosql/orc/OrcWriter.java @@ -57,6 +57,7 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Optional; +import java.util.OptionalInt; import java.util.function.Consumer; import java.util.stream.Collectors; @@ -481,7 +482,7 @@ private List bufferFileFooter() Footer footer = new Footer( numberOfRows, - rowGroupMaxRowCount, + rowGroupMaxRowCount == 0 ? OptionalInt.empty() : OptionalInt.of(rowGroupMaxRowCount), closedStripes.stream() .map(ClosedStripe::getStripeInformation) .collect(toImmutableList()), diff --git a/presto-orc/src/main/java/io/prestosql/orc/StripeReader.java b/presto-orc/src/main/java/io/prestosql/orc/StripeReader.java index 2d9568c6f48d..c8a8368eeece 100644 --- a/presto-orc/src/main/java/io/prestosql/orc/StripeReader.java +++ b/presto-orc/src/main/java/io/prestosql/orc/StripeReader.java @@ -56,6 +56,7 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Optional; +import java.util.OptionalInt; import java.util.Set; import static com.google.common.base.Preconditions.checkArgument; @@ -83,7 +84,7 @@ public class StripeReader private final ColumnMetadata types; private final HiveWriterVersion hiveWriterVersion; private final Set includedOrcColumnIds; - private final int rowsInRowGroup; + private final OptionalInt rowsInRowGroup; private final OrcPredicate predicate; private final MetadataReader metadataReader; private final Optional writeValidation; @@ -94,7 +95,7 @@ public StripeReader( Optional decompressor, ColumnMetadata types, Set readColumns, - int rowsInRowGroup, + OptionalInt rowsInRowGroup, OrcPredicate predicate, HiveWriterVersion hiveWriterVersion, MetadataReader metadataReader, @@ -133,7 +134,7 @@ public Stripe readStripe(StripeInformation stripe, AggregatedMemoryContext syste // handle stripes with more than one row group boolean invalidCheckPoint = false; - if (stripe.getNumberOfRows() > rowsInRowGroup) { + if (rowsInRowGroup.isPresent() && stripe.getNumberOfRows() > rowsInRowGroup.getAsInt()) { // determine ranges of the stripe to read Map diskRanges = getDiskRanges(stripeFooter.getStreams()); diskRanges = Maps.filterKeys(diskRanges, Predicates.in(streams.keySet())); @@ -332,6 +333,7 @@ private List createRowGroups( ColumnMetadata encodings) throws InvalidCheckpointException { + int rowsInRowGroup = this.rowsInRowGroup.orElseThrow(() -> new IllegalStateException("Cannot create row groups if row group info is missing")); ImmutableList.Builder rowGroupBuilder = ImmutableList.builder(); for (int rowGroupId : selectedRowGroups) { @@ -438,6 +440,8 @@ private Map> readColumnIndexes(Map selectRowGroups(StripeInformation stripe, Map> columnIndexes) { + int rowsInRowGroup = this.rowsInRowGroup.orElseThrow(() -> new IllegalStateException("Cannot create row groups if row group info is missing")); + int rowsInStripe = stripe.getNumberOfRows(); int groupsInStripe = ceil(rowsInStripe, rowsInRowGroup); diff --git a/presto-orc/src/main/java/io/prestosql/orc/metadata/Footer.java b/presto-orc/src/main/java/io/prestosql/orc/metadata/Footer.java index 54c81c115343..f4399edf6457 100644 --- a/presto-orc/src/main/java/io/prestosql/orc/metadata/Footer.java +++ b/presto-orc/src/main/java/io/prestosql/orc/metadata/Footer.java @@ -22,15 +22,17 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.OptionalInt; import static com.google.common.base.MoreObjects.toStringHelper; +import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.Maps.transformValues; import static java.util.Objects.requireNonNull; public class Footer { private final long numberOfRows; - private final int rowsInRowGroup; + private final OptionalInt rowsInRowGroup; private final List stripes; private final ColumnMetadata types; private final Optional> fileStats; @@ -38,13 +40,14 @@ public class Footer public Footer( long numberOfRows, - int rowsInRowGroup, + OptionalInt rowsInRowGroup, List stripes, ColumnMetadata types, Optional> fileStats, Map userMetadata) { this.numberOfRows = numberOfRows; + rowsInRowGroup.ifPresent(value -> checkArgument(value > 0, "rowsInRowGroup must be at least 1")); this.rowsInRowGroup = rowsInRowGroup; this.stripes = ImmutableList.copyOf(requireNonNull(stripes, "stripes is null")); this.types = requireNonNull(types, "types is null"); @@ -58,7 +61,7 @@ public long getNumberOfRows() return numberOfRows; } - public int getRowsInRowGroup() + public OptionalInt getRowsInRowGroup() { return rowsInRowGroup; } diff --git a/presto-orc/src/main/java/io/prestosql/orc/metadata/OrcMetadataReader.java b/presto-orc/src/main/java/io/prestosql/orc/metadata/OrcMetadataReader.java index df9d5b7b6234..5b7c05808b49 100644 --- a/presto-orc/src/main/java/io/prestosql/orc/metadata/OrcMetadataReader.java +++ b/presto-orc/src/main/java/io/prestosql/orc/metadata/OrcMetadataReader.java @@ -46,6 +46,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.OptionalInt; import java.util.TimeZone; import static com.google.common.base.Preconditions.checkArgument; @@ -134,7 +135,7 @@ public Footer readFooter(HiveWriterVersion hiveWriterVersion, InputStream inputS OrcProto.Footer footer = OrcProto.Footer.parseFrom(input); return new Footer( footer.getNumberOfRows(), - footer.getRowIndexStride(), + footer.getRowIndexStride() == 0 ? OptionalInt.empty() : OptionalInt.of(footer.getRowIndexStride()), toStripeInformation(footer.getStripesList()), toType(footer.getTypesList()), toColumnStatistics(hiveWriterVersion, footer.getStatisticsList(), false), diff --git a/presto-orc/src/main/java/io/prestosql/orc/metadata/OrcMetadataWriter.java b/presto-orc/src/main/java/io/prestosql/orc/metadata/OrcMetadataWriter.java index 56f28f399a5d..1e432ea8f157 100644 --- a/presto-orc/src/main/java/io/prestosql/orc/metadata/OrcMetadataWriter.java +++ b/presto-orc/src/main/java/io/prestosql/orc/metadata/OrcMetadataWriter.java @@ -114,7 +114,7 @@ public int writeFooter(SliceOutput output, Footer footer) { OrcProto.Footer.Builder builder = OrcProto.Footer.newBuilder() .setNumberOfRows(footer.getNumberOfRows()) - .setRowIndexStride(footer.getRowsInRowGroup()) + .setRowIndexStride(footer.getRowsInRowGroup().orElse(0)) .addAllStripes(footer.getStripes().stream() .map(OrcMetadataWriter::toStripeInformation) .collect(toList())) diff --git a/presto-orc/src/test/java/io/prestosql/orc/OrcTester.java b/presto-orc/src/test/java/io/prestosql/orc/OrcTester.java index 0d292f96d819..dcff9619eb95 100644 --- a/presto-orc/src/test/java/io/prestosql/orc/OrcTester.java +++ b/presto-orc/src/test/java/io/prestosql/orc/OrcTester.java @@ -573,7 +573,7 @@ static OrcRecordReader createCustomOrcRecordReader(TempFile tempFile, OrcPredica OrcReader orcReader = new OrcReader(orcDataSource, READER_OPTIONS); assertEquals(orcReader.getColumnNames(), ImmutableList.of("test")); - assertEquals(orcReader.getFooter().getRowsInRowGroup(), 10_000); + assertEquals(orcReader.getFooter().getRowsInRowGroup().orElse(0), 10_000); return orcReader.createRecordReader( orcReader.getRootColumn().getNestedColumns(), diff --git a/presto-orc/src/test/java/io/prestosql/orc/TestReadBloomFilter.java b/presto-orc/src/test/java/io/prestosql/orc/TestReadBloomFilter.java index 5791d73a76bb..8bd5c70e1c03 100644 --- a/presto-orc/src/test/java/io/prestosql/orc/TestReadBloomFilter.java +++ b/presto-orc/src/test/java/io/prestosql/orc/TestReadBloomFilter.java @@ -133,7 +133,7 @@ private static OrcRecordReader createCustomOrcRecordReader(TempFile tempFile, Or OrcReader orcReader = new OrcReader(orcDataSource, READER_OPTIONS); assertEquals(orcReader.getColumnNames(), ImmutableList.of("test")); - assertEquals(orcReader.getFooter().getRowsInRowGroup(), 10_000); + assertEquals(orcReader.getFooter().getRowsInRowGroup().orElse(0), 10_000); return orcReader.createRecordReader( orcReader.getRootColumn().getNestedColumns(), diff --git a/presto-product-tests/src/main/java/io/prestosql/tests/hive/TestHiveTransactionalTable.java b/presto-product-tests/src/main/java/io/prestosql/tests/hive/TestHiveTransactionalTable.java index 94fbe3907507..49e9e7ff40ec 100644 --- a/presto-product-tests/src/main/java/io/prestosql/tests/hive/TestHiveTransactionalTable.java +++ b/presto-product-tests/src/main/java/io/prestosql/tests/hive/TestHiveTransactionalTable.java @@ -17,6 +17,8 @@ import org.testng.annotations.DataProvider; import org.testng.annotations.Test; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeoutException; import java.util.stream.Stream; import static io.prestosql.tempto.assertions.QueryAssert.Row.row; @@ -32,8 +34,9 @@ public class TestHiveTransactionalTable extends HiveProductTest { - @Test(groups = {STORAGE_FORMATS, HIVE_TRANSACTIONAL}, dataProvider = "partitioningAndBucketingTypeDataProvider") + @Test(groups = {STORAGE_FORMATS, HIVE_TRANSACTIONAL}, dataProvider = "partitioningAndBucketingTypeDataProvider", timeOut = 5 * 60 * 1000) public void testReadFullAcid(boolean isPartitioned, BucketingType bucketingType) + throws InterruptedException, ExecutionException, TimeoutException { if (getHiveVersionMajor() < 3) { throw new SkipException("Presto Hive transactional tables are supported with Hive version 3 or above"); @@ -59,21 +62,30 @@ public void testReadFullAcid(boolean isPartitioned, BucketingType bucketingType) // test filtering assertThat(query("SELECT col, fcol FROM " + tableName + " WHERE fcol = 1 ORDER BY col")).containsOnly(row(21, 1)); + // test minor compacted data read + onHive().executeQuery("INSERT INTO TABLE " + tableName + hivePartitionString + " VALUES (20, 3)"); + onHive().executeQuery("ALTER TABLE " + tableName + " " + hivePartitionString + " COMPACT 'MINOR' AND WAIT"); + assertThat(query(selectFromOnePartitionsSql)).containsExactly(row(20, 3), row(21, 1), row(22, 2)); + // delete a row onHive().executeQuery("DELETE FROM " + tableName + " WHERE fcol=2"); - assertThat(query(selectFromOnePartitionsSql)).containsOnly(row(21, 1)); + assertThat(query(selectFromOnePartitionsSql)).containsExactly(row(20, 3), row(21, 1)); // update the existing row String predicate = "fcol = 1" + (isPartitioned ? " AND part_col = 2 " : ""); onHive().executeQuery("UPDATE " + tableName + " SET col = 23 WHERE " + predicate); - assertThat(query(selectFromOnePartitionsSql)).containsOnly(row(23, 1)); + assertThat(query(selectFromOnePartitionsSql)).containsExactly(row(20, 3), row(23, 1)); + + // test major compaction + onHive().executeQuery("ALTER TABLE " + tableName + " " + hivePartitionString + " COMPACT 'MAJOR' AND WAIT"); + assertThat(query(selectFromOnePartitionsSql)).containsExactly(row(20, 3), row(23, 1)); } finally { onHive().executeQuery("DROP TABLE " + tableName); } } - @Test(groups = {STORAGE_FORMATS, HIVE_TRANSACTIONAL}, dataProvider = "partitioningAndBucketingTypeDataProvider") + @Test(groups = {STORAGE_FORMATS, HIVE_TRANSACTIONAL}, dataProvider = "partitioningAndBucketingTypeDataProvider", timeOut = 5 * 60 * 1000) public void testReadInsertOnly(boolean isPartitioned, BucketingType bucketingType) { if (getHiveVersionMajor() < 3) { @@ -98,8 +110,17 @@ public void testReadInsertOnly(boolean isPartitioned, BucketingType bucketingTyp onHive().executeQuery("INSERT INTO TABLE " + tableName + hivePartitionString + " SELECT 2"); assertThat(query(selectFromOnePartitionsSql)).containsExactly(row(1), row(2)); + // test minor compacted data read + onHive().executeQuery("ALTER TABLE " + tableName + " " + hivePartitionString + " COMPACT 'MINOR' AND WAIT"); + assertThat(query(selectFromOnePartitionsSql)).containsExactly(row(1), row(2)); + onHive().executeQuery("INSERT OVERWRITE TABLE " + tableName + hivePartitionString + " SELECT 3"); assertThat(query(selectFromOnePartitionsSql)).containsOnly(row(3)); + + // test major compaction + onHive().executeQuery("INSERT INTO TABLE " + tableName + hivePartitionString + " SELECT 4"); + onHive().executeQuery("ALTER TABLE " + tableName + " " + hivePartitionString + " COMPACT 'MAJOR' AND WAIT"); + assertThat(query(selectFromOnePartitionsSql)).containsOnly(row(3), row(4)); } finally { onHive().executeQuery("DROP TABLE " + tableName); From eafbdcda25ed72416e12118e60b3061dc327d3a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Osipiuk?= Date: Thu, 30 Apr 2020 12:20:22 +0200 Subject: [PATCH 293/519] Do not used Rubix parallel warmup by default Stress testing showed that Rubix caching is not stable with parallel warmup enabled. Temporarily diabling by default. --- .../java/io/prestosql/plugin/hive/rubix/RubixConfig.java | 3 ++- .../io/prestosql/plugin/hive/rubix/TestRubixConfig.java | 6 +++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfig.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfig.java index b46d3e7ca2eb..3c3f643686f1 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfig.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfig.java @@ -20,7 +20,8 @@ public class RubixConfig { - private boolean parallelWarmupEnabled = true; + // TODO enable by default again after https://github.com/prestosql/presto/issues/3494 is fixed + private boolean parallelWarmupEnabled; private String cacheLocation; private int bookKeeperServerPort = CacheConfig.DEFAULT_BOOKKEEPER_SERVER_PORT; private int dataTransferServerPort = CacheConfig.DEFAULT_DATA_TRANSFER_SERVER_PORT; diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/rubix/TestRubixConfig.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/rubix/TestRubixConfig.java index e01690e4d256..be6122960a59 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/rubix/TestRubixConfig.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/rubix/TestRubixConfig.java @@ -32,21 +32,21 @@ public void testDefaults() .setBookKeeperServerPort(CacheConfig.DEFAULT_BOOKKEEPER_SERVER_PORT) .setDataTransferServerPort(CacheConfig.DEFAULT_DATA_TRANSFER_SERVER_PORT) .setCacheLocation(null) - .setParallelWarmupEnabled(true)); + .setParallelWarmupEnabled(false)); } @Test public void testExplicitPropertyMappings() { Map properties = new ImmutableMap.Builder() - .put("hive.cache.parallel-warmup-enabled", "false") + .put("hive.cache.parallel-warmup-enabled", "true") .put("hive.cache.location", "/some-directory") .put("hive.cache.bookkeeper-port", "1234") .put("hive.cache.data-transfer-port", "1235") .build(); RubixConfig expected = new RubixConfig() - .setParallelWarmupEnabled(false) + .setParallelWarmupEnabled(true) .setCacheLocation("/some-directory") .setBookKeeperServerPort(1234) .setDataTransferServerPort(1235); From c970ae0bf08e225a355b4cb5c0b941f26dc7e957 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Osipiuk?= Date: Wed, 29 Apr 2020 13:53:46 +0200 Subject: [PATCH 294/519] Replace hive.cache.parallel-warmup-enabled with hive.cache.read-mode --- .../plugin/hive/rubix/RubixConfig.java | 49 ++++++++++++++++--- .../rubix/RubixConfigurationInitializer.java | 2 +- .../plugin/hive/rubix/TestRubixConfig.java | 14 ++++-- 3 files changed, 54 insertions(+), 11 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfig.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfig.java index 3c3f643686f1..6f6bf0ecee86 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfig.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfig.java @@ -18,23 +18,58 @@ import javax.validation.constraints.NotNull; +import static java.lang.String.format; +import static java.util.Locale.ENGLISH; +import static java.util.Objects.requireNonNull; + public class RubixConfig { - // TODO enable by default again after https://github.com/prestosql/presto/issues/3494 is fixed - private boolean parallelWarmupEnabled; + public enum ReadMode + { + READ_THROUGH(false), + ASYNC(true); + + private final boolean parallelWarmupEnabled; + + ReadMode(boolean parallelWarmupEnabled) + { + this.parallelWarmupEnabled = parallelWarmupEnabled; + } + + public boolean isParallelWarmupEnabled() + { + return parallelWarmupEnabled; + } + + public static ReadMode fromString(String value) + { + switch (requireNonNull(value, "value is null").toLowerCase(ENGLISH)) { + case "async": + return ASYNC; + case "read-through": + return READ_THROUGH; + } + + throw new IllegalArgumentException(format("Unrecognized value: '%s'", value)); + } + } + + // TODO switch back to ASYNC when https://github.com/prestosql/presto/issues/3494 is fixed + private ReadMode readMode = ReadMode.READ_THROUGH; private String cacheLocation; private int bookKeeperServerPort = CacheConfig.DEFAULT_BOOKKEEPER_SERVER_PORT; private int dataTransferServerPort = CacheConfig.DEFAULT_DATA_TRANSFER_SERVER_PORT; - public boolean isParallelWarmupEnabled() + @NotNull + public ReadMode getReadMode() { - return parallelWarmupEnabled; + return readMode; } - @Config("hive.cache.parallel-warmup-enabled") - public RubixConfig setParallelWarmupEnabled(boolean value) + @Config("hive.cache.read-mode") + public RubixConfig setReadMode(ReadMode readMode) { - this.parallelWarmupEnabled = value; + this.readMode = readMode; return this; } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfigurationInitializer.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfigurationInitializer.java index 9cda8cc2943a..f5668c1778d4 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfigurationInitializer.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/rubix/RubixConfigurationInitializer.java @@ -61,7 +61,7 @@ public class RubixConfigurationInitializer @Inject public RubixConfigurationInitializer(RubixConfig config) { - this.parallelWarmupEnabled = config.isParallelWarmupEnabled(); + this.parallelWarmupEnabled = config.getReadMode().isParallelWarmupEnabled(); this.cacheLocation = config.getCacheLocation(); this.bookKeeperServerPort = config.getBookKeeperServerPort(); this.dataTransferServerPort = config.getDataTransferServerPort(); diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/rubix/TestRubixConfig.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/rubix/TestRubixConfig.java index be6122960a59..c862e864c61f 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/rubix/TestRubixConfig.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/rubix/TestRubixConfig.java @@ -22,6 +22,7 @@ import static io.airlift.configuration.testing.ConfigAssertions.assertFullMapping; import static io.airlift.configuration.testing.ConfigAssertions.assertRecordedDefaults; import static io.airlift.configuration.testing.ConfigAssertions.recordDefaults; +import static org.assertj.core.api.Assertions.assertThat; public class TestRubixConfig { @@ -32,25 +33,32 @@ public void testDefaults() .setBookKeeperServerPort(CacheConfig.DEFAULT_BOOKKEEPER_SERVER_PORT) .setDataTransferServerPort(CacheConfig.DEFAULT_DATA_TRANSFER_SERVER_PORT) .setCacheLocation(null) - .setParallelWarmupEnabled(false)); + .setReadMode(RubixConfig.ReadMode.READ_THROUGH)); } @Test public void testExplicitPropertyMappings() { Map properties = new ImmutableMap.Builder() - .put("hive.cache.parallel-warmup-enabled", "true") + .put("hive.cache.read-mode", "async") .put("hive.cache.location", "/some-directory") .put("hive.cache.bookkeeper-port", "1234") .put("hive.cache.data-transfer-port", "1235") .build(); RubixConfig expected = new RubixConfig() - .setParallelWarmupEnabled(true) + .setReadMode(RubixConfig.ReadMode.ASYNC) .setCacheLocation("/some-directory") .setBookKeeperServerPort(1234) .setDataTransferServerPort(1235); assertFullMapping(properties, expected); } + + @Test + public void testReadModeFromString() + { + assertThat(RubixConfig.ReadMode.fromString("async")).isEqualTo(RubixConfig.ReadMode.ASYNC); + assertThat(RubixConfig.ReadMode.fromString("read-through")).isEqualTo(RubixConfig.ReadMode.READ_THROUGH); + } } From cfd3350574f6aec32880ab5c5798f19c1f6d7f5a Mon Sep 17 00:00:00 2001 From: "byunghwa.yun" Date: Thu, 30 Apr 2020 13:56:52 +0900 Subject: [PATCH 295/519] Fix wrong return value in geospatial document --- presto-docs/src/main/sphinx/functions/geospatial.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/presto-docs/src/main/sphinx/functions/geospatial.rst b/presto-docs/src/main/sphinx/functions/geospatial.rst index bea8e7d70929..836ccc62aaaa 100644 --- a/presto-docs/src/main/sphinx/functions/geospatial.rst +++ b/presto-docs/src/main/sphinx/functions/geospatial.rst @@ -378,12 +378,12 @@ Accessors Returns the cardinality of the collection of interior rings of a polygon. -.. function:: line_interpolate_point(LineString, double) -> double +.. function:: line_interpolate_point(LineString, double) -> Geometry Returns a Point interpolated along a LineString at the fraction given. The fraction must be between 0 and 1, inclusive. -.. function:: line_interpolate_points(LineString, double, repeated) -> double +.. function:: line_interpolate_points(LineString, double, repeated) -> array(Geometry) Returns an array of Points interpolated along a LineString. The fraction must be between 0 and 1, inclusive. From a380c5d1bdc40056860f4b06f4dacd2dd8bbdff8 Mon Sep 17 00:00:00 2001 From: Alex Date: Wed, 29 Apr 2020 10:57:37 -0400 Subject: [PATCH 296/519] Add data mapping tests with padded chars to TestDistributedQueries --- .../io/prestosql/testing/AbstractTestDistributedQueries.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java index 36299205a5c9..9f44b9f576e5 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java @@ -1418,7 +1418,9 @@ private List testDataMappingSmokeTestData() .add(new DataMappingTestSetup("timestamp", "TIMESTAMP '2020-02-12 15:03:00'", "TIMESTAMP '2199-12-31 23:59:59.999'")) .add(new DataMappingTestSetup("timestamp with time zone", "TIMESTAMP '2020-02-12 15:03:00 +01:00'", "TIMESTAMP '9999-12-31 23:59:59.999 +12:00'")) .add(new DataMappingTestSetup("char(3)", "'ab'", "'zzz'")) + .add(new DataMappingTestSetup("char(3)", "'ab '", "'zzz'")) .add(new DataMappingTestSetup("varchar(3)", "'de'", "'zzz'")) + .add(new DataMappingTestSetup("varchar(3)", "'de '", "'zzz'")) .add(new DataMappingTestSetup("varchar", "'łąka for the win'", "'ŻŻŻŻŻŻŻŻŻŻ'")) .add(new DataMappingTestSetup("varbinary", "X'12ab3f'", "X'ffffffffffffffffffff'")) .build(); From 780e27288141e26029da1495100893126f13ab05 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Wed, 15 Apr 2020 11:13:34 +0200 Subject: [PATCH 297/519] Verify number of deserialized pages --- .../java/io/prestosql/operator/HttpPageBufferClient.java | 7 +++++++ .../java/io/prestosql/server/PagesResponseWriter.java | 4 ++++ .../prestosql/operator/MockExchangeRequestProcessor.java | 3 +++ .../operator/TestingExchangeHttpClientHandler.java | 8 +++++++- 4 files changed, 21 insertions(+), 1 deletion(-) diff --git a/presto-main/src/main/java/io/prestosql/operator/HttpPageBufferClient.java b/presto-main/src/main/java/io/prestosql/operator/HttpPageBufferClient.java index 513548f9599a..0124bc64b628 100644 --- a/presto-main/src/main/java/io/prestosql/operator/HttpPageBufferClient.java +++ b/presto-main/src/main/java/io/prestosql/operator/HttpPageBufferClient.java @@ -73,6 +73,7 @@ import static io.prestosql.execution.buffer.PagesSerdeUtil.readSerializedPages; import static io.prestosql.operator.HttpPageBufferClient.PagesResponse.createEmptyPagesResponse; import static io.prestosql.operator.HttpPageBufferClient.PagesResponse.createPagesResponse; +import static io.prestosql.server.PagesResponseWriter.SERIALIZED_PAGES_MAGIC; import static io.prestosql.spi.HostAddress.fromUri; import static io.prestosql.spi.StandardErrorCode.REMOTE_BUFFER_CLOSE_FAILED; import static io.prestosql.spi.StandardErrorCode.REMOTE_TASK_MISMATCH; @@ -593,7 +594,13 @@ public PagesResponse handle(Request request, Response response) boolean complete = getComplete(response); try (SliceInput input = new InputStreamSliceInput(response.getInputStream())) { + int magic = input.readInt(); + if (magic != SERIALIZED_PAGES_MAGIC) { + throw new IllegalStateException(format("Invalid stream header, expected 0x%08x, but was 0x%08x", SERIALIZED_PAGES_MAGIC, magic)); + } + int pagesCount = input.readInt(); List pages = ImmutableList.copyOf(readSerializedPages(input)); + checkState(pages.size() == pagesCount, "Wrong number of pages, expected %s, but read %s", pagesCount, pages.size()); return createPagesResponse(taskInstanceId, token, nextToken, pages, complete); } catch (IOException e) { diff --git a/presto-main/src/main/java/io/prestosql/server/PagesResponseWriter.java b/presto-main/src/main/java/io/prestosql/server/PagesResponseWriter.java index 0d334a8d0fa0..b6c23c0afc99 100644 --- a/presto-main/src/main/java/io/prestosql/server/PagesResponseWriter.java +++ b/presto-main/src/main/java/io/prestosql/server/PagesResponseWriter.java @@ -41,6 +41,8 @@ public class PagesResponseWriter implements MessageBodyWriter> { + public static final int SERIALIZED_PAGES_MAGIC = 0xfea4f001; + private static final MediaType PRESTO_PAGES_TYPE = MediaType.valueOf(PRESTO_PAGES); private static final Type LIST_GENERIC_TOKEN; @@ -79,6 +81,8 @@ public void writeTo(List serializedPages, { try { SliceOutput sliceOutput = new OutputStreamSliceOutput(output); + sliceOutput.writeInt(SERIALIZED_PAGES_MAGIC); + sliceOutput.writeInt(serializedPages.size()); writeSerializedPages(sliceOutput, serializedPages); // We use flush instead of close, because the underlying stream would be closed and that is not allowed. sliceOutput.flush(); diff --git a/presto-main/src/test/java/io/prestosql/operator/MockExchangeRequestProcessor.java b/presto-main/src/test/java/io/prestosql/operator/MockExchangeRequestProcessor.java index 55614d0d69b9..97de6c1a5d91 100644 --- a/presto-main/src/test/java/io/prestosql/operator/MockExchangeRequestProcessor.java +++ b/presto-main/src/test/java/io/prestosql/operator/MockExchangeRequestProcessor.java @@ -48,6 +48,7 @@ import static io.prestosql.client.PrestoHeaders.PRESTO_PAGE_TOKEN; import static io.prestosql.client.PrestoHeaders.PRESTO_TASK_INSTANCE_ID; import static io.prestosql.execution.buffer.TestingPagesSerdeFactory.testingPagesSerde; +import static io.prestosql.server.PagesResponseWriter.SERIALIZED_PAGES_MAGIC; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; @@ -97,6 +98,8 @@ public Response handle(Request request) HttpStatus status; if (!result.getSerializedPages().isEmpty()) { DynamicSliceOutput sliceOutput = new DynamicSliceOutput(64); + sliceOutput.writeInt(SERIALIZED_PAGES_MAGIC); + sliceOutput.writeInt(result.getSerializedPages().size()); PagesSerdeUtil.writeSerializedPages(sliceOutput, result.getSerializedPages()); bytes = sliceOutput.slice().getBytes(); status = HttpStatus.OK; diff --git a/presto-main/src/test/java/io/prestosql/operator/TestingExchangeHttpClientHandler.java b/presto-main/src/test/java/io/prestosql/operator/TestingExchangeHttpClientHandler.java index a2faec7a9254..3f105976b7c0 100644 --- a/presto-main/src/test/java/io/prestosql/operator/TestingExchangeHttpClientHandler.java +++ b/presto-main/src/test/java/io/prestosql/operator/TestingExchangeHttpClientHandler.java @@ -33,6 +33,7 @@ import static io.prestosql.client.PrestoHeaders.PRESTO_PAGE_TOKEN; import static io.prestosql.client.PrestoHeaders.PRESTO_TASK_INSTANCE_ID; import static io.prestosql.execution.buffer.TestingPagesSerdeFactory.testingPagesSerde; +import static io.prestosql.server.PagesResponseWriter.SERIALIZED_PAGES_MAGIC; import static java.util.Objects.requireNonNull; import static javax.ws.rs.core.HttpHeaders.CONTENT_TYPE; import static org.testng.Assert.assertEquals; @@ -73,13 +74,18 @@ public Response handle(Request request) headers.put(PRESTO_PAGE_NEXT_TOKEN, String.valueOf(pageToken + 1)); headers.put(PRESTO_BUFFER_COMPLETE, String.valueOf(false)); DynamicSliceOutput output = new DynamicSliceOutput(256); + output.writeInt(SERIALIZED_PAGES_MAGIC); + output.writeInt(1); PagesSerdeUtil.writePages(PAGES_SERDE, output, page); return new TestingResponse(HttpStatus.OK, headers.build(), output.slice().getInput()); } else if (taskBuffer.isFinished()) { headers.put(PRESTO_PAGE_NEXT_TOKEN, String.valueOf(pageToken)); headers.put(PRESTO_BUFFER_COMPLETE, String.valueOf(true)); - return new TestingResponse(HttpStatus.OK, headers.build(), new byte[0]); + DynamicSliceOutput output = new DynamicSliceOutput(8); + output.writeInt(SERIALIZED_PAGES_MAGIC); + output.writeInt(0); + return new TestingResponse(HttpStatus.OK, headers.build(), output.slice().getInput()); } else { headers.put(PRESTO_PAGE_NEXT_TOKEN, String.valueOf(pageToken)); From 4d80836fb88c7ae1ecfb7469412c505bed58b658 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Wed, 15 Apr 2020 11:13:35 +0200 Subject: [PATCH 298/519] Verify data integrity in exchanges In our testing, a cloud's network proved to be not reliable. We observed data corruption when transmitting data over TCP between Presto nodes (internal communication unsecured, no compression). Verify data integrity to prevent incorrect query results. Optionally retry when data corruption is detected. --- .../execution/buffer/PagesSerdeUtil.java | 35 +++++ .../io/prestosql/operator/ExchangeClient.java | 9 ++ .../operator/ExchangeClientFactory.java | 15 +++ .../operator/HttpPageBufferClient.java | 76 ++++++++++- .../prestosql/server/PagesResponseWriter.java | 16 +++ .../sql/analyzer/FeaturesConfig.java | 21 +++ .../MockExchangeRequestProcessor.java | 6 +- .../operator/TestExchangeClient.java | 124 ++++++++++++++++++ .../operator/TestExchangeOperator.java | 3 + .../operator/TestHttpPageBufferClient.java | 26 +++- .../prestosql/operator/TestMergeOperator.java | 4 +- .../TestingExchangeHttpClientHandler.java | 9 +- .../sql/analyzer/TestFeaturesConfig.java | 4 + 13 files changed, 336 insertions(+), 12 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/execution/buffer/PagesSerdeUtil.java b/presto-main/src/main/java/io/prestosql/execution/buffer/PagesSerdeUtil.java index 841d4ea4037d..d6c8735d5232 100644 --- a/presto-main/src/main/java/io/prestosql/execution/buffer/PagesSerdeUtil.java +++ b/presto-main/src/main/java/io/prestosql/execution/buffer/PagesSerdeUtil.java @@ -17,11 +17,14 @@ import io.airlift.slice.Slice; import io.airlift.slice.SliceInput; import io.airlift.slice.SliceOutput; +import io.airlift.slice.Slices; +import io.airlift.slice.XxHash64; import io.prestosql.spi.Page; import io.prestosql.spi.block.Block; import io.prestosql.spi.block.BlockEncodingSerde; import java.util.Iterator; +import java.util.List; import static io.prestosql.block.BlockSerdeUtil.readBlock; import static io.prestosql.block.BlockSerdeUtil.writeBlock; @@ -32,6 +35,13 @@ public final class PagesSerdeUtil { private PagesSerdeUtil() {} + /** + * Special checksum value used to verify configuration consistency across nodes (all nodes need to have data integrity configured the same way). + * + * @implNote It's not just 0, so that hypothetical zero-ed out data is not treated as valid payload with no checksum. + */ + public static final long NO_CHECKSUM = 0x0123456789abcdefL; + static void writeRawPage(Page page, SliceOutput output, BlockEncodingSerde serde) { output.writeInt(page.getChannelCount()); @@ -53,6 +63,7 @@ static Page readRawPage(int positionCount, SliceInput input, BlockEncodingSerde public static void writeSerializedPage(SliceOutput output, SerializedPage page) { + // Every new field being written here must be added in updateChecksum() too. output.writeInt(page.getPositionCount()); output.writeByte(page.getPageCodecMarkers()); output.writeInt(page.getUncompressedSizeInBytes()); @@ -60,6 +71,16 @@ public static void writeSerializedPage(SliceOutput output, SerializedPage page) output.writeBytes(page.getSlice()); } + private static void updateChecksum(XxHash64 hash, SerializedPage page) + { + hash.update(Slices.wrappedIntArray( + page.getPositionCount(), + page.getPageCodecMarkers(), + page.getUncompressedSizeInBytes(), + page.getSizeInBytes())); + hash.update(page.getSlice()); + } + private static SerializedPage readSerializedPage(SliceInput sliceInput) { int positionCount = sliceInput.readInt(); @@ -82,6 +103,20 @@ public static long writeSerializedPages(SliceOutput sliceOutput, Iterable pages) + { + XxHash64 hash = new XxHash64(); + for (SerializedPage page : pages) { + updateChecksum(hash, page); + } + long checksum = hash.hash(); + // Since NO_CHECKSUM is assigned a special meaning, it is not a valid checksum. + if (checksum == NO_CHECKSUM) { + return checksum + 1; + } + return checksum; + } + public static long writePages(PagesSerde serde, SliceOutput sliceOutput, Page... pages) { return writePages(serde, sliceOutput, asList(pages).iterator()); diff --git a/presto-main/src/main/java/io/prestosql/operator/ExchangeClient.java b/presto-main/src/main/java/io/prestosql/operator/ExchangeClient.java index 14ea3d8d6e11..9ca9887781d2 100644 --- a/presto-main/src/main/java/io/prestosql/operator/ExchangeClient.java +++ b/presto-main/src/main/java/io/prestosql/operator/ExchangeClient.java @@ -25,6 +25,7 @@ import io.prestosql.memory.context.LocalMemoryContext; import io.prestosql.operator.HttpPageBufferClient.ClientCallback; import io.prestosql.operator.WorkProcessor.ProcessState; +import io.prestosql.sql.analyzer.FeaturesConfig.DataIntegrityVerification; import javax.annotation.Nullable; import javax.annotation.concurrent.GuardedBy; @@ -57,6 +58,8 @@ public class ExchangeClient { private static final SerializedPage NO_MORE_PAGES = new SerializedPage(EMPTY_SLICE, PageCodecMarker.MarkerSet.empty(), 0, 0); + private final String selfAddress; + private final DataIntegrityVerification dataIntegrityVerification; private final long bufferCapacity; private final DataSize maxResponseSize; private final int concurrentRequestMultiplier; @@ -97,6 +100,8 @@ public class ExchangeClient // ExchangeClientStatus.mergeWith assumes all clients have the same bufferCapacity. // Please change that method accordingly when this assumption becomes not true. public ExchangeClient( + String selfAddress, + DataIntegrityVerification dataIntegrityVerification, DataSize bufferCapacity, DataSize maxResponseSize, int concurrentRequestMultiplier, @@ -107,6 +112,8 @@ public ExchangeClient( LocalMemoryContext systemMemoryContext, Executor pageBufferClientCallbackExecutor) { + this.selfAddress = requireNonNull(selfAddress, "selfAddress is null"); + this.dataIntegrityVerification = requireNonNull(dataIntegrityVerification, "dataIntegrityVerification is null"); this.bufferCapacity = bufferCapacity.toBytes(); this.maxResponseSize = maxResponseSize; this.concurrentRequestMultiplier = concurrentRequestMultiplier; @@ -156,7 +163,9 @@ public synchronized void addLocation(URI location) checkState(!noMoreLocations, "No more locations already set"); HttpPageBufferClient client = new HttpPageBufferClient( + selfAddress, httpClient, + dataIntegrityVerification, maxResponseSize, maxErrorDuration, acknowledgePages, diff --git a/presto-main/src/main/java/io/prestosql/operator/ExchangeClientFactory.java b/presto-main/src/main/java/io/prestosql/operator/ExchangeClientFactory.java index 1774d9234844..6a43d3667a11 100644 --- a/presto-main/src/main/java/io/prestosql/operator/ExchangeClientFactory.java +++ b/presto-main/src/main/java/io/prestosql/operator/ExchangeClientFactory.java @@ -15,9 +15,12 @@ import io.airlift.concurrent.ThreadPoolExecutorMBean; import io.airlift.http.client.HttpClient; +import io.airlift.node.NodeInfo; import io.airlift.units.DataSize; import io.airlift.units.Duration; import io.prestosql.memory.context.LocalMemoryContext; +import io.prestosql.sql.analyzer.FeaturesConfig; +import io.prestosql.sql.analyzer.FeaturesConfig.DataIntegrityVerification; import org.weakref.jmx.Managed; import org.weakref.jmx.Nested; @@ -36,6 +39,8 @@ public class ExchangeClientFactory implements ExchangeClientSupplier { + private final NodeInfo nodeInfo; + private final DataIntegrityVerification dataIntegrityVerification; private final DataSize maxBufferedBytes; private final int concurrentRequestMultiplier; private final Duration maxErrorDuration; @@ -48,11 +53,15 @@ public class ExchangeClientFactory @Inject public ExchangeClientFactory( + NodeInfo nodeInfo, + FeaturesConfig featuresConfig, ExchangeClientConfig config, @ForExchange HttpClient httpClient, @ForExchange ScheduledExecutorService scheduler) { this( + nodeInfo, + featuresConfig.getExchangeDataIntegrityVerification(), config.getMaxBufferSize(), config.getMaxResponseSize(), config.getConcurrentRequestMultiplier(), @@ -64,6 +73,8 @@ public ExchangeClientFactory( } public ExchangeClientFactory( + NodeInfo nodeInfo, + DataIntegrityVerification dataIntegrityVerification, DataSize maxBufferedBytes, DataSize maxResponseSize, int concurrentRequestMultiplier, @@ -73,6 +84,8 @@ public ExchangeClientFactory( HttpClient httpClient, ScheduledExecutorService scheduler) { + this.nodeInfo = requireNonNull(nodeInfo, "nodeInfo is null"); + this.dataIntegrityVerification = requireNonNull(dataIntegrityVerification, "dataIntegrityVerification is null"); this.maxBufferedBytes = requireNonNull(maxBufferedBytes, "maxBufferedBytes is null"); this.concurrentRequestMultiplier = concurrentRequestMultiplier; this.maxErrorDuration = requireNonNull(maxErrorDuration, "maxErrorDuration is null"); @@ -112,6 +125,8 @@ public ThreadPoolExecutorMBean getExecutor() public ExchangeClient get(LocalMemoryContext systemMemoryContext) { return new ExchangeClient( + nodeInfo.getExternalAddress(), + dataIntegrityVerification, maxBufferedBytes, maxResponseSize, concurrentRequestMultiplier, diff --git a/presto-main/src/main/java/io/prestosql/operator/HttpPageBufferClient.java b/presto-main/src/main/java/io/prestosql/operator/HttpPageBufferClient.java index 0124bc64b628..15a88f0a539e 100644 --- a/presto-main/src/main/java/io/prestosql/operator/HttpPageBufferClient.java +++ b/presto-main/src/main/java/io/prestosql/operator/HttpPageBufferClient.java @@ -34,6 +34,7 @@ import io.prestosql.execution.buffer.SerializedPage; import io.prestosql.server.remotetask.Backoff; import io.prestosql.spi.PrestoException; +import io.prestosql.sql.analyzer.FeaturesConfig.DataIntegrityVerification; import org.joda.time.DateTime; import javax.annotation.Nullable; @@ -70,11 +71,14 @@ import static io.prestosql.client.PrestoHeaders.PRESTO_PAGE_NEXT_TOKEN; import static io.prestosql.client.PrestoHeaders.PRESTO_PAGE_TOKEN; import static io.prestosql.client.PrestoHeaders.PRESTO_TASK_INSTANCE_ID; +import static io.prestosql.execution.buffer.PagesSerdeUtil.NO_CHECKSUM; +import static io.prestosql.execution.buffer.PagesSerdeUtil.calculateChecksum; import static io.prestosql.execution.buffer.PagesSerdeUtil.readSerializedPages; import static io.prestosql.operator.HttpPageBufferClient.PagesResponse.createEmptyPagesResponse; import static io.prestosql.operator.HttpPageBufferClient.PagesResponse.createPagesResponse; import static io.prestosql.server.PagesResponseWriter.SERIALIZED_PAGES_MAGIC; import static io.prestosql.spi.HostAddress.fromUri; +import static io.prestosql.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR; import static io.prestosql.spi.StandardErrorCode.REMOTE_BUFFER_CLOSE_FAILED; import static io.prestosql.spi.StandardErrorCode.REMOTE_TASK_MISMATCH; import static io.prestosql.util.Failures.REMOTE_TASK_MISMATCH_ERROR; @@ -110,7 +114,9 @@ public interface ClientCallback void clientFailed(HttpPageBufferClient client, Throwable cause); } + private final String selfAddress; private final HttpClient httpClient; + private final DataIntegrityVerification dataIntegrityVerification; private final DataSize maxResponseSize; private final boolean acknowledgePages; private final URI location; @@ -146,7 +152,9 @@ public interface ClientCallback private final Executor pageBufferClientCallbackExecutor; public HttpPageBufferClient( + String selfAddress, HttpClient httpClient, + DataIntegrityVerification dataIntegrityVerification, DataSize maxResponseSize, Duration maxErrorDuration, boolean acknowledgePages, @@ -155,11 +163,24 @@ public HttpPageBufferClient( ScheduledExecutorService scheduler, Executor pageBufferClientCallbackExecutor) { - this(httpClient, maxResponseSize, maxErrorDuration, acknowledgePages, location, clientCallback, scheduler, Ticker.systemTicker(), pageBufferClientCallbackExecutor); + this( + selfAddress, + httpClient, + dataIntegrityVerification, + maxResponseSize, + maxErrorDuration, + acknowledgePages, + location, + clientCallback, + scheduler, + Ticker.systemTicker(), + pageBufferClientCallbackExecutor); } public HttpPageBufferClient( + String selfAddress, HttpClient httpClient, + DataIntegrityVerification dataIntegrityVerification, DataSize maxResponseSize, Duration maxErrorDuration, boolean acknowledgePages, @@ -169,7 +190,9 @@ public HttpPageBufferClient( Ticker ticker, Executor pageBufferClientCallbackExecutor) { + this.selfAddress = requireNonNull(selfAddress, "selfAddress is null"); this.httpClient = requireNonNull(httpClient, "httpClient is null"); + this.dataIntegrityVerification = requireNonNull(dataIntegrityVerification, "dataIntegrityVerification is null"); this.maxResponseSize = requireNonNull(maxResponseSize, "maxResponseSize is null"); this.acknowledgePages = acknowledgePages; this.location = requireNonNull(location, "location is null"); @@ -302,7 +325,7 @@ private synchronized void sendGetResults() prepareGet() .setHeader(PRESTO_MAX_SIZE, maxResponseSize.toString()) .setUri(uri).build(), - new PageResponseHandler()); + new PageResponseHandler(dataIntegrityVerification != DataIntegrityVerification.NONE)); future = resultFuture; Futures.addCallback(resultFuture, new FutureCallback() @@ -404,6 +427,22 @@ public void onFailure(Throwable t) log.debug("Request to %s failed %s", uri, t); checkNotHoldsLock(this); + if (t instanceof ChecksumVerificationException) { + switch (dataIntegrityVerification) { + case NONE: + // In case of NONE, failure is possible in case of inconsistent cluster configuration, so we should not retry. + case ABORT: + // PrestoException will not be retried + t = new PrestoException(GENERIC_INTERNAL_ERROR, format("Checksum verification failure on %s when reading from %s: %s", selfAddress, uri, t.getMessage()), t); + break; + case RETRY: + log.warn("Checksum verification failure on %s when reading from %s, may be retried: %s", selfAddress, uri, t.getMessage()); + break; + default: + throw new AssertionError("Unsupported option: " + dataIntegrityVerification); + } + } + t = rewriteException(t); if (!(t instanceof PrestoException) && backoff.failure()) { String message = format("%s (%s - %s failures, failure duration %s, total failed request time %s)", @@ -543,6 +582,13 @@ private static Throwable rewriteException(Throwable t) public static class PageResponseHandler implements ResponseHandler { + private final boolean dataIntegrityVerificationEnabled; + + private PageResponseHandler(boolean dataIntegrityVerificationEnabled) + { + this.dataIntegrityVerificationEnabled = dataIntegrityVerificationEnabled; + } + @Override public PagesResponse handleException(Request request, Exception exception) { @@ -598,8 +644,10 @@ public PagesResponse handle(Request request, Response response) if (magic != SERIALIZED_PAGES_MAGIC) { throw new IllegalStateException(format("Invalid stream header, expected 0x%08x, but was 0x%08x", SERIALIZED_PAGES_MAGIC, magic)); } + long checksum = input.readLong(); int pagesCount = input.readInt(); List pages = ImmutableList.copyOf(readSerializedPages(input)); + verifyChecksum(checksum, pages); checkState(pages.size() == pagesCount, "Wrong number of pages, expected %s, but read %s", pagesCount, pages.size()); return createPagesResponse(taskInstanceId, token, nextToken, pages, complete); } @@ -612,6 +660,21 @@ public PagesResponse handle(Request request, Response response) } } + private void verifyChecksum(long readChecksum, List pages) + { + if (dataIntegrityVerificationEnabled) { + long calculatedChecksum = calculateChecksum(pages); + if (readChecksum != calculatedChecksum) { + throw new ChecksumVerificationException(format("Data corruption, read checksum: 0x%08x, calculated checksum: 0x%08x", readChecksum, calculatedChecksum)); + } + } + else { + if (readChecksum != NO_CHECKSUM) { + throw new ChecksumVerificationException(format("Expected checksum to be NO_CHECKSUM (0x%08x) but is 0x%08x", NO_CHECKSUM, readChecksum)); + } + } + } + private static String getTaskInstanceId(Response response) { String taskInstanceId = response.getHeader(PRESTO_TASK_INSTANCE_ID); @@ -722,4 +785,13 @@ public String toString() .toString(); } } + + private static class ChecksumVerificationException + extends RuntimeException + { + public ChecksumVerificationException(String message) + { + super(requireNonNull(message, "message is null")); + } + } } diff --git a/presto-main/src/main/java/io/prestosql/server/PagesResponseWriter.java b/presto-main/src/main/java/io/prestosql/server/PagesResponseWriter.java index b6c23c0afc99..bd99974e2642 100644 --- a/presto-main/src/main/java/io/prestosql/server/PagesResponseWriter.java +++ b/presto-main/src/main/java/io/prestosql/server/PagesResponseWriter.java @@ -17,7 +17,10 @@ import io.airlift.slice.OutputStreamSliceOutput; import io.airlift.slice.SliceOutput; import io.prestosql.execution.buffer.SerializedPage; +import io.prestosql.sql.analyzer.FeaturesConfig; +import io.prestosql.sql.analyzer.FeaturesConfig.DataIntegrityVerification; +import javax.inject.Inject; import javax.ws.rs.Produces; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.MediaType; @@ -34,7 +37,10 @@ import java.util.List; import static io.prestosql.PrestoMediaTypes.PRESTO_PAGES; +import static io.prestosql.execution.buffer.PagesSerdeUtil.NO_CHECKSUM; +import static io.prestosql.execution.buffer.PagesSerdeUtil.calculateChecksum; import static io.prestosql.execution.buffer.PagesSerdeUtil.writeSerializedPages; +import static java.util.Objects.requireNonNull; @Provider @Produces(PRESTO_PAGES) @@ -55,6 +61,15 @@ public class PagesResponseWriter } } + private final boolean dataIntegrityVerificationEnabled; + + @Inject + public PagesResponseWriter(FeaturesConfig featuresConfig) + { + requireNonNull(featuresConfig, "featuresConfig is null"); + this.dataIntegrityVerificationEnabled = featuresConfig.getExchangeDataIntegrityVerification() != DataIntegrityVerification.NONE; + } + @Override public boolean isWriteable(Class type, Type genericType, Annotation[] annotations, MediaType mediaType) { @@ -82,6 +97,7 @@ public void writeTo(List serializedPages, try { SliceOutput sliceOutput = new OutputStreamSliceOutput(output); sliceOutput.writeInt(SERIALIZED_PAGES_MAGIC); + sliceOutput.writeLong(dataIntegrityVerificationEnabled ? calculateChecksum(serializedPages) : NO_CHECKSUM); sliceOutput.writeInt(serializedPages.size()); writeSerializedPages(sliceOutput, serializedPages); // We use flush instead of close, because the underlying stream would be closed and that is not allowed. diff --git a/presto-main/src/main/java/io/prestosql/sql/analyzer/FeaturesConfig.java b/presto-main/src/main/java/io/prestosql/sql/analyzer/FeaturesConfig.java index 72e04e97b4c2..6cb42c0cc090 100644 --- a/presto-main/src/main/java/io/prestosql/sql/analyzer/FeaturesConfig.java +++ b/presto-main/src/main/java/io/prestosql/sql/analyzer/FeaturesConfig.java @@ -84,6 +84,7 @@ public class FeaturesConfig private boolean optimizeHashGeneration = true; private boolean enableIntermediateAggregations; private boolean pushTableWriteThroughUnion = true; + private DataIntegrityVerification exchangeDataIntegrityVerification = DataIntegrityVerification.ABORT; private boolean exchangeCompressionEnabled; private boolean legacyTimestamp = true; private boolean optimizeMixedDistinctAggregations; @@ -159,6 +160,14 @@ public boolean canReplicate() } } + public enum DataIntegrityVerification + { + NONE, + ABORT, + RETRY, + /**/; + } + public double getCpuCostWeight() { return cpuCostWeight; @@ -814,6 +823,18 @@ public FeaturesConfig setExchangeCompressionEnabled(boolean exchangeCompressionE return this; } + public DataIntegrityVerification getExchangeDataIntegrityVerification() + { + return exchangeDataIntegrityVerification; + } + + @Config("exchange.data-integrity-verification") + public FeaturesConfig setExchangeDataIntegrityVerification(DataIntegrityVerification exchangeDataIntegrityVerification) + { + this.exchangeDataIntegrityVerification = exchangeDataIntegrityVerification; + return this; + } + public boolean isEnableIntermediateAggregations() { return enableIntermediateAggregations; diff --git a/presto-main/src/test/java/io/prestosql/operator/MockExchangeRequestProcessor.java b/presto-main/src/test/java/io/prestosql/operator/MockExchangeRequestProcessor.java index 97de6c1a5d91..081fdd32a7a8 100644 --- a/presto-main/src/test/java/io/prestosql/operator/MockExchangeRequestProcessor.java +++ b/presto-main/src/test/java/io/prestosql/operator/MockExchangeRequestProcessor.java @@ -27,7 +27,6 @@ import io.prestosql.client.PrestoHeaders; import io.prestosql.execution.buffer.BufferResult; import io.prestosql.execution.buffer.PagesSerde; -import io.prestosql.execution.buffer.PagesSerdeUtil; import io.prestosql.execution.buffer.SerializedPage; import io.prestosql.spi.Page; @@ -47,6 +46,8 @@ import static io.prestosql.client.PrestoHeaders.PRESTO_PAGE_NEXT_TOKEN; import static io.prestosql.client.PrestoHeaders.PRESTO_PAGE_TOKEN; import static io.prestosql.client.PrestoHeaders.PRESTO_TASK_INSTANCE_ID; +import static io.prestosql.execution.buffer.PagesSerdeUtil.calculateChecksum; +import static io.prestosql.execution.buffer.PagesSerdeUtil.writeSerializedPages; import static io.prestosql.execution.buffer.TestingPagesSerdeFactory.testingPagesSerde; import static io.prestosql.server.PagesResponseWriter.SERIALIZED_PAGES_MAGIC; import static org.testng.Assert.assertEquals; @@ -99,8 +100,9 @@ public Response handle(Request request) if (!result.getSerializedPages().isEmpty()) { DynamicSliceOutput sliceOutput = new DynamicSliceOutput(64); sliceOutput.writeInt(SERIALIZED_PAGES_MAGIC); + sliceOutput.writeLong(calculateChecksum(result.getSerializedPages())); sliceOutput.writeInt(result.getSerializedPages().size()); - PagesSerdeUtil.writeSerializedPages(sliceOutput, result.getSerializedPages()); + writeSerializedPages(sliceOutput, result.getSerializedPages()); bytes = sliceOutput.slice().getBytes(); status = HttpStatus.OK; } diff --git a/presto-main/src/test/java/io/prestosql/operator/TestExchangeClient.java b/presto-main/src/test/java/io/prestosql/operator/TestExchangeClient.java index a723af398653..d0ac81c48472 100644 --- a/presto-main/src/test/java/io/prestosql/operator/TestExchangeClient.java +++ b/presto-main/src/test/java/io/prestosql/operator/TestExchangeClient.java @@ -14,9 +14,14 @@ package io.prestosql.operator; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ListMultimap; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; +import io.airlift.http.client.HttpStatus; +import io.airlift.http.client.Request; +import io.airlift.http.client.Response; import io.airlift.http.client.testing.TestingHttpClient; +import io.airlift.http.client.testing.TestingResponse; import io.airlift.units.DataSize; import io.airlift.units.DataSize.Unit; import io.airlift.units.Duration; @@ -25,17 +30,24 @@ import io.prestosql.execution.buffer.SerializedPage; import io.prestosql.memory.context.SimpleLocalMemoryContext; import io.prestosql.spi.Page; +import io.prestosql.spi.PrestoException; +import io.prestosql.sql.analyzer.FeaturesConfig.DataIntegrityVerification; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import java.net.URI; +import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; +import static com.google.common.base.Preconditions.checkState; +import static com.google.common.base.Verify.verify; +import static com.google.common.collect.ImmutableListMultimap.toImmutableListMultimap; import static com.google.common.collect.Maps.uniqueIndex; +import static com.google.common.io.ByteStreams.toByteArray; import static com.google.common.util.concurrent.MoreExecutors.directExecutor; import static com.google.common.util.concurrent.Uninterruptibles.sleepUninterruptibly; import static io.airlift.concurrent.MoreFutures.tryGetFutureValue; @@ -46,6 +58,7 @@ import static java.util.concurrent.Executors.newCachedThreadPool; import static java.util.concurrent.Executors.newScheduledThreadPool; import static java.util.concurrent.TimeUnit.MILLISECONDS; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotNull; @@ -94,6 +107,8 @@ public void testHappyPath() @SuppressWarnings("resource") ExchangeClient exchangeClient = new ExchangeClient( + "localhost", + DataIntegrityVerification.ABORT, DataSize.of(32, Unit.MEGABYTE), maxResponseSize, 1, @@ -133,6 +148,8 @@ public void testAddLocation() @SuppressWarnings("resource") ExchangeClient exchangeClient = new ExchangeClient( + "localhost", + DataIntegrityVerification.ABORT, DataSize.of(32, Unit.MEGABYTE), maxResponseSize, 1, @@ -205,6 +222,8 @@ public void testBufferLimit() @SuppressWarnings("resource") ExchangeClient exchangeClient = new ExchangeClient( + "localhost", + DataIntegrityVerification.ABORT, DataSize.ofBytes(1), maxResponseSize, 1, @@ -273,6 +292,109 @@ public void testBufferLimit() assertStatus(exchangeClient.getStatus().getPageBufferClientStatuses().get(0), location, "closed", 3, 5, 5, "not scheduled"); } + @Test + public void testAbortOnDataCorruption() + { + URI location = URI.create("http://localhost:8080"); + ExchangeClient exchangeClient = setUpDataCorruption(DataIntegrityVerification.ABORT, location); + + assertFalse(exchangeClient.isClosed()); + assertThatThrownBy(() -> getNextPage(exchangeClient)) + .isInstanceOf(PrestoException.class) + .hasMessageMatching("Checksum verification failure on localhost when reading from http://localhost:8080/0: Data corruption, read checksum: 0xf91cfe5d2bc6e1c2, calculated checksum: 0x3c51297c7b78052f"); + + assertThatThrownBy(exchangeClient::isFinished) + .isInstanceOf(PrestoException.class) + .hasMessageMatching("Checksum verification failure on localhost when reading from http://localhost:8080/0: Data corruption, read checksum: 0xf91cfe5d2bc6e1c2, calculated checksum: 0x3c51297c7b78052f"); + + exchangeClient.close(); + } + + @Test + public void testRetryDataCorruption() + { + URI location = URI.create("http://localhost:8080"); + ExchangeClient exchangeClient = setUpDataCorruption(DataIntegrityVerification.RETRY, location); + + assertFalse(exchangeClient.isClosed()); + assertPageEquals(getNextPage(exchangeClient), createPage(1)); + assertFalse(exchangeClient.isClosed()); + assertPageEquals(getNextPage(exchangeClient), createPage(2)); + assertNull(getNextPage(exchangeClient)); + assertTrue(exchangeClient.isClosed()); + + ExchangeClientStatus status = exchangeClient.getStatus(); + assertEquals(status.getBufferedPages(), 0); + assertEquals(status.getBufferedBytes(), 0); + + assertStatus(status.getPageBufferClientStatuses().get(0), location, "closed", 2, 4, 4, "not scheduled"); + } + + private ExchangeClient setUpDataCorruption(DataIntegrityVerification dataIntegrityVerification, URI location) + { + DataSize maxResponseSize = DataSize.of(10, Unit.MEGABYTE); + + MockExchangeRequestProcessor delegate = new MockExchangeRequestProcessor(maxResponseSize); + delegate.addPage(location, createPage(1)); + delegate.addPage(location, createPage(2)); + delegate.setComplete(location); + + TestingHttpClient.Processor processor = new TestingHttpClient.Processor() + { + private int completedRequests; + private TestingResponse savedResponse; + + @Override + public synchronized Response handle(Request request) + throws Exception + { + if (completedRequests == 0) { + verify(savedResponse == null); + TestingResponse response = (TestingResponse) delegate.handle(request); + checkState(response.getStatusCode() == HttpStatus.OK.code(), "Unexpected status code: %s", response.getStatusCode()); + ListMultimap headers = response.getHeaders().entries().stream() + .collect(toImmutableListMultimap(entry -> entry.getKey().toString(), Map.Entry::getValue)); + byte[] bytes = toByteArray(response.getInputStream()); + checkState(bytes.length > 42, "too short"); + savedResponse = new TestingResponse(HttpStatus.OK, headers, bytes.clone()); + // corrupt + bytes[42]++; + completedRequests++; + return new TestingResponse(HttpStatus.OK, headers, bytes); + } + + if (completedRequests == 1) { + verify(savedResponse != null); + Response response = savedResponse; + savedResponse = null; + completedRequests++; + return response; + } + + completedRequests++; + return delegate.handle(request); + } + }; + + ExchangeClient exchangeClient = new ExchangeClient( + "localhost", + dataIntegrityVerification, + DataSize.of(32, Unit.MEGABYTE), + maxResponseSize, + 1, + new Duration(1, TimeUnit.MINUTES), + true, + new TestingHttpClient(processor, scheduler), + scheduler, + new SimpleLocalMemoryContext(newSimpleAggregatedMemoryContext(), "test"), + pageBufferClientCallbackExecutor); + + exchangeClient.addLocation(location); + exchangeClient.noMoreLocations(); + + return exchangeClient; + } + @Test public void testClose() throws Exception @@ -287,6 +409,8 @@ public void testClose() @SuppressWarnings("resource") ExchangeClient exchangeClient = new ExchangeClient( + "localhost", + DataIntegrityVerification.ABORT, DataSize.ofBytes(1), maxResponseSize, 1, diff --git a/presto-main/src/test/java/io/prestosql/operator/TestExchangeOperator.java b/presto-main/src/test/java/io/prestosql/operator/TestExchangeOperator.java index c5eac5cf6716..dfbe44d54d26 100644 --- a/presto-main/src/test/java/io/prestosql/operator/TestExchangeOperator.java +++ b/presto-main/src/test/java/io/prestosql/operator/TestExchangeOperator.java @@ -29,6 +29,7 @@ import io.prestosql.spi.Page; import io.prestosql.spi.type.Type; import io.prestosql.split.RemoteSplit; +import io.prestosql.sql.analyzer.FeaturesConfig.DataIntegrityVerification; import io.prestosql.sql.planner.plan.PlanNodeId; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; @@ -84,6 +85,8 @@ public void setUp() httpClient = new TestingHttpClient(new TestingExchangeHttpClientHandler(taskBuffers), scheduler); exchangeClientSupplier = (systemMemoryUsageListener) -> new ExchangeClient( + "localhost", + DataIntegrityVerification.ABORT, DataSize.of(32, MEGABYTE), DataSize.of(10, MEGABYTE), 3, diff --git a/presto-main/src/test/java/io/prestosql/operator/TestHttpPageBufferClient.java b/presto-main/src/test/java/io/prestosql/operator/TestHttpPageBufferClient.java index f1dea48e0296..85e3ad230f91 100644 --- a/presto-main/src/test/java/io/prestosql/operator/TestHttpPageBufferClient.java +++ b/presto-main/src/test/java/io/prestosql/operator/TestHttpPageBufferClient.java @@ -28,6 +28,7 @@ import io.prestosql.operator.HttpPageBufferClient.ClientCallback; import io.prestosql.spi.HostAddress; import io.prestosql.spi.Page; +import io.prestosql.sql.analyzer.FeaturesConfig.DataIntegrityVerification; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; @@ -101,7 +102,10 @@ public void testHappyPath() TestingClientCallback callback = new TestingClientCallback(requestComplete); URI location = URI.create("http://localhost:8080"); - HttpPageBufferClient client = new HttpPageBufferClient(new TestingHttpClient(processor, scheduler), + HttpPageBufferClient client = new HttpPageBufferClient( + "localhost", + new TestingHttpClient(processor, scheduler), + DataIntegrityVerification.ABORT, expectedMaxSize, new Duration(1, TimeUnit.MINUTES), true, @@ -186,7 +190,10 @@ public void testLifecycle() TestingClientCallback callback = new TestingClientCallback(requestComplete); URI location = URI.create("http://localhost:8080"); - HttpPageBufferClient client = new HttpPageBufferClient(new TestingHttpClient(processor, scheduler), + HttpPageBufferClient client = new HttpPageBufferClient( + "localhost", + new TestingHttpClient(processor, scheduler), + DataIntegrityVerification.ABORT, DataSize.of(10, Unit.MEGABYTE), new Duration(1, TimeUnit.MINUTES), true, @@ -226,7 +233,10 @@ public void testInvalidResponses() TestingClientCallback callback = new TestingClientCallback(requestComplete); URI location = URI.create("http://localhost:8080"); - HttpPageBufferClient client = new HttpPageBufferClient(new TestingHttpClient(processor, scheduler), + HttpPageBufferClient client = new HttpPageBufferClient( + "localhost", + new TestingHttpClient(processor, scheduler), + DataIntegrityVerification.ABORT, DataSize.of(10, Unit.MEGABYTE), new Duration(1, TimeUnit.MINUTES), true, @@ -294,7 +304,10 @@ public void testCloseDuringPendingRequest() TestingClientCallback callback = new TestingClientCallback(requestComplete); URI location = URI.create("http://localhost:8080"); - HttpPageBufferClient client = new HttpPageBufferClient(new TestingHttpClient(processor, scheduler), + HttpPageBufferClient client = new HttpPageBufferClient( + "localhost", + new TestingHttpClient(processor, scheduler), + DataIntegrityVerification.ABORT, DataSize.of(10, Unit.MEGABYTE), new Duration(1, TimeUnit.MINUTES), true, @@ -348,7 +361,10 @@ public void testExceptionFromResponseHandler() TestingClientCallback callback = new TestingClientCallback(requestComplete); URI location = URI.create("http://localhost:8080"); - HttpPageBufferClient client = new HttpPageBufferClient(new TestingHttpClient(processor, scheduler), + HttpPageBufferClient client = new HttpPageBufferClient( + "localhost", + new TestingHttpClient(processor, scheduler), + DataIntegrityVerification.ABORT, DataSize.of(10, Unit.MEGABYTE), new Duration(30, TimeUnit.SECONDS), true, diff --git a/presto-main/src/test/java/io/prestosql/operator/TestMergeOperator.java b/presto-main/src/test/java/io/prestosql/operator/TestMergeOperator.java index 1c23c7fba8d1..1e6480369424 100644 --- a/presto-main/src/test/java/io/prestosql/operator/TestMergeOperator.java +++ b/presto-main/src/test/java/io/prestosql/operator/TestMergeOperator.java @@ -19,6 +19,7 @@ import com.google.common.collect.ImmutableList; import io.airlift.http.client.HttpClient; import io.airlift.http.client.testing.TestingHttpClient; +import io.airlift.node.NodeInfo; import io.prestosql.execution.Lifespan; import io.prestosql.execution.buffer.PagesSerdeFactory; import io.prestosql.execution.buffer.TestingPagesSerdeFactory; @@ -27,6 +28,7 @@ import io.prestosql.spi.block.SortOrder; import io.prestosql.spi.type.Type; import io.prestosql.split.RemoteSplit; +import io.prestosql.sql.analyzer.FeaturesConfig; import io.prestosql.sql.gen.OrderingCompiler; import io.prestosql.sql.planner.plan.PlanNodeId; import org.testng.annotations.AfterMethod; @@ -82,7 +84,7 @@ public void setUp() taskBuffers = CacheBuilder.newBuilder().build(CacheLoader.from(TestingTaskBuffer::new)); httpClient = new TestingHttpClient(new TestingExchangeHttpClientHandler(taskBuffers), executor); - exchangeClientFactory = new ExchangeClientFactory(new ExchangeClientConfig(), httpClient, executor); + exchangeClientFactory = new ExchangeClientFactory(new NodeInfo("test"), new FeaturesConfig(), new ExchangeClientConfig(), httpClient, executor); orderingCompiler = new OrderingCompiler(); } diff --git a/presto-main/src/test/java/io/prestosql/operator/TestingExchangeHttpClientHandler.java b/presto-main/src/test/java/io/prestosql/operator/TestingExchangeHttpClientHandler.java index 3f105976b7c0..20e78272d669 100644 --- a/presto-main/src/test/java/io/prestosql/operator/TestingExchangeHttpClientHandler.java +++ b/presto-main/src/test/java/io/prestosql/operator/TestingExchangeHttpClientHandler.java @@ -24,7 +24,7 @@ import io.airlift.http.client.testing.TestingResponse; import io.airlift.slice.DynamicSliceOutput; import io.prestosql.execution.buffer.PagesSerde; -import io.prestosql.execution.buffer.PagesSerdeUtil; +import io.prestosql.execution.buffer.SerializedPage; import io.prestosql.spi.Page; import static io.prestosql.PrestoMediaTypes.PRESTO_PAGES; @@ -32,6 +32,8 @@ import static io.prestosql.client.PrestoHeaders.PRESTO_PAGE_NEXT_TOKEN; import static io.prestosql.client.PrestoHeaders.PRESTO_PAGE_TOKEN; import static io.prestosql.client.PrestoHeaders.PRESTO_TASK_INSTANCE_ID; +import static io.prestosql.execution.buffer.PagesSerdeUtil.calculateChecksum; +import static io.prestosql.execution.buffer.PagesSerdeUtil.writeSerializedPage; import static io.prestosql.execution.buffer.TestingPagesSerdeFactory.testingPagesSerde; import static io.prestosql.server.PagesResponseWriter.SERIALIZED_PAGES_MAGIC; import static java.util.Objects.requireNonNull; @@ -73,10 +75,12 @@ public Response handle(Request request) if (page != null) { headers.put(PRESTO_PAGE_NEXT_TOKEN, String.valueOf(pageToken + 1)); headers.put(PRESTO_BUFFER_COMPLETE, String.valueOf(false)); + SerializedPage serializedPage = PAGES_SERDE.serialize(page); DynamicSliceOutput output = new DynamicSliceOutput(256); output.writeInt(SERIALIZED_PAGES_MAGIC); + output.writeLong(calculateChecksum(ImmutableList.of(serializedPage))); output.writeInt(1); - PagesSerdeUtil.writePages(PAGES_SERDE, output, page); + writeSerializedPage(output, serializedPage); return new TestingResponse(HttpStatus.OK, headers.build(), output.slice().getInput()); } else if (taskBuffer.isFinished()) { @@ -84,6 +88,7 @@ else if (taskBuffer.isFinished()) { headers.put(PRESTO_BUFFER_COMPLETE, String.valueOf(true)); DynamicSliceOutput output = new DynamicSliceOutput(8); output.writeInt(SERIALIZED_PAGES_MAGIC); + output.writeLong(calculateChecksum(ImmutableList.of())); output.writeInt(0); return new TestingResponse(HttpStatus.OK, headers.build(), output.slice().getInput()); } diff --git a/presto-main/src/test/java/io/prestosql/sql/analyzer/TestFeaturesConfig.java b/presto-main/src/test/java/io/prestosql/sql/analyzer/TestFeaturesConfig.java index b76cc844719b..f67000099c90 100644 --- a/presto-main/src/test/java/io/prestosql/sql/analyzer/TestFeaturesConfig.java +++ b/presto-main/src/test/java/io/prestosql/sql/analyzer/TestFeaturesConfig.java @@ -19,6 +19,7 @@ import io.prestosql.operator.aggregation.arrayagg.ArrayAggGroupImplementation; import io.prestosql.operator.aggregation.histogram.HistogramGroupImplementation; import io.prestosql.operator.aggregation.multimapagg.MultimapAggGroupImplementation; +import io.prestosql.sql.analyzer.FeaturesConfig.DataIntegrityVerification; import io.prestosql.sql.analyzer.FeaturesConfig.JoinDistributionType; import io.prestosql.sql.analyzer.FeaturesConfig.JoinReorderingStrategy; import org.testng.annotations.Test; @@ -88,6 +89,7 @@ public void testDefaults() .setDefaultFilterFactorEnabled(false) .setEnableForcedExchangeBelowGroupId(true) .setExchangeCompressionEnabled(false) + .setExchangeDataIntegrityVerification(DataIntegrityVerification.ABORT) .setLegacyTimestamp(true) .setEnableIntermediateAggregations(false) .setPushAggregationThroughOuterJoin(true) @@ -164,6 +166,7 @@ public void testExplicitPropertyMappings() .put("memory-revoking-threshold", "0.2") .put("memory-revoking-target", "0.8") .put("exchange.compression-enabled", "true") + .put("exchange.data-integrity-verification", "RETRY") .put("deprecated.legacy-timestamp", "false") .put("optimizer.enable-intermediate-aggregations", "true") .put("parse-decimal-literals-as-double", "true") @@ -234,6 +237,7 @@ public void testExplicitPropertyMappings() .setMemoryRevokingThreshold(0.2) .setMemoryRevokingTarget(0.8) .setExchangeCompressionEnabled(true) + .setExchangeDataIntegrityVerification(DataIntegrityVerification.RETRY) .setLegacyTimestamp(false) .setEnableIntermediateAggregations(true) .setParseDecimalLiteralsAsDouble(true) From b1a2969882e638a77dba5e2289bd5374297d2d27 Mon Sep 17 00:00:00 2001 From: Alex Jo Date: Fri, 1 May 2020 09:15:49 -0400 Subject: [PATCH 299/519] Skip whitespace padded char data mapping test for Kudu --- .../io/prestosql/plugin/kudu/TestKuduDistributedQueries.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java b/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java index 6ac9776f1a5d..39d592b85caa 100644 --- a/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java +++ b/presto-kudu/src/test/java/io/prestosql/plugin/kudu/TestKuduDistributedQueries.java @@ -150,7 +150,8 @@ protected Optional filterDataMappingSmokeTestData(DataMapp } if (typeName.equals("date") // date gets stored as varchar - || typeName.equals("varbinary")) { // TODO (https://github.com/prestosql/presto/issues/3416) + || typeName.equals("varbinary") // TODO (https://github.com/prestosql/presto/issues/3416) + || (typeName.startsWith("char") && dataMappingTestSetup.getSampleValueLiteral().contains(" "))) { // TODO: https://github.com/prestosql/presto/issues/3597 // TODO this should either work or fail cleanly return Optional.empty(); } From 19abcb7ddeacaf1ca7bad52f998b524dc83b6904 Mon Sep 17 00:00:00 2001 From: praveenkrishna Date: Wed, 8 Apr 2020 22:46:48 +0530 Subject: [PATCH 300/519] Quote special row field names containing special characters --- .../plugin/hive/TestHiveIntegrationSmokeTest.java | 13 +++++++++++++ .../sql/analyzer/TypeSignatureTranslator.java | 2 +- .../sql/analyzer/TestTypeSignatureTranslator.java | 1 + 3 files changed, 15 insertions(+), 1 deletion(-) diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java index 3cfff0b3a4d3..7c8be2b4d8cf 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java @@ -3233,6 +3233,19 @@ public void testShowCreateTable() assertUpdate(createTableSql); actualResult = computeActual("SHOW CREATE TABLE \"test_show_create_table'2\""); assertEquals(getOnlyElement(actualResult.getOnlyColumnAsSet()), createTableSql); + + createTableSql = format("" + + "CREATE TABLE %s.%s.%s (\n" + + " c1 ROW(\"$a\" bigint, \"$b\" varchar)\n)\n" + + "WITH (\n" + + " format = 'ORC'\n" + + ")", + getSession().getCatalog().get(), + getSession().getSchema().get(), + "test_show_create_table_with_special_characters"); + assertUpdate(createTableSql); + actualResult = computeActual("SHOW CREATE TABLE test_show_create_table_with_special_characters"); + assertEquals(getOnlyElement(actualResult.getOnlyColumnAsSet()), createTableSql); } @Test diff --git a/presto-main/src/main/java/io/prestosql/sql/analyzer/TypeSignatureTranslator.java b/presto-main/src/main/java/io/prestosql/sql/analyzer/TypeSignatureTranslator.java index ce9f822085b5..9290a8fe60f8 100644 --- a/presto-main/src/main/java/io/prestosql/sql/analyzer/TypeSignatureTranslator.java +++ b/presto-main/src/main/java/io/prestosql/sql/analyzer/TypeSignatureTranslator.java @@ -186,7 +186,7 @@ static DataType toDataType(TypeSignature typeSignature) typeSignature.getParameters().stream() .map(parameter -> new RowDataType.Field( Optional.empty(), - parameter.getNamedTypeSignature().getFieldName().map(fieldName -> new Identifier(fieldName.getName(), false)), + parameter.getNamedTypeSignature().getFieldName().map(fieldName -> new Identifier(fieldName.getName())), toDataType(parameter.getNamedTypeSignature().getTypeSignature()))) .collect(toImmutableList())); case StandardTypes.VARCHAR: diff --git a/presto-main/src/test/java/io/prestosql/sql/analyzer/TestTypeSignatureTranslator.java b/presto-main/src/test/java/io/prestosql/sql/analyzer/TestTypeSignatureTranslator.java index 8bebc94f1ed0..c06ff1089036 100644 --- a/presto-main/src/test/java/io/prestosql/sql/analyzer/TestTypeSignatureTranslator.java +++ b/presto-main/src/test/java/io/prestosql/sql/analyzer/TestTypeSignatureTranslator.java @@ -88,6 +88,7 @@ public void testRowType() assertRoundTrip("ROW(a BIGINT, b VARCHAR)"); assertRoundTrip("ROW(a BIGINT,b VARCHAR)"); assertRoundTrip("ROW(\"a\" BIGINT, \"from\" VARCHAR)"); + assertRoundTrip("ROW(\"$x\" BIGINT, \"$y\" VARCHAR)"); } @Test From 4da18146a65a709d8676712fdf5a494d9b456e46 Mon Sep 17 00:00:00 2001 From: Manfred Moser Date: Wed, 15 Apr 2020 13:33:49 -0700 Subject: [PATCH 301/519] Add link to book and paper pages --- presto-docs/src/main/sphinx/overview/concepts.rst | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/presto-docs/src/main/sphinx/overview/concepts.rst b/presto-docs/src/main/sphinx/overview/concepts.rst index 023e27354d1e..a676d70b4adb 100644 --- a/presto-docs/src/main/sphinx/overview/concepts.rst +++ b/presto-docs/src/main/sphinx/overview/concepts.rst @@ -24,6 +24,14 @@ This section provides a solid definition for the core concepts referenced throughout Presto, and these sections are sorted from most general to most specific. +.. note:: + + The book `Presto: The Definitive Guide + `_ and the research + paper `Presto: SQL on Everything `_ can + provide further information about Presto and the concepts in use. + + Server Types ------------ From 2169c41518f692a2c7615d901a425abf45fee264 Mon Sep 17 00:00:00 2001 From: Alex Jo Date: Fri, 10 Apr 2020 12:07:23 -0400 Subject: [PATCH 302/519] Remove duplicated CacheStatsMBean --- .../io/prestosql/sql/gen/CacheStatsMBean.java | 53 ------------------- .../prestosql/sql/gen/ExpressionCompiler.java | 1 + .../io/prestosql/sql/gen/JoinCompiler.java | 1 + .../sql/gen/JoinFilterFunctionCompiler.java | 1 + .../prestosql/sql/gen/OrderingCompiler.java | 1 + .../sql/gen/PageFunctionCompiler.java | 1 + 6 files changed, 5 insertions(+), 53 deletions(-) delete mode 100644 presto-main/src/main/java/io/prestosql/sql/gen/CacheStatsMBean.java diff --git a/presto-main/src/main/java/io/prestosql/sql/gen/CacheStatsMBean.java b/presto-main/src/main/java/io/prestosql/sql/gen/CacheStatsMBean.java deleted file mode 100644 index b79beaece76e..000000000000 --- a/presto-main/src/main/java/io/prestosql/sql/gen/CacheStatsMBean.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package io.prestosql.sql.gen; - -import com.google.common.cache.LoadingCache; -import org.weakref.jmx.Managed; - -import static java.util.Objects.requireNonNull; - -public class CacheStatsMBean -{ - private final LoadingCache loadingCache; - - public CacheStatsMBean(LoadingCache loadingCache) - { - this.loadingCache = requireNonNull(loadingCache, "loadingCache is null"); - } - - @Managed - public long size() - { - return loadingCache.size(); - } - - @Managed - public Double getHitRate() - { - return loadingCache.stats().hitRate(); - } - - @Managed - public Double getMissRate() - { - return loadingCache.stats().missRate(); - } - - @Managed - public long getRequestCount() - { - return loadingCache.stats().requestCount(); - } -} diff --git a/presto-main/src/main/java/io/prestosql/sql/gen/ExpressionCompiler.java b/presto-main/src/main/java/io/prestosql/sql/gen/ExpressionCompiler.java index 29c344437f13..2d95e6acd0af 100644 --- a/presto-main/src/main/java/io/prestosql/sql/gen/ExpressionCompiler.java +++ b/presto-main/src/main/java/io/prestosql/sql/gen/ExpressionCompiler.java @@ -20,6 +20,7 @@ import com.google.common.collect.ImmutableList; import io.airlift.bytecode.ClassDefinition; import io.airlift.bytecode.CompilationException; +import io.airlift.jmx.CacheStatsMBean; import io.prestosql.metadata.Metadata; import io.prestosql.operator.project.CursorProcessor; import io.prestosql.operator.project.PageFilter; diff --git a/presto-main/src/main/java/io/prestosql/sql/gen/JoinCompiler.java b/presto-main/src/main/java/io/prestosql/sql/gen/JoinCompiler.java index a3817d31f670..7343a7e0a8de 100644 --- a/presto-main/src/main/java/io/prestosql/sql/gen/JoinCompiler.java +++ b/presto-main/src/main/java/io/prestosql/sql/gen/JoinCompiler.java @@ -31,6 +31,7 @@ import io.airlift.bytecode.control.IfStatement; import io.airlift.bytecode.expression.BytecodeExpression; import io.airlift.bytecode.instruction.LabelNode; +import io.airlift.jmx.CacheStatsMBean; import io.airlift.slice.Slice; import io.prestosql.Session; import io.prestosql.metadata.Metadata; diff --git a/presto-main/src/main/java/io/prestosql/sql/gen/JoinFilterFunctionCompiler.java b/presto-main/src/main/java/io/prestosql/sql/gen/JoinFilterFunctionCompiler.java index 6ad43e505af2..53b0a64c1dda 100644 --- a/presto-main/src/main/java/io/prestosql/sql/gen/JoinFilterFunctionCompiler.java +++ b/presto-main/src/main/java/io/prestosql/sql/gen/JoinFilterFunctionCompiler.java @@ -29,6 +29,7 @@ import io.airlift.bytecode.Scope; import io.airlift.bytecode.Variable; import io.airlift.bytecode.control.IfStatement; +import io.airlift.jmx.CacheStatsMBean; import io.prestosql.metadata.Metadata; import io.prestosql.operator.InternalJoinFilterFunction; import io.prestosql.operator.JoinFilterFunction; diff --git a/presto-main/src/main/java/io/prestosql/sql/gen/OrderingCompiler.java b/presto-main/src/main/java/io/prestosql/sql/gen/OrderingCompiler.java index c7d27cf25db7..84e93adbd468 100644 --- a/presto-main/src/main/java/io/prestosql/sql/gen/OrderingCompiler.java +++ b/presto-main/src/main/java/io/prestosql/sql/gen/OrderingCompiler.java @@ -26,6 +26,7 @@ import io.airlift.bytecode.Variable; import io.airlift.bytecode.expression.BytecodeExpression; import io.airlift.bytecode.instruction.LabelNode; +import io.airlift.jmx.CacheStatsMBean; import io.airlift.log.Logger; import io.prestosql.operator.PageWithPositionComparator; import io.prestosql.operator.PagesIndex; diff --git a/presto-main/src/main/java/io/prestosql/sql/gen/PageFunctionCompiler.java b/presto-main/src/main/java/io/prestosql/sql/gen/PageFunctionCompiler.java index 558333bb3b16..3dad3fd3c87b 100644 --- a/presto-main/src/main/java/io/prestosql/sql/gen/PageFunctionCompiler.java +++ b/presto-main/src/main/java/io/prestosql/sql/gen/PageFunctionCompiler.java @@ -31,6 +31,7 @@ import io.airlift.bytecode.Variable; import io.airlift.bytecode.control.ForLoop; import io.airlift.bytecode.control.IfStatement; +import io.airlift.jmx.CacheStatsMBean; import io.prestosql.metadata.Metadata; import io.prestosql.operator.Work; import io.prestosql.operator.project.ConstantPageProjection; From 1694c17b44c49a88321bde57675b5ff333cf287d Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 27 Apr 2020 23:19:51 +0200 Subject: [PATCH 303/519] Remove unused method --- .../prestosql/plugin/accumulo/model/Row.java | 136 ------------------ .../plugin/accumulo/model/TestRow.java | 40 ------ 2 files changed, 176 deletions(-) diff --git a/presto-accumulo/src/main/java/io/prestosql/plugin/accumulo/model/Row.java b/presto-accumulo/src/main/java/io/prestosql/plugin/accumulo/model/Row.java index fe958e8c018a..34ca02309c0e 100644 --- a/presto-accumulo/src/main/java/io/prestosql/plugin/accumulo/model/Row.java +++ b/presto-accumulo/src/main/java/io/prestosql/plugin/accumulo/model/Row.java @@ -13,52 +13,18 @@ */ package io.prestosql.plugin.accumulo.model; -import com.google.common.base.Splitter; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import io.prestosql.plugin.accumulo.Types; -import io.prestosql.plugin.accumulo.serializers.AccumuloRowSerializer; -import io.prestosql.spi.PrestoException; import io.prestosql.spi.type.Type; -import io.prestosql.spi.type.VarcharType; -import org.apache.commons.lang.StringUtils; -import org.joda.time.format.DateTimeFormat; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; -import java.sql.Date; -import java.sql.Time; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Objects; import java.util.stream.Collectors; -import static com.google.common.base.Preconditions.checkArgument; -import static io.prestosql.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT; -import static io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED; -import static io.prestosql.spi.type.BigintType.BIGINT; -import static io.prestosql.spi.type.BooleanType.BOOLEAN; -import static io.prestosql.spi.type.DateType.DATE; -import static io.prestosql.spi.type.DoubleType.DOUBLE; -import static io.prestosql.spi.type.IntegerType.INTEGER; -import static io.prestosql.spi.type.RealType.REAL; -import static io.prestosql.spi.type.SmallintType.SMALLINT; -import static io.prestosql.spi.type.TimeType.TIME; -import static io.prestosql.spi.type.TimestampType.TIMESTAMP; -import static io.prestosql.spi.type.TinyintType.TINYINT; -import static io.prestosql.spi.type.VarbinaryType.VARBINARY; -import static java.lang.String.format; -import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.Objects.requireNonNull; public class Row { - private static final DateTimeFormatter DATE_PARSER = ISODateTimeFormat.date(); - private static final DateTimeFormatter TIME_PARSER = DateTimeFormat.forPattern("HH:mm:ss"); - private static final DateTimeFormatter TIMESTAMP_PARSER = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS"); - private final List fields = new ArrayList<>(); public Row() {} @@ -128,106 +94,4 @@ public String toString() builder.deleteCharAt(builder.length() - 1); return builder.append(')').toString(); } - - /** - * Creates a new {@link Row} from the given delimited string based on the given {@link RowSchema} - * - * @param schema Row's schema - * @param str String to parse - * @param delimiter Delimiter of the string - * @return A new Row - * @throws PrestoException If the length of the split string is not equal to the length of the schema - * @throws PrestoException If the schema contains an unsupported type - */ - public static Row fromString(RowSchema schema, String str, char delimiter) - { - Row row = new Row(); - - ImmutableList.Builder builder = ImmutableList.builder(); - List fields = builder.addAll(Splitter.on(delimiter).split(str)).build(); - - if (fields.size() != schema.getLength()) { - throw new PrestoException(INVALID_FUNCTION_ARGUMENT, format("Number of split tokens is not equal to schema length. Expected %s received %s. Schema: %s, fields {%s}, delimiter %s", schema.getLength(), fields.size(), schema, StringUtils.join(fields, ","), delimiter)); - } - - for (int i = 0; i < fields.size(); ++i) { - Type type = schema.getColumn(i).getType(); - row.addField(valueFromString(fields.get(i), type), type); - } - - return row; - } - - /** - * Converts the given String into a Java object based on the given Presto type - * - * @param str String to convert - * @param type Presto Type - * @return Java object - * @throws PrestoException If the type is not supported by this function - */ - public static Object valueFromString(String str, Type type) - { - if (str == null || str.isEmpty()) { - return null; - } - if (Types.isArrayType(type)) { - Type elementType = Types.getElementType(type); - ImmutableList.Builder listBuilder = ImmutableList.builder(); - for (String element : Splitter.on(',').split(str)) { - listBuilder.add(valueFromString(element, elementType)); - } - return AccumuloRowSerializer.getBlockFromArray(elementType, listBuilder.build()); - } - if (Types.isMapType(type)) { - Type keyType = Types.getKeyType(type); - Type valueType = Types.getValueType(type); - ImmutableMap.Builder mapBuilder = ImmutableMap.builder(); - for (String element : Splitter.on(',').split(str)) { - ImmutableList.Builder builder = ImmutableList.builder(); - List keyValue = builder.addAll(Splitter.on("->").split(element)).build(); - checkArgument(keyValue.size() == 2, "Map element %s has %s entries, not 2", element, keyValue.size()); - - mapBuilder.put(valueFromString(keyValue.get(0), keyType), valueFromString(keyValue.get(1), valueType)); - } - return AccumuloRowSerializer.getBlockFromMap(type, mapBuilder.build()); - } - if (type.equals(BIGINT)) { - return Long.parseLong(str); - } - if (type.equals(BOOLEAN)) { - return Boolean.parseBoolean(str); - } - if (type.equals(DATE)) { - return new Date(DATE_PARSER.parseDateTime(str).getMillis()); - } - if (type.equals(DOUBLE)) { - return Double.parseDouble(str); - } - if (type.equals(INTEGER)) { - return Integer.parseInt(str); - } - if (type.equals(REAL)) { - return Float.parseFloat(str); - } - if (type.equals(SMALLINT)) { - return Short.parseShort(str); - } - if (type.equals(TIME)) { - return new Time(TIME_PARSER.parseDateTime(str).getMillis()); - } - if (type.equals(TIMESTAMP)) { - return new Timestamp(TIMESTAMP_PARSER.parseDateTime(str).getMillis()); - } - if (type.equals(TINYINT)) { - return Byte.valueOf(str); - } - if (type.equals(VARBINARY)) { - return str.getBytes(UTF_8); - } - if (type instanceof VarcharType) { - return str; - } - throw new PrestoException(NOT_SUPPORTED, "Unsupported type " + type); - } } diff --git a/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestRow.java b/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestRow.java index 276c00a4fd63..23a0512e7178 100644 --- a/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestRow.java +++ b/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestRow.java @@ -21,7 +21,6 @@ import java.sql.Date; import java.sql.Timestamp; import java.util.GregorianCalendar; -import java.util.Optional; import static io.prestosql.spi.type.BigintType.BIGINT; import static io.prestosql.spi.type.BooleanType.BOOLEAN; @@ -72,43 +71,4 @@ public void testRowTypeIsNull() Row r1 = new Row(); r1.addField(VARCHAR, null); } - - @Test - public void testRowFromString() - { - Row expected = new Row(); - expected.addField(new Field(AccumuloRowSerializer.getBlockFromArray(VARCHAR, ImmutableList.of("a", "b", "c")), new ArrayType(VARCHAR))); - expected.addField(true, BOOLEAN); - expected.addField(new Field(new Date(new GregorianCalendar(1999, 0, 1).getTime().getTime()), DATE)); - expected.addField(123.45678, DOUBLE); - expected.addField(new Field(123.45678f, REAL)); - expected.addField(12345678, INTEGER); - expected.addField(new Field(12345678L, BIGINT)); - expected.addField(new Field((short) 12345, SMALLINT)); - expected.addField(new GregorianCalendar(1999, 0, 1, 12, 30, 0).getTime().getTime(), TIME); - expected.addField(new Field(new Timestamp(new GregorianCalendar(1999, 0, 1, 12, 30, 0).getTime().getTime()), TIMESTAMP)); - expected.addField((byte) 123, TINYINT); - expected.addField(new Field("O'Leary".getBytes(UTF_8), VARBINARY)); - expected.addField("O'Leary", VARCHAR); - expected.addField(null, VARCHAR); - - RowSchema schema = new RowSchema(); - schema.addRowId("a", new ArrayType(VARCHAR)); - schema.addColumn("b", Optional.of("b"), Optional.of("b"), BOOLEAN); - schema.addColumn("c", Optional.of("c"), Optional.of("c"), DATE); - schema.addColumn("d", Optional.of("d"), Optional.of("d"), DOUBLE); - schema.addColumn("e", Optional.of("e"), Optional.of("e"), REAL); - schema.addColumn("f", Optional.of("f"), Optional.of("f"), INTEGER); - schema.addColumn("g", Optional.of("g"), Optional.of("g"), BIGINT); - schema.addColumn("h", Optional.of("h"), Optional.of("h"), SMALLINT); - schema.addColumn("i", Optional.of("i"), Optional.of("i"), TIME); - schema.addColumn("j", Optional.of("j"), Optional.of("j"), TIMESTAMP); - schema.addColumn("k", Optional.of("k"), Optional.of("k"), TINYINT); - schema.addColumn("l", Optional.of("l"), Optional.of("l"), VARBINARY); - schema.addColumn("m", Optional.of("m"), Optional.of("m"), VARCHAR); - schema.addColumn("n", Optional.of("n"), Optional.of("n"), VARCHAR); - - Row actual = Row.fromString(schema, "a,b,c|true|1999-01-01|123.45678|123.45678|12345678|12345678|12345|12:30:00|1999-01-01 12:30:00.0|123|O'Leary|O'Leary|", '|'); - assertEquals(actual, expected); - } } From 6843bf9d7425fcba71462b04a7676df6f8c109ac Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 27 Apr 2020 23:19:52 +0200 Subject: [PATCH 304/519] Simplify Field#toString The `Field`'s `toString` is complex, as if it was playing some conversion role, but it does not. Simplify it and make more robust (avoid throwing exceptions), at the cost of not producing human-readable string representation of collections. --- .../plugin/accumulo/model/Field.java | 111 +----------------- .../plugin/accumulo/model/TestField.java | 15 --- .../plugin/accumulo/model/TestRow.java | 1 - 3 files changed, 5 insertions(+), 122 deletions(-) diff --git a/presto-accumulo/src/main/java/io/prestosql/plugin/accumulo/model/Field.java b/presto-accumulo/src/main/java/io/prestosql/plugin/accumulo/model/Field.java index 7513e287cc97..a0ea0006e4b0 100644 --- a/presto-accumulo/src/main/java/io/prestosql/plugin/accumulo/model/Field.java +++ b/presto-accumulo/src/main/java/io/prestosql/plugin/accumulo/model/Field.java @@ -15,7 +15,6 @@ import io.airlift.slice.Slice; import io.prestosql.plugin.accumulo.Types; -import io.prestosql.plugin.accumulo.serializers.AccumuloRowSerializer; import io.prestosql.spi.PrestoException; import io.prestosql.spi.block.ArrayBlock; import io.prestosql.spi.block.Block; @@ -27,11 +26,9 @@ import java.sql.Timestamp; import java.util.Arrays; import java.util.Calendar; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; import java.util.Objects; +import static com.google.common.base.MoreObjects.toStringHelper; import static io.prestosql.spi.StandardErrorCode.FUNCTION_IMPLEMENTATION_ERROR; import static io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED; import static io.prestosql.spi.type.BigintType.BIGINT; @@ -266,108 +263,10 @@ private static boolean equals(Block block1, Block block2) @Override public String toString() { - if (value == null) { - return "null"; - } - - if (Types.isArrayType(type)) { - Type elementType = Types.getElementType(type); - StringBuilder builder = new StringBuilder("ARRAY ["); - for (Object element : AccumuloRowSerializer.getArrayFromBlock(elementType, this.getArray())) { - if (Types.isArrayType(elementType)) { - Type elementElementType = Types.getElementType(elementType); - builder.append( - new Field( - AccumuloRowSerializer.getBlockFromArray(elementElementType, (List) element), - elementType)) - .append(','); - } - else if (Types.isMapType(elementType)) { - builder.append( - new Field( - AccumuloRowSerializer.getBlockFromMap(elementType, (Map) element), - elementType)) - .append(','); - } - else { - builder.append(new Field(element, elementType)) - .append(','); - } - } - - return builder.deleteCharAt(builder.length() - 1).append("]").toString(); - } - - if (Types.isMapType(type)) { - StringBuilder builder = new StringBuilder("MAP("); - StringBuilder keys = new StringBuilder("ARRAY ["); - StringBuilder values = new StringBuilder("ARRAY ["); - for (Entry entry : AccumuloRowSerializer - .getMapFromBlock(type, this.getMap()).entrySet()) { - Type keyType = Types.getKeyType(type); - if (Types.isArrayType(keyType)) { - keys.append( - new Field( - AccumuloRowSerializer.getBlockFromArray(Types.getElementType(keyType), (List) entry.getKey()), - keyType)) - .append(','); - } - else if (Types.isMapType(keyType)) { - keys.append( - new Field( - AccumuloRowSerializer.getBlockFromMap(keyType, (Map) entry.getKey()), - keyType)) - .append(','); - } - else { - keys.append(new Field(entry.getKey(), keyType)) - .append(','); - } - - Type valueType = Types.getValueType(type); - if (Types.isArrayType(valueType)) { - values.append( - new Field(AccumuloRowSerializer.getBlockFromArray(Types.getElementType(valueType), - (List) entry.getValue()), valueType)) - .append(','); - } - else if (Types.isMapType(valueType)) { - values.append( - new Field( - AccumuloRowSerializer.getBlockFromMap(valueType, (Map) entry.getValue()), - valueType)) - .append(','); - } - else { - values.append(new Field(entry.getValue(), valueType)).append(','); - } - } - - keys.deleteCharAt(keys.length() - 1).append(']'); - values.deleteCharAt(values.length() - 1).append(']'); - return builder.append(keys).append(", ").append(values).append(")").toString(); - } - - // Validate the object is the given type - if (type.equals(BIGINT) || type.equals(BOOLEAN) || type.equals(DOUBLE) || type.equals(INTEGER) || type.equals(REAL) || type.equals(TINYINT) || type.equals(SMALLINT)) { - return value.toString(); - } - if (type.equals(DATE)) { - return "DATE '" + value.toString() + "'"; - } - if (type.equals(TIME)) { - return "TIME '" + value.toString() + "'"; - } - if (type.equals(TIMESTAMP)) { - return "TIMESTAMP '" + value.toString() + "'"; - } - if (type.equals(VARBINARY)) { - return "CAST('" + new String((byte[]) value, UTF_8).replaceAll("'", "''") + "' AS VARBINARY)"; - } - if (type instanceof VarcharType) { - return "'" + value.toString().replaceAll("'", "''") + "'"; - } - throw new PrestoException(NOT_SUPPORTED, "Unsupported PrestoType " + type); + return toStringHelper(this) + .add("value", value) + .add("type", type) + .toString(); } /** diff --git a/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestField.java b/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestField.java index 081e99d0132a..778e2c43a7d2 100644 --- a/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestField.java +++ b/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestField.java @@ -61,7 +61,6 @@ public void testArray() assertEquals(f1.getArray(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); - assertEquals(f1.toString(), "ARRAY ['a','b','c']"); Field f2 = new Field(f1); assertEquals(f2, f1); @@ -75,13 +74,11 @@ public void testBoolean() assertEquals(f1.getBoolean().booleanValue(), true); assertEquals(f1.getObject(), true); assertEquals(f1.getType(), type); - assertEquals(f1.toString(), "true"); f1 = new Field(false, type); assertEquals(f1.getBoolean().booleanValue(), false); assertEquals(f1.getObject(), false); assertEquals(f1.getType(), type); - assertEquals(f1.toString(), "false"); Field f2 = new Field(f1); assertEquals(f2, f1); @@ -96,7 +93,6 @@ public void testDate() assertEquals(f1.getDate(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); - assertEquals(f1.toString(), "DATE '1999-01-01'"); Field f2 = new Field(f1); assertEquals(f2, f1); @@ -111,7 +107,6 @@ public void testDouble() assertEquals(f1.getDouble(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); - assertEquals(f1.toString(), "123.45678"); Field f2 = new Field(f1); assertEquals(f2, f1); @@ -126,7 +121,6 @@ public void testFloat() assertEquals(f1.getFloat(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); - assertEquals(f1.toString(), "123.45678"); Field f2 = new Field(f1); assertEquals(f2, f1); @@ -141,7 +135,6 @@ public void testInt() assertEquals(f1.getInt(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); - assertEquals(f1.toString(), "12345678"); Field f2 = new Field(f1); assertEquals(f2, f1); @@ -156,7 +149,6 @@ public void testLong() assertEquals(f1.getLong(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); - assertEquals(f1.toString(), "12345678"); Field f2 = new Field(f1); assertEquals(f2, f1); @@ -173,7 +165,6 @@ public void testMap() assertEquals(f1.getMap(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); - assertEquals(f1.toString(), "MAP(ARRAY ['a','b','c'], ARRAY [1,2,3])"); } @Test @@ -185,7 +176,6 @@ public void testSmallInt() assertEquals(f1.getShort(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); - assertEquals(f1.toString(), "12345"); Field f2 = new Field(f1); assertEquals(f2, f1); @@ -200,7 +190,6 @@ public void testTime() assertEquals(f1.getTime(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); - assertEquals(f1.toString(), "TIME '12:30:00'"); Field f2 = new Field(f1); assertEquals(f2, f1); @@ -215,7 +204,6 @@ public void testTimestamp() assertEquals(f1.getTimestamp(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); - assertEquals(f1.toString(), "TIMESTAMP '1999-01-01 12:30:00.0'"); Field f2 = new Field(f1); assertEquals(f2, f1); @@ -230,7 +218,6 @@ public void testTinyInt() assertEquals(f1.getByte(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); - assertEquals(f1.toString(), "123"); Field f2 = new Field(f1); assertEquals(f2, f1); @@ -245,7 +232,6 @@ public void testVarbinary() assertEquals(f1.getVarbinary(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); - assertEquals(f1.toString(), "CAST('O''Leary' AS VARBINARY)"); Field f2 = new Field(f1); assertEquals(f2, f1); @@ -260,7 +246,6 @@ public void testVarchar() assertEquals(f1.getVarchar(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); - assertEquals(f1.toString(), "'O''Leary'"); Field f2 = new Field(f1); assertEquals(f2, f1); diff --git a/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestRow.java b/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestRow.java index 23a0512e7178..fb1259f83b75 100644 --- a/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestRow.java +++ b/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestRow.java @@ -59,7 +59,6 @@ public void testRow() r1.addField(null, VARCHAR); assertEquals(r1.length(), 14); - assertEquals(r1.toString(), "(ARRAY ['a','b','c'],true,DATE '1999-01-01',123.45678,123.45678,12345678,12345678,12345,TIME '12:30:00',TIMESTAMP '1999-01-01 12:30:00.0',123,CAST('O''Leary' AS VARBINARY),'O''Leary',null)"); Row r2 = new Row(r1); assertEquals(r2, r1); From ead1407b42921c0d26dc980e3746304f738702ce Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 27 Apr 2020 23:19:53 +0200 Subject: [PATCH 305/519] Correct constant to represent TIME --- .../test/java/io/prestosql/plugin/accumulo/model/TestField.java | 2 +- .../test/java/io/prestosql/plugin/accumulo/model/TestRow.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestField.java b/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestField.java index 778e2c43a7d2..25062cc3632a 100644 --- a/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestField.java +++ b/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestField.java @@ -185,7 +185,7 @@ public void testSmallInt() public void testTime() { Type type = TIME; - Time expected = new Time(new GregorianCalendar(1999, 0, 1, 12, 30, 0).getTime().getTime()); + Time expected = new Time(new GregorianCalendar(1970, 0, 1, 12, 30, 0).getTime().getTime()); Field f1 = new Field(expected, type); assertEquals(f1.getTime(), expected); assertEquals(f1.getObject(), expected); diff --git a/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestRow.java b/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestRow.java index fb1259f83b75..288f479bc52a 100644 --- a/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestRow.java +++ b/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestRow.java @@ -51,7 +51,7 @@ public void testRow() r1.addField(12345678, INTEGER); r1.addField(new Field(12345678L, BIGINT)); r1.addField(new Field((short) 12345, SMALLINT)); - r1.addField(new GregorianCalendar(1999, 0, 1, 12, 30, 0).getTime().getTime(), TIME); + r1.addField(new GregorianCalendar(1970, 0, 1, 12, 30, 0).getTime().getTime(), TIME); r1.addField(new Field(new Timestamp(new GregorianCalendar(1999, 0, 1, 12, 30, 0).getTime().getTime()), TIMESTAMP)); r1.addField((byte) 123, TINYINT); r1.addField(new Field("O'Leary".getBytes(UTF_8), VARBINARY)); From ad48ae06bb0969ae147faca94c00f91df3da95ac Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 27 Apr 2020 23:19:55 +0200 Subject: [PATCH 306/519] Simplify type conversion code The conversion code was flexible, allowing different representations for given Presto Type. This is unnecessary, as the value passed is always a Presto Type's stack representation. --- .../plugin/accumulo/model/Field.java | 152 ++++-------------- .../prestosql/plugin/accumulo/model/Row.java | 4 +- .../plugin/accumulo/model/TestField.java | 21 +-- .../plugin/accumulo/model/TestRow.java | 21 +-- 4 files changed, 59 insertions(+), 139 deletions(-) diff --git a/presto-accumulo/src/main/java/io/prestosql/plugin/accumulo/model/Field.java b/presto-accumulo/src/main/java/io/prestosql/plugin/accumulo/model/Field.java index a0ea0006e4b0..af5764baccac 100644 --- a/presto-accumulo/src/main/java/io/prestosql/plugin/accumulo/model/Field.java +++ b/presto-accumulo/src/main/java/io/prestosql/plugin/accumulo/model/Field.java @@ -13,6 +13,9 @@ */ package io.prestosql.plugin.accumulo.model; +import com.google.common.primitives.Primitives; +import com.google.common.primitives.Shorts; +import com.google.common.primitives.SignedBytes; import io.airlift.slice.Slice; import io.prestosql.plugin.accumulo.Types; import io.prestosql.spi.PrestoException; @@ -24,12 +27,12 @@ import java.sql.Date; import java.sql.Time; import java.sql.Timestamp; +import java.time.LocalDate; import java.util.Arrays; -import java.util.Calendar; import java.util.Objects; import static com.google.common.base.MoreObjects.toStringHelper; -import static io.prestosql.spi.StandardErrorCode.FUNCTION_IMPLEMENTATION_ERROR; +import static com.google.common.base.Preconditions.checkArgument; import static io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED; import static io.prestosql.spi.type.BigintType.BIGINT; import static io.prestosql.spi.type.BooleanType.BOOLEAN; @@ -43,9 +46,9 @@ import static io.prestosql.spi.type.TinyintType.TINYINT; import static io.prestosql.spi.type.VarbinaryType.VARBINARY; import static io.prestosql.spi.type.VarcharType.VARCHAR; -import static java.nio.charset.StandardCharsets.UTF_8; +import static java.lang.Float.intBitsToFloat; +import static java.lang.Math.toIntExact; import static java.util.Objects.requireNonNull; -import static java.util.concurrent.TimeUnit.DAYS; public class Field { @@ -53,14 +56,14 @@ public class Field private final Type type; private final boolean indexed; - public Field(Object value, Type type) + public Field(Object nativeValue, Type type) { - this(value, type, false); + this(nativeValue, type, false); } - public Field(Object value, Type type, boolean indexed) + public Field(Object nativeValue, Type type, boolean indexed) { - this.value = cleanObject(value, type); + this.value = convert(nativeValue, type); this.type = requireNonNull(type, "type is null"); this.indexed = indexed; } @@ -270,174 +273,85 @@ public String toString() } /** - * Does it's damnedest job to convert the given object to the given type. + * Convert Presto native value (stack representation) of given type to Accumulo equivalent. * * @param value Object to convert * @param type Destination Presto type - * @return Null if null, the converted type of it could convert it, or the same value if it is fine just the way it is :D - * @throws PrestoException If the given object is not any flavor of the given type */ - private static Object cleanObject(Object value, Type type) + private static Object convert(Object value, Type type) { if (value == null) { return null; } + checkArgument(Primitives.wrap(type.getJavaType()).isInstance(value), "Invalid representation for %s: %s [%s]", type, value, value.getClass().getName()); + // Array? Better be a block! if (Types.isArrayType(type)) { - if (!(value instanceof Block)) { - throw new PrestoException(FUNCTION_IMPLEMENTATION_ERROR, "Object is not a Block, but " + value.getClass()); - } + // Block return value; } // Map? Better be a block! if (Types.isMapType(type)) { - if (!(value instanceof Block)) { - throw new PrestoException(FUNCTION_IMPLEMENTATION_ERROR, "Object is not a Block, but " + value.getClass()); - } + // Block return value; } // And now for the plain types if (type.equals(BIGINT)) { - if (!(value instanceof Long)) { - throw new PrestoException(FUNCTION_IMPLEMENTATION_ERROR, "Object is not a Long, but " + value.getClass()); - } + // long return value; } if (type.equals(INTEGER)) { - if (value instanceof Long) { - return ((Long) value).intValue(); - } - - if (!(value instanceof Integer)) { - throw new PrestoException(FUNCTION_IMPLEMENTATION_ERROR, "Object is not a Long or Integer, but " + value.getClass()); - } - return value; + return toIntExact((long) value); } if (type.equals(BOOLEAN)) { - if (!(value instanceof Boolean)) { - throw new PrestoException(FUNCTION_IMPLEMENTATION_ERROR, "Object is not a Boolean, but " + value.getClass()); - } + // boolean return value; } if (type.equals(DATE)) { - if (value instanceof Long) { - return new Date(DAYS.toMillis((Long) value)); - } - - if (value instanceof Calendar) { - return new Date(((Calendar) value).getTime().getTime()); - } - - if (!(value instanceof Date)) { - throw new PrestoException(FUNCTION_IMPLEMENTATION_ERROR, "Object is not a Calendar, Date, or Long, but " + value.getClass()); - } - return value; + return Date.valueOf(LocalDate.ofEpochDay((long) value)); } if (type.equals(DOUBLE)) { - if (!(value instanceof Double)) { - throw new PrestoException(FUNCTION_IMPLEMENTATION_ERROR, "Object is not a Double, but " + value.getClass()); - } + // double return value; } if (type.equals(REAL)) { - if (value instanceof Long) { - return Float.intBitsToFloat(((Long) value).intValue()); - } - - if (value instanceof Integer) { - return Float.intBitsToFloat((Integer) value); - } - - if (!(value instanceof Float)) { - throw new PrestoException(FUNCTION_IMPLEMENTATION_ERROR, "Object is not a Float, but " + value.getClass()); - } - return value; + return intBitsToFloat(toIntExact((long) value)); } if (type.equals(SMALLINT)) { - if (value instanceof Long) { - return ((Long) value).shortValue(); - } - - if (value instanceof Integer) { - return ((Integer) value).shortValue(); - } - - if (!(value instanceof Short)) { - throw new PrestoException(FUNCTION_IMPLEMENTATION_ERROR, "Object is not a Short, but " + value.getClass()); - } - return value; + return Shorts.checkedCast((long) value); } if (type.equals(TIME)) { - if (value instanceof Long) { - return new Time((Long) value); - } - - if (!(value instanceof Time)) { - throw new PrestoException(FUNCTION_IMPLEMENTATION_ERROR, "Object is not a Long or Time, but " + value.getClass()); - } - return value; + // TODO this likely is incorrect, passing the millis value interpreted in UTC into millis value interpreted in JVM's zone + // TODO account for non-legacy timestamp + return new Time((long) value); } if (type.equals(TIMESTAMP)) { - if (value instanceof Long) { - return new Timestamp((Long) value); - } - - if (!(value instanceof Timestamp)) { - throw new PrestoException(FUNCTION_IMPLEMENTATION_ERROR, "Object is not a Long or Timestamp, but " + value.getClass()); - } - return value; + // TODO this likely is incorrect, passing the millis value interpreted in UTC into millis value interpreted in JVM's zone + // TODO account for non-legacy timestamp + return new Timestamp((long) value); } if (type.equals(TINYINT)) { - if (value instanceof Long) { - return ((Long) value).byteValue(); - } - - if (value instanceof Integer) { - return ((Integer) value).byteValue(); - } - - if (value instanceof Short) { - return ((Short) value).byteValue(); - } - - if (!(value instanceof Byte)) { - throw new PrestoException(FUNCTION_IMPLEMENTATION_ERROR, "Object is not a Byte, but " + value.getClass()); - } - return value; + return SignedBytes.checkedCast((long) value); } if (type.equals(VARBINARY)) { - if (value instanceof Slice) { - return ((Slice) value).getBytes(); - } - - if (!(value instanceof byte[])) { - throw new PrestoException(FUNCTION_IMPLEMENTATION_ERROR, "Object is not a Slice byte[], but " + value.getClass()); - } - return value; + return ((Slice) value).getBytes(); } if (type instanceof VarcharType) { - if (value instanceof Slice) { - return new String(((Slice) value).getBytes(), UTF_8); - } - - if (!(value instanceof String)) { - throw new PrestoException(FUNCTION_IMPLEMENTATION_ERROR, "Object is not a Slice or String, but " + value.getClass()); - } - return value; + return ((Slice) value).toStringUtf8(); } throw new PrestoException(NOT_SUPPORTED, "Unsupported PrestoType " + type); diff --git a/presto-accumulo/src/main/java/io/prestosql/plugin/accumulo/model/Row.java b/presto-accumulo/src/main/java/io/prestosql/plugin/accumulo/model/Row.java index 34ca02309c0e..6b2c87cdb7f6 100644 --- a/presto-accumulo/src/main/java/io/prestosql/plugin/accumulo/model/Row.java +++ b/presto-accumulo/src/main/java/io/prestosql/plugin/accumulo/model/Row.java @@ -42,10 +42,10 @@ public Row addField(Field field) return this; } - public Row addField(Object value, Type type) + public Row addField(Object nativeValue, Type type) { requireNonNull(type, "type is null"); - fields.add(new Field(value, type)); + fields.add(new Field(nativeValue, type)); return this; } diff --git a/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestField.java b/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestField.java index 25062cc3632a..e7e33b2da6dc 100644 --- a/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestField.java +++ b/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestField.java @@ -15,6 +15,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import io.airlift.slice.Slices; import io.prestosql.plugin.accumulo.serializers.AccumuloRowSerializer; import io.prestosql.spi.block.Block; import io.prestosql.spi.type.ArrayType; @@ -28,6 +29,7 @@ import java.sql.Timestamp; import java.util.GregorianCalendar; +import static io.airlift.slice.Slices.utf8Slice; import static io.prestosql.metadata.MetadataManager.createTestMetadataManager; import static io.prestosql.spi.type.BigintType.BIGINT; import static io.prestosql.spi.type.BooleanType.BOOLEAN; @@ -41,6 +43,7 @@ import static io.prestosql.spi.type.TinyintType.TINYINT; import static io.prestosql.spi.type.VarbinaryType.VARBINARY; import static io.prestosql.spi.type.VarcharType.VARCHAR; +import static java.lang.Float.floatToIntBits; import static java.nio.charset.StandardCharsets.UTF_8; import static org.testng.Assert.assertEquals; @@ -89,7 +92,7 @@ public void testDate() { Type type = DATE; Date expected = new Date(new GregorianCalendar(1999, 0, 1).getTime().getTime()); - Field f1 = new Field(expected, type); + Field f1 = new Field(10592L, type); assertEquals(f1.getDate(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); @@ -117,7 +120,7 @@ public void testFloat() { Type type = REAL; Float expected = 123.45678f; - Field f1 = new Field(expected, type); + Field f1 = new Field((long) floatToIntBits(expected), type); assertEquals(f1.getFloat(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); @@ -131,7 +134,7 @@ public void testInt() { Type type = INTEGER; Integer expected = 12345678; - Field f1 = new Field(expected, type); + Field f1 = new Field((long) expected, type); assertEquals(f1.getInt(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); @@ -172,7 +175,7 @@ public void testSmallInt() { Type type = SMALLINT; Short expected = 12345; - Field f1 = new Field(expected, type); + Field f1 = new Field((long) expected, type); assertEquals(f1.getShort(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); @@ -186,7 +189,7 @@ public void testTime() { Type type = TIME; Time expected = new Time(new GregorianCalendar(1970, 0, 1, 12, 30, 0).getTime().getTime()); - Field f1 = new Field(expected, type); + Field f1 = new Field(70200000L, type); assertEquals(f1.getTime(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); @@ -200,7 +203,7 @@ public void testTimestamp() { Type type = TIMESTAMP; Timestamp expected = new Timestamp(new GregorianCalendar(1999, 0, 1, 12, 30, 0).getTime().getTime()); - Field f1 = new Field(expected, type); + Field f1 = new Field(915219000000L, type); assertEquals(f1.getTimestamp(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); @@ -214,7 +217,7 @@ public void testTinyInt() { Type type = TINYINT; Byte expected = 123; - Field f1 = new Field(expected, type); + Field f1 = new Field((long) expected, type); assertEquals(f1.getByte(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); @@ -228,7 +231,7 @@ public void testVarbinary() { Type type = VARBINARY; byte[] expected = "O'Leary".getBytes(UTF_8); - Field f1 = new Field(expected, type); + Field f1 = new Field(Slices.wrappedBuffer(expected.clone()), type); assertEquals(f1.getVarbinary(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); @@ -242,7 +245,7 @@ public void testVarchar() { Type type = VARCHAR; String expected = "O'Leary"; - Field f1 = new Field(expected, type); + Field f1 = new Field(utf8Slice(expected), type); assertEquals(f1.getVarchar(), expected); assertEquals(f1.getObject(), expected); assertEquals(f1.getType(), type); diff --git a/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestRow.java b/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestRow.java index 288f479bc52a..87108e8469fa 100644 --- a/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestRow.java +++ b/presto-accumulo/src/test/java/io/prestosql/plugin/accumulo/model/TestRow.java @@ -14,14 +14,16 @@ package io.prestosql.plugin.accumulo.model; import com.google.common.collect.ImmutableList; +import io.airlift.slice.Slices; import io.prestosql.plugin.accumulo.serializers.AccumuloRowSerializer; import io.prestosql.spi.type.ArrayType; import org.testng.annotations.Test; -import java.sql.Date; import java.sql.Timestamp; +import java.time.LocalDateTime; import java.util.GregorianCalendar; +import static io.airlift.slice.Slices.utf8Slice; import static io.prestosql.spi.type.BigintType.BIGINT; import static io.prestosql.spi.type.BooleanType.BOOLEAN; import static io.prestosql.spi.type.DateType.DATE; @@ -34,6 +36,7 @@ import static io.prestosql.spi.type.TinyintType.TINYINT; import static io.prestosql.spi.type.VarbinaryType.VARBINARY; import static io.prestosql.spi.type.VarcharType.VARCHAR; +import static java.lang.Float.floatToIntBits; import static java.nio.charset.StandardCharsets.UTF_8; import static org.testng.Assert.assertEquals; @@ -45,17 +48,17 @@ public void testRow() Row r1 = new Row(); r1.addField(new Field(AccumuloRowSerializer.getBlockFromArray(VARCHAR, ImmutableList.of("a", "b", "c")), new ArrayType(VARCHAR))); r1.addField(true, BOOLEAN); - r1.addField(new Field(new Date(new GregorianCalendar(1999, 0, 1).getTime().getTime()), DATE)); + r1.addField(new Field(10592L, DATE)); r1.addField(123.45678, DOUBLE); - r1.addField(new Field(123.45678f, REAL)); - r1.addField(12345678, INTEGER); + r1.addField(new Field((long) floatToIntBits(123.45678f), REAL)); + r1.addField(12345678L, INTEGER); r1.addField(new Field(12345678L, BIGINT)); - r1.addField(new Field((short) 12345, SMALLINT)); + r1.addField(new Field(12345L, SMALLINT)); r1.addField(new GregorianCalendar(1970, 0, 1, 12, 30, 0).getTime().getTime(), TIME); - r1.addField(new Field(new Timestamp(new GregorianCalendar(1999, 0, 1, 12, 30, 0).getTime().getTime()), TIMESTAMP)); - r1.addField((byte) 123, TINYINT); - r1.addField(new Field("O'Leary".getBytes(UTF_8), VARBINARY)); - r1.addField("O'Leary", VARCHAR); + r1.addField(new Field(Timestamp.valueOf(LocalDateTime.of(1999, 1, 1, 12, 30, 0)).getTime(), TIMESTAMP)); + r1.addField((long) 123, TINYINT); + r1.addField(new Field(Slices.wrappedBuffer("O'Leary".getBytes(UTF_8)), VARBINARY)); + r1.addField(utf8Slice("O'Leary"), VARCHAR); r1.addField(null, VARCHAR); assertEquals(r1.length(), 14); From 3eef328d2981b6ae3fc5552d976942623a98a31b Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Sun, 3 May 2020 17:03:31 +0200 Subject: [PATCH 307/519] Remove redundant test case --- .../AbstractTestDistributedQueries.java | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java index 9f44b9f576e5..50c9c6eb7767 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java @@ -275,25 +275,6 @@ public void testExplainAnalyze() "EXPLAIN ANALYZE SELECT x + y FROM (" + " SELECT orderdate, COUNT(*) x FROM orders GROUP BY orderdate) a JOIN (" + " SELECT orderdate, COUNT(*) y FROM orders GROUP BY orderdate) b ON a.orderdate = b.orderdate"); - assertExplainAnalyze("" + - "EXPLAIN ANALYZE SELECT *, o2.custkey\n" + - " IN (\n" + - " SELECT orderkey\n" + - " FROM lineitem\n" + - " WHERE orderkey % 5 = 0)\n" + - "FROM (SELECT * FROM orders WHERE custkey % 256 = 0) o1\n" + - "JOIN (SELECT * FROM orders WHERE custkey % 256 = 0) o2\n" + - " ON (o1.orderkey IN (SELECT orderkey FROM lineitem WHERE orderkey % 4 = 0)) = (o2.orderkey IN (SELECT orderkey FROM lineitem WHERE orderkey % 4 = 0))\n" + - "WHERE o1.orderkey\n" + - " IN (\n" + - " SELECT orderkey\n" + - " FROM lineitem\n" + - " WHERE orderkey % 4 = 0)\n" + - "ORDER BY o1.orderkey\n" + - " IN (\n" + - " SELECT orderkey\n" + - " FROM lineitem\n" + - " WHERE orderkey % 7 = 0)"); assertExplainAnalyze("EXPLAIN ANALYZE SELECT count(*), clerk FROM orders GROUP BY clerk UNION ALL SELECT sum(orderkey), clerk FROM orders GROUP BY clerk"); assertExplainAnalyze("EXPLAIN ANALYZE SHOW COLUMNS FROM orders"); From a51e625f2d0382e40e1cc8f23f99a043c15c118f Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Sun, 3 May 2020 17:04:15 +0200 Subject: [PATCH 308/519] Move some tests to appropriate place `AbstractTestDistributedQueries` is run for connectors than support read/write operations. - move read-only tests to `AbstractTestIntegrationSmokeTest` - move connector independent tests to `AbstractTestEngineOnlyQueries` --- .../AbstractTestDistributedQueries.java | 89 ++----------------- .../AbstractTestIntegrationSmokeTest.java | 72 +++++++++++++++ .../tests/AbstractTestEngineOnlyQueries.java | 26 ++++++ 3 files changed, 103 insertions(+), 84 deletions(-) diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java index 50c9c6eb7767..7d1f7da985a4 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java @@ -13,7 +13,6 @@ */ package io.prestosql.testing; -import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -21,7 +20,6 @@ import io.airlift.testing.Assertions; import io.airlift.units.Duration; import io.prestosql.Session; -import io.prestosql.SystemSessionProperties; import io.prestosql.dispatcher.DispatchManager; import io.prestosql.execution.QueryInfo; import io.prestosql.execution.QueryManager; @@ -67,7 +65,6 @@ import static io.prestosql.testing.sql.TestTable.randomTableSuffix; import static java.lang.String.format; import static java.lang.Thread.currentThread; -import static java.util.Collections.nCopies; import static java.util.Locale.ENGLISH; import static java.util.Objects.requireNonNull; import static java.util.concurrent.TimeUnit.MILLISECONDS; @@ -79,6 +76,11 @@ import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; +/** + * Generic test for connectors exercising connector's read and write capabilities. + * + * @see AbstractTestIntegrationSmokeTest + */ public abstract class AbstractTestDistributedQueries extends AbstractTestQueries { @@ -266,41 +268,6 @@ public void testCreateTableAsSelect() assertUpdate("DROP TABLE " + tableName); } - @Test - public void testExplainAnalyze() - { - assertExplainAnalyze("EXPLAIN ANALYZE SELECT * FROM orders"); - assertExplainAnalyze("EXPLAIN ANALYZE SELECT count(*), clerk FROM orders GROUP BY clerk"); - assertExplainAnalyze( - "EXPLAIN ANALYZE SELECT x + y FROM (" + - " SELECT orderdate, COUNT(*) x FROM orders GROUP BY orderdate) a JOIN (" + - " SELECT orderdate, COUNT(*) y FROM orders GROUP BY orderdate) b ON a.orderdate = b.orderdate"); - assertExplainAnalyze("EXPLAIN ANALYZE SELECT count(*), clerk FROM orders GROUP BY clerk UNION ALL SELECT sum(orderkey), clerk FROM orders GROUP BY clerk"); - - assertExplainAnalyze("EXPLAIN ANALYZE SHOW COLUMNS FROM orders"); - assertExplainAnalyze("EXPLAIN ANALYZE EXPLAIN SELECT count(*) FROM orders"); - assertExplainAnalyze("EXPLAIN ANALYZE EXPLAIN ANALYZE SELECT count(*) FROM orders"); - assertExplainAnalyze("EXPLAIN ANALYZE SHOW FUNCTIONS"); - assertExplainAnalyze("EXPLAIN ANALYZE SHOW TABLES"); - assertExplainAnalyze("EXPLAIN ANALYZE SHOW SCHEMAS"); - assertExplainAnalyze("EXPLAIN ANALYZE SHOW CATALOGS"); - assertExplainAnalyze("EXPLAIN ANALYZE SHOW SESSION"); - } - - @Test - public void testExplainAnalyzeVerbose() - { - assertExplainAnalyze("EXPLAIN ANALYZE VERBOSE SELECT * FROM orders"); - assertExplainAnalyze("EXPLAIN ANALYZE VERBOSE SELECT rank() OVER (PARTITION BY orderkey ORDER BY clerk DESC) FROM orders"); - assertExplainAnalyze("EXPLAIN ANALYZE VERBOSE SELECT rank() OVER (PARTITION BY orderkey ORDER BY clerk DESC) FROM orders WHERE orderkey < 0"); - } - - @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "EXPLAIN ANALYZE doesn't support statement type: DropTable") - public void testExplainAnalyzeDDL() - { - computeActual("EXPLAIN ANALYZE DROP TABLE orders"); - } - protected void assertExplainAnalyze(@Language("SQL") String query) { String value = (String) computeActual(query).getOnlyValue(); @@ -962,12 +929,6 @@ private static void assertUntilTimeout(Runnable assertion, Duration timeout) } } - @Test - public void testLargeQuerySuccess() - { - assertQuery("SELECT " + Joiner.on(" AND ").join(nCopies(500, "1 = 1")), "SELECT true"); - } - @Test public void testShowSchemasFromOther() { @@ -975,32 +936,6 @@ public void testShowSchemasFromOther() assertTrue(result.getOnlyColumnAsSet().containsAll(ImmutableSet.of(INFORMATION_SCHEMA, "tiny", "sf1"))); } - @Test - public void testTableSampleSystem() - { - MaterializedResult fullSample = computeActual("SELECT orderkey FROM orders TABLESAMPLE SYSTEM (100)"); - MaterializedResult emptySample = computeActual("SELECT orderkey FROM orders TABLESAMPLE SYSTEM (0)"); - MaterializedResult randomSample = computeActual("SELECT orderkey FROM orders TABLESAMPLE SYSTEM (50)"); - MaterializedResult all = computeActual("SELECT orderkey FROM orders"); - - assertContains(all, fullSample); - assertEquals(emptySample.getMaterializedRows().size(), 0); - assertTrue(all.getMaterializedRows().size() >= randomSample.getMaterializedRows().size()); - } - - @Test - public void testTableSampleWithFiltering() - { - MaterializedResult emptySample = computeActual("SELECT DISTINCT orderkey, orderdate FROM orders TABLESAMPLE SYSTEM (99) WHERE orderkey BETWEEN 0 AND 0"); - MaterializedResult halfSample = computeActual("SELECT DISTINCT orderkey, orderdate FROM orders TABLESAMPLE SYSTEM (50) WHERE orderkey BETWEEN 0 AND 9999999999"); - MaterializedResult all = computeActual("SELECT orderkey, orderdate FROM orders"); - - assertEquals(emptySample.getMaterializedRows().size(), 0); - // Assertions need to be loose here because SYSTEM sampling random selects data on split boundaries. In this case either all the data will be selected, or - // none of it. Sampling with a 100% ratio is ignored, so that also cannot be used to guarantee results. - assertTrue(all.getMaterializedRows().size() >= halfSample.getMaterializedRows().size()); - } - @Test public void testSymbolAliasing() { @@ -1199,18 +1134,6 @@ public void testWrittenStats() assertUpdate("DROP TABLE " + tableName); } - @Test - public void testComplexCast() - { - Session session = Session.builder(getSession()) - .setSystemProperty(SystemSessionProperties.OPTIMIZE_DISTINCT_AGGREGATIONS, "true") - .build(); - // This is optimized using CAST(null AS interval day to second) which may be problematic to deserialize on worker - assertQuery(session, "WITH t(a, b) AS (VALUES (1, INTERVAL '1' SECOND)) " + - "SELECT count(DISTINCT a), CAST(max(b) AS VARCHAR) FROM t", - "VALUES (1, '0 00:00:01.000')"); - } - @Test public void testCreateSchema() { @@ -1399,9 +1322,7 @@ private List testDataMappingSmokeTestData() .add(new DataMappingTestSetup("timestamp", "TIMESTAMP '2020-02-12 15:03:00'", "TIMESTAMP '2199-12-31 23:59:59.999'")) .add(new DataMappingTestSetup("timestamp with time zone", "TIMESTAMP '2020-02-12 15:03:00 +01:00'", "TIMESTAMP '9999-12-31 23:59:59.999 +12:00'")) .add(new DataMappingTestSetup("char(3)", "'ab'", "'zzz'")) - .add(new DataMappingTestSetup("char(3)", "'ab '", "'zzz'")) .add(new DataMappingTestSetup("varchar(3)", "'de'", "'zzz'")) - .add(new DataMappingTestSetup("varchar(3)", "'de '", "'zzz'")) .add(new DataMappingTestSetup("varchar", "'łąka for the win'", "'ŻŻŻŻŻŻŻŻŻŻ'")) .add(new DataMappingTestSetup("varbinary", "X'12ab3f'", "X'ffffffffffffffffffff'")) .build(); diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java index 54d70ff9da02..b9c62c8715a3 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java @@ -19,10 +19,17 @@ import static io.prestosql.spi.type.VarcharType.VARCHAR; import static io.prestosql.testing.QueryAssertions.assertContains; import static io.prestosql.testing.assertions.Assert.assertEquals; +import static java.lang.String.format; import static java.lang.String.join; import static java.util.Collections.nCopies; import static org.assertj.core.api.Assertions.assertThat; +import static org.testng.Assert.assertTrue; +/** + * Generic test for connectors exercising connector's read capabilities. + * + * @see AbstractTestDistributedQueries + */ public abstract class AbstractTestIntegrationSmokeTest extends AbstractTestQueryFramework { @@ -153,6 +160,71 @@ public void testDescribeTable() assertEquals(actualColumns, expectedColumns); } + @Test + public void testExplainAnalyze() + { + assertExplainAnalyze("EXPLAIN ANALYZE SELECT * FROM orders"); + assertExplainAnalyze("EXPLAIN ANALYZE SELECT count(*), clerk FROM orders GROUP BY clerk"); + assertExplainAnalyze( + "EXPLAIN ANALYZE SELECT x + y FROM (" + + " SELECT orderdate, COUNT(*) x FROM orders GROUP BY orderdate) a JOIN (" + + " SELECT orderdate, COUNT(*) y FROM orders GROUP BY orderdate) b ON a.orderdate = b.orderdate"); + assertExplainAnalyze("EXPLAIN ANALYZE SELECT count(*), clerk FROM orders GROUP BY clerk UNION ALL SELECT sum(orderkey), clerk FROM orders GROUP BY clerk"); + + assertExplainAnalyze("EXPLAIN ANALYZE SHOW COLUMNS FROM orders"); + assertExplainAnalyze("EXPLAIN ANALYZE EXPLAIN SELECT count(*) FROM orders"); + assertExplainAnalyze("EXPLAIN ANALYZE EXPLAIN ANALYZE SELECT count(*) FROM orders"); + assertExplainAnalyze("EXPLAIN ANALYZE SHOW FUNCTIONS"); + assertExplainAnalyze("EXPLAIN ANALYZE SHOW TABLES"); + assertExplainAnalyze("EXPLAIN ANALYZE SHOW SCHEMAS"); + assertExplainAnalyze("EXPLAIN ANALYZE SHOW CATALOGS"); + assertExplainAnalyze("EXPLAIN ANALYZE SHOW SESSION"); + } + + @Test + public void testExplainAnalyzeVerbose() + { + assertExplainAnalyze("EXPLAIN ANALYZE VERBOSE SELECT * FROM orders"); + assertExplainAnalyze("EXPLAIN ANALYZE VERBOSE SELECT rank() OVER (PARTITION BY orderkey ORDER BY clerk DESC) FROM orders"); + assertExplainAnalyze("EXPLAIN ANALYZE VERBOSE SELECT rank() OVER (PARTITION BY orderkey ORDER BY clerk DESC) FROM orders WHERE orderkey < 0"); + } + + protected void assertExplainAnalyze(@Language("SQL") String query) + { + String value = (String) computeActual(query).getOnlyValue(); + + assertTrue(value.matches("(?s:.*)CPU:.*, Input:.*, Output(?s:.*)"), format("Expected output to contain \"CPU:.*, Input:.*, Output\", but it is %s", value)); + + // TODO: check that rendered plan is as expected, once stats are collected in a consistent way + // assertTrue(value.contains("Cost: "), format("Expected output to contain \"Cost: \", but it is %s", value)); + } + + @Test + public void testTableSampleSystem() + { + MaterializedResult fullSample = computeActual("SELECT orderkey FROM orders TABLESAMPLE SYSTEM (100)"); + MaterializedResult emptySample = computeActual("SELECT orderkey FROM orders TABLESAMPLE SYSTEM (0)"); + MaterializedResult randomSample = computeActual("SELECT orderkey FROM orders TABLESAMPLE SYSTEM (50)"); + MaterializedResult all = computeActual("SELECT orderkey FROM orders"); + + assertContains(all, fullSample); + assertEquals(emptySample.getMaterializedRows().size(), 0); + assertTrue(all.getMaterializedRows().size() >= randomSample.getMaterializedRows().size()); + } + + @Test + public void testTableSampleWithFiltering() + { + MaterializedResult emptySample = computeActual("SELECT DISTINCT orderkey, orderdate FROM orders TABLESAMPLE SYSTEM (99) WHERE orderkey BETWEEN 0 AND 0"); + MaterializedResult halfSample = computeActual("SELECT DISTINCT orderkey, orderdate FROM orders TABLESAMPLE SYSTEM (50) WHERE orderkey BETWEEN 0 AND 9999999999"); + MaterializedResult all = computeActual("SELECT orderkey, orderdate FROM orders"); + + assertEquals(emptySample.getMaterializedRows().size(), 0); + // Assertions need to be loose here because SYSTEM sampling random selects data on split boundaries. In this case either all the data will be selected, or + // none of it. Sampling with a 100% ratio is ignored, so that also cannot be used to guarantee results. + assertTrue(all.getMaterializedRows().size() >= halfSample.getMaterializedRows().size()); + } + @Test public void testShowCreateTable() { diff --git a/presto-tests/src/test/java/io/prestosql/tests/AbstractTestEngineOnlyQueries.java b/presto-tests/src/test/java/io/prestosql/tests/AbstractTestEngineOnlyQueries.java index 15b82f972a94..722ce136b6d0 100644 --- a/presto-tests/src/test/java/io/prestosql/tests/AbstractTestEngineOnlyQueries.java +++ b/presto-tests/src/test/java/io/prestosql/tests/AbstractTestEngineOnlyQueries.java @@ -13,6 +13,7 @@ */ package io.prestosql.tests; +import com.google.common.base.Joiner; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -72,6 +73,7 @@ import static io.prestosql.tests.QueryTemplate.queryTemplate; import static io.prestosql.type.UnknownType.UNKNOWN; import static java.lang.String.format; +import static java.util.Collections.nCopies; import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.toList; import static java.util.stream.IntStream.range; @@ -210,6 +212,12 @@ public void testIntersectAllFails() assertQueryFails("SELECT * FROM (VALUES 1, 2, 3, 4) INTERSECT ALL SELECT * FROM (VALUES 3, 4)", "line 1:35: INTERSECT ALL not yet implemented"); } + @Test + public void testLargeQuerySuccess() + { + assertQuery("SELECT " + Joiner.on(" AND ").join(nCopies(500, "1 = 1")), "SELECT true"); + } + @Test public void testLargeInArray() { @@ -531,6 +539,18 @@ public void testInvalidColumn() "line 1:39: Column 'orderkey_1' cannot be resolved"); } + @Test + public void testComplexCast() + { + Session session = Session.builder(getSession()) + .setSystemProperty(SystemSessionProperties.OPTIMIZE_DISTINCT_AGGREGATIONS, "true") + .build(); + // This is optimized using CAST(null AS interval day to second) which may be problematic to deserialize on worker + assertQuery(session, "WITH t(a, b) AS (VALUES (1, INTERVAL '1' SECOND)) " + + "SELECT count(DISTINCT a), CAST(max(b) AS VARCHAR) FROM t", + "VALUES (1, '0 00:00:01.000')"); + } + @Test public void testInvalidCast() { @@ -3454,6 +3474,12 @@ public void testExplainDdl() assertExplainDdl("ROLLBACK"); } + @Test + public void testExplainAnalyzeDDL() + { + assertQueryFails("EXPLAIN ANALYZE DROP TABLE orders", "EXPLAIN ANALYZE doesn't support statement type: DropTable"); + } + private void assertExplainDdl(String query) { assertExplainDdl(query, query); From 13a7cd4caca6cb5b5e42e9b342137df43c4699b8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20=C5=9Alizak?= Date: Sun, 3 May 2020 23:27:59 +0200 Subject: [PATCH 309/519] Add some missing types to TestingTypeManager --- .../test/java/io/prestosql/spi/type/TestingTypeManager.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/presto-spi/src/test/java/io/prestosql/spi/type/TestingTypeManager.java b/presto-spi/src/test/java/io/prestosql/spi/type/TestingTypeManager.java index d24d9af515ea..6f237e7c3c49 100644 --- a/presto-spi/src/test/java/io/prestosql/spi/type/TestingTypeManager.java +++ b/presto-spi/src/test/java/io/prestosql/spi/type/TestingTypeManager.java @@ -24,15 +24,17 @@ import static io.prestosql.spi.type.DateType.DATE; import static io.prestosql.spi.type.DoubleType.DOUBLE; import static io.prestosql.spi.type.HyperLogLogType.HYPER_LOG_LOG; +import static io.prestosql.spi.type.IntegerType.INTEGER; import static io.prestosql.spi.type.TestingIdType.ID; import static io.prestosql.spi.type.TimestampType.TIMESTAMP; +import static io.prestosql.spi.type.TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE; import static io.prestosql.spi.type.VarbinaryType.VARBINARY; import static io.prestosql.spi.type.VarcharType.VARCHAR; public class TestingTypeManager implements TypeManager { - private static final List TYPES = ImmutableList.of(BOOLEAN, BIGINT, DOUBLE, VARCHAR, VARBINARY, TIMESTAMP, DATE, ID, HYPER_LOG_LOG); + private static final List TYPES = ImmutableList.of(BOOLEAN, BIGINT, DOUBLE, INTEGER, VARCHAR, VARBINARY, TIMESTAMP, TIMESTAMP_WITH_TIME_ZONE, DATE, ID, HYPER_LOG_LOG); @Override public Type getType(TypeSignature signature) From 1df1c54379903efca43ec575a725980d004c499a Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 4 May 2020 08:37:05 +0200 Subject: [PATCH 310/519] Preserve first close exception --- .../server/testing/TestingPrestoServer.java | 22 +++++++++++-------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/server/testing/TestingPrestoServer.java b/presto-main/src/main/java/io/prestosql/server/testing/TestingPrestoServer.java index ad486e9f04d2..975c6180254c 100644 --- a/presto-main/src/main/java/io/prestosql/server/testing/TestingPrestoServer.java +++ b/presto-main/src/main/java/io/prestosql/server/testing/TestingPrestoServer.java @@ -18,6 +18,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import com.google.common.io.Closer; import com.google.common.net.HostAndPort; import com.google.inject.Injector; import com.google.inject.Key; @@ -324,15 +325,18 @@ private TestingPrestoServer( public void close() throws IOException { - try { - if (lifeCycleManager != null) { - lifeCycleManager.stop(); - } - } - finally { - if (isDirectory(baseDataDir) && !preserveData) { - deleteRecursively(baseDataDir, ALLOW_INSECURE); - } + try (Closer closer = Closer.create()) { + closer.register(() -> { + if (isDirectory(baseDataDir) && !preserveData) { + deleteRecursively(baseDataDir, ALLOW_INSECURE); + } + }); + + closer.register(() -> { + if (lifeCycleManager != null) { + lifeCycleManager.stop(); + } + }); } } From 0024db73500c460e6589af9f8806f69a606a8e49 Mon Sep 17 00:00:00 2001 From: Yuya Ebihara Date: Wed, 22 Apr 2020 22:49:17 +0900 Subject: [PATCH 311/519] Rename variable name and extract id column name in Cassandra --- .../plugin/cassandra/CassandraMetadata.java | 3 ++- .../plugin/cassandra/CassandraPageSink.java | 13 +++++++------ .../plugin/cassandra/util/CassandraCqlUtils.java | 1 + 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraMetadata.java b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraMetadata.java index 37362b704f7d..8a1737aa8766 100644 --- a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraMetadata.java +++ b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraMetadata.java @@ -51,6 +51,7 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.ImmutableList.toImmutableList; import static io.prestosql.plugin.cassandra.CassandraType.toCassandraType; +import static io.prestosql.plugin.cassandra.util.CassandraCqlUtils.ID_COLUMN_NAME; import static io.prestosql.plugin.cassandra.util.CassandraCqlUtils.cqlNameToSqlName; import static io.prestosql.plugin.cassandra.util.CassandraCqlUtils.validColumnName; import static io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED; @@ -283,7 +284,7 @@ private CassandraOutputTableHandle createTable(ConnectorTableMetadata tableMetad ImmutableList.Builder columnNames = ImmutableList.builder(); ImmutableList.Builder columnTypes = ImmutableList.builder(); ImmutableList.Builder columnExtra = ImmutableList.builder(); - columnExtra.add(new ExtraColumnMetadata("id", true)); + columnExtra.add(new ExtraColumnMetadata(ID_COLUMN_NAME, true)); for (ColumnMetadata column : tableMetadata.getColumns()) { columnNames.add(column.getName()); columnTypes.add(column.getType()); diff --git a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraPageSink.java b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraPageSink.java index 78c3af5dafb2..0c27554909ba 100644 --- a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraPageSink.java +++ b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraPageSink.java @@ -41,6 +41,7 @@ import static com.datastax.driver.core.querybuilder.QueryBuilder.bindMarker; import static com.datastax.driver.core.querybuilder.QueryBuilder.insertInto; import static com.google.common.base.Preconditions.checkArgument; +import static io.prestosql.plugin.cassandra.util.CassandraCqlUtils.ID_COLUMN_NAME; import static io.prestosql.plugin.cassandra.util.CassandraCqlUtils.validColumnName; import static io.prestosql.plugin.cassandra.util.CassandraCqlUtils.validSchemaName; import static io.prestosql.plugin.cassandra.util.CassandraCqlUtils.validTableName; @@ -69,7 +70,7 @@ public class CassandraPageSink private final CassandraSession cassandraSession; private final PreparedStatement insert; private final List columnTypes; - private final boolean generateUUID; + private final boolean generateUuid; private final Function toCassandraDate; public CassandraPageSink( @@ -79,14 +80,14 @@ public CassandraPageSink( String tableName, List columnNames, List columnTypes, - boolean generateUUID) + boolean generateUuid) { this.cassandraSession = requireNonNull(cassandraSession, "cassandraSession"); requireNonNull(schemaName, "schemaName is null"); requireNonNull(tableName, "tableName is null"); requireNonNull(columnNames, "columnNames is null"); this.columnTypes = ImmutableList.copyOf(requireNonNull(columnTypes, "columnTypes is null")); - this.generateUUID = generateUUID; + this.generateUuid = generateUuid; if (protocolVersion.toInt() <= ProtocolVersion.V3.toInt()) { this.toCassandraDate = value -> DATE_FORMATTER.print(TimeUnit.DAYS.toMillis(value)); @@ -96,8 +97,8 @@ public CassandraPageSink( } Insert insert = insertInto(validSchemaName(schemaName), validTableName(tableName)); - if (generateUUID) { - insert.value("id", bindMarker()); + if (generateUuid) { + insert.value(ID_COLUMN_NAME, bindMarker()); } for (int i = 0; i < columnNames.size(); i++) { String columnName = columnNames.get(i); @@ -112,7 +113,7 @@ public CompletableFuture appendPage(Page page) { for (int position = 0; position < page.getPositionCount(); position++) { List values = new ArrayList<>(columnTypes.size() + 1); - if (generateUUID) { + if (generateUuid) { values.add(UUID.randomUUID()); } diff --git a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/util/CassandraCqlUtils.java b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/util/CassandraCqlUtils.java index ff7c4a1cdacc..1bb85f8cf247 100644 --- a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/util/CassandraCqlUtils.java +++ b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/util/CassandraCqlUtils.java @@ -29,6 +29,7 @@ public final class CassandraCqlUtils { private CassandraCqlUtils() {} + public static final String ID_COLUMN_NAME = "id"; public static final String EMPTY_COLUMN_NAME = "__empty__"; public static String validSchemaName(String identifier) From 8c9b40584ec5edb7bdde3adf0c8c97678e45a7c6 Mon Sep 17 00:00:00 2001 From: Yuya Ebihara Date: Wed, 22 Apr 2020 00:04:51 +0900 Subject: [PATCH 312/519] Allow INSERT statement for Cassandra table having hidden id column --- .../cassandra/CassandraInsertTableHandle.java | 11 ++++++++- .../plugin/cassandra/CassandraMetadata.java | 23 ++++++++++++++++--- .../cassandra/CassandraPageSinkProvider.java | 2 +- .../TestCassandraIntegrationSmokeTest.java | 10 ++++++++ 4 files changed, 41 insertions(+), 5 deletions(-) diff --git a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraInsertTableHandle.java b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraInsertTableHandle.java index ab981d92901e..dc817a119d84 100644 --- a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraInsertTableHandle.java +++ b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraInsertTableHandle.java @@ -31,13 +31,15 @@ public class CassandraInsertTableHandle private final String tableName; private final List columnNames; private final List columnTypes; + private final boolean generateUuid; @JsonCreator public CassandraInsertTableHandle( @JsonProperty("schemaName") String schemaName, @JsonProperty("tableName") String tableName, @JsonProperty("columnNames") List columnNames, - @JsonProperty("columnTypes") List columnTypes) + @JsonProperty("columnTypes") List columnTypes, + @JsonProperty("generateUuid") boolean generateUuid) { this.schemaName = requireNonNull(schemaName, "schemaName is null"); this.tableName = requireNonNull(tableName, "tableName is null"); @@ -47,6 +49,7 @@ public CassandraInsertTableHandle( checkArgument(columnNames.size() == columnTypes.size(), "columnNames and columnTypes sizes don't match"); this.columnNames = ImmutableList.copyOf(columnNames); this.columnTypes = ImmutableList.copyOf(columnTypes); + this.generateUuid = generateUuid; } @JsonProperty @@ -73,6 +76,12 @@ public List getColumnTypes() return columnTypes; } + @JsonProperty + public boolean isGenerateUuid() + { + return generateUuid; + } + @Override public String toString() { diff --git a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraMetadata.java b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraMetadata.java index 8a1737aa8766..cf050daf4609 100644 --- a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraMetadata.java +++ b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraMetadata.java @@ -50,6 +50,7 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.ImmutableList.toImmutableList; +import static com.google.common.collect.MoreCollectors.toOptional; import static io.prestosql.plugin.cassandra.CassandraType.toCassandraType; import static io.prestosql.plugin.cassandra.util.CassandraCqlUtils.ID_COLUMN_NAME; import static io.prestosql.plugin.cassandra.util.CassandraCqlUtils.cqlNameToSqlName; @@ -336,14 +337,25 @@ public ConnectorInsertTableHandle beginInsert(ConnectorSession session, Connecto SchemaTableName schemaTableName = new SchemaTableName(table.getSchemaName(), table.getTableName()); List columns = cassandraSession.getTable(schemaTableName).getColumns(); - List columnNames = columns.stream().map(CassandraColumnHandle::getName).collect(Collectors.toList()); - List columnTypes = columns.stream().map(CassandraColumnHandle::getType).collect(Collectors.toList()); + List columnNames = columns.stream() + .filter(columnHandle -> !isHiddenIdColumn(columnHandle)) + .map(CassandraColumnHandle::getName) + .collect(Collectors.toList()); + List columnTypes = columns.stream() + .filter(columnHandle -> !isHiddenIdColumn(columnHandle)) + .map(CassandraColumnHandle::getType) + .collect(Collectors.toList()); + boolean generateUuid = columns.stream() + .filter(CassandraMetadata::isHiddenIdColumn) + .collect(toOptional()) // must be at most one + .isPresent(); return new CassandraInsertTableHandle( table.getSchemaName(), table.getTableName(), columnNames, - columnTypes); + columnTypes, + generateUuid); } @Override @@ -351,4 +363,9 @@ public Optional finishInsert(ConnectorSession session, { return Optional.empty(); } + + private static boolean isHiddenIdColumn(CassandraColumnHandle columnHandle) + { + return columnHandle.isHidden() && ID_COLUMN_NAME.equals(columnHandle.getName()); + } } diff --git a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraPageSinkProvider.java b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraPageSinkProvider.java index a4abb6e74b84..a60c9265b570 100644 --- a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraPageSinkProvider.java +++ b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraPageSinkProvider.java @@ -67,6 +67,6 @@ public ConnectorPageSink createPageSink(ConnectorTransactionHandle transactionHa handle.getTableName(), handle.getColumnNames(), handle.getColumnTypes(), - false); + handle.isGenerateUuid()); } } diff --git a/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraIntegrationSmokeTest.java b/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraIntegrationSmokeTest.java index df28e8937ebd..209356ba35c5 100644 --- a/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraIntegrationSmokeTest.java +++ b/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraIntegrationSmokeTest.java @@ -165,6 +165,16 @@ public void testSelect() assertSelect(TABLE_ALL_TYPES_PARTITION_KEY, false); } + @Test + public void testInsertToTableWithHiddenId() + { + execute("DROP TABLE IF EXISTS test_create_table"); + execute("CREATE TABLE test_create_table (col1 integer)"); + execute("INSERT INTO test_create_table VALUES (12345)"); + assertQuery("SELECT * FROM smoke_test.test_create_table", "VALUES (12345)"); + execute("DROP TABLE test_create_table"); + } + @Test public void testCreateTableAs() { From be3118334fc2509023a31ce21ef826651e5cd955 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Wed, 29 Apr 2020 10:47:27 -0700 Subject: [PATCH 313/519] Remove usage of deprecated immedateCheckedFuture --- .../java/io/prestosql/execution/executor/SimulationSplit.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/presto-main/src/test/java/io/prestosql/execution/executor/SimulationSplit.java b/presto-main/src/test/java/io/prestosql/execution/executor/SimulationSplit.java index 5c92ae52d9db..fc7aa82f2d4b 100644 --- a/presto-main/src/test/java/io/prestosql/execution/executor/SimulationSplit.java +++ b/presto-main/src/test/java/io/prestosql/execution/executor/SimulationSplit.java @@ -13,7 +13,6 @@ */ package io.prestosql.execution.executor; -import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.SettableFuture; import io.airlift.units.Duration; @@ -25,6 +24,7 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; +import static com.google.common.util.concurrent.Futures.immediateFuture; import static io.airlift.units.Duration.succinctNanos; import static io.prestosql.operator.Operator.NOT_BLOCKED; import static java.lang.String.format; @@ -154,7 +154,7 @@ public ListenableFuture processFor(Duration duration) task.splitComplete(this); } - return Futures.immediateCheckedFuture(null); + return immediateFuture(null); } ListenableFuture processResult = getProcessResult(); From 1bf8167ad73cd060d97811b512b87f918c706708 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Sun, 3 May 2020 11:11:59 -0700 Subject: [PATCH 314/519] Fix error message for aggregation annotation parser --- .../operator/aggregation/AggregationImplementation.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/presto-main/src/main/java/io/prestosql/operator/aggregation/AggregationImplementation.java b/presto-main/src/main/java/io/prestosql/operator/aggregation/AggregationImplementation.java index 4ff03d3ec158..c6b990ad8c61 100644 --- a/presto-main/src/main/java/io/prestosql/operator/aggregation/AggregationImplementation.java +++ b/presto-main/src/main/java/io/prestosql/operator/aggregation/AggregationImplementation.java @@ -13,6 +13,7 @@ */ package io.prestosql.operator.aggregation; +import com.google.common.base.VerifyException; import com.google.common.collect.ImmutableList; import io.prestosql.metadata.BoundVariables; import io.prestosql.metadata.FunctionArgumentDefinition; @@ -423,7 +424,7 @@ else if (baseTypeAnnotation instanceof BlockIndex) { builder.add(BLOCK_INDEX); } else { - throw new IllegalArgumentException("Unsupported annotation: " + annotations[i]); + throw new VerifyException("Unhandled annotation: " + baseTypeAnnotation); } } return builder.build(); From 1166b5a7dbf0c58bac643460e7c673eba99d3e4e Mon Sep 17 00:00:00 2001 From: David Phillips Date: Tue, 28 Apr 2020 10:34:57 -0700 Subject: [PATCH 315/519] Update to Airbase 100 --- pom.xml | 10 ++-------- presto-bigquery/pom.xml | 31 ------------------------------- presto-jdbc/pom.xml | 16 ++++++++++++++-- 3 files changed, 16 insertions(+), 41 deletions(-) diff --git a/pom.xml b/pom.xml index 037d23970a69..3927de32bcbe 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ io.airlift airbase - 99 + 100 io.prestosql @@ -52,7 +52,7 @@ 1.14 174 2.0.0 - 2.3.3 + 2.3.4 0.3.6 - com.teradata - redlinerpm-maven-plugin-td - 2.1.5 + io.airlift.maven.plugins + redlinerpm-maven-plugin + 2.1.6 true @@ -180,6 +203,14 @@ + + /usr/lib/presto/shared + ${server.tar.package}/shared + + * + + + diff --git a/presto-server-rpm/src/main/script/symlink.groovy b/presto-server-rpm/src/main/script/symlink.groovy new file mode 100644 index 000000000000..a09f9992a5ba --- /dev/null +++ b/presto-server-rpm/src/main/script/symlink.groovy @@ -0,0 +1,44 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.nio.file.Files +import java.nio.file.LinkOption +import java.nio.file.Path +import java.nio.file.Paths + +// We expect this to run on top of the unpacked tarball, which contains +// hard links for any duplicate named JAR files. All of the JARs are +// moved to a top level shared directory and replaced with symlinks. + +Path root = Paths.get(properties['root']) +Path shared = root.resolve('shared') + +Files.createDirectory(shared) + +List files = new FileNameFinder().getFileNames(root.toString(), '**/*.jar') + +for (file in files) { + Path source = Paths.get(file) + Path target = shared.resolve(source.getFileName()) + if (!Files.exists(target, LinkOption.NOFOLLOW_LINKS)) { + Files.move(source, target) + } + else if (!Files.isSameFile(source, target)) { + throw new RuntimeException("Not same file: $source <> $target") + } + else { + Files.delete(source) + } + Files.createSymbolicLink(source, source.getParent().relativize(target)) +} From ae220a9efd413fcf24d77a422ba6ade8e0b88d9d Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 4 May 2020 12:17:41 -0700 Subject: [PATCH 318/519] Simplify user mapping rule matching --- .../java/io/prestosql/server/security/UserMapping.java | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/server/security/UserMapping.java b/presto-main/src/main/java/io/prestosql/server/security/UserMapping.java index 81b23b1cb2e1..22fa10dd9e1f 100644 --- a/presto-main/src/main/java/io/prestosql/server/security/UserMapping.java +++ b/presto-main/src/main/java/io/prestosql/server/security/UserMapping.java @@ -55,18 +55,14 @@ public static UserMapping createUserMapping(Optional userMappingPattern, public String mapUser(String principal) throws UserMappingException { - Optional user = Optional.empty(); for (Rule rule : rules) { - user = rule.mapUser(principal); + Optional user = rule.mapUser(principal); if (user.isPresent()) { - break; + return user.get(); } } - if (!user.isPresent()) { - throw new UserMappingException("No user mapping patterns match the principal"); - } - return user.get(); + throw new UserMappingException("No user mapping patterns match the principal"); } public static final class UserMappingRules From a6c7da4f1d7234c04491b8a36c8f2dc009e22c15 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 4 May 2020 12:19:26 -0700 Subject: [PATCH 319/519] Validate that user mapping rules list is not empty --- .../main/java/io/prestosql/server/security/UserMapping.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/presto-main/src/main/java/io/prestosql/server/security/UserMapping.java b/presto-main/src/main/java/io/prestosql/server/security/UserMapping.java index 22fa10dd9e1f..256aa26463bd 100644 --- a/presto-main/src/main/java/io/prestosql/server/security/UserMapping.java +++ b/presto-main/src/main/java/io/prestosql/server/security/UserMapping.java @@ -49,7 +49,9 @@ public static UserMapping createUserMapping(Optional userMappingPattern, @VisibleForTesting UserMapping(List rules) { - this.rules = ImmutableList.copyOf(requireNonNull(rules, "rules is null")); + requireNonNull(rules, "rules is null"); + checkArgument(!rules.isEmpty(), "rules list is empty"); + this.rules = ImmutableList.copyOf(rules); } public String mapUser(String principal) From db7fcc9f6273d830fafe0df71afda67be09ffccd Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 4 May 2020 21:39:43 -0700 Subject: [PATCH 320/519] Update to Airlift 0.196 This fixes a size estimation error when updating a T-Digest. --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 3927de32bcbe..776a8a50d087 100644 --- a/pom.xml +++ b/pom.xml @@ -44,7 +44,7 @@ 3.3.9 4.7.1 - 0.195 + 0.196 ${dep.airlift.version} 1.11.749 3.9.0 From d937cebe45696e51d19ef1325c71696bf62ef9c2 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 4 May 2020 09:26:04 -0700 Subject: [PATCH 321/519] Add documentation for SHOW CREATE SCHEMA --- presto-docs/src/main/sphinx/sql.rst | 1 + .../main/sphinx/sql/show-create-schema.rst | 20 +++++++++++++++++++ 2 files changed, 21 insertions(+) create mode 100644 presto-docs/src/main/sphinx/sql/show-create-schema.rst diff --git a/presto-docs/src/main/sphinx/sql.rst b/presto-docs/src/main/sphinx/sql.rst index 56b7077f5525..27d1cbac6c81 100644 --- a/presto-docs/src/main/sphinx/sql.rst +++ b/presto-docs/src/main/sphinx/sql.rst @@ -50,6 +50,7 @@ functions and operators`. sql/set-session sql/show-catalogs sql/show-columns + sql/show-create-schema sql/show-create-table sql/show-create-view sql/show-functions diff --git a/presto-docs/src/main/sphinx/sql/show-create-schema.rst b/presto-docs/src/main/sphinx/sql/show-create-schema.rst new file mode 100644 index 000000000000..ea55fc926a0f --- /dev/null +++ b/presto-docs/src/main/sphinx/sql/show-create-schema.rst @@ -0,0 +1,20 @@ +================== +SHOW CREATE SCHEMA +================== + +Synopsis +-------- + +.. code-block:: none + + SHOW CREATE SCHEMA schema_name + +Description +----------- + +Show the SQL statement that creates the specified schema. + +See Also +-------- + +:doc:`create-schema` From eee6045d8d4b69b288a2d8d43c56e628b5c7189b Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 4 May 2020 16:05:48 -0700 Subject: [PATCH 322/519] Document LIKE clause for SHOW COLUMNS --- presto-docs/src/main/sphinx/sql/show-columns.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/presto-docs/src/main/sphinx/sql/show-columns.rst b/presto-docs/src/main/sphinx/sql/show-columns.rst index 898b233d796b..b2914aa6fcd0 100644 --- a/presto-docs/src/main/sphinx/sql/show-columns.rst +++ b/presto-docs/src/main/sphinx/sql/show-columns.rst @@ -7,9 +7,10 @@ Synopsis .. code-block:: none - SHOW COLUMNS FROM table + SHOW COLUMNS FROM table [ LIKE pattern ] Description ----------- List the columns in ``table`` along with their data type and other attributes. +The ``LIKE`` clause can be used to restrict the list of column names. From b09a619a2c701c08371c880044f7ade84da09958 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 4 May 2020 15:50:30 -0700 Subject: [PATCH 323/519] Remove UseGCOverheadLimit which has no effect for G1 --- presto-docs/src/main/sphinx/installation/deployment.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/presto-docs/src/main/sphinx/installation/deployment.rst b/presto-docs/src/main/sphinx/installation/deployment.rst index ae90fda0e9e8..d981c621ffaa 100644 --- a/presto-docs/src/main/sphinx/installation/deployment.rst +++ b/presto-docs/src/main/sphinx/installation/deployment.rst @@ -82,7 +82,6 @@ The following provides a good starting point for creating ``etc/jvm.config``: -XX:G1HeapRegionSize=32M -XX:+ExplicitGCInvokesConcurrent -XX:+ExitOnOutOfMemoryError - -XX:+UseGCOverheadLimit -XX:+HeapDumpOnOutOfMemoryError -XX:ReservedCodeCacheSize=512M -Djdk.attach.allowAttachSelf=true From 3c2ac9f9ca3d7c2b7da5b11027391c6c9b961b25 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 4 May 2020 15:58:20 -0700 Subject: [PATCH 324/519] Remove legacy GC debug flags --- presto-docs/src/main/sphinx/admin/tuning.rst | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/presto-docs/src/main/sphinx/admin/tuning.rst b/presto-docs/src/main/sphinx/admin/tuning.rst index 6ad21cf39bf8..29b5530fc8a5 100644 --- a/presto-docs/src/main/sphinx/admin/tuning.rst +++ b/presto-docs/src/main/sphinx/admin/tuning.rst @@ -17,17 +17,4 @@ The following can be helpful for diagnosing garbage collection (GC) issues: .. code-block:: none - -XX:+PrintGCApplicationConcurrentTime - -XX:+PrintGCApplicationStoppedTime - -XX:+PrintGCCause - -XX:+PrintGCDateStamps - -XX:+PrintGCTimeStamps - -XX:+PrintGCDetails - -XX:+PrintReferenceGC - -XX:+PrintClassHistogramAfterFullGC - -XX:+PrintClassHistogramBeforeFullGC - -XX:PrintFLSStatistics=2 - -XX:+PrintAdaptiveSizePolicy - -XX:+PrintSafepointStatistics - -XX:PrintSafepointStatisticsCount=1 -Xlog:gc*,safepoint::time,level,tags,tid From f33ce931e637d7a71eab9537adb6745fccb2a0f0 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 4 May 2020 17:11:08 -0700 Subject: [PATCH 325/519] Fix release note section naming and location --- presto-docs/src/main/sphinx/release/release-326.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/presto-docs/src/main/sphinx/release/release-326.rst b/presto-docs/src/main/sphinx/release/release-326.rst index 4bf5aaf797f2..c991d44bdd02 100644 --- a/presto-docs/src/main/sphinx/release/release-326.rst +++ b/presto-docs/src/main/sphinx/release/release-326.rst @@ -13,6 +13,11 @@ General Changes ``ORDER BY`` clauses. (:issue:`2044`) * Improve performance when processing columns of ``map`` type. (:issue:`2015`) +Server RPM Changes +------------------ + +* Allow running Presto with Java 11 or newer. (:issue:`2057`) + Security Changes ---------------- @@ -27,11 +32,6 @@ Hive Changes * Improve performance for Glue metastore by fetching partitions in parallel. (:issue:`1465`) * Improve performance of ``sql-standard`` security. (:issue:`1922`, :issue:`1929`) -RPM Changes ------------ - -* Allow running Presto with Java 11 or newer. (:issue:`2057`) - Phoenix Connector Changes ------------------------- From 45bfd76147dbda2c4f7f51e0891f8fa894f2d6ba Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 4 May 2020 09:52:39 -0700 Subject: [PATCH 326/519] Add 333 release notes --- presto-docs/src/main/sphinx/release.rst | 1 + .../src/main/sphinx/release/release-333.rst | 96 +++++++++++++++++++ 2 files changed, 97 insertions(+) create mode 100644 presto-docs/src/main/sphinx/release/release-333.rst diff --git a/presto-docs/src/main/sphinx/release.rst b/presto-docs/src/main/sphinx/release.rst index 11ae43be1f1b..e5e62cbc0b8b 100644 --- a/presto-docs/src/main/sphinx/release.rst +++ b/presto-docs/src/main/sphinx/release.rst @@ -5,6 +5,7 @@ Release Notes .. toctree:: :maxdepth: 1 + release/release-333 release/release-332 release/release-331 release/release-330 diff --git a/presto-docs/src/main/sphinx/release/release-333.rst b/presto-docs/src/main/sphinx/release/release-333.rst new file mode 100644 index 000000000000..6e498cbaf5c2 --- /dev/null +++ b/presto-docs/src/main/sphinx/release/release-333.rst @@ -0,0 +1,96 @@ +========================= +Release 333 (04 May 2020) +========================= + +General Changes +--------------- + +* Fix planning failure when lambda expressions are repeated in a query. (:issue:`3218`) +* Fix failure when input to ``TRY`` is a constant ``NULL``. (:issue:`3408`) +* Fix failure for :doc:`/sql/show-create-table` for tables with + row types that contain special characters. (:issue:`3380`) +* Fix failure when using :func:`max_by` or :func:`min_by` + where the second argument is of type ``varchar``. (:issue:`3424`) +* Fix rare failure due to an invalid size estimation for T-Digests. (:issue:`3625`) +* Do not require coordinator to have spill paths setup when spill is enabled. (:issue:`3407`) +* Improve performance when dynamic filtering is enabled. (:issue:`3413`) +* Improve performance of queries involving constant scalar subqueries (:issue:`3432`) +* Allow overriding the count of available workers used for query cost + estimation via the ``cost_estimation_worker_count`` session property. (:issue:`2705`) +* Add data integrity verification for Presto internal communication. This can be configured + with the ``exchange.data-integrity-verification`` configuration property. (:issue:`3438`) +* Add support for ``LIKE`` predicate to :doc:`/sql/show-columns`. (:issue:`2997`) +* Add :doc:`/sql/show-create-schema`. (:issue:`3099`) +* Add :func:`starts_with` function. (:issue:`3392`) + +Server Changes +-------------- + +* Require running on Java 11 or above. (:issue:`2799`) + +Server RPM Changes +------------------ + +* Reduce size of RPM and disk usage after installation. (:issue:`3595`) + +Security Changes +---------------- + +* Allow configuring trust certificate for LDAP password authenticator. (:issue:`3523`) + +JDBC Driver Changes +------------------- + +* Fix hangs on JDK 8u252 when using secure connections. (:issue:`3444`) + +BigQuery Connector Changes +-------------------------- + +* Improve performance for queries that contain filters on table columns. (:issue:`3376`) +* Add support for partitioned tables. (:issue:`3376`) + +Cassandra Connector Changes +--------------------------- + +* Allow :doc:`/sql/insert` statement for table having hidden ``id`` column. (:issue:`3499`) +* Add support for :doc:`/sql/create-table` statement. (:issue:`3478`) + +Elasticsearch Connector Changes +------------------------------- + +* Fix failure when querying Elasticsearch 7.x clusters. (:issue:`3447`) + +Hive Connector Changes +---------------------- + +* Fix incorrect query results when reading Parquet data with a ``varchar`` column predicate + which is a comparison with a value containing non-ASCII characters. (:issue:`3517`) +* Ensure cleanup of resources (file descriptors, sockets, temporary files, etc.) + when an error occurs while writing an ORC file. (:issue:`3390`) +* Generate multiple splits for files in bucketed tables. (:issue:`3455`) +* Make file system caching honor Hadoop properties from ``hive.config.resources``. (:issue:`3557`) +* Disallow enabling file system caching together with S3 security mapping or GCS access tokens. (:issue:`3571`) +* Disable file system caching parallel warmup by default. + It is currently broken and should not be enabled. (:issue:`3591`) +* Include metrics from S3 Select in the S3 JMX metrics. (:issue:`3429`) +* Report timings for request retries in S3 JMX metrics. + Previously, only the first request was reported. (:issue:`3429`) +* Add S3 JMX metric for client retry pause time (how long the thread was asleep + between request retries in the client itself). (:issue:`3429`) +* Add support for :doc:`/sql/show-create-schema`. (:issue:`3099`) +* Add ``hive.projection-pushdown-enabled`` configuration property and + ``projection_pushdown_enabled`` session property. (:issue:`3490`) +* Add support for connecting to the Thrift metastore using TLS. (:issue:`3440`) + +MongoDB Connector Changes +------------------------- + +* Skip unknown types in nested BSON object. (:issue:`2935`) +* Fix query failure when the user does not have access privileges for ``system.views``. (:issue:`3355`) + +Other Connector Changes +----------------------- + +These changes apply to the MemSQL, MySQL, PostgreSQL, Redshift, and SQL Server connectors. + +* Export JMX statistics for various connector operations. (:issue:`3479`). From f754f56d718de3ad1bc308cb1de1da93efc3d6b9 Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 4 May 2020 23:25:49 -0700 Subject: [PATCH 327/519] [maven-release-plugin] prepare release 333 --- pom.xml | 4 ++-- presto-accumulo/pom.xml | 2 +- presto-array/pom.xml | 2 +- presto-atop/pom.xml | 2 +- presto-base-jdbc/pom.xml | 2 +- presto-benchmark-driver/pom.xml | 2 +- presto-benchmark/pom.xml | 2 +- presto-benchto-benchmarks/pom.xml | 2 +- presto-bigquery/pom.xml | 2 +- presto-blackhole/pom.xml | 2 +- presto-cassandra/pom.xml | 2 +- presto-cli/pom.xml | 2 +- presto-client/pom.xml | 2 +- presto-docs/pom.xml | 2 +- presto-elasticsearch/pom.xml | 2 +- presto-example-http/pom.xml | 2 +- presto-geospatial-toolkit/pom.xml | 2 +- presto-geospatial/pom.xml | 2 +- presto-google-sheets/pom.xml | 2 +- presto-hive-hadoop2/pom.xml | 2 +- presto-hive/pom.xml | 2 +- presto-iceberg/pom.xml | 2 +- presto-jdbc/pom.xml | 2 +- presto-jmx/pom.xml | 2 +- presto-kafka/pom.xml | 2 +- presto-kinesis/pom.xml | 2 +- presto-kudu/pom.xml | 2 +- presto-local-file/pom.xml | 2 +- presto-main/pom.xml | 2 +- presto-matching/pom.xml | 2 +- presto-memory-context/pom.xml | 2 +- presto-memory/pom.xml | 2 +- presto-memsql/pom.xml | 2 +- presto-ml/pom.xml | 2 +- presto-mongodb/pom.xml | 2 +- presto-mysql/pom.xml | 2 +- presto-noop/pom.xml | 2 +- presto-orc/pom.xml | 2 +- presto-parquet/pom.xml | 2 +- presto-parser/pom.xml | 2 +- presto-password-authenticators/pom.xml | 2 +- presto-phoenix/pom.xml | 2 +- presto-plugin-toolkit/pom.xml | 2 +- presto-postgresql/pom.xml | 2 +- presto-product-tests-launcher/pom.xml | 2 +- presto-product-tests/pom.xml | 2 +- presto-proxy/pom.xml | 2 +- presto-raptor-legacy/pom.xml | 2 +- presto-rcfile/pom.xml | 2 +- presto-record-decoder/pom.xml | 2 +- presto-redis/pom.xml | 2 +- presto-redshift/pom.xml | 2 +- presto-resource-group-managers/pom.xml | 2 +- presto-server-rpm/pom.xml | 2 +- presto-server/pom.xml | 2 +- presto-session-property-managers/pom.xml | 2 +- presto-spi/pom.xml | 2 +- presto-sqlserver/pom.xml | 2 +- presto-teradata-functions/pom.xml | 2 +- presto-testing-server-launcher/pom.xml | 2 +- presto-testing/pom.xml | 2 +- presto-tests/pom.xml | 2 +- presto-thrift-api/pom.xml | 2 +- presto-thrift-testing-server/pom.xml | 2 +- presto-thrift/pom.xml | 2 +- presto-tpcds/pom.xml | 2 +- presto-tpch/pom.xml | 2 +- presto-verifier/pom.xml | 2 +- 68 files changed, 69 insertions(+), 69 deletions(-) diff --git a/pom.xml b/pom.xml index 776a8a50d087..a737c1db30e1 100644 --- a/pom.xml +++ b/pom.xml @@ -10,7 +10,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 pom presto-root @@ -30,7 +30,7 @@ scm:git:git://github.com/prestosql/presto.git https://github.com/prestosql/presto - HEAD + 333 diff --git a/presto-accumulo/pom.xml b/presto-accumulo/pom.xml index ffc539612319..86c9b6f41fa9 100644 --- a/presto-accumulo/pom.xml +++ b/presto-accumulo/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-accumulo diff --git a/presto-array/pom.xml b/presto-array/pom.xml index 97333c94addd..2d2942f23191 100644 --- a/presto-array/pom.xml +++ b/presto-array/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-array diff --git a/presto-atop/pom.xml b/presto-atop/pom.xml index 87b5d072886f..8d87d5af60c6 100644 --- a/presto-atop/pom.xml +++ b/presto-atop/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-atop diff --git a/presto-base-jdbc/pom.xml b/presto-base-jdbc/pom.xml index f6ade36ba376..41194cb38f5d 100644 --- a/presto-base-jdbc/pom.xml +++ b/presto-base-jdbc/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-base-jdbc diff --git a/presto-benchmark-driver/pom.xml b/presto-benchmark-driver/pom.xml index a1c87b16e427..0b5960ac93c8 100644 --- a/presto-benchmark-driver/pom.xml +++ b/presto-benchmark-driver/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-benchmark-driver diff --git a/presto-benchmark/pom.xml b/presto-benchmark/pom.xml index 3ac166609048..1ddbfdc15617 100644 --- a/presto-benchmark/pom.xml +++ b/presto-benchmark/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-benchmark diff --git a/presto-benchto-benchmarks/pom.xml b/presto-benchto-benchmarks/pom.xml index 8c987a2c8054..dfbc1b457b14 100644 --- a/presto-benchto-benchmarks/pom.xml +++ b/presto-benchto-benchmarks/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-benchto-benchmarks diff --git a/presto-bigquery/pom.xml b/presto-bigquery/pom.xml index 1269ebdba836..b0ac961bab79 100644 --- a/presto-bigquery/pom.xml +++ b/presto-bigquery/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-bigquery diff --git a/presto-blackhole/pom.xml b/presto-blackhole/pom.xml index 5017d4040f0c..44e7e648a269 100644 --- a/presto-blackhole/pom.xml +++ b/presto-blackhole/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-blackhole diff --git a/presto-cassandra/pom.xml b/presto-cassandra/pom.xml index eac005ef5eb4..969e9b805dae 100644 --- a/presto-cassandra/pom.xml +++ b/presto-cassandra/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-cassandra diff --git a/presto-cli/pom.xml b/presto-cli/pom.xml index 044e50fe4295..9a81735bd7a8 100644 --- a/presto-cli/pom.xml +++ b/presto-cli/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-cli diff --git a/presto-client/pom.xml b/presto-client/pom.xml index 2ba0cc7ee6dc..3e49712fcfce 100644 --- a/presto-client/pom.xml +++ b/presto-client/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-client diff --git a/presto-docs/pom.xml b/presto-docs/pom.xml index ea4a3fa4521e..652179f38b28 100644 --- a/presto-docs/pom.xml +++ b/presto-docs/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-docs diff --git a/presto-elasticsearch/pom.xml b/presto-elasticsearch/pom.xml index b592a90937c3..7507145ba503 100644 --- a/presto-elasticsearch/pom.xml +++ b/presto-elasticsearch/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-elasticsearch diff --git a/presto-example-http/pom.xml b/presto-example-http/pom.xml index 79115edc57ea..52be59853c35 100644 --- a/presto-example-http/pom.xml +++ b/presto-example-http/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-example-http diff --git a/presto-geospatial-toolkit/pom.xml b/presto-geospatial-toolkit/pom.xml index 0f6e9bcd0fe5..baee5127a6fd 100644 --- a/presto-geospatial-toolkit/pom.xml +++ b/presto-geospatial-toolkit/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-geospatial-toolkit diff --git a/presto-geospatial/pom.xml b/presto-geospatial/pom.xml index 197144d632aa..8f7131216726 100644 --- a/presto-geospatial/pom.xml +++ b/presto-geospatial/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-geospatial diff --git a/presto-google-sheets/pom.xml b/presto-google-sheets/pom.xml index 050e2c4b4df1..0fcf47c6cba2 100644 --- a/presto-google-sheets/pom.xml +++ b/presto-google-sheets/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-google-sheets diff --git a/presto-hive-hadoop2/pom.xml b/presto-hive-hadoop2/pom.xml index a306ce9d3b70..305ff3d74caf 100644 --- a/presto-hive-hadoop2/pom.xml +++ b/presto-hive-hadoop2/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-hive-hadoop2 diff --git a/presto-hive/pom.xml b/presto-hive/pom.xml index bd151d27b13c..8ed95c28740a 100644 --- a/presto-hive/pom.xml +++ b/presto-hive/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-hive diff --git a/presto-iceberg/pom.xml b/presto-iceberg/pom.xml index 3a87a043c491..657d1d2e2dbb 100644 --- a/presto-iceberg/pom.xml +++ b/presto-iceberg/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-iceberg diff --git a/presto-jdbc/pom.xml b/presto-jdbc/pom.xml index 57df0401916d..38627d78311c 100644 --- a/presto-jdbc/pom.xml +++ b/presto-jdbc/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-jdbc diff --git a/presto-jmx/pom.xml b/presto-jmx/pom.xml index 630f241187dc..69bc41f2aeca 100644 --- a/presto-jmx/pom.xml +++ b/presto-jmx/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-jmx diff --git a/presto-kafka/pom.xml b/presto-kafka/pom.xml index d3766a79de48..49960209ed93 100644 --- a/presto-kafka/pom.xml +++ b/presto-kafka/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-kafka diff --git a/presto-kinesis/pom.xml b/presto-kinesis/pom.xml index 7da27a5da949..78aa0e4cba66 100644 --- a/presto-kinesis/pom.xml +++ b/presto-kinesis/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-kinesis diff --git a/presto-kudu/pom.xml b/presto-kudu/pom.xml index 70e232a589bb..90bc0666b896 100644 --- a/presto-kudu/pom.xml +++ b/presto-kudu/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-kudu diff --git a/presto-local-file/pom.xml b/presto-local-file/pom.xml index 58412e6baf55..c72abfb8fe14 100644 --- a/presto-local-file/pom.xml +++ b/presto-local-file/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-local-file diff --git a/presto-main/pom.xml b/presto-main/pom.xml index 3c4d5839b7c6..43dcfa8cd1ea 100644 --- a/presto-main/pom.xml +++ b/presto-main/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-main diff --git a/presto-matching/pom.xml b/presto-matching/pom.xml index feb5d552f72f..cfc02618ab68 100644 --- a/presto-matching/pom.xml +++ b/presto-matching/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-matching diff --git a/presto-memory-context/pom.xml b/presto-memory-context/pom.xml index 0f5dd805df5e..a366afb99ec4 100644 --- a/presto-memory-context/pom.xml +++ b/presto-memory-context/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-memory-context diff --git a/presto-memory/pom.xml b/presto-memory/pom.xml index c4161556cbf2..8de3c8dee03d 100644 --- a/presto-memory/pom.xml +++ b/presto-memory/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-memory diff --git a/presto-memsql/pom.xml b/presto-memsql/pom.xml index 593b15089e52..30192a1b3c8c 100644 --- a/presto-memsql/pom.xml +++ b/presto-memsql/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-memsql diff --git a/presto-ml/pom.xml b/presto-ml/pom.xml index b66b8f4ddc74..ba140eb69240 100644 --- a/presto-ml/pom.xml +++ b/presto-ml/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-ml diff --git a/presto-mongodb/pom.xml b/presto-mongodb/pom.xml index f4ecf7d60ce2..8cad103d4f63 100644 --- a/presto-mongodb/pom.xml +++ b/presto-mongodb/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-mongodb diff --git a/presto-mysql/pom.xml b/presto-mysql/pom.xml index 30fca3f14aec..a7b53a30f215 100644 --- a/presto-mysql/pom.xml +++ b/presto-mysql/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-mysql diff --git a/presto-noop/pom.xml b/presto-noop/pom.xml index 181779fd9b79..1b146b32bc75 100644 --- a/presto-noop/pom.xml +++ b/presto-noop/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-noop diff --git a/presto-orc/pom.xml b/presto-orc/pom.xml index 0736a64d2229..02328be6845c 100644 --- a/presto-orc/pom.xml +++ b/presto-orc/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-orc diff --git a/presto-parquet/pom.xml b/presto-parquet/pom.xml index 8d6f4b2c8f15..802a8d007246 100644 --- a/presto-parquet/pom.xml +++ b/presto-parquet/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-parquet diff --git a/presto-parser/pom.xml b/presto-parser/pom.xml index 1bdc59664d82..4d608dece356 100644 --- a/presto-parser/pom.xml +++ b/presto-parser/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-parser diff --git a/presto-password-authenticators/pom.xml b/presto-password-authenticators/pom.xml index a9d80217e7ee..7e027389da9e 100644 --- a/presto-password-authenticators/pom.xml +++ b/presto-password-authenticators/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-password-authenticators diff --git a/presto-phoenix/pom.xml b/presto-phoenix/pom.xml index 6f3dc9229e4d..72efbb83d333 100644 --- a/presto-phoenix/pom.xml +++ b/presto-phoenix/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-phoenix diff --git a/presto-plugin-toolkit/pom.xml b/presto-plugin-toolkit/pom.xml index a824badef735..46a2d318dd1a 100644 --- a/presto-plugin-toolkit/pom.xml +++ b/presto-plugin-toolkit/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-plugin-toolkit diff --git a/presto-postgresql/pom.xml b/presto-postgresql/pom.xml index 0fb2f70654e3..c9de22a18794 100644 --- a/presto-postgresql/pom.xml +++ b/presto-postgresql/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-postgresql diff --git a/presto-product-tests-launcher/pom.xml b/presto-product-tests-launcher/pom.xml index 9897bf936b09..cd25845e1900 100644 --- a/presto-product-tests-launcher/pom.xml +++ b/presto-product-tests-launcher/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-product-tests-launcher diff --git a/presto-product-tests/pom.xml b/presto-product-tests/pom.xml index 0f44eb1b00f6..d4320ee6376d 100644 --- a/presto-product-tests/pom.xml +++ b/presto-product-tests/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-product-tests diff --git a/presto-proxy/pom.xml b/presto-proxy/pom.xml index c2090e2b939e..60e55808bf2c 100644 --- a/presto-proxy/pom.xml +++ b/presto-proxy/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-proxy diff --git a/presto-raptor-legacy/pom.xml b/presto-raptor-legacy/pom.xml index daf7976b844f..8b0c99c0dd1d 100644 --- a/presto-raptor-legacy/pom.xml +++ b/presto-raptor-legacy/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-raptor-legacy diff --git a/presto-rcfile/pom.xml b/presto-rcfile/pom.xml index a4036438e062..cd5a148a897d 100644 --- a/presto-rcfile/pom.xml +++ b/presto-rcfile/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-rcfile diff --git a/presto-record-decoder/pom.xml b/presto-record-decoder/pom.xml index 5cce08d0a4e4..eac9582b6a15 100644 --- a/presto-record-decoder/pom.xml +++ b/presto-record-decoder/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-record-decoder diff --git a/presto-redis/pom.xml b/presto-redis/pom.xml index 14a05f135e03..4a60864838b1 100644 --- a/presto-redis/pom.xml +++ b/presto-redis/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-redis diff --git a/presto-redshift/pom.xml b/presto-redshift/pom.xml index 71106cbb9d21..604f0f73c79d 100644 --- a/presto-redshift/pom.xml +++ b/presto-redshift/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-redshift diff --git a/presto-resource-group-managers/pom.xml b/presto-resource-group-managers/pom.xml index d1f0240178a6..d344af99804f 100644 --- a/presto-resource-group-managers/pom.xml +++ b/presto-resource-group-managers/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-resource-group-managers diff --git a/presto-server-rpm/pom.xml b/presto-server-rpm/pom.xml index 9c4553220f0b..2a29301870c5 100644 --- a/presto-server-rpm/pom.xml +++ b/presto-server-rpm/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-server-rpm diff --git a/presto-server/pom.xml b/presto-server/pom.xml index b3c41fb09cb1..1b995e3bc4d5 100644 --- a/presto-server/pom.xml +++ b/presto-server/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-server diff --git a/presto-session-property-managers/pom.xml b/presto-session-property-managers/pom.xml index 2e47115baf33..087947656095 100644 --- a/presto-session-property-managers/pom.xml +++ b/presto-session-property-managers/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-session-property-managers diff --git a/presto-spi/pom.xml b/presto-spi/pom.xml index 51cbcfb7bd88..7358fc331dbd 100644 --- a/presto-spi/pom.xml +++ b/presto-spi/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-spi diff --git a/presto-sqlserver/pom.xml b/presto-sqlserver/pom.xml index dd2fdfce648d..72a4cf425581 100644 --- a/presto-sqlserver/pom.xml +++ b/presto-sqlserver/pom.xml @@ -3,7 +3,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 4.0.0 diff --git a/presto-teradata-functions/pom.xml b/presto-teradata-functions/pom.xml index cd926748588a..a6310fd5f1e5 100644 --- a/presto-teradata-functions/pom.xml +++ b/presto-teradata-functions/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-teradata-functions diff --git a/presto-testing-server-launcher/pom.xml b/presto-testing-server-launcher/pom.xml index f927f4e78bd7..6cbecfa31eb7 100644 --- a/presto-testing-server-launcher/pom.xml +++ b/presto-testing-server-launcher/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-testing-server-launcher diff --git a/presto-testing/pom.xml b/presto-testing/pom.xml index 765ed50c7cc9..178ecbaff970 100644 --- a/presto-testing/pom.xml +++ b/presto-testing/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-testing diff --git a/presto-tests/pom.xml b/presto-tests/pom.xml index 6b06053c9873..9d3d56b67318 100644 --- a/presto-tests/pom.xml +++ b/presto-tests/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-tests diff --git a/presto-thrift-api/pom.xml b/presto-thrift-api/pom.xml index 85da30b38cd3..5b3de85b44b1 100644 --- a/presto-thrift-api/pom.xml +++ b/presto-thrift-api/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-thrift-api diff --git a/presto-thrift-testing-server/pom.xml b/presto-thrift-testing-server/pom.xml index e9618bd6374e..131095f4b717 100644 --- a/presto-thrift-testing-server/pom.xml +++ b/presto-thrift-testing-server/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-thrift-testing-server diff --git a/presto-thrift/pom.xml b/presto-thrift/pom.xml index f28a1593afc2..e503e55758c1 100644 --- a/presto-thrift/pom.xml +++ b/presto-thrift/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-thrift diff --git a/presto-tpcds/pom.xml b/presto-tpcds/pom.xml index 68796cad6a46..5100c3f09c7a 100644 --- a/presto-tpcds/pom.xml +++ b/presto-tpcds/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-tpcds diff --git a/presto-tpch/pom.xml b/presto-tpch/pom.xml index 748176a6fa7b..c0762af7fa2f 100644 --- a/presto-tpch/pom.xml +++ b/presto-tpch/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-tpch diff --git a/presto-verifier/pom.xml b/presto-verifier/pom.xml index cf8c3f8345eb..ce7313c3a07a 100644 --- a/presto-verifier/pom.xml +++ b/presto-verifier/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333-SNAPSHOT + 333 presto-verifier From 40ce327b5dd068ba8590c1ffc45c980a1eafbb7c Mon Sep 17 00:00:00 2001 From: David Phillips Date: Mon, 4 May 2020 23:25:50 -0700 Subject: [PATCH 328/519] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- presto-accumulo/pom.xml | 2 +- presto-array/pom.xml | 2 +- presto-atop/pom.xml | 2 +- presto-base-jdbc/pom.xml | 2 +- presto-benchmark-driver/pom.xml | 2 +- presto-benchmark/pom.xml | 2 +- presto-benchto-benchmarks/pom.xml | 2 +- presto-bigquery/pom.xml | 2 +- presto-blackhole/pom.xml | 2 +- presto-cassandra/pom.xml | 2 +- presto-cli/pom.xml | 2 +- presto-client/pom.xml | 2 +- presto-docs/pom.xml | 2 +- presto-elasticsearch/pom.xml | 2 +- presto-example-http/pom.xml | 2 +- presto-geospatial-toolkit/pom.xml | 2 +- presto-geospatial/pom.xml | 2 +- presto-google-sheets/pom.xml | 2 +- presto-hive-hadoop2/pom.xml | 2 +- presto-hive/pom.xml | 2 +- presto-iceberg/pom.xml | 2 +- presto-jdbc/pom.xml | 2 +- presto-jmx/pom.xml | 2 +- presto-kafka/pom.xml | 2 +- presto-kinesis/pom.xml | 2 +- presto-kudu/pom.xml | 2 +- presto-local-file/pom.xml | 2 +- presto-main/pom.xml | 2 +- presto-matching/pom.xml | 2 +- presto-memory-context/pom.xml | 2 +- presto-memory/pom.xml | 2 +- presto-memsql/pom.xml | 2 +- presto-ml/pom.xml | 2 +- presto-mongodb/pom.xml | 2 +- presto-mysql/pom.xml | 2 +- presto-noop/pom.xml | 2 +- presto-orc/pom.xml | 2 +- presto-parquet/pom.xml | 2 +- presto-parser/pom.xml | 2 +- presto-password-authenticators/pom.xml | 2 +- presto-phoenix/pom.xml | 2 +- presto-plugin-toolkit/pom.xml | 2 +- presto-postgresql/pom.xml | 2 +- presto-product-tests-launcher/pom.xml | 2 +- presto-product-tests/pom.xml | 2 +- presto-proxy/pom.xml | 2 +- presto-raptor-legacy/pom.xml | 2 +- presto-rcfile/pom.xml | 2 +- presto-record-decoder/pom.xml | 2 +- presto-redis/pom.xml | 2 +- presto-redshift/pom.xml | 2 +- presto-resource-group-managers/pom.xml | 2 +- presto-server-rpm/pom.xml | 2 +- presto-server/pom.xml | 2 +- presto-session-property-managers/pom.xml | 2 +- presto-spi/pom.xml | 2 +- presto-sqlserver/pom.xml | 2 +- presto-teradata-functions/pom.xml | 2 +- presto-testing-server-launcher/pom.xml | 2 +- presto-testing/pom.xml | 2 +- presto-tests/pom.xml | 2 +- presto-thrift-api/pom.xml | 2 +- presto-thrift-testing-server/pom.xml | 2 +- presto-thrift/pom.xml | 2 +- presto-tpcds/pom.xml | 2 +- presto-tpch/pom.xml | 2 +- presto-verifier/pom.xml | 2 +- 68 files changed, 69 insertions(+), 69 deletions(-) diff --git a/pom.xml b/pom.xml index a737c1db30e1..b012ebd91ebb 100644 --- a/pom.xml +++ b/pom.xml @@ -10,7 +10,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT pom presto-root @@ -30,7 +30,7 @@ scm:git:git://github.com/prestosql/presto.git https://github.com/prestosql/presto - 333 + HEAD diff --git a/presto-accumulo/pom.xml b/presto-accumulo/pom.xml index 86c9b6f41fa9..7b6ed29f8772 100644 --- a/presto-accumulo/pom.xml +++ b/presto-accumulo/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-accumulo diff --git a/presto-array/pom.xml b/presto-array/pom.xml index 2d2942f23191..371ff4c7abab 100644 --- a/presto-array/pom.xml +++ b/presto-array/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-array diff --git a/presto-atop/pom.xml b/presto-atop/pom.xml index 8d87d5af60c6..12a03cdbcb92 100644 --- a/presto-atop/pom.xml +++ b/presto-atop/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-atop diff --git a/presto-base-jdbc/pom.xml b/presto-base-jdbc/pom.xml index 41194cb38f5d..46030480ac61 100644 --- a/presto-base-jdbc/pom.xml +++ b/presto-base-jdbc/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-base-jdbc diff --git a/presto-benchmark-driver/pom.xml b/presto-benchmark-driver/pom.xml index 0b5960ac93c8..3a522fef8742 100644 --- a/presto-benchmark-driver/pom.xml +++ b/presto-benchmark-driver/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-benchmark-driver diff --git a/presto-benchmark/pom.xml b/presto-benchmark/pom.xml index 1ddbfdc15617..ae9b30c101e1 100644 --- a/presto-benchmark/pom.xml +++ b/presto-benchmark/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-benchmark diff --git a/presto-benchto-benchmarks/pom.xml b/presto-benchto-benchmarks/pom.xml index dfbc1b457b14..31883a0ab578 100644 --- a/presto-benchto-benchmarks/pom.xml +++ b/presto-benchto-benchmarks/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-benchto-benchmarks diff --git a/presto-bigquery/pom.xml b/presto-bigquery/pom.xml index b0ac961bab79..06b0ae6b59ca 100644 --- a/presto-bigquery/pom.xml +++ b/presto-bigquery/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-bigquery diff --git a/presto-blackhole/pom.xml b/presto-blackhole/pom.xml index 44e7e648a269..0cd004aa3646 100644 --- a/presto-blackhole/pom.xml +++ b/presto-blackhole/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-blackhole diff --git a/presto-cassandra/pom.xml b/presto-cassandra/pom.xml index 969e9b805dae..b41d000d3963 100644 --- a/presto-cassandra/pom.xml +++ b/presto-cassandra/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-cassandra diff --git a/presto-cli/pom.xml b/presto-cli/pom.xml index 9a81735bd7a8..2bc73a5e7659 100644 --- a/presto-cli/pom.xml +++ b/presto-cli/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-cli diff --git a/presto-client/pom.xml b/presto-client/pom.xml index 3e49712fcfce..6c3dd358debf 100644 --- a/presto-client/pom.xml +++ b/presto-client/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-client diff --git a/presto-docs/pom.xml b/presto-docs/pom.xml index 652179f38b28..98cdd75b1a70 100644 --- a/presto-docs/pom.xml +++ b/presto-docs/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-docs diff --git a/presto-elasticsearch/pom.xml b/presto-elasticsearch/pom.xml index 7507145ba503..46d8720379e9 100644 --- a/presto-elasticsearch/pom.xml +++ b/presto-elasticsearch/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-elasticsearch diff --git a/presto-example-http/pom.xml b/presto-example-http/pom.xml index 52be59853c35..db43575c0f42 100644 --- a/presto-example-http/pom.xml +++ b/presto-example-http/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-example-http diff --git a/presto-geospatial-toolkit/pom.xml b/presto-geospatial-toolkit/pom.xml index baee5127a6fd..f639e502d756 100644 --- a/presto-geospatial-toolkit/pom.xml +++ b/presto-geospatial-toolkit/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-geospatial-toolkit diff --git a/presto-geospatial/pom.xml b/presto-geospatial/pom.xml index 8f7131216726..3cea3faf27a1 100644 --- a/presto-geospatial/pom.xml +++ b/presto-geospatial/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-geospatial diff --git a/presto-google-sheets/pom.xml b/presto-google-sheets/pom.xml index 0fcf47c6cba2..4c4924be9933 100644 --- a/presto-google-sheets/pom.xml +++ b/presto-google-sheets/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-google-sheets diff --git a/presto-hive-hadoop2/pom.xml b/presto-hive-hadoop2/pom.xml index 305ff3d74caf..1f7b046eaada 100644 --- a/presto-hive-hadoop2/pom.xml +++ b/presto-hive-hadoop2/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-hive-hadoop2 diff --git a/presto-hive/pom.xml b/presto-hive/pom.xml index 8ed95c28740a..c99f00b6f21f 100644 --- a/presto-hive/pom.xml +++ b/presto-hive/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-hive diff --git a/presto-iceberg/pom.xml b/presto-iceberg/pom.xml index 657d1d2e2dbb..2b6e2069b5db 100644 --- a/presto-iceberg/pom.xml +++ b/presto-iceberg/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-iceberg diff --git a/presto-jdbc/pom.xml b/presto-jdbc/pom.xml index 38627d78311c..1b4d54d958e7 100644 --- a/presto-jdbc/pom.xml +++ b/presto-jdbc/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-jdbc diff --git a/presto-jmx/pom.xml b/presto-jmx/pom.xml index 69bc41f2aeca..cb1bfa92b589 100644 --- a/presto-jmx/pom.xml +++ b/presto-jmx/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-jmx diff --git a/presto-kafka/pom.xml b/presto-kafka/pom.xml index 49960209ed93..148a6fe4ad4a 100644 --- a/presto-kafka/pom.xml +++ b/presto-kafka/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-kafka diff --git a/presto-kinesis/pom.xml b/presto-kinesis/pom.xml index 78aa0e4cba66..9995f7310a0a 100644 --- a/presto-kinesis/pom.xml +++ b/presto-kinesis/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-kinesis diff --git a/presto-kudu/pom.xml b/presto-kudu/pom.xml index 90bc0666b896..5f675321610e 100644 --- a/presto-kudu/pom.xml +++ b/presto-kudu/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-kudu diff --git a/presto-local-file/pom.xml b/presto-local-file/pom.xml index c72abfb8fe14..7509dda0ca7a 100644 --- a/presto-local-file/pom.xml +++ b/presto-local-file/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-local-file diff --git a/presto-main/pom.xml b/presto-main/pom.xml index 43dcfa8cd1ea..4e953a8ea5d9 100644 --- a/presto-main/pom.xml +++ b/presto-main/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-main diff --git a/presto-matching/pom.xml b/presto-matching/pom.xml index cfc02618ab68..db8db21129a3 100644 --- a/presto-matching/pom.xml +++ b/presto-matching/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-matching diff --git a/presto-memory-context/pom.xml b/presto-memory-context/pom.xml index a366afb99ec4..41f9fcca65a7 100644 --- a/presto-memory-context/pom.xml +++ b/presto-memory-context/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-memory-context diff --git a/presto-memory/pom.xml b/presto-memory/pom.xml index 8de3c8dee03d..f8ee3eda01f4 100644 --- a/presto-memory/pom.xml +++ b/presto-memory/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-memory diff --git a/presto-memsql/pom.xml b/presto-memsql/pom.xml index 30192a1b3c8c..e228610952b0 100644 --- a/presto-memsql/pom.xml +++ b/presto-memsql/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-memsql diff --git a/presto-ml/pom.xml b/presto-ml/pom.xml index ba140eb69240..73053fe27e24 100644 --- a/presto-ml/pom.xml +++ b/presto-ml/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-ml diff --git a/presto-mongodb/pom.xml b/presto-mongodb/pom.xml index 8cad103d4f63..f1ddfd40d1a7 100644 --- a/presto-mongodb/pom.xml +++ b/presto-mongodb/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-mongodb diff --git a/presto-mysql/pom.xml b/presto-mysql/pom.xml index a7b53a30f215..cc3d20f5e3db 100644 --- a/presto-mysql/pom.xml +++ b/presto-mysql/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-mysql diff --git a/presto-noop/pom.xml b/presto-noop/pom.xml index 1b146b32bc75..ae8f6504bfe0 100644 --- a/presto-noop/pom.xml +++ b/presto-noop/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-noop diff --git a/presto-orc/pom.xml b/presto-orc/pom.xml index 02328be6845c..5d2d648717da 100644 --- a/presto-orc/pom.xml +++ b/presto-orc/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-orc diff --git a/presto-parquet/pom.xml b/presto-parquet/pom.xml index 802a8d007246..fa26f287b32e 100644 --- a/presto-parquet/pom.xml +++ b/presto-parquet/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-parquet diff --git a/presto-parser/pom.xml b/presto-parser/pom.xml index 4d608dece356..e691ae980155 100644 --- a/presto-parser/pom.xml +++ b/presto-parser/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-parser diff --git a/presto-password-authenticators/pom.xml b/presto-password-authenticators/pom.xml index 7e027389da9e..13ebb4670418 100644 --- a/presto-password-authenticators/pom.xml +++ b/presto-password-authenticators/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-password-authenticators diff --git a/presto-phoenix/pom.xml b/presto-phoenix/pom.xml index 72efbb83d333..aaca6471306e 100644 --- a/presto-phoenix/pom.xml +++ b/presto-phoenix/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-phoenix diff --git a/presto-plugin-toolkit/pom.xml b/presto-plugin-toolkit/pom.xml index 46a2d318dd1a..672de4caff9e 100644 --- a/presto-plugin-toolkit/pom.xml +++ b/presto-plugin-toolkit/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-plugin-toolkit diff --git a/presto-postgresql/pom.xml b/presto-postgresql/pom.xml index c9de22a18794..7988f6d1aa65 100644 --- a/presto-postgresql/pom.xml +++ b/presto-postgresql/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-postgresql diff --git a/presto-product-tests-launcher/pom.xml b/presto-product-tests-launcher/pom.xml index cd25845e1900..cfc023d3457d 100644 --- a/presto-product-tests-launcher/pom.xml +++ b/presto-product-tests-launcher/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-product-tests-launcher diff --git a/presto-product-tests/pom.xml b/presto-product-tests/pom.xml index d4320ee6376d..24c5e565f5d0 100644 --- a/presto-product-tests/pom.xml +++ b/presto-product-tests/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-product-tests diff --git a/presto-proxy/pom.xml b/presto-proxy/pom.xml index 60e55808bf2c..5bf104a7d139 100644 --- a/presto-proxy/pom.xml +++ b/presto-proxy/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-proxy diff --git a/presto-raptor-legacy/pom.xml b/presto-raptor-legacy/pom.xml index 8b0c99c0dd1d..1d66d1a97d67 100644 --- a/presto-raptor-legacy/pom.xml +++ b/presto-raptor-legacy/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-raptor-legacy diff --git a/presto-rcfile/pom.xml b/presto-rcfile/pom.xml index cd5a148a897d..a5aae363e7e7 100644 --- a/presto-rcfile/pom.xml +++ b/presto-rcfile/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-rcfile diff --git a/presto-record-decoder/pom.xml b/presto-record-decoder/pom.xml index eac9582b6a15..9afd1aaaac5f 100644 --- a/presto-record-decoder/pom.xml +++ b/presto-record-decoder/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-record-decoder diff --git a/presto-redis/pom.xml b/presto-redis/pom.xml index 4a60864838b1..29ecd35b5446 100644 --- a/presto-redis/pom.xml +++ b/presto-redis/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-redis diff --git a/presto-redshift/pom.xml b/presto-redshift/pom.xml index 604f0f73c79d..52ae5faca804 100644 --- a/presto-redshift/pom.xml +++ b/presto-redshift/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-redshift diff --git a/presto-resource-group-managers/pom.xml b/presto-resource-group-managers/pom.xml index d344af99804f..6feb5d63d64f 100644 --- a/presto-resource-group-managers/pom.xml +++ b/presto-resource-group-managers/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-resource-group-managers diff --git a/presto-server-rpm/pom.xml b/presto-server-rpm/pom.xml index 2a29301870c5..8bbeabdebf5c 100644 --- a/presto-server-rpm/pom.xml +++ b/presto-server-rpm/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-server-rpm diff --git a/presto-server/pom.xml b/presto-server/pom.xml index 1b995e3bc4d5..f3c5a408b457 100644 --- a/presto-server/pom.xml +++ b/presto-server/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-server diff --git a/presto-session-property-managers/pom.xml b/presto-session-property-managers/pom.xml index 087947656095..16a6c00aaf2b 100644 --- a/presto-session-property-managers/pom.xml +++ b/presto-session-property-managers/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-session-property-managers diff --git a/presto-spi/pom.xml b/presto-spi/pom.xml index 7358fc331dbd..da0e43550ff5 100644 --- a/presto-spi/pom.xml +++ b/presto-spi/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-spi diff --git a/presto-sqlserver/pom.xml b/presto-sqlserver/pom.xml index 72a4cf425581..de06910a9b02 100644 --- a/presto-sqlserver/pom.xml +++ b/presto-sqlserver/pom.xml @@ -3,7 +3,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT 4.0.0 diff --git a/presto-teradata-functions/pom.xml b/presto-teradata-functions/pom.xml index a6310fd5f1e5..78f116e0461f 100644 --- a/presto-teradata-functions/pom.xml +++ b/presto-teradata-functions/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-teradata-functions diff --git a/presto-testing-server-launcher/pom.xml b/presto-testing-server-launcher/pom.xml index 6cbecfa31eb7..ff8d342595bd 100644 --- a/presto-testing-server-launcher/pom.xml +++ b/presto-testing-server-launcher/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-testing-server-launcher diff --git a/presto-testing/pom.xml b/presto-testing/pom.xml index 178ecbaff970..531a152e47b3 100644 --- a/presto-testing/pom.xml +++ b/presto-testing/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-testing diff --git a/presto-tests/pom.xml b/presto-tests/pom.xml index 9d3d56b67318..59663b490141 100644 --- a/presto-tests/pom.xml +++ b/presto-tests/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-tests diff --git a/presto-thrift-api/pom.xml b/presto-thrift-api/pom.xml index 5b3de85b44b1..a8dd0389b494 100644 --- a/presto-thrift-api/pom.xml +++ b/presto-thrift-api/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-thrift-api diff --git a/presto-thrift-testing-server/pom.xml b/presto-thrift-testing-server/pom.xml index 131095f4b717..6ee0439296a3 100644 --- a/presto-thrift-testing-server/pom.xml +++ b/presto-thrift-testing-server/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-thrift-testing-server diff --git a/presto-thrift/pom.xml b/presto-thrift/pom.xml index e503e55758c1..19fb2ddf3f30 100644 --- a/presto-thrift/pom.xml +++ b/presto-thrift/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-thrift diff --git a/presto-tpcds/pom.xml b/presto-tpcds/pom.xml index 5100c3f09c7a..cc3d246064e1 100644 --- a/presto-tpcds/pom.xml +++ b/presto-tpcds/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-tpcds diff --git a/presto-tpch/pom.xml b/presto-tpch/pom.xml index c0762af7fa2f..7a5151882b9d 100644 --- a/presto-tpch/pom.xml +++ b/presto-tpch/pom.xml @@ -4,7 +4,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-tpch diff --git a/presto-verifier/pom.xml b/presto-verifier/pom.xml index ce7313c3a07a..81d52a12ea92 100644 --- a/presto-verifier/pom.xml +++ b/presto-verifier/pom.xml @@ -5,7 +5,7 @@ io.prestosql presto-root - 333 + 334-SNAPSHOT presto-verifier From e76f642e9535c8c2dbbb305dd4286421df6cd3bc Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Fri, 1 May 2020 00:47:05 +0200 Subject: [PATCH 329/519] Remove redundant declaration --- .../src/main/java/io/prestosql/operator/ExchangeOperator.java | 3 +-- .../src/main/java/io/prestosql/operator/MergeOperator.java | 3 +-- .../java/io/prestosql/operator/NestedLoopJoinOperator.java | 3 +-- .../main/java/io/prestosql/operator/PageSourceOperator.java | 3 +-- .../src/main/java/io/prestosql/operator/TableScanOperator.java | 3 +-- .../src/test/java/io/prestosql/operator/TestDriver.java | 3 +-- 6 files changed, 6 insertions(+), 12 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/operator/ExchangeOperator.java b/presto-main/src/main/java/io/prestosql/operator/ExchangeOperator.java index c302355c05de..238c659a5930 100644 --- a/presto-main/src/main/java/io/prestosql/operator/ExchangeOperator.java +++ b/presto-main/src/main/java/io/prestosql/operator/ExchangeOperator.java @@ -24,7 +24,6 @@ import io.prestosql.split.RemoteSplit; import io.prestosql.sql.planner.plan.PlanNodeId; -import java.io.Closeable; import java.net.URI; import java.util.Optional; import java.util.function.Supplier; @@ -34,7 +33,7 @@ import static java.util.Objects.requireNonNull; public class ExchangeOperator - implements SourceOperator, Closeable + implements SourceOperator { public static final CatalogName REMOTE_CONNECTOR_ID = new CatalogName("$remote"); diff --git a/presto-main/src/main/java/io/prestosql/operator/MergeOperator.java b/presto-main/src/main/java/io/prestosql/operator/MergeOperator.java index 401d5b40523c..9619c12f4d11 100644 --- a/presto-main/src/main/java/io/prestosql/operator/MergeOperator.java +++ b/presto-main/src/main/java/io/prestosql/operator/MergeOperator.java @@ -27,7 +27,6 @@ import io.prestosql.sql.gen.OrderingCompiler; import io.prestosql.sql.planner.plan.PlanNodeId; -import java.io.Closeable; import java.io.IOException; import java.io.UncheckedIOException; import java.net.URI; @@ -43,7 +42,7 @@ import static java.util.Objects.requireNonNull; public class MergeOperator - implements SourceOperator, Closeable + implements SourceOperator { public static class MergeOperatorFactory implements SourceOperatorFactory diff --git a/presto-main/src/main/java/io/prestosql/operator/NestedLoopJoinOperator.java b/presto-main/src/main/java/io/prestosql/operator/NestedLoopJoinOperator.java index fda88bc72191..c71fa24ff6c3 100644 --- a/presto-main/src/main/java/io/prestosql/operator/NestedLoopJoinOperator.java +++ b/presto-main/src/main/java/io/prestosql/operator/NestedLoopJoinOperator.java @@ -22,7 +22,6 @@ import io.prestosql.spi.block.RunLengthEncodedBlock; import io.prestosql.sql.planner.plan.PlanNodeId; -import java.io.Closeable; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; @@ -35,7 +34,7 @@ import static java.util.Objects.requireNonNull; public class NestedLoopJoinOperator - implements Operator, Closeable + implements Operator { public static class NestedLoopJoinOperatorFactory implements OperatorFactory diff --git a/presto-main/src/main/java/io/prestosql/operator/PageSourceOperator.java b/presto-main/src/main/java/io/prestosql/operator/PageSourceOperator.java index 2abfc9b3d3b0..1d1f85770ec6 100644 --- a/presto-main/src/main/java/io/prestosql/operator/PageSourceOperator.java +++ b/presto-main/src/main/java/io/prestosql/operator/PageSourceOperator.java @@ -17,7 +17,6 @@ import io.prestosql.spi.Page; import io.prestosql.spi.connector.ConnectorPageSource; -import java.io.Closeable; import java.io.IOException; import java.io.UncheckedIOException; import java.util.concurrent.CompletableFuture; @@ -26,7 +25,7 @@ import static java.util.Objects.requireNonNull; public class PageSourceOperator - implements Operator, Closeable + implements Operator { private final ConnectorPageSource pageSource; private final OperatorContext operatorContext; diff --git a/presto-main/src/main/java/io/prestosql/operator/TableScanOperator.java b/presto-main/src/main/java/io/prestosql/operator/TableScanOperator.java index 8a8522ff5e83..a350e2e1c199 100644 --- a/presto-main/src/main/java/io/prestosql/operator/TableScanOperator.java +++ b/presto-main/src/main/java/io/prestosql/operator/TableScanOperator.java @@ -33,7 +33,6 @@ import javax.annotation.Nullable; -import java.io.Closeable; import java.io.IOException; import java.io.UncheckedIOException; import java.util.List; @@ -46,7 +45,7 @@ import static java.util.Objects.requireNonNull; public class TableScanOperator - implements SourceOperator, Closeable + implements SourceOperator { public static class TableScanOperatorFactory implements SourceOperatorFactory, WorkProcessorSourceOperatorFactory diff --git a/presto-main/src/test/java/io/prestosql/operator/TestDriver.java b/presto-main/src/test/java/io/prestosql/operator/TestDriver.java index a3d52275bff5..9e7db5742b42 100644 --- a/presto-main/src/test/java/io/prestosql/operator/TestDriver.java +++ b/presto-main/src/test/java/io/prestosql/operator/TestDriver.java @@ -42,7 +42,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import java.io.Closeable; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; @@ -351,7 +350,7 @@ private PageConsumerOperator createSinkOperator(List types) } private static class BrokenOperator - implements Operator, Closeable + implements Operator { private final OperatorContext operatorContext; private final ReentrantLock lock = new ReentrantLock(); From cd7c3da6147d11b47112554fd6b17e6fceb4ea9f Mon Sep 17 00:00:00 2001 From: Yuya Ebihara Date: Sat, 2 May 2020 23:34:53 +0900 Subject: [PATCH 330/519] Make table wider in MongoDB document --- .../src/main/sphinx/connector/mongodb.rst | 36 +++++++++---------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/presto-docs/src/main/sphinx/connector/mongodb.rst b/presto-docs/src/main/sphinx/connector/mongodb.rst index f35a8bc6d4d7..0687608aa7fd 100644 --- a/presto-docs/src/main/sphinx/connector/mongodb.rst +++ b/presto-docs/src/main/sphinx/connector/mongodb.rst @@ -34,24 +34,24 @@ Configuration Properties The following configuration properties are available: -===================================== ============================================================== -Property Name Description -===================================== ============================================================== -``mongodb.seeds`` List of all MongoDB servers -``mongodb.schema-collection`` A collection which contains schema information -``mongodb.credentials`` List of credentials -``mongodb.min-connections-per-host`` The minimum size of the connection pool per host -``mongodb.connections-per-host`` The maximum size of the connection pool per host -``mongodb.max-wait-time`` The maximum wait time -``mongodb.connection-timeout`` The socket connect timeout -``mongodb.socket-timeout`` The socket timeout -``mongodb.socket-keep-alive`` Whether keep-alive is enabled on each socket -``mongodb.ssl.enabled`` Use TLS/SSL for connections to mongod/mongos -``mongodb.read-preference`` The read preference -``mongodb.write-concern`` The write concern -``mongodb.required-replica-set`` The required replica set name -``mongodb.cursor-batch-size`` The number of elements to return in a batch -===================================== ============================================================== +========================================== ============================================================== +Property Name Description +========================================== ============================================================== +``mongodb.seeds`` List of all MongoDB servers +``mongodb.schema-collection`` A collection which contains schema information +``mongodb.credentials`` List of credentials +``mongodb.min-connections-per-host`` The minimum size of the connection pool per host +``mongodb.connections-per-host`` The maximum size of the connection pool per host +``mongodb.max-wait-time`` The maximum wait time +``mongodb.connection-timeout`` The socket connect timeout +``mongodb.socket-timeout`` The socket timeout +``mongodb.socket-keep-alive`` Whether keep-alive is enabled on each socket +``mongodb.ssl.enabled`` Use TLS/SSL for connections to mongod/mongos +``mongodb.read-preference`` The read preference +``mongodb.write-concern`` The write concern +``mongodb.required-replica-set`` The required replica set name +``mongodb.cursor-batch-size`` The number of elements to return in a batch +========================================== ============================================================== ``mongodb.seeds`` ^^^^^^^^^^^^^^^^^ From 43b55cba71bc7345977f292f91b1a082ada72fdf Mon Sep 17 00:00:00 2001 From: Yuya Ebihara Date: Sat, 2 May 2020 23:35:20 +0900 Subject: [PATCH 331/519] Support case insensitive identifiers in MongoDB --- .../src/main/sphinx/connector/mongodb.rst | 1 + .../plugin/mongodb/MongoClientConfig.java | 13 +++++ .../plugin/mongodb/MongoSession.java | 57 +++++++++++++++---- .../plugin/mongodb/MongoQueryRunner.java | 1 + .../plugin/mongodb/TestMongoClientConfig.java | 3 + .../TestMongoIntegrationSmokeTest.java | 21 +++++++ 6 files changed, 84 insertions(+), 12 deletions(-) diff --git a/presto-docs/src/main/sphinx/connector/mongodb.rst b/presto-docs/src/main/sphinx/connector/mongodb.rst index 0687608aa7fd..1bed45d57900 100644 --- a/presto-docs/src/main/sphinx/connector/mongodb.rst +++ b/presto-docs/src/main/sphinx/connector/mongodb.rst @@ -39,6 +39,7 @@ Property Name Description ========================================== ============================================================== ``mongodb.seeds`` List of all MongoDB servers ``mongodb.schema-collection`` A collection which contains schema information +``mongodb.case-insensitive-name-matching`` Match database and collection names case insensitively ``mongodb.credentials`` List of credentials ``mongodb.min-connections-per-host`` The minimum size of the connection pool per host ``mongodb.connections-per-host`` The maximum size of the connection pool per host diff --git a/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/MongoClientConfig.java b/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/MongoClientConfig.java index 5a4fc07deecf..24328421d086 100644 --- a/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/MongoClientConfig.java +++ b/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/MongoClientConfig.java @@ -37,6 +37,7 @@ public class MongoClientConfig private static final Splitter PORT_SPLITTER = Splitter.on(':').trimResults().omitEmptyStrings(); private String schemaCollection = "_schema"; + private boolean caseInsensitiveNameMatching; private List seeds = ImmutableList.of(); private List credentials = ImmutableList.of(); @@ -69,6 +70,18 @@ public MongoClientConfig setSchemaCollection(String schemaCollection) return this; } + public boolean isCaseInsensitiveNameMatching() + { + return caseInsensitiveNameMatching; + } + + @Config("mongodb.case-insensitive-name-matching") + public MongoClientConfig setCaseInsensitiveNameMatching(boolean caseInsensitiveNameMatching) + { + this.caseInsensitiveNameMatching = caseInsensitiveNameMatching; + return this; + } + @NotNull @Size(min = 1) public List getSeeds() diff --git a/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/MongoSession.java b/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/MongoSession.java index 9eb0b1f237fc..f37e2907ea89 100644 --- a/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/MongoSession.java +++ b/presto-mongodb/src/main/java/io/prestosql/plugin/mongodb/MongoSession.java @@ -79,6 +79,7 @@ import static io.prestosql.spi.type.VarcharType.createUnboundedVarcharType; import static java.lang.Math.toIntExact; import static java.lang.String.format; +import static java.util.Locale.ENGLISH; import static java.util.Objects.requireNonNull; import static java.util.concurrent.TimeUnit.HOURS; import static java.util.concurrent.TimeUnit.MINUTES; @@ -115,6 +116,7 @@ public class MongoSession private final MongoClient client; private final String schemaCollection; + private final boolean caseInsensitiveNameMatching; private final int cursorBatchSize; private final LoadingCache tableCache; @@ -125,6 +127,7 @@ public MongoSession(TypeManager typeManager, MongoClient client, MongoClientConf this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.client = requireNonNull(client, "client is null"); this.schemaCollection = requireNonNull(config.getSchemaCollection(), "config.getSchemaCollection() is null"); + this.caseInsensitiveNameMatching = config.isCaseInsensitiveNameMatching(); this.cursorBatchSize = config.getCursorBatchSize(); this.implicitPrefix = requireNonNull(config.getImplicitRowFieldPrefix(), "config.getImplicitRowFieldPrefix() is null"); @@ -141,15 +144,18 @@ public void shutdown() public List getAllSchemas() { - return ImmutableList.copyOf(client.listDatabaseNames()); + return ImmutableList.copyOf(client.listDatabaseNames()).stream() + .map(name -> name.toLowerCase(ENGLISH)) + .collect(toImmutableList()); } public Set getAllTables(String schema) throws SchemaNotFoundException { + String schemaName = toRemoteSchemaName(schema); ImmutableSet.Builder builder = ImmutableSet.builder(); - builder.addAll(ImmutableList.copyOf(client.getDatabase(schema).listCollectionNames()).stream() + builder.addAll(ImmutableList.copyOf(client.getDatabase(schemaName).listCollectionNames()).stream() .filter(name -> !name.equals(schemaCollection)) .filter(name -> !SYSTEM_TABLES.contains(name)) .collect(toSet())); @@ -227,7 +233,9 @@ public MongoCollection getCollection(SchemaTableName tableName) private MongoCollection getCollection(String schema, String table) { - return client.getDatabase(schema).getCollection(table); + String schemaName = toRemoteSchemaName(schema); + String tableName = toRemoteTableName(schemaName, table); + return client.getDatabase(schemaName).getCollection(tableName); } public List getIndexes(SchemaTableName tableName) @@ -412,8 +420,8 @@ private static Document isNotNullPredicate() private Document getTableMetadata(SchemaTableName schemaTableName) throws TableNotFoundException { - String schemaName = schemaTableName.getSchemaName(); - String tableName = schemaTableName.getTableName(); + String schemaName = toRemoteSchemaName(schemaTableName.getSchemaName()); + String tableName = toRemoteTableName(schemaName, schemaTableName.getTableName()); MongoDatabase db = client.getDatabase(schemaName); MongoCollection schema = db.getCollection(schemaCollection); @@ -427,7 +435,7 @@ private Document getTableMetadata(SchemaTableName schemaTableName) } else { Document metadata = new Document(TABLE_NAME_KEY, tableName); - metadata.append(FIELDS_KEY, guessTableFields(schemaTableName)); + metadata.append(FIELDS_KEY, guessTableFields(schemaName, tableName)); schema.createIndex(new Document(TABLE_NAME_KEY, 1), new IndexOptions().unique(true)); schema.insertOne(metadata); @@ -491,8 +499,8 @@ private void createTableMetadata(SchemaTableName schemaTableName, List guessTableFields(SchemaTableName schemaTableName) + private List guessTableFields(String schemaName, String tableName) { - String schemaName = schemaTableName.getSchemaName(); - String tableName = schemaTableName.getTableName(); - MongoDatabase db = client.getDatabase(schemaName); Document doc = db.getCollection(tableName).find().first(); if (doc == null) { @@ -606,6 +611,34 @@ else if (value instanceof Document) { return Optional.ofNullable(typeSignature); } + private String toRemoteSchemaName(String schemaName) + { + verify(schemaName.equals(schemaName.toLowerCase(ENGLISH)), "schemaName not in lower-case: %s", schemaName); + if (!caseInsensitiveNameMatching) { + return schemaName; + } + for (String remoteSchemaName : client.listDatabaseNames()) { + if (schemaName.equals(remoteSchemaName.toLowerCase(ENGLISH))) { + return remoteSchemaName; + } + } + return schemaName; + } + + private String toRemoteTableName(String schemaName, String tableName) + { + verify(tableName.equals(tableName.toLowerCase(ENGLISH)), "tableName not in lower-case: %s", tableName); + if (!caseInsensitiveNameMatching) { + return tableName; + } + for (String remoteTableName : client.getDatabase(schemaName).listCollectionNames()) { + if (tableName.equals(remoteTableName.toLowerCase(ENGLISH))) { + return remoteTableName; + } + } + return tableName; + } + private boolean isView(SchemaTableName tableName) { Document listCollectionsCommand = new Document(new ImmutableMap.Builder() diff --git a/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/MongoQueryRunner.java b/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/MongoQueryRunner.java index a50d4bc12312..296be576cfd3 100644 --- a/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/MongoQueryRunner.java +++ b/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/MongoQueryRunner.java @@ -55,6 +55,7 @@ public static DistributedQueryRunner createMongoQueryRunner(MongoServer server, queryRunner.createCatalog("tpch", "tpch"); Map properties = ImmutableMap.of( + "mongodb.case-insensitive-name-matching", "true", "mongodb.seeds", server.getAddress().toString(), "mongodb.socket-keep-alive", "true"); diff --git a/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoClientConfig.java b/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoClientConfig.java index ef33fa706028..c2676a9566bd 100644 --- a/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoClientConfig.java +++ b/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoClientConfig.java @@ -31,6 +31,7 @@ public void testDefaults() { assertRecordedDefaults(recordDefaults(MongoClientConfig.class) .setSchemaCollection("_schema") + .setCaseInsensitiveNameMatching(false) .setSeeds("") .setCredentials("") .setMinConnectionsPerHost(0) @@ -52,6 +53,7 @@ public void testExplicitPropertyMappings() { Map properties = new ImmutableMap.Builder() .put("mongodb.schema-collection", "_my_schema") + .put("mongodb.case-insensitive-name-matching", "true") .put("mongodb.seeds", "host1,host2:27016") .put("mongodb.credentials", "username:password@collection") .put("mongodb.min-connections-per-host", "1") @@ -70,6 +72,7 @@ public void testExplicitPropertyMappings() MongoClientConfig expected = new MongoClientConfig() .setSchemaCollection("_my_schema") + .setCaseInsensitiveNameMatching(true) .setSeeds("host1", "host2:27016") .setCredentials("username:password@collection") .setMinConnectionsPerHost(1) diff --git a/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoIntegrationSmokeTest.java b/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoIntegrationSmokeTest.java index 0d77121a4aaf..6269314b8d47 100644 --- a/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoIntegrationSmokeTest.java +++ b/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoIntegrationSmokeTest.java @@ -16,6 +16,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.mongodb.MongoClient; +import com.mongodb.client.MongoCollection; import io.prestosql.testing.AbstractTestIntegrationSmokeTest; import io.prestosql.testing.MaterializedResult; import io.prestosql.testing.MaterializedRow; @@ -298,6 +299,26 @@ public void testObjectIds() assertUpdate("DROP TABLE tmp_objectid"); } + @Test + public void testCaseInsensitive() + throws Exception + { + MongoCollection collection = client.getDatabase("testCase").getCollection("testInsensitive"); + collection.insertOne(new Document(ImmutableMap.of("Name", "abc", "Value", 1))); + + assertQuery("SHOW SCHEMAS IN mongodb LIKE 'testcase'", "SELECT 'testcase'"); + assertQuery("SHOW TABLES IN testcase", "SELECT 'testinsensitive'"); + assertQuery( + "SHOW COLUMNS FROM testcase.testInsensitive", + "VALUES ('name', 'varchar', '', ''), ('value', 'bigint', '', '')"); + + assertQuery("SELECT name, value FROM testcase.testinsensitive", "SELECT 'abc', 1"); + assertUpdate("INSERT INTO testcase.testinsensitive VALUES('def', 2)", 1); + + assertQuery("SELECT value FROM testcase.testinsensitive WHERE name = 'def'", "SELECT 2"); + assertUpdate("DROP TABLE testcase.testinsensitive"); + } + @Test public void testSelectView() { From c9731f9be51f9255f8367068058f11e4f80d3d1f Mon Sep 17 00:00:00 2001 From: Yuya Ebihara Date: Tue, 5 May 2020 22:49:43 +0900 Subject: [PATCH 332/519] Enable tests related to DROP TABLE in MongoDB --- .../mongodb/TestMongoDistributedQueries.java | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoDistributedQueries.java b/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoDistributedQueries.java index 7947fbb05b7a..55dd9cddcd26 100644 --- a/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoDistributedQueries.java +++ b/presto-mongodb/src/test/java/io/prestosql/plugin/mongodb/TestMongoDistributedQueries.java @@ -53,20 +53,6 @@ protected boolean supportsViews() return false; } - @Override - public void testCreateTable() - { - // TODO https://github.com/prestosql/presto/issues/3082 - throw new SkipException("Fix DROP TABLE"); - } - - @Override - public void testCreateTableAsSelect() - { - // TODO https://github.com/prestosql/presto/issues/3082 - throw new SkipException("Fix DROP TABLE"); - } - @Override public void testCreateSchema() { From 2ca9b5bfece354776fc7fe9494f80bfb2a9966ba Mon Sep 17 00:00:00 2001 From: Yuya Ebihara Date: Tue, 5 May 2020 11:59:15 +0900 Subject: [PATCH 333/519] Rename id column to key and shorten name in testColumnName The id column name is reserved in Cassandra connector. Replace 0startingwithdigit with 0startwithdigit to avoid column name length limitation (48) in Cassandra. --- .../prestosql/testing/AbstractTestDistributedQueries.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java index 7d1f7da985a4..f89752934ef3 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestDistributedQueries.java @@ -1180,7 +1180,7 @@ private void testColumnName(String columnName, boolean delimited) try { // TODO test with both CTAS *and* CREATE TABLE + INSERT, since they use different connector API methods. - assertUpdate("CREATE TABLE " + tableName + "(id varchar, " + nameInSql + " varchar)"); + assertUpdate("CREATE TABLE " + tableName + "(key varchar, " + nameInSql + " varchar)"); } catch (RuntimeException e) { if (isColumnNameRejected(e, columnName, delimited)) { @@ -1198,8 +1198,8 @@ private void testColumnName(String columnName, boolean delimited) assertQuery("SELECT " + nameInSql + " FROM " + tableName, "VALUES (NULL), ('abc'), ('xyz')"); // predicate - assertQuery("SELECT id FROM " + tableName + " WHERE " + nameInSql + " IS NULL", "VALUES ('null value')"); - assertQuery("SELECT id FROM " + tableName + " WHERE " + nameInSql + " = 'abc'", "VALUES ('sample value')"); + assertQuery("SELECT key FROM " + tableName + " WHERE " + nameInSql + " IS NULL", "VALUES ('null value')"); + assertQuery("SELECT key FROM " + tableName + " WHERE " + nameInSql + " = 'abc'", "VALUES ('sample value')"); assertUpdate("DROP TABLE " + tableName); } @@ -1237,7 +1237,7 @@ public Object[][] testColumnNameDataProvider() {"a/slash`"}, {"a\\backslash`"}, {"adigit0"}, - {"0startingwithdigit"}, + {"0startwithdigit"}, }; } From fdbeea304bcf3252e7cd1be718dcd148340cf764 Mon Sep 17 00:00:00 2001 From: Yuya Ebihara Date: Tue, 5 May 2020 11:38:48 +0900 Subject: [PATCH 334/519] Use Metadata.quote in CassandraCqlUtils To use escaping logic of Metadata.quote method. Additionally, escape single quotation for table comment and enable testColumnName in Cassandra. --- .../plugin/cassandra/CassandraMetadata.java | 3 ++- .../plugin/cassandra/util/CassandraCqlUtils.java | 12 ++++-------- .../cassandra/TestCassandraDistributedQueries.java | 10 ---------- 3 files changed, 6 insertions(+), 19 deletions(-) diff --git a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraMetadata.java b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraMetadata.java index cf050daf4609..c34e5b8a02d8 100644 --- a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraMetadata.java +++ b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/CassandraMetadata.java @@ -54,6 +54,7 @@ import static io.prestosql.plugin.cassandra.CassandraType.toCassandraType; import static io.prestosql.plugin.cassandra.util.CassandraCqlUtils.ID_COLUMN_NAME; import static io.prestosql.plugin.cassandra.util.CassandraCqlUtils.cqlNameToSqlName; +import static io.prestosql.plugin.cassandra.util.CassandraCqlUtils.quoteStringLiteral; import static io.prestosql.plugin.cassandra.util.CassandraCqlUtils.validColumnName; import static io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED; import static io.prestosql.spi.StandardErrorCode.PERMISSION_DENIED; @@ -310,7 +311,7 @@ private CassandraOutputTableHandle createTable(ConnectorTableMetadata tableMetad // encode column ordering in the cassandra table comment field since there is no better place to store this String columnMetadata = extraColumnMetadataCodec.toJson(columnExtra.build()); - queryBuilder.append("WITH comment='").append(PRESTO_COMMENT_METADATA).append(" ").append(columnMetadata).append("'"); + queryBuilder.append("WITH comment=").append(quoteStringLiteral(PRESTO_COMMENT_METADATA + " " + columnMetadata)); // We need to create the Cassandra table before commit because the record needs to be written to the table. cassandraSession.execute(queryBuilder.toString()); diff --git a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/util/CassandraCqlUtils.java b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/util/CassandraCqlUtils.java index 1bb85f8cf247..afd34fd73047 100644 --- a/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/util/CassandraCqlUtils.java +++ b/presto-cassandra/src/main/java/io/prestosql/plugin/cassandra/util/CassandraCqlUtils.java @@ -23,6 +23,7 @@ import java.util.List; +import static com.datastax.driver.core.Metadata.quote; import static java.nio.charset.StandardCharsets.UTF_8; public final class CassandraCqlUtils @@ -34,12 +35,12 @@ private CassandraCqlUtils() {} public static String validSchemaName(String identifier) { - return quoteIdentifier(identifier); + return quote(identifier); } public static String validTableName(String identifier) { - return quoteIdentifier(identifier); + return quote(identifier); } public static String validColumnName(String identifier) @@ -48,12 +49,7 @@ public static String validColumnName(String identifier) return "\"\""; } - return quoteIdentifier(identifier); - } - - private static String quoteIdentifier(String identifier) - { - return '"' + identifier + '"'; + return quote(identifier); } public static String quoteStringLiteral(String string) diff --git a/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraDistributedQueries.java b/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraDistributedQueries.java index 7b40ab9ba1b5..dffdd7552e89 100644 --- a/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraDistributedQueries.java +++ b/presto-cassandra/src/test/java/io/prestosql/plugin/cassandra/TestCassandraDistributedQueries.java @@ -150,16 +150,6 @@ protected TestTable createTableWithDefaultColumns() throw new SkipException("Cassandra connector does not support column default values"); } - @Override - public void testColumnName(String columnName) - { - // TODO Enable after fixing the following error messages - // - Multiple definition of identifier id - // - Column family names shouldn't be more than 48 characters long - // - mismatched character '' - // - missing EOF at 'apostrophe' - } - @Override public void testDataMappingSmokeTest(DataMappingTestSetup dataMappingTestSetup) { From 652673e0f0e996fba217f8a8ad12ff83c63d6306 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Grzegorz=20Kokosi=C5=84ski?= Date: Mon, 4 May 2020 10:04:29 +0200 Subject: [PATCH 335/519] Skip valueIsNull to ShortArrayBlock if possible --- .../java/io/prestosql/spi/block/ShortArrayBlockBuilder.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/presto-spi/src/main/java/io/prestosql/spi/block/ShortArrayBlockBuilder.java b/presto-spi/src/main/java/io/prestosql/spi/block/ShortArrayBlockBuilder.java index 0295e1f8350f..9236959d3ee4 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/block/ShortArrayBlockBuilder.java +++ b/presto-spi/src/main/java/io/prestosql/spi/block/ShortArrayBlockBuilder.java @@ -102,7 +102,7 @@ public Block build() if (!hasNonNullValue) { return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, positionCount); } - return new ShortArrayBlock(0, positionCount, valueIsNull, values); + return new ShortArrayBlock(0, positionCount, hasNullValue ? valueIsNull : null, values); } @Override From 1b457a1188b44828d5aba7eb7e58d9aa45e1df28 Mon Sep 17 00:00:00 2001 From: Alex Date: Mon, 4 May 2020 15:54:42 -0400 Subject: [PATCH 336/519] Change Hive $file_modified_time column to timestamp with timezone type --- .../io/prestosql/plugin/hive/HiveColumnHandle.java | 6 ++++-- .../prestosql/plugin/hive/HivePageSourceProvider.java | 8 +++++--- .../java/io/prestosql/plugin/hive/util/HiveUtil.java | 11 +++++++++-- .../io/prestosql/plugin/hive/AbstractTestHive.java | 5 +++++ .../plugin/hive/TestHiveIntegrationSmokeTest.java | 9 +++++---- 5 files changed, 28 insertions(+), 11 deletions(-) diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveColumnHandle.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveColumnHandle.java index 69e8345e6de4..744acedba9ed 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveColumnHandle.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveColumnHandle.java @@ -30,6 +30,7 @@ import static io.prestosql.plugin.hive.HiveType.HIVE_STRING; import static io.prestosql.spi.type.BigintType.BIGINT; import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE; import static io.prestosql.spi.type.VarcharType.VARCHAR; import static java.util.Objects.requireNonNull; @@ -58,8 +59,9 @@ public class HiveColumnHandle public static final int FILE_MODIFIED_TIME_COLUMN_INDEX = -14; public static final String FILE_MODIFIED_TIME_COLUMN_NAME = "$file_modified_time"; - public static final HiveType FILE_MODIFIED_TIME_TYPE = HIVE_LONG; - public static final Type FILE_MODIFIED_TIME_TYPE_SIGNATURE = BIGINT; + // TODO introduce HiveType.HIVE_TIMESTAMP_WITH_TIME_ZONE + public static final HiveType FILE_MODIFIED_TIME_TYPE = HiveType.HIVE_TIMESTAMP; + public static final Type FILE_MODIFIED_TIME_TYPE_SIGNATURE = TIMESTAMP_WITH_TIME_ZONE; private static final String UPDATE_ROW_ID_COLUMN_NAME = "$shard_row_id"; diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSourceProvider.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSourceProvider.java index 8f4f930d3c3f..464bb9a7322c 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSourceProvider.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HivePageSourceProvider.java @@ -163,7 +163,8 @@ public static Optional createHivePageSource( path, bucketNumber, fileSize, - fileModifiedTime); + fileModifiedTime, + hiveStorageTimeZone); List regularAndInterimColumnMappings = ColumnMapping.extractRegularAndInterimColumnMappings(columnMappings); Optional bucketAdaptation = createBucketAdaptation(bucketConversion, bucketNumber, regularAndInterimColumnMappings); @@ -350,7 +351,8 @@ public static List buildColumnMappings( Path path, OptionalInt bucketNumber, long fileSize, - long fileModifiedTime) + long fileModifiedTime, + DateTimeZone hiveStorageTimeZone) { Map partitionKeysByName = uniqueIndex(partitionKeys, HivePartitionKey::getName); @@ -386,7 +388,7 @@ public static List buildColumnMappings( else { columnMappings.add(prefilled( column, - getPrefilledColumnValue(column, partitionKeysByName.get(column.getName()), path, bucketNumber, fileSize, fileModifiedTime), + getPrefilledColumnValue(column, partitionKeysByName.get(column.getName()), path, bucketNumber, fileSize, fileModifiedTime, hiveStorageTimeZone), baseTypeCoercionFrom)); } } diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/util/HiveUtil.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/util/HiveUtil.java index 157cc037dfe1..3eaa6d528392 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/util/HiveUtil.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/util/HiveUtil.java @@ -973,7 +973,14 @@ public static List toPartitionValues(String partitionName) return resultBuilder.build(); } - public static String getPrefilledColumnValue(HiveColumnHandle columnHandle, HivePartitionKey partitionKey, Path path, OptionalInt bucketNumber, long fileSize, long fileModifiedTime) + public static String getPrefilledColumnValue( + HiveColumnHandle columnHandle, + HivePartitionKey partitionKey, + Path path, + OptionalInt bucketNumber, + long fileSize, + long fileModifiedTime, + DateTimeZone hiveStorageTimeZone) { if (partitionKey != null) { return partitionKey.getValue(); @@ -988,7 +995,7 @@ public static String getPrefilledColumnValue(HiveColumnHandle columnHandle, Hive return String.valueOf(fileSize); } if (isFileModifiedTimeColumnHandle(columnHandle)) { - return String.valueOf(fileModifiedTime); + return HIVE_TIMESTAMP_PARSER.withZone(hiveStorageTimeZone).print(fileModifiedTime); } throw new PrestoException(NOT_SUPPORTED, "unsupported hidden column: " + columnHandle); } diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHive.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHive.java index 93806c6ef484..5b78916ab8b8 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHive.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/AbstractTestHive.java @@ -109,6 +109,7 @@ import io.prestosql.spi.type.RowType; import io.prestosql.spi.type.SqlDate; import io.prestosql.spi.type.SqlTimestamp; +import io.prestosql.spi.type.SqlTimestampWithTimeZone; import io.prestosql.spi.type.SqlVarbinary; import io.prestosql.spi.type.Type; import io.prestosql.sql.gen.JoinCompiler; @@ -256,6 +257,7 @@ import static io.prestosql.spi.type.SmallintType.SMALLINT; import static io.prestosql.spi.type.TimeZoneKey.UTC_KEY; import static io.prestosql.spi.type.TimestampType.TIMESTAMP; +import static io.prestosql.spi.type.TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE; import static io.prestosql.spi.type.TinyintType.TINYINT; import static io.prestosql.spi.type.VarbinaryType.VARBINARY; import static io.prestosql.spi.type.VarcharType.VARCHAR; @@ -4517,6 +4519,9 @@ else if (VARBINARY.equals(column.getType())) { else if (TIMESTAMP.equals(column.getType())) { assertInstanceOf(value, SqlTimestamp.class); } + else if (TIMESTAMP_WITH_TIME_ZONE.equals(column.getType())) { + assertInstanceOf(value, SqlTimestampWithTimeZone.class); + } else if (DATE.equals(column.getType())) { assertInstanceOf(value, SqlDate.class); } diff --git a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java index 7c8be2b4d8cf..6dc458a2fd17 100644 --- a/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java +++ b/presto-hive/src/test/java/io/prestosql/plugin/hive/TestHiveIntegrationSmokeTest.java @@ -62,6 +62,7 @@ import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; +import java.time.ZonedDateTime; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; @@ -3812,17 +3813,17 @@ public void testFileModifiedTimeHiddenColumn() assertEquals(getPartitions("test_file_modified_time").size(), 3); MaterializedResult results = computeActual(format("SELECT *, \"%s\" FROM test_file_modified_time", FILE_MODIFIED_TIME_COLUMN_NAME)); - Map fileModifiedTimeMap = new HashMap<>(); + Map fileModifiedTimeMap = new HashMap<>(); for (int i = 0; i < results.getRowCount(); i++) { MaterializedRow row = results.getMaterializedRows().get(i); int col0 = (int) row.getField(0); int col1 = (int) row.getField(1); - long fileModifiedTime = (Long) row.getField(2); + Instant fileModifiedTime = ((ZonedDateTime) row.getField(2)).toInstant(); - assertTrue(fileModifiedTime > (testStartTime - 2_000)); + assertTrue(fileModifiedTime.toEpochMilli() > (testStartTime - 2_000)); assertEquals(col0 % 3, col1); if (fileModifiedTimeMap.containsKey(col1)) { - assertEquals(fileModifiedTimeMap.get(col1).longValue(), fileModifiedTime); + assertEquals(fileModifiedTimeMap.get(col1), fileModifiedTime); } else { fileModifiedTimeMap.put(col1, fileModifiedTime); From 2aa3de46a4e3f4b6f4363a398f5a25633f7c0432 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Thu, 13 Dec 2018 10:11:06 +0100 Subject: [PATCH 337/519] Add basic Domain, ValueSet, Range and Marker #toString While proper `toString` requires `ConnectorSession`, a basic `toString` is still helpful when debugging. --- .../spi/predicate/AllOrNoneValueSet.java | 8 ++++- .../io/prestosql/spi/predicate/Domain.java | 6 ++++ .../spi/predicate/EquatableValueSet.java | 9 ++++++ .../io/prestosql/spi/predicate/Marker.java | 16 ++++++++++ .../io/prestosql/spi/predicate/Range.java | 32 +++++++++++++------ .../spi/predicate/SortedRangeSet.java | 6 ++++ .../io/prestosql/spi/predicate/ValueSet.java | 3 ++ 7 files changed, 70 insertions(+), 10 deletions(-) diff --git a/presto-spi/src/main/java/io/prestosql/spi/predicate/AllOrNoneValueSet.java b/presto-spi/src/main/java/io/prestosql/spi/predicate/AllOrNoneValueSet.java index 22211b62a883..702767c956b1 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/predicate/AllOrNoneValueSet.java +++ b/presto-spi/src/main/java/io/prestosql/spi/predicate/AllOrNoneValueSet.java @@ -152,11 +152,17 @@ public ValueSet complement() } @Override - public String toString(ConnectorSession session) + public String toString() { return "[" + (all ? "ALL" : "NONE") + "]"; } + @Override + public String toString(ConnectorSession session) + { + return toString(); + } + @Override public int hashCode() { diff --git a/presto-spi/src/main/java/io/prestosql/spi/predicate/Domain.java b/presto-spi/src/main/java/io/prestosql/spi/predicate/Domain.java index e98f07369280..787ab961c325 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/predicate/Domain.java +++ b/presto-spi/src/main/java/io/prestosql/spi/predicate/Domain.java @@ -299,6 +299,12 @@ public Domain simplify(int threshold) return Domain.create(simplifiedValueSet, nullAllowed); } + @Override + public String toString() + { + return "[ " + (nullAllowed ? "NULL, " : "") + values.toString() + " ]"; + } + public String toString(ConnectorSession session) { return "[ " + (nullAllowed ? "NULL, " : "") + values.toString(session) + " ]"; diff --git a/presto-spi/src/main/java/io/prestosql/spi/predicate/EquatableValueSet.java b/presto-spi/src/main/java/io/prestosql/spi/predicate/EquatableValueSet.java index 625b9f28818c..d76916e20d59 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/predicate/EquatableValueSet.java +++ b/presto-spi/src/main/java/io/prestosql/spi/predicate/EquatableValueSet.java @@ -268,6 +268,15 @@ public EquatableValueSet complement() return new EquatableValueSet(type, !whiteList, entries); } + @Override + public String toString() + { + return format( + "%s[... (%d elements) ...]", + whiteList ? "" : "EXCLUDES", + entries.size()); + } + @Override public String toString(ConnectorSession session) { diff --git a/presto-spi/src/main/java/io/prestosql/spi/predicate/Marker.java b/presto-spi/src/main/java/io/prestosql/spi/predicate/Marker.java index 2b4857976051..599b812828da 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/predicate/Marker.java +++ b/presto-spi/src/main/java/io/prestosql/spi/predicate/Marker.java @@ -23,6 +23,7 @@ import java.util.Objects; import java.util.Optional; +import java.util.StringJoiner; import static io.prestosql.spi.predicate.Utils.blockToNativeValue; import static io.prestosql.spi.predicate.Utils.nativeValueToBlock; @@ -294,6 +295,21 @@ public boolean equals(Object obj) && (!this.valueBlock.isPresent() || type.equalTo(this.valueBlock.get(), 0, other.valueBlock.get(), 0)); } + @Override + public String toString() + { + StringJoiner stringJoiner = new StringJoiner(", ", Marker.class.getSimpleName() + "[", "]"); + if (isLowerUnbounded()) { + stringJoiner.add("lower unbounded"); + } + else if (isUpperUnbounded()) { + stringJoiner.add("upper unbounded"); + } + stringJoiner.add("bound=" + bound); + valueBlock.ifPresent(valueBlock -> stringJoiner.add("valueBlock=...")); + return stringJoiner.toString(); + } + public String toString(ConnectorSession session) { StringBuilder buffer = new StringBuilder("{"); diff --git a/presto-spi/src/main/java/io/prestosql/spi/predicate/Range.java b/presto-spi/src/main/java/io/prestosql/spi/predicate/Range.java index 7e28a987c47a..b7696666fe72 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/predicate/Range.java +++ b/presto-spi/src/main/java/io/prestosql/spi/predicate/Range.java @@ -200,19 +200,33 @@ public boolean equals(Object obj) Objects.equals(this.high, other.high); } - public String toString(ConnectorSession session) + @Override + public String toString() { - StringBuilder buffer = new StringBuilder(); if (isSingleValue()) { - buffer.append('[').append(low.getPrintableValue(session)).append(']'); + return "?"; } - else { - buffer.append((low.getBound() == Marker.Bound.EXACTLY) ? '[' : '('); - buffer.append(low.isLowerUnbounded() ? "" : low.getPrintableValue(session)); - buffer.append(", "); - buffer.append(high.isUpperUnbounded() ? "" : high.getPrintableValue(session)); - buffer.append((high.getBound() == Marker.Bound.EXACTLY) ? ']' : ')'); + + StringBuilder buffer = new StringBuilder(); + buffer.append((low.getBound() == Marker.Bound.EXACTLY) ? '[' : '('); + buffer.append(low.isLowerUnbounded() ? "" : "?"); + buffer.append(", "); + buffer.append(high.isUpperUnbounded() ? "" : "?"); + buffer.append((high.getBound() == Marker.Bound.EXACTLY) ? ']' : ')'); + return buffer.toString(); + } + + public String toString(ConnectorSession session) + { + if (isSingleValue()) { + return "[" + low.getPrintableValue(session) + "]"; } + StringBuilder buffer = new StringBuilder(); + buffer.append((low.getBound() == Marker.Bound.EXACTLY) ? '[' : '('); + buffer.append(low.isLowerUnbounded() ? "" : low.getPrintableValue(session)); + buffer.append(", "); + buffer.append(high.isUpperUnbounded() ? "" : high.getPrintableValue(session)); + buffer.append((high.getBound() == Marker.Bound.EXACTLY) ? ']' : ')'); return buffer.toString(); } } diff --git a/presto-spi/src/main/java/io/prestosql/spi/predicate/SortedRangeSet.java b/presto-spi/src/main/java/io/prestosql/spi/predicate/SortedRangeSet.java index c2b663299d72..353ebe907819 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/predicate/SortedRangeSet.java +++ b/presto-spi/src/main/java/io/prestosql/spi/predicate/SortedRangeSet.java @@ -375,6 +375,12 @@ public boolean equals(Object obj) return Objects.equals(this.lowIndexedRanges, other.lowIndexedRanges); } + @Override + public String toString() + { + return lowIndexedRanges.values().toString(); + } + @Override public String toString(ConnectorSession session) { diff --git a/presto-spi/src/main/java/io/prestosql/spi/predicate/ValueSet.java b/presto-spi/src/main/java/io/prestosql/spi/predicate/ValueSet.java index 5ca40029ebdc..77d7c1ff2132 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/predicate/ValueSet.java +++ b/presto-spi/src/main/java/io/prestosql/spi/predicate/ValueSet.java @@ -152,5 +152,8 @@ default boolean contains(ValueSet other) return this.union(other).equals(this); } + @Override + String toString(); + String toString(ConnectorSession session); } From c1bdd867b77f44f16eca21ef88c10bfd26d75e37 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 4 May 2020 23:02:51 +0200 Subject: [PATCH 338/519] Inline trivial method --- .../tests/TestInformationSchemaConnector.java | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java b/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java index 1e31e20a9f10..2e64ab90009d 100644 --- a/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java +++ b/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java @@ -171,7 +171,10 @@ public void testMetadataCalls() new MetadataCallsCount() .withListTablesCount(1) .withGetColumnsCount(1)); - assertNoMetadataCalls("SELECT count(*) from test_catalog.information_schema.columns WHERE table_catalog = 'wrong'", "VALUES 0"); + assertMetadataCalls( + "SELECT count(*) from test_catalog.information_schema.columns WHERE table_catalog = 'wrong'", + "VALUES 0", + new MetadataCallsCount()); assertMetadataCalls( "SELECT count(*) from test_catalog.information_schema.columns WHERE table_catalog = 'test_catalog' AND table_schema = 'table_schema1' AND table_name = 'test_table1'", "VALUES 0", @@ -196,7 +199,10 @@ public void testMetadataCalls() .withListSchemasCount(1) .withListTablesCount(2) .withGetColumnsCount(10000)); - assertNoMetadataCalls("SELECT count(*) from test_catalog.information_schema.tables WHERE table_schema = ''", "VALUES 0"); + assertMetadataCalls( + "SELECT count(*) from test_catalog.information_schema.tables WHERE table_schema = ''", + "VALUES 0", + new MetadataCallsCount()); } @Test @@ -261,11 +267,6 @@ public Iterable getConnectorFactories() } } - private void assertNoMetadataCalls(String actualSql, String expectedSql) - { - assertMetadataCalls(actualSql, expectedSql, new MetadataCallsCount()); - } - private void assertMetadataCalls(String actualSql, String expectedSql, MetadataCallsCount expectedMetadataCallsCount) { long listSchemasCallsCountBefore = LIST_SCHEMAS_CALLS_COUNTER.get(); From 6efc6c5f23c744c57db1345956e14a8f6b35652e Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 4 May 2020 23:02:52 +0200 Subject: [PATCH 339/519] Use better predicate in test table_schema1 does not exist (test_schema1 does). --- .../io/prestosql/tests/TestInformationSchemaConnector.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java b/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java index 2e64ab90009d..324d80c14b7d 100644 --- a/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java +++ b/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java @@ -176,12 +176,12 @@ public void testMetadataCalls() "VALUES 0", new MetadataCallsCount()); assertMetadataCalls( - "SELECT count(*) from test_catalog.information_schema.columns WHERE table_catalog = 'test_catalog' AND table_schema = 'table_schema1' AND table_name = 'test_table1'", + "SELECT count(*) from test_catalog.information_schema.columns WHERE table_catalog = 'test_catalog' AND table_schema = 'wrong_schema1' AND table_name = 'test_table1'", "VALUES 0", new MetadataCallsCount() .withListTablesCount(1)); assertMetadataCalls( - "SELECT count(*) from test_catalog.information_schema.columns WHERE table_catalog IN ('wrong', 'test_catalog') AND table_schema = 'table_schema1' AND table_name = 'test_table1'", + "SELECT count(*) from test_catalog.information_schema.columns WHERE table_catalog IN ('wrong', 'test_catalog') AND table_schema = 'wrong_schema1' AND table_name = 'test_table1'", "VALUES 0", new MetadataCallsCount() .withListTablesCount(1)); From 75fe578cb09eee91bc4e58659873fc359c00e788 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 4 May 2020 23:02:53 +0200 Subject: [PATCH 340/519] Use assertThat for better messages on failure --- .../jdbc/TestPrestoDatabaseMetaData.java | 113 ++++++++---------- 1 file changed, 52 insertions(+), 61 deletions(-) diff --git a/presto-jdbc/src/test/java/io/prestosql/jdbc/TestPrestoDatabaseMetaData.java b/presto-jdbc/src/test/java/io/prestosql/jdbc/TestPrestoDatabaseMetaData.java index 999aa049b932..abdf48bf2193 100644 --- a/presto-jdbc/src/test/java/io/prestosql/jdbc/TestPrestoDatabaseMetaData.java +++ b/presto-jdbc/src/test/java/io/prestosql/jdbc/TestPrestoDatabaseMetaData.java @@ -225,7 +225,8 @@ public void testGetCatalogs() { try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getCatalogs()) { - assertEquals(readRows(rs), list(list("blackhole"), list("hive"), list("system"), list(TEST_CATALOG))); + assertThat(readRows(rs)) + .isEqualTo(list(list("blackhole"), list("hive"), list("system"), list(TEST_CATALOG))); ResultSetMetaData metadata = rs.getMetaData(); assertEquals(metadata.getColumnCount(), 1); @@ -330,9 +331,10 @@ private static void assertGetSchemasResult(ResultSet rs, List> expe { List> data = readRows(rs); - assertEquals(data.size(), expectedSchemas.size()); + assertThat(data).hasSize(expectedSchemas.size()); for (List row : data) { - assertTrue(expectedSchemas.contains(list((String) row.get(1), (String) row.get(0)))); + assertThat(list((String) row.get(1), (String) row.get(0))) + .isIn(expectedSchemas); } ResultSetMetaData metadata = rs.getMetaData(); @@ -352,20 +354,18 @@ public void testGetTables() try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getTables(null, null, null, null)) { assertTableMetadata(rs); - - Set> rows = ImmutableSet.copyOf(readRows(rs)); - assertTrue(rows.contains(getTablesRow("information_schema", "tables"))); - assertTrue(rows.contains(getTablesRow("information_schema", "schemata"))); + assertThat(readRows(rs)) + .contains(getTablesRow("information_schema", "tables")) + .contains(getTablesRow("information_schema", "schemata")); } } try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, null, null, null)) { assertTableMetadata(rs); - - Set> rows = ImmutableSet.copyOf(readRows(rs)); - assertTrue(rows.contains(getTablesRow("information_schema", "tables"))); - assertTrue(rows.contains(getTablesRow("information_schema", "schemata"))); + assertThat(readRows(rs)) + .contains(getTablesRow("information_schema", "tables")) + .contains(getTablesRow("information_schema", "schemata")); } } @@ -373,17 +373,16 @@ public void testGetTables() try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getTables("", null, null, null)) { assertTableMetadata(rs); - assertEquals(readRows(rs).size(), 0); + assertThat(readRows(rs)).isEmpty(); } } try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", null, null)) { assertTableMetadata(rs); - - Set> rows = ImmutableSet.copyOf(readRows(rs)); - assertTrue(rows.contains(getTablesRow("information_schema", "tables"))); - assertTrue(rows.contains(getTablesRow("information_schema", "schemata"))); + assertThat(readRows(rs)) + .contains(getTablesRow("information_schema", "tables")) + .contains(getTablesRow("information_schema", "schemata")); } } @@ -391,77 +390,70 @@ public void testGetTables() try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "", null, null)) { assertTableMetadata(rs); - assertEquals(readRows(rs).size(), 0); + assertThat(readRows(rs)).isEmpty(); } } try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", "tables", null)) { assertTableMetadata(rs); - - Set> rows = ImmutableSet.copyOf(readRows(rs)); - assertTrue(rows.contains(getTablesRow("information_schema", "tables"))); - assertFalse(rows.contains(getTablesRow("information_schema", "schemata"))); + assertThat(readRows(rs)) + .contains(getTablesRow("information_schema", "tables")) + .doesNotContain(getTablesRow("information_schema", "schemata")); } } try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", "tables", array("TABLE"))) { assertTableMetadata(rs); - - Set> rows = ImmutableSet.copyOf(readRows(rs)); - assertTrue(rows.contains(getTablesRow("information_schema", "tables"))); - assertFalse(rows.contains(getTablesRow("information_schema", "schemata"))); + assertThat(readRows(rs)) + .contains(getTablesRow("information_schema", "tables")) + .doesNotContain(getTablesRow("information_schema", "schemata")); } } try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getTables(null, "information_schema", null, null)) { assertTableMetadata(rs); - - Set> rows = ImmutableSet.copyOf(readRows(rs)); - assertTrue(rows.contains(getTablesRow("information_schema", "tables"))); - assertTrue(rows.contains(getTablesRow("information_schema", "schemata"))); + assertThat(readRows(rs)) + .contains(getTablesRow("information_schema", "tables")) + .contains(getTablesRow("information_schema", "schemata")); } } try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getTables(null, null, "tables", null)) { assertTableMetadata(rs); - - Set> rows = ImmutableSet.copyOf(readRows(rs)); - assertTrue(rows.contains(getTablesRow("information_schema", "tables"))); - assertFalse(rows.contains(getTablesRow("information_schema", "schemata"))); + assertThat(readRows(rs)) + .contains(getTablesRow("information_schema", "tables")) + .doesNotContain(getTablesRow("information_schema", "schemata")); } } try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getTables(null, null, null, array("TABLE"))) { assertTableMetadata(rs); - - Set> rows = ImmutableSet.copyOf(readRows(rs)); - assertTrue(rows.contains(getTablesRow("information_schema", "tables"))); - assertTrue(rows.contains(getTablesRow("information_schema", "schemata"))); + assertThat(readRows(rs)) + .contains(getTablesRow("information_schema", "tables")) + .contains(getTablesRow("information_schema", "schemata")); } } try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "inf%", "tables", null)) { assertTableMetadata(rs); - - Set> rows = ImmutableSet.copyOf(readRows(rs)); - assertTrue(rows.contains(getTablesRow("information_schema", "tables"))); - assertFalse(rows.contains(getTablesRow("information_schema", "schemata"))); + assertThat(readRows(rs)) + .contains(getTablesRow("information_schema", "tables")) + .doesNotContain(getTablesRow("information_schema", "schemata")); } } try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", "tab%", null)) { assertTableMetadata(rs); - - Set> rows = ImmutableSet.copyOf(readRows(rs)); - assertTrue(rows.contains(getTablesRow("information_schema", "tables"))); - assertFalse(rows.contains(getTablesRow("information_schema", "schemata"))); + assertThat(readRows(rs)) + .contains(getTablesRow("information_schema", "tables")) + .doesNotContain(getTablesRow("information_schema", "schemata")); } } @@ -469,7 +461,7 @@ public void testGetTables() try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getTables("unknown", "information_schema", "tables", array("TABLE"))) { assertTableMetadata(rs); - assertEquals(readRows(rs).size(), 0); + assertThat(readRows(rs)).isEmpty(); } } @@ -477,7 +469,7 @@ public void testGetTables() try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "unknown", "tables", array("TABLE"))) { assertTableMetadata(rs); - assertEquals(readRows(rs).size(), 0); + assertThat(readRows(rs)).isEmpty(); } } @@ -485,7 +477,7 @@ public void testGetTables() try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", "unknown", array("TABLE"))) { assertTableMetadata(rs); - assertEquals(readRows(rs).size(), 0); + assertThat(readRows(rs)).isEmpty(); } } @@ -493,17 +485,16 @@ public void testGetTables() try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", "tables", array("unknown"))) { assertTableMetadata(rs); - assertEquals(readRows(rs).size(), 0); + assertThat(readRows(rs)).isEmpty(); } } try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", "tables", array("unknown", "TABLE"))) { assertTableMetadata(rs); - - Set> rows = ImmutableSet.copyOf(readRows(rs)); - assertTrue(rows.contains(getTablesRow("information_schema", "tables"))); - assertFalse(rows.contains(getTablesRow("information_schema", "schemata"))); + assertThat(readRows(rs)) + .contains(getTablesRow("information_schema", "tables")) + .doesNotContain(getTablesRow("information_schema", "schemata")); } } @@ -511,7 +502,7 @@ public void testGetTables() try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", "tables", array())) { assertTableMetadata(rs); - assertEquals(readRows(rs).size(), 0); + assertThat(readRows(rs)).isEmpty(); } } @@ -593,8 +584,8 @@ public void testGetTableTypes() { try (Connection connection = createConnection()) { try (ResultSet tableTypes = connection.getMetaData().getTableTypes()) { - List> data = readRows(tableTypes); - assertEquals(data, list(list("TABLE"), list("VIEW"))); + assertThat(readRows(tableTypes)) + .isEqualTo(list(list("TABLE"), list("VIEW"))); ResultSetMetaData metadata = tableTypes.getMetaData(); assertEquals(metadata.getColumnCount(), 1); @@ -637,35 +628,35 @@ public void testGetColumns() try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getColumns(TEST_CATALOG, null, "tables", "table_name")) { assertColumnMetadata(rs); - assertEquals(readRows(rs).size(), 1); + assertThat(readRows(rs)).hasSize(1); } } try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getColumns(null, "information_schema", "tables", "table_name")) { assertColumnMetadata(rs); - assertEquals(readRows(rs).size(), 4); + assertThat(readRows(rs)).hasSize(4); } } try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getColumns(TEST_CATALOG, "information_schema", "tables", "table_name")) { assertColumnMetadata(rs); - assertEquals(readRows(rs).size(), 1); + assertThat(readRows(rs)).hasSize(1); } } try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getColumns(TEST_CATALOG, "inf%", "tables", "table_name")) { assertColumnMetadata(rs); - assertEquals(readRows(rs).size(), 1); + assertThat(readRows(rs)).hasSize(1); } } try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getColumns(TEST_CATALOG, "information_schema", "tab%", "table_name")) { assertColumnMetadata(rs); - assertEquals(readRows(rs).size(), 2); + assertThat(readRows(rs)).hasSize(2); } } From 1027a8935ce2844a93c699afb8fee4850f091d5c Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 4 May 2020 23:02:54 +0200 Subject: [PATCH 341/519] Extract counting mock connector --- .../testing/CountingMockConnector.java | 165 ++++++++++++++++++ .../tests/TestInformationSchemaConnector.java | 131 +------------- 2 files changed, 173 insertions(+), 123 deletions(-) create mode 100644 presto-testing/src/main/java/io/prestosql/testing/CountingMockConnector.java diff --git a/presto-testing/src/main/java/io/prestosql/testing/CountingMockConnector.java b/presto-testing/src/main/java/io/prestosql/testing/CountingMockConnector.java new file mode 100644 index 000000000000..317dd132543f --- /dev/null +++ b/presto-testing/src/main/java/io/prestosql/testing/CountingMockConnector.java @@ -0,0 +1,165 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.testing; + +import com.google.common.collect.ImmutableList; +import io.prestosql.connector.MockConnectorFactory; +import io.prestosql.spi.Plugin; +import io.prestosql.spi.connector.ConnectorFactory; +import io.prestosql.spi.connector.SchemaTableName; + +import java.util.List; +import java.util.Objects; +import java.util.concurrent.atomic.AtomicLong; +import java.util.stream.IntStream; + +import static com.google.common.base.MoreObjects.toStringHelper; +import static com.google.common.collect.ImmutableList.toImmutableList; +import static io.prestosql.connector.MockConnectorFactory.Builder.defaultGetColumns; + +public class CountingMockConnector +{ + private final Object lock = new Object(); + + private final List tablesTestSchema1 = IntStream.range(0, 10000) + .mapToObj(i -> new SchemaTableName("test_schema1", "test_table" + i)) + .collect(toImmutableList()); + + private final List tablesTestSchema2 = IntStream.range(0, 20000) + .mapToObj(i -> new SchemaTableName("test_schema2", "test_table" + i)) + .collect(toImmutableList()); + + private final AtomicLong listSchemasCallsCounter = new AtomicLong(); + private final AtomicLong listTablesCallsCounter = new AtomicLong(); + private final AtomicLong getColumnsCallsCounter = new AtomicLong(); + + public Plugin getPlugin() + { + return new Plugin() + { + @Override + public Iterable getConnectorFactories() + { + return ImmutableList.of(getConnectorFactory()); + } + }; + } + + public MetadataCallsCount runCounting(Runnable runnable) + { + synchronized (lock) { + listSchemasCallsCounter.set(0); + listTablesCallsCounter.set(0); + getColumnsCallsCounter.set(0); + + runnable.run(); + + return new MetadataCallsCount() + .withListSchemasCount(listSchemasCallsCounter.get()) + .withListTablesCount(listTablesCallsCounter.get()) + .withGetColumnsCount(getColumnsCallsCounter.get()); + } + } + + private ConnectorFactory getConnectorFactory() + { + MockConnectorFactory mockConnectorFactory = MockConnectorFactory.builder() + .withListSchemaNames(connectorSession -> { + listSchemasCallsCounter.incrementAndGet(); + return ImmutableList.of("test_schema1", "test_schema2"); + }) + .withListTables((connectorSession, schemaName) -> { + listTablesCallsCounter.incrementAndGet(); + if (schemaName.equals("test_schema1")) { + return tablesTestSchema1; + } + if (schemaName.equals("test_schema2")) { + return tablesTestSchema2; + } + return ImmutableList.of(); + }) + .withGetColumns(schemaTableName -> { + getColumnsCallsCounter.incrementAndGet(); + return defaultGetColumns().apply(schemaTableName); + }) + .build(); + + return mockConnectorFactory; + } + + public static final class MetadataCallsCount + { + private final long listSchemasCount; + private final long listTablesCount; + private final long getColumnsCount; + + public MetadataCallsCount() + { + this(0, 0, 0); + } + + public MetadataCallsCount(long listSchemasCount, long listTablesCount, long getColumnsCount) + { + this.listSchemasCount = listSchemasCount; + this.listTablesCount = listTablesCount; + this.getColumnsCount = getColumnsCount; + } + + public MetadataCallsCount withListSchemasCount(long listSchemasCount) + { + return new MetadataCallsCount(listSchemasCount, listTablesCount, getColumnsCount); + } + + public MetadataCallsCount withListTablesCount(long listTablesCount) + { + return new MetadataCallsCount(listSchemasCount, listTablesCount, getColumnsCount); + } + + public MetadataCallsCount withGetColumnsCount(long getColumnsCount) + { + return new MetadataCallsCount(listSchemasCount, listTablesCount, getColumnsCount); + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + MetadataCallsCount that = (MetadataCallsCount) o; + return listSchemasCount == that.listSchemasCount && + listTablesCount == that.listTablesCount && + getColumnsCount == that.getColumnsCount; + } + + @Override + public int hashCode() + { + return Objects.hash(listSchemasCount, listTablesCount, getColumnsCount); + } + + @Override + public String toString() + { + return toStringHelper(this) + .add("listSchemasCount", listSchemasCount) + .add("listTablesCount", listTablesCount) + .add("getColumnsCount", getColumnsCount) + .toString(); + } + } +} diff --git a/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java b/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java index 324d80c14b7d..91423a7a00c0 100644 --- a/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java +++ b/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java @@ -13,28 +13,17 @@ */ package io.prestosql.tests; -import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.prestosql.Session; -import io.prestosql.connector.MockConnectorFactory; import io.prestosql.execution.warnings.WarningCollector; import io.prestosql.plugin.tpch.TpchPlugin; -import io.prestosql.spi.Plugin; -import io.prestosql.spi.connector.ConnectorFactory; -import io.prestosql.spi.connector.SchemaTableName; import io.prestosql.sql.planner.plan.TableScanNode; import io.prestosql.testing.AbstractTestQueryFramework; +import io.prestosql.testing.CountingMockConnector; +import io.prestosql.testing.CountingMockConnector.MetadataCallsCount; import io.prestosql.testing.DistributedQueryRunner; import org.testng.annotations.Test; -import java.util.List; -import java.util.Objects; -import java.util.concurrent.atomic.AtomicLong; -import java.util.stream.IntStream; - -import static com.google.common.base.MoreObjects.toStringHelper; -import static com.google.common.collect.ImmutableList.toImmutableList; -import static io.prestosql.connector.MockConnectorFactory.Builder.defaultGetColumns; import static io.prestosql.sql.planner.optimizations.PlanNodeSearcher.searchFrom; import static io.prestosql.testing.TestingSession.testSessionBuilder; import static org.testng.Assert.assertEquals; @@ -44,9 +33,7 @@ public class TestInformationSchemaConnector extends AbstractTestQueryFramework { - private static final AtomicLong LIST_SCHEMAS_CALLS_COUNTER = new AtomicLong(); - private static final AtomicLong LIST_TABLES_CALLS_COUNTER = new AtomicLong(); - private static final AtomicLong GET_COLUMNS_CALLS_COUNTER = new AtomicLong(); + private final CountingMockConnector countingMockConnector = new CountingMockConnector(); @Test public void testBasic() @@ -224,40 +211,7 @@ protected DistributedQueryRunner createQueryRunner() queryRunner.installPlugin(new TpchPlugin()); queryRunner.createCatalog("tpch", "tpch"); - queryRunner.installPlugin(new Plugin() - { - @Override - public Iterable getConnectorFactories() - { - List tablesTestSchema1 = IntStream.range(0, 10000) - .mapToObj(i -> new SchemaTableName("test_schema1", "test_table" + i)) - .collect(toImmutableList()); - List tablesTestSchema2 = IntStream.range(0, 20000) - .mapToObj(i -> new SchemaTableName("test_schema2", "test_table" + i)) - .collect(toImmutableList()); - MockConnectorFactory mockConnectorFactory = MockConnectorFactory.builder() - .withListSchemaNames(connectorSession -> { - LIST_SCHEMAS_CALLS_COUNTER.incrementAndGet(); - return ImmutableList.of("test_schema1", "test_schema2"); - }) - .withListTables((connectorSession, schemaName) -> { - LIST_TABLES_CALLS_COUNTER.incrementAndGet(); - if (schemaName.equals("test_schema1")) { - return tablesTestSchema1; - } - if (schemaName.equals("test_schema2")) { - return tablesTestSchema2; - } - return ImmutableList.of(); - }) - .withGetColumns(schemaTableName -> { - GET_COLUMNS_CALLS_COUNTER.incrementAndGet(); - return defaultGetColumns().apply(schemaTableName); - }) - .build(); - return ImmutableList.of(mockConnectorFactory); - } - }); + queryRunner.installPlugin(countingMockConnector.getPlugin()); queryRunner.createCatalog("test_catalog", "mock", ImmutableMap.of()); return queryRunner; } @@ -269,14 +223,10 @@ public Iterable getConnectorFactories() private void assertMetadataCalls(String actualSql, String expectedSql, MetadataCallsCount expectedMetadataCallsCount) { - long listSchemasCallsCountBefore = LIST_SCHEMAS_CALLS_COUNTER.get(); - long listTablesCallsCountBefore = LIST_TABLES_CALLS_COUNTER.get(); - long getColumnsCallsCountBefore = GET_COLUMNS_CALLS_COUNTER.get(); - assertQuery(actualSql, expectedSql); - MetadataCallsCount actualMetadataCallsCount = new MetadataCallsCount() - .withListSchemasCount(LIST_SCHEMAS_CALLS_COUNTER.get() - listSchemasCallsCountBefore) - .withListTablesCount(LIST_TABLES_CALLS_COUNTER.get() - listTablesCallsCountBefore) - .withGetColumnsCount(GET_COLUMNS_CALLS_COUNTER.get() - getColumnsCallsCountBefore); + MetadataCallsCount actualMetadataCallsCount = countingMockConnector.runCounting(() -> { + // expectedSql is run on H2, so does not affect counts. + assertQuery(actualSql, expectedSql); + }); assertEquals(actualMetadataCallsCount, expectedMetadataCallsCount); } @@ -289,69 +239,4 @@ private void assertNoTableScan(String query) .isPresent(), "TableScanNode was not expected"); } - - private static class MetadataCallsCount - { - private final long listSchemasCount; - private final long listTablesCount; - private final long getColumnsCount; - - private MetadataCallsCount() - { - this(0, 0, 0); - } - - private MetadataCallsCount(long listSchemasCount, long listTablesCount, long getColumnsCount) - { - this.listSchemasCount = listSchemasCount; - this.listTablesCount = listTablesCount; - this.getColumnsCount = getColumnsCount; - } - - public MetadataCallsCount withListSchemasCount(long listSchemasCount) - { - return new MetadataCallsCount(listSchemasCount, listTablesCount, getColumnsCount); - } - - public MetadataCallsCount withListTablesCount(long listTablesCount) - { - return new MetadataCallsCount(listSchemasCount, listTablesCount, getColumnsCount); - } - - public MetadataCallsCount withGetColumnsCount(long getColumnsCount) - { - return new MetadataCallsCount(listSchemasCount, listTablesCount, getColumnsCount); - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - MetadataCallsCount that = (MetadataCallsCount) o; - return listSchemasCount == that.listSchemasCount && - listTablesCount == that.listTablesCount && - getColumnsCount == that.getColumnsCount; - } - - @Override - public int hashCode() - { - return Objects.hash(listSchemasCount, listTablesCount, getColumnsCount); - } - - @Override - public String toString() - { - return toStringHelper(this) - .add("listSchemasCount", listSchemasCount) - .add("listTablesCount", listTablesCount) - .add("getColumnsCount", getColumnsCount) - .toString(); - } - } } From 058556e8b373c303b4dc8b03e165c4bcd58e4a94 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 4 May 2020 23:02:55 +0200 Subject: [PATCH 342/519] Reduce number of objects in test `CountingMockConnector` is going to be used to JDBC metadata queries test. JDBC metadata queries sort their results, and the memory required for this is high. --- .../testing/CountingMockConnector.java | 4 ++-- .../tests/TestInformationSchemaConnector.java | 18 +++++++++--------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/presto-testing/src/main/java/io/prestosql/testing/CountingMockConnector.java b/presto-testing/src/main/java/io/prestosql/testing/CountingMockConnector.java index 317dd132543f..4c8a848cb0b8 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/CountingMockConnector.java +++ b/presto-testing/src/main/java/io/prestosql/testing/CountingMockConnector.java @@ -32,11 +32,11 @@ public class CountingMockConnector { private final Object lock = new Object(); - private final List tablesTestSchema1 = IntStream.range(0, 10000) + private final List tablesTestSchema1 = IntStream.range(0, 1000) .mapToObj(i -> new SchemaTableName("test_schema1", "test_table" + i)) .collect(toImmutableList()); - private final List tablesTestSchema2 = IntStream.range(0, 20000) + private final List tablesTestSchema2 = IntStream.range(0, 2000) .mapToObj(i -> new SchemaTableName("test_schema2", "test_table" + i)) .collect(toImmutableList()); diff --git a/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java b/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java index 91423a7a00c0..903d0fc9d58d 100644 --- a/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java +++ b/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java @@ -44,7 +44,7 @@ public void testBasic() assertQuery("SELECT * FROM tpch.information_schema.schemata ORDER BY 1 DESC, 2 DESC LIMIT 1", "VALUES ('tpch', 'tiny')"); assertQuery("SELECT * FROM tpch.information_schema.tables ORDER BY 1 DESC, 2 DESC, 3 DESC, 4 DESC LIMIT 1", "VALUES ('tpch', 'tiny', 'supplier', 'BASE TABLE')"); assertQuery("SELECT * FROM tpch.information_schema.columns ORDER BY 1 DESC, 2 DESC, 3 DESC, 4 DESC LIMIT 1", "VALUES ('tpch', 'tiny', 'supplier', 'suppkey', 1, NULL, 'YES', 'bigint')"); - assertQuery("SELECT count(*) FROM test_catalog.information_schema.columns", "VALUES 3000034"); + assertQuery("SELECT count(*) FROM test_catalog.information_schema.columns", "VALUES 300034"); } @Test @@ -98,7 +98,7 @@ public void testLimit() { assertQuery("SELECT count(*) FROM (SELECT * from tpch.information_schema.columns LIMIT 1)", "VALUES 1"); assertQuery("SELECT count(*) FROM (SELECT * FROM tpch.information_schema.columns LIMIT 100)", "VALUES 100"); - assertQuery("SELECT count(*) FROM (SELECT * FROM test_catalog.information_schema.tables LIMIT 10000)", "VALUES 10000"); + assertQuery("SELECT count(*) FROM (SELECT * FROM test_catalog.information_schema.tables LIMIT 1000)", "VALUES 1000"); } @Test(timeOut = 60_000) @@ -116,24 +116,24 @@ public void testMetadataCalls() .withListSchemasCount(1)); assertMetadataCalls( "SELECT count(*) from test_catalog.information_schema.tables", - "VALUES 30008", + "VALUES 3008", new MetadataCallsCount() .withListSchemasCount(1) .withListTablesCount(2)); assertMetadataCalls( "SELECT count(*) from test_catalog.information_schema.tables WHERE table_schema = 'test_schema1'", - "VALUES 10000", + "VALUES 1000", new MetadataCallsCount() .withListTablesCount(1)); assertMetadataCalls( "SELECT count(*) from test_catalog.information_schema.tables WHERE table_schema LIKE 'test_sch_ma1'", - "VALUES 10000", + "VALUES 1000", new MetadataCallsCount() .withListSchemasCount(1) .withListTablesCount(1)); assertMetadataCalls( "SELECT count(*) from test_catalog.information_schema.tables WHERE table_schema LIKE 'test_sch_ma1' AND table_schema IN ('test_schema1', 'test_schema2')", - "VALUES 10000", + "VALUES 1000", new MetadataCallsCount() .withListTablesCount(2)); assertMetadataCalls( @@ -180,12 +180,12 @@ public void testMetadataCalls() .withListTablesCount(1) .withGetColumnsCount(0)); assertMetadataCalls( - "SELECT count(*) FROM (SELECT * from test_catalog.information_schema.columns LIMIT 10000)", - "VALUES 10000", + "SELECT count(*) FROM (SELECT * from test_catalog.information_schema.columns LIMIT 1000)", + "VALUES 1000", new MetadataCallsCount() .withListSchemasCount(1) .withListTablesCount(2) - .withGetColumnsCount(10000)); + .withGetColumnsCount(1000)); assertMetadataCalls( "SELECT count(*) from test_catalog.information_schema.tables WHERE table_schema = ''", "VALUES 0", From 6620434cc4601a76975af270ff66f41b1764c603 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 4 May 2020 23:02:57 +0200 Subject: [PATCH 343/519] Add new catalog in TestPrestoDatabaseMetaData --- presto-jdbc/pom.xml | 6 +++++ .../jdbc/TestPrestoDatabaseMetaData.java | 23 +++++++++++++++++-- 2 files changed, 27 insertions(+), 2 deletions(-) diff --git a/presto-jdbc/pom.xml b/presto-jdbc/pom.xml index 1b4d54d958e7..ee429c62062f 100644 --- a/presto-jdbc/pom.xml +++ b/presto-jdbc/pom.xml @@ -106,6 +106,12 @@ test + + io.prestosql + presto-testing + test + + io.prestosql presto-tpch diff --git a/presto-jdbc/src/test/java/io/prestosql/jdbc/TestPrestoDatabaseMetaData.java b/presto-jdbc/src/test/java/io/prestosql/jdbc/TestPrestoDatabaseMetaData.java index abdf48bf2193..83bbf1f24a03 100644 --- a/presto-jdbc/src/test/java/io/prestosql/jdbc/TestPrestoDatabaseMetaData.java +++ b/presto-jdbc/src/test/java/io/prestosql/jdbc/TestPrestoDatabaseMetaData.java @@ -39,6 +39,7 @@ import io.prestosql.spi.type.TinyintType; import io.prestosql.spi.type.Type; import io.prestosql.spi.type.VarbinaryType; +import io.prestosql.testing.CountingMockConnector; import io.prestosql.type.ColorType; import org.testng.annotations.AfterClass; import org.testng.annotations.AfterMethod; @@ -80,7 +81,9 @@ public class TestPrestoDatabaseMetaData { private static final String TEST_CATALOG = "test_catalog"; + private static final String COUNTING_CATALOG = "mock_catalog"; + private CountingMockConnector countingMockConnector; private TestingPrestoServer server; private Connection connection; @@ -105,6 +108,10 @@ public void setupServer() .put("hive.security", "sql-standard") .build()); + countingMockConnector = new CountingMockConnector(); + server.installPlugin(countingMockConnector.getPlugin()); + server.createCatalog(COUNTING_CATALOG, "mock", ImmutableMap.of()); + waitForNodeRefresh(server); try (Connection connection = createConnection(); @@ -128,6 +135,8 @@ public void tearDownServer() throws Exception { server.close(); + server = null; + countingMockConnector = null; } @SuppressWarnings("JDBCResourceOpenedButNotSafelyClosed") @@ -226,7 +235,7 @@ public void testGetCatalogs() try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getCatalogs()) { assertThat(readRows(rs)) - .isEqualTo(list(list("blackhole"), list("hive"), list("system"), list(TEST_CATALOG))); + .isEqualTo(list(list("blackhole"), list("hive"), list(COUNTING_CATALOG), list("system"), list(TEST_CATALOG))); ResultSetMetaData metadata = rs.getMetaData(); assertEquals(metadata.getColumnCount(), 1); @@ -244,6 +253,11 @@ public void testGetSchemas() hive.add(list("hive", "information_schema")); hive.add(list("hive", "default")); + List> countingCatalog = new ArrayList<>(); + hive.add(list(COUNTING_CATALOG, "information_schema")); + hive.add(list(COUNTING_CATALOG, "test_schema1")); + hive.add(list(COUNTING_CATALOG, "test_schema2")); + List> system = new ArrayList<>(); system.add(list("system", "information_schema")); system.add(list("system", "jdbc")); @@ -263,6 +277,7 @@ public void testGetSchemas() List> all = new ArrayList<>(); all.addAll(hive); + all.addAll(countingCatalog); all.addAll(system); all.addAll(test); all.addAll(blackhole); @@ -292,6 +307,7 @@ public void testGetSchemas() try (ResultSet rs = connection.getMetaData().getSchemas(null, "information_schema")) { assertGetSchemasResult(rs, list( list(TEST_CATALOG, "information_schema"), + list(COUNTING_CATALOG, "information_schema"), list("blackhole", "information_schema"), list("hive", "information_schema"), list("system", "information_schema"))); @@ -613,6 +629,9 @@ public void testGetColumns() assertEquals(rs.getString("TABLE_CAT"), "hive"); assertEquals(rs.getString("TABLE_SCHEM"), "information_schema"); assertTrue(rs.next()); + assertEquals(rs.getString("TABLE_CAT"), COUNTING_CATALOG); + assertEquals(rs.getString("TABLE_SCHEM"), "information_schema"); + assertTrue(rs.next()); assertEquals(rs.getString("TABLE_CAT"), "system"); assertEquals(rs.getString("TABLE_SCHEM"), "information_schema"); assertTrue(rs.next()); @@ -635,7 +654,7 @@ public void testGetColumns() try (Connection connection = createConnection()) { try (ResultSet rs = connection.getMetaData().getColumns(null, "information_schema", "tables", "table_name")) { assertColumnMetadata(rs); - assertThat(readRows(rs)).hasSize(4); + assertThat(readRows(rs)).hasSize(5); } } From 550b8a6d2bc8800540e5d0836103c78cd92cf6e9 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Mon, 4 May 2020 23:44:06 +0200 Subject: [PATCH 344/519] Add metadata operation counting JDBC test --- .../jdbc/TestPrestoDatabaseMetaData.java | 424 +++++++++++++++++- .../testing/CountingMockConnector.java | 8 + 2 files changed, 429 insertions(+), 3 deletions(-) diff --git a/presto-jdbc/src/test/java/io/prestosql/jdbc/TestPrestoDatabaseMetaData.java b/presto-jdbc/src/test/java/io/prestosql/jdbc/TestPrestoDatabaseMetaData.java index 83bbf1f24a03..d4e4326df310 100644 --- a/presto-jdbc/src/test/java/io/prestosql/jdbc/TestPrestoDatabaseMetaData.java +++ b/presto-jdbc/src/test/java/io/prestosql/jdbc/TestPrestoDatabaseMetaData.java @@ -15,6 +15,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMultiset; import com.google.common.collect.ImmutableSet; import io.airlift.log.Logging; import io.prestosql.plugin.blackhole.BlackHolePlugin; @@ -40,6 +41,7 @@ import io.prestosql.spi.type.Type; import io.prestosql.spi.type.VarbinaryType; import io.prestosql.testing.CountingMockConnector; +import io.prestosql.testing.CountingMockConnector.MetadataCallsCount; import io.prestosql.type.ColorType; import org.testng.annotations.AfterClass; import org.testng.annotations.AfterMethod; @@ -56,10 +58,15 @@ import java.sql.Statement; import java.sql.Types; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Set; import java.util.concurrent.Callable; +import java.util.function.Consumer; +import java.util.stream.IntStream; +import static com.google.common.base.Verify.verify; +import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static com.google.common.collect.Iterables.getOnlyElement; import static io.airlift.testing.Assertions.assertContains; @@ -71,6 +78,7 @@ import static io.prestosql.spi.type.VarcharType.createVarcharType; import static java.lang.String.format; import static java.util.Arrays.asList; +import static java.util.Objects.requireNonNull; import static java.util.stream.Collectors.toList; import static org.assertj.core.api.Assertions.assertThat; import static org.testng.Assert.assertEquals; @@ -254,9 +262,9 @@ public void testGetSchemas() hive.add(list("hive", "default")); List> countingCatalog = new ArrayList<>(); - hive.add(list(COUNTING_CATALOG, "information_schema")); - hive.add(list(COUNTING_CATALOG, "test_schema1")); - hive.add(list(COUNTING_CATALOG, "test_schema2")); + countingCatalog.add(list(COUNTING_CATALOG, "information_schema")); + countingCatalog.add(list(COUNTING_CATALOG, "test_schema1")); + countingCatalog.add(list(COUNTING_CATALOG, "test_schema2")); List> system = new ArrayList<>(); system.add(list("system", "information_schema")); @@ -911,6 +919,347 @@ public void testGetSuperTypes() } } + @Test + @SuppressWarnings("resource") + public void testGetSchemasMetadataCalls() + throws Exception + { + verify(connection.getMetaData().getSearchStringEscape().equals("\\")); // this test uses escape inline for readability + + // No filter + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getSchemas(null, null), + list("TABLE_CATALOG", "TABLE_SCHEM")), + new MetadataCallsCount() + .withListSchemasCount(1)); + + // Equality predicate on catalog name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getSchemas(COUNTING_CATALOG, null), + list("TABLE_CATALOG", "TABLE_SCHEM")), + list( + list(COUNTING_CATALOG, "information_schema"), + list(COUNTING_CATALOG, "test_schema1"), + list(COUNTING_CATALOG, "test_schema2")), + new MetadataCallsCount() + .withListSchemasCount(1)); + + // Equality predicate on schema name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getSchemas(COUNTING_CATALOG, "test\\_schema%"), + list("TABLE_CATALOG", "TABLE_SCHEM")), + list( + list(COUNTING_CATALOG, "test_schema1"), + list(COUNTING_CATALOG, "test_schema2")), + new MetadataCallsCount() + .withListSchemasCount(1)); + + // LIKE predicate on schema name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getSchemas(COUNTING_CATALOG, "test_sch_ma1"), + list("TABLE_CATALOG", "TABLE_SCHEM")), + list(list(COUNTING_CATALOG, "test_schema1")), + new MetadataCallsCount() + .withListSchemasCount(1)); + + // Empty schema name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getSchemas(COUNTING_CATALOG, ""), + list("TABLE_CATALOG", "TABLE_SCHEM")), + list(), + new MetadataCallsCount() + .withListSchemasCount(1)); + + // catalog does not exist + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getSchemas("wrong", null), + list("TABLE_CATALOG", "TABLE_SCHEM")), + list(), + new MetadataCallsCount()); + } + + @Test + @SuppressWarnings("resource") + public void testGetTablesMetadataCalls() + throws Exception + { + verify(connection.getMetaData().getSearchStringEscape().equals("\\")); // this test uses escape inline for readability + + // No filter + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getTables(null, null, null, null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "TABLE_TYPE")), + new MetadataCallsCount() + .withListSchemasCount(1) + .withListTablesCount(2)); + + // Equality predicate on catalog name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getTables(COUNTING_CATALOG, null, null, null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "TABLE_TYPE")), + new MetadataCallsCount() + .withListSchemasCount(1) + .withListTablesCount(2)); + + // Equality predicate on schema name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getTables(COUNTING_CATALOG, "test\\_schema1", null, null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "TABLE_TYPE")), + countingMockConnector.getAllTables() + .filter(schemaTableName -> schemaTableName.getSchemaName().equals("test_schema1")) + .map(schemaTableName -> list(COUNTING_CATALOG, schemaTableName.getSchemaName(), schemaTableName.getTableName(), "TABLE")) + .collect(toImmutableList()), + new MetadataCallsCount() + .withListTablesCount(1)); + + // LIKE predicate on schema name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getTables(COUNTING_CATALOG, "test_sch_ma1", null, null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "TABLE_TYPE")), + countingMockConnector.getAllTables() + .filter(schemaTableName -> schemaTableName.getSchemaName().equals("test_schema1")) + .map(schemaTableName -> list(COUNTING_CATALOG, schemaTableName.getSchemaName(), schemaTableName.getTableName(), "TABLE")) + .collect(toImmutableList()), + new MetadataCallsCount() + .withListSchemasCount(1) + .withListTablesCount(2)); + + // Equality predicate on table name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getTables(COUNTING_CATALOG, null, "test\\_table1", null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "TABLE_TYPE")), + list( + list(COUNTING_CATALOG, "test_schema1", "test_table1", "TABLE"), + list(COUNTING_CATALOG, "test_schema2", "test_table1", "TABLE")), + new MetadataCallsCount() + .withListSchemasCount(1) + .withListTablesCount(2)); + + // LIKE predicate on table name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getTables(COUNTING_CATALOG, null, "test_t_ble1", null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "TABLE_TYPE")), + list( + list(COUNTING_CATALOG, "test_schema1", "test_table1", "TABLE"), + list(COUNTING_CATALOG, "test_schema2", "test_table1", "TABLE")), + new MetadataCallsCount() + .withListSchemasCount(1) + .withListTablesCount(2)); + + // Equality predicate on schema name and table name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getTables(COUNTING_CATALOG, "test\\_schema1", "test\\_table1", null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "TABLE_TYPE")), + list(list(COUNTING_CATALOG, "test_schema1", "test_table1", "TABLE")), + new MetadataCallsCount()); + + // LIKE predicate on schema name and table name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getTables(COUNTING_CATALOG, "test_schema1", "test_table1", null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "TABLE_TYPE")), + list(list(COUNTING_CATALOG, "test_schema1", "test_table1", "TABLE")), + new MetadataCallsCount() + .withListSchemasCount(1) + .withListTablesCount(2)); + + // catalog does not exist + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getTables("wrong", null, null, null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "TABLE_TYPE")), + list(), + new MetadataCallsCount()); + + // empty schema name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getTables(COUNTING_CATALOG, "", null, null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "TABLE_TYPE")), + list(), + new MetadataCallsCount() + .withListSchemasCount(1) + .withListTablesCount(2)); + + // empty table name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getTables(COUNTING_CATALOG, null, "", null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "TABLE_TYPE")), + list(), + new MetadataCallsCount() + .withListSchemasCount(1) + .withListTablesCount(2)); + + // no table types selected + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getTables(COUNTING_CATALOG, null, null, new String[0]), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "TABLE_TYPE")), + list(), + new MetadataCallsCount()); + } + + @Test + @SuppressWarnings("resource") + public void testGetColumnsMetadataCalls() + throws Exception + { + verify(connection.getMetaData().getSearchStringEscape().equals("\\")); // this test uses escape inline for readability + + // No filter + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getColumns(null, null, null, null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "TYPE_NAME")), + new MetadataCallsCount() + .withListSchemasCount(1) + .withListTablesCount(2) + .withGetColumnsCount(3000)); + + // Equality predicate on catalog name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getColumns(COUNTING_CATALOG, null, null, null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "TYPE_NAME")), + new MetadataCallsCount() + .withListSchemasCount(1) + .withListTablesCount(2) + .withGetColumnsCount(3000)); + + // Equality predicate on catalog name, schema name and table name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getColumns(COUNTING_CATALOG, "test\\_schema1", "test\\_table1", null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "TYPE_NAME")), + IntStream.range(0, 100) + .mapToObj(i -> list(COUNTING_CATALOG, "test_schema1", "test_table1", "column_" + i, "varchar")) + .collect(toImmutableList()), + new MetadataCallsCount() + .withListTablesCount(1) + .withGetColumnsCount(1)); + + // Equality predicate on catalog name, schema name, table name and column name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getColumns(COUNTING_CATALOG, "test\\_schema1", "test\\_table1", "column\\_17"), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "TYPE_NAME")), + list(list(COUNTING_CATALOG, "test_schema1", "test_table1", "column_17", "varchar")), + new MetadataCallsCount() + .withListTablesCount(1) + .withGetColumnsCount(1)); + + // Equality predicate on catalog name, LIKE predicate on schema name, table name and column name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getColumns(COUNTING_CATALOG, "test_schema1", "test_table1", "column_17"), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "TYPE_NAME")), + list(list(COUNTING_CATALOG, "test_schema1", "test_table1", "column_17", "varchar")), + new MetadataCallsCount() + .withListSchemasCount(1) + .withListTablesCount(2) + .withGetColumnsCount(3000)); + + // LIKE predicate on schema name and table name, but no predicate on catalog name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getColumns(null, "test_schema1", "test_table1", null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "TYPE_NAME")), + IntStream.range(0, 100) + .mapToObj(i -> list(COUNTING_CATALOG, "test_schema1", "test_table1", "column_" + i, "varchar")) + .collect(toImmutableList()), + new MetadataCallsCount() + .withListSchemasCount(1) + .withListTablesCount(2) + .withGetColumnsCount(3000)); // TODO (https://github.com/prestosql/presto/issues/1620) + + // Equality predicate on schema name and table name, but no predicate on catalog name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getColumns(null, "test\\_schema1", "test\\_table1", null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "TYPE_NAME")), + IntStream.range(0, 100) + .mapToObj(i -> list(COUNTING_CATALOG, "test_schema1", "test_table1", "column_" + i, "varchar")) + .collect(toImmutableList()), + new MetadataCallsCount() + .withListTablesCount(1) + .withGetColumnsCount(1)); + + // catalog does not exist + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getColumns("wrong", null, null, null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "TYPE_NAME")), + list(), + new MetadataCallsCount()); + + // schema does not exist + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getColumns(COUNTING_CATALOG, "wrong\\_schema1", "test\\_table1", null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "TYPE_NAME")), + list(), + new MetadataCallsCount() + .withListTablesCount(1)); + + // schema does not exist + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getColumns(COUNTING_CATALOG, "wrong_schema1", "test_table1", null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "TYPE_NAME")), + list(), + new MetadataCallsCount() + .withListSchemasCount(1) + .withListTablesCount(2) + .withGetColumnsCount(3000)); + + // empty schema name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getColumns(COUNTING_CATALOG, "", null, null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "TYPE_NAME")), + list(), + new MetadataCallsCount() + .withListSchemasCount(1) + .withListTablesCount(2) + .withGetColumnsCount(3000)); + + // empty table name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getColumns(COUNTING_CATALOG, null, "", null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "TYPE_NAME")), + list(), + new MetadataCallsCount() + .withListSchemasCount(1) + .withListTablesCount(2) + .withGetColumnsCount(3000)); + + // empty column name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getColumns(COUNTING_CATALOG, null, null, ""), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "TYPE_NAME")), + list(), + new MetadataCallsCount() + .withListSchemasCount(1) + .withListTablesCount(2) + .withGetColumnsCount(3000)); + } + private static void assertColumnSpec(ResultSet rs, int dataType, Long precision, Long numPrecRadix, String typeName) throws SQLException { @@ -950,6 +1299,55 @@ private Set captureQueries(Callable action) .collect(toImmutableSet()); } + private void assertMetadataCalls(MetaDataCallback>> callback, MetadataCallsCount expectedMetadataCallsCount) + throws Exception + { + assertMetadataCalls( + callback, + actual -> {}, + expectedMetadataCallsCount); + } + + private void assertMetadataCalls(MetaDataCallback>> callback, Collection> expected, MetadataCallsCount expectedMetadataCallsCount) + throws Exception + { + assertMetadataCalls( + callback, + actual -> assertThat(ImmutableMultiset.copyOf(requireNonNull(actual, "actual is null"))) + .isEqualTo(ImmutableMultiset.copyOf(requireNonNull(expected, "expected is null"))), + expectedMetadataCallsCount); + } + + private void assertMetadataCalls( + MetaDataCallback>> callback, + Consumer>> resultsVerification, + MetadataCallsCount expectedMetadataCallsCount) + throws Exception + { + MetadataCallsCount actualMetadataCallsCount; + try (Connection connection = createConnection()) { + actualMetadataCallsCount = countingMockConnector.runCounting(() -> { + try { + Collection> actual = callback.apply(connection.getMetaData()); + resultsVerification.accept(actual); + } + catch (SQLException e) { + throw new RuntimeException(e); + } + }); + } + assertEquals(actualMetadataCallsCount, expectedMetadataCallsCount); + } + + private MetaDataCallback>> readMetaData(MetaDataCallback query, List columns) + { + return metaData -> { + try (ResultSet resultSet = query.apply(metaData)) { + return readRows(resultSet, columns); + } + }; + } + private Connection createConnection() throws SQLException { @@ -979,6 +1377,20 @@ private static List> readRows(ResultSet rs) return rows.build(); } + private static List> readRows(ResultSet rs, List columns) + throws SQLException + { + ImmutableList.Builder> rows = ImmutableList.builder(); + while (rs.next()) { + List row = new ArrayList<>(); + for (String column : columns) { + row.add(rs.getObject(column)); + } + rows.add(row); + } + return rows.build(); + } + @SafeVarargs private static List list(T... elements) { @@ -990,4 +1402,10 @@ private static T[] array(T... elements) { return elements; } + + private interface MetaDataCallback + { + T apply(DatabaseMetaData metaData) + throws SQLException; + } } diff --git a/presto-testing/src/main/java/io/prestosql/testing/CountingMockConnector.java b/presto-testing/src/main/java/io/prestosql/testing/CountingMockConnector.java index 4c8a848cb0b8..fd6ee76b7a8a 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/CountingMockConnector.java +++ b/presto-testing/src/main/java/io/prestosql/testing/CountingMockConnector.java @@ -23,6 +23,7 @@ import java.util.Objects; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.IntStream; +import java.util.stream.Stream; import static com.google.common.base.MoreObjects.toStringHelper; import static com.google.common.collect.ImmutableList.toImmutableList; @@ -56,6 +57,13 @@ public Iterable getConnectorFactories() }; } + public Stream getAllTables() + { + return Stream.concat( + tablesTestSchema1.stream(), + tablesTestSchema2.stream()); + } + public MetadataCallsCount runCounting(Runnable runnable) { synchronized (lock) { From 56dc6bb42950b4528d9157573afee869c81cc810 Mon Sep 17 00:00:00 2001 From: Martin Traverso Date: Sun, 19 Apr 2020 19:07:33 -0700 Subject: [PATCH 345/519] Fix failure when unnest inputs are repeated The mappings to unnest are keyed based on input symbol. When input expression are repeated, the deduplication logic replaces them with references to the same symbol. This causes a conflict that results in a planning failure. --- .../sql/planner/LocalExecutionPlanner.java | 12 +++- .../sql/planner/RelationPlanner.java | 5 +- .../sql/planner/SubqueryPlanner.java | 2 +- .../iterative/rule/ExtractSpatialJoins.java | 2 +- .../HashGenerationOptimizer.java | 2 +- .../optimizations/PredicatePushDown.java | 2 +- .../PruneUnreferencedOutputs.java | 21 ++++--- .../UnaliasSymbolReferences.java | 11 ++-- .../sql/planner/plan/UnnestNode.java | 58 ++++++++++++++----- .../sql/planner/planprinter/PlanPrinter.java | 7 ++- .../sanity/ValidateDependenciesChecker.java | 19 +++--- .../io/prestosql/util/GraphvizPrinter.java | 7 ++- .../io/prestosql/sql/query/TestUnnest.java | 8 +++ 13 files changed, 112 insertions(+), 44 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/LocalExecutionPlanner.java b/presto-main/src/main/java/io/prestosql/sql/planner/LocalExecutionPlanner.java index 82f18841f10b..2a9fd5bf74b4 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/LocalExecutionPlanner.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/LocalExecutionPlanner.java @@ -1393,7 +1393,11 @@ public PhysicalOperation visitUnnest(UnnestNode node, LocalExecutionPlanContext for (Symbol symbol : node.getReplicateSymbols()) { replicateTypes.add(context.getTypes().get(symbol)); } - List unnestSymbols = ImmutableList.copyOf(node.getUnnestSymbols().keySet()); + + List unnestSymbols = node.getMappings().stream() + .map(UnnestNode.Mapping::getInput) + .collect(toImmutableList()); + ImmutableList.Builder unnestTypes = ImmutableList.builder(); for (Symbol symbol : unnestSymbols) { unnestTypes.add(context.getTypes().get(symbol)); @@ -1412,12 +1416,14 @@ public PhysicalOperation visitUnnest(UnnestNode node, LocalExecutionPlanContext outputMappings.put(symbol, channel); channel++; } - for (Symbol symbol : unnestSymbols) { - for (Symbol unnestedSymbol : node.getUnnestSymbols().get(symbol)) { + + for (UnnestNode.Mapping mapping : node.getMappings()) { + for (Symbol unnestedSymbol : mapping.getOutputs()) { outputMappings.put(unnestedSymbol, channel); channel++; } } + if (ordinalitySymbol.isPresent()) { outputMappings.put(ordinalitySymbol.get(), channel); channel++; diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java b/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java index 51fb376e0a01..031aa15ac001 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java @@ -689,14 +689,15 @@ private RelationPlan planUnnest(PlanBuilder subPlan, Unnest node, List r .collect(toImmutableMap(Function.identity(), symbolAllocator::newSymbol)); UnnestAnalysis unnestAnalysis = analysis.getUnnest(node); - ImmutableMap.Builder> mappings = ImmutableMap.builder(); + + ImmutableList.Builder mappings = ImmutableList.builder(); for (Expression expression : node.getExpressions()) { Symbol input = subPlan.translate(expression); List outputs = unnestAnalysis.getMappings().get(NodeRef.of(expression)).stream() .map(allocations::get) .collect(toImmutableList()); - mappings.put(input, outputs); + mappings.add(new UnnestNode.Mapping(input, outputs)); } UnnestNode unnestNode = new UnnestNode( diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/SubqueryPlanner.java b/presto-main/src/main/java/io/prestosql/sql/planner/SubqueryPlanner.java index e520d028ac62..0949faeb2f34 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/SubqueryPlanner.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/SubqueryPlanner.java @@ -574,7 +574,7 @@ public PlanNode visitUnnest(UnnestNode node, RewriteContext context) node.getId(), rewrittenNode.getSource(), rewrittenNode.getReplicateSymbols(), - rewrittenNode.getUnnestSymbols(), + rewrittenNode.getMappings(), rewrittenNode.getOrdinalitySymbol(), rewrittenNode.getJoinType(), rewrittenNode.getFilter().map(expression -> replaceExpression(expression, mapping))); diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/ExtractSpatialJoins.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/ExtractSpatialJoins.java index f3de73cbbea4..a537bb3190e7 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/ExtractSpatialJoins.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/ExtractSpatialJoins.java @@ -607,7 +607,7 @@ private static PlanNode addPartitioningNodes(Metadata metadata, Context context, context.getIdAllocator().getNextId(), new ProjectNode(context.getIdAllocator().getNextId(), node, projections.build()), node.getOutputSymbols(), - ImmutableMap.of(partitionsSymbol, ImmutableList.of(partitionSymbol)), + ImmutableList.of(new UnnestNode.Mapping(partitionsSymbol, ImmutableList.of(partitionSymbol))), Optional.empty(), INNER, Optional.empty()); diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/HashGenerationOptimizer.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/HashGenerationOptimizer.java index 630466fa409c..574ec24e03d8 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/HashGenerationOptimizer.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/HashGenerationOptimizer.java @@ -668,7 +668,7 @@ public PlanWithProperties visitUnnest(UnnestNode node, HashComputationSet parent .addAll(node.getReplicateSymbols()) .addAll(hashSymbols.values()) .build(), - node.getUnnestSymbols(), + node.getMappings(), node.getOrdinalitySymbol(), node.getJoinType(), node.getFilter()), diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PredicatePushDown.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PredicatePushDown.java index 2af13eebc14a..3b9dafb042e0 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PredicatePushDown.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PredicatePushDown.java @@ -1387,7 +1387,7 @@ public PlanNode visitUnnest(UnnestNode node, RewriteContext context) PlanNode output = node; if (rewrittenSource != node.getSource()) { - output = new UnnestNode(node.getId(), rewrittenSource, node.getReplicateSymbols(), node.getUnnestSymbols(), node.getOrdinalitySymbol(), node.getJoinType(), node.getFilter()); + output = new UnnestNode(node.getId(), rewrittenSource, node.getReplicateSymbols(), node.getMappings(), node.getOrdinalitySymbol(), node.getJoinType(), node.getFilter()); } if (!postUnnestConjuncts.isEmpty()) { output = new FilterNode(idAllocator.getNextId(), output, combineConjuncts(metadata, postUnnestConjuncts)); diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java index 2e3a3add5095..f021b06004cb 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java @@ -519,19 +519,26 @@ public PlanNode visitUnnest(UnnestNode node, RewriteContext> context if (ordinalitySymbol.isPresent() && !context.get().contains(ordinalitySymbol.get())) { ordinalitySymbol = Optional.empty(); } - Map> unnestSymbols = node.getUnnestSymbols(); + List mappings = node.getMappings(); ImmutableSet.Builder expectedInputs = ImmutableSet.builder() - .addAll(replicateSymbols) - .addAll(unnestSymbols.keySet()); + .addAll(replicateSymbols); + + mappings.stream() + .map(UnnestNode.Mapping::getInput) + .forEach(expectedInputs::add); + ImmutableSet.Builder unnestedSymbols = ImmutableSet.builder(); - for (List symbols : unnestSymbols.values()) { - unnestedSymbols.addAll(symbols); - } + + mappings.stream() + .map(UnnestNode.Mapping::getOutputs) + .flatMap(Collection::stream) + .forEach(unnestedSymbols::add); + Set expectedFilterSymbols = Sets.difference(SymbolsExtractor.extractUnique(node.getFilter().orElse(TRUE_LITERAL)), unnestedSymbols.build()); expectedInputs.addAll(expectedFilterSymbols); PlanNode source = context.rewrite(node.getSource(), expectedInputs.build()); - return new UnnestNode(node.getId(), source, replicateSymbols, unnestSymbols, ordinalitySymbol, node.getJoinType(), node.getFilter()); + return new UnnestNode(node.getId(), source, replicateSymbols, mappings, ordinalitySymbol, node.getJoinType(), node.getFilter()); } @Override diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/UnaliasSymbolReferences.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/UnaliasSymbolReferences.java index ecf184da1052..ff9ea141fc0e 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/UnaliasSymbolReferences.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/UnaliasSymbolReferences.java @@ -189,15 +189,18 @@ public PlanNode visitMarkDistinct(MarkDistinctNode node, RewriteContext co public PlanNode visitUnnest(UnnestNode node, RewriteContext context) { PlanNode source = context.rewrite(node.getSource()); - ImmutableMap.Builder> builder = ImmutableMap.builder(); - for (Map.Entry> entry : node.getUnnestSymbols().entrySet()) { - builder.put(canonicalize(entry.getKey()), entry.getValue()); + + ImmutableList.Builder mappings = ImmutableList.builder(); + + for (UnnestNode.Mapping mapping : node.getMappings()) { + mappings.add(new UnnestNode.Mapping(canonicalize(mapping.getInput()), mapping.getOutputs())); } + return new UnnestNode( node.getId(), source, canonicalizeAndDistinct(node.getReplicateSymbols()), - builder.build(), + mappings.build(), node.getOrdinalitySymbol(), node.getJoinType(), node.getFilter().map(this::canonicalize)); diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/plan/UnnestNode.java b/presto-main/src/main/java/io/prestosql/sql/planner/plan/UnnestNode.java index c77cfff18daf..d482b1e148f2 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/plan/UnnestNode.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/plan/UnnestNode.java @@ -16,7 +16,6 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import io.prestosql.sql.planner.Symbol; import io.prestosql.sql.planner.plan.JoinNode.Type; @@ -24,11 +23,12 @@ import javax.annotation.concurrent.Immutable; +import java.util.Collection; import java.util.List; -import java.util.Map; import java.util.Optional; import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.collect.ImmutableList.toImmutableList; import static java.util.Objects.requireNonNull; @Immutable @@ -37,7 +37,7 @@ public class UnnestNode { private final PlanNode source; private final List replicateSymbols; - private final Map> unnestSymbols; + private final List mappings; private final Optional ordinalitySymbol; private final Type joinType; private final Optional filter; @@ -47,7 +47,7 @@ public UnnestNode( @JsonProperty("id") PlanNodeId id, @JsonProperty("source") PlanNode source, @JsonProperty("replicateSymbols") List replicateSymbols, - @JsonProperty("unnestSymbols") Map> unnestSymbols, + @JsonProperty("mappings") List mappings, @JsonProperty("ordinalitySymbol") Optional ordinalitySymbol, @JsonProperty("joinType") Type joinType, @JsonProperty("filter") Optional filter) @@ -56,13 +56,9 @@ public UnnestNode( this.source = requireNonNull(source, "source is null"); this.replicateSymbols = ImmutableList.copyOf(requireNonNull(replicateSymbols, "replicateSymbols is null")); checkArgument(source.getOutputSymbols().containsAll(replicateSymbols), "Source does not contain all replicateSymbols"); - requireNonNull(unnestSymbols, "unnestSymbols is null"); - checkArgument(!unnestSymbols.isEmpty(), "unnestSymbols is empty"); - ImmutableMap.Builder> builder = ImmutableMap.builder(); - for (Map.Entry> entry : unnestSymbols.entrySet()) { - builder.put(entry.getKey(), ImmutableList.copyOf(entry.getValue())); - } - this.unnestSymbols = builder.build(); + requireNonNull(mappings, "mappings is null"); + checkArgument(!mappings.isEmpty(), "mappings is empty"); + this.mappings = ImmutableList.copyOf(mappings); this.ordinalitySymbol = requireNonNull(ordinalitySymbol, "ordinalitySymbol is null"); this.joinType = requireNonNull(joinType, "type is null"); this.filter = requireNonNull(filter, "filter is null"); @@ -73,7 +69,11 @@ public List getOutputSymbols() { ImmutableList.Builder outputSymbolsBuilder = ImmutableList.builder() .addAll(replicateSymbols) - .addAll(Iterables.concat(unnestSymbols.values())); + .addAll(mappings.stream() + .map(Mapping::getOutputs) + .flatMap(Collection::stream) + .collect(toImmutableList())); + ordinalitySymbol.ifPresent(outputSymbolsBuilder::add); return outputSymbolsBuilder.build(); } @@ -91,9 +91,9 @@ public List getReplicateSymbols() } @JsonProperty - public Map> getUnnestSymbols() + public List getMappings() { - return unnestSymbols; + return mappings; } @JsonProperty @@ -129,6 +129,34 @@ public R accept(PlanVisitor visitor, C context) @Override public PlanNode replaceChildren(List newChildren) { - return new UnnestNode(getId(), Iterables.getOnlyElement(newChildren), replicateSymbols, unnestSymbols, ordinalitySymbol, joinType, filter); + return new UnnestNode(getId(), Iterables.getOnlyElement(newChildren), replicateSymbols, mappings, ordinalitySymbol, joinType, filter); + } + + public static class Mapping + { + private final Symbol input; + private final List outputs; + + @JsonCreator + public Mapping( + @JsonProperty("input") Symbol input, + @JsonProperty("outputs") List outputs) + { + this.input = requireNonNull(input, "input is null"); + requireNonNull(outputs, "outputs is null"); + this.outputs = ImmutableList.copyOf(outputs); + } + + @JsonProperty + public Symbol getInput() + { + return input; + } + + @JsonProperty + public List getOutputs() + { + return outputs; + } } } diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/planprinter/PlanPrinter.java b/presto-main/src/main/java/io/prestosql/sql/planner/planprinter/PlanPrinter.java index 8c01610ac044..5630429fc2dd 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/planprinter/PlanPrinter.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/planprinter/PlanPrinter.java @@ -873,10 +873,15 @@ else if (!node.getReplicateSymbols().isEmpty()) { else { name = "Unnest"; } + + List unnestInputs = node.getMappings().stream() + .map(UnnestNode.Mapping::getInput) + .collect(toImmutableList()); + addNode( node, name, - format("[replicate=%s, unnest=%s", formatOutputs(types, node.getReplicateSymbols()), formatOutputs(types, node.getUnnestSymbols().keySet())) + format("[replicate=%s, unnest=%s", formatOutputs(types, node.getReplicateSymbols()), formatOutputs(types, unnestInputs)) + (node.getFilter().isPresent() ? format(", filter=%s]", node.getFilter().get().toString()) : "]")); return processChildren(node, context); } diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/sanity/ValidateDependenciesChecker.java b/presto-main/src/main/java/io/prestosql/sql/planner/sanity/ValidateDependenciesChecker.java index 6ff1da6bab7a..bbe8cce49728 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/sanity/ValidateDependenciesChecker.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/sanity/ValidateDependenciesChecker.java @@ -497,13 +497,18 @@ public Void visitUnnest(UnnestNode node, Set boundSymbols) source.accept(this, boundSymbols); ImmutableSet.Builder required = ImmutableSet.builder() - .addAll(node.getReplicateSymbols()) - .addAll(node.getUnnestSymbols().keySet()); - ImmutableSet.Builder unnestedSymbols = ImmutableSet.builder(); - for (List symbols : node.getUnnestSymbols().values()) { - unnestedSymbols.addAll(symbols); - } - Set expectedFilterSymbols = Sets.difference(SymbolsExtractor.extractUnique(node.getFilter().orElse(TRUE_LITERAL)), unnestedSymbols.build()); + .addAll(node.getReplicateSymbols()); + + node.getMappings().stream() + .map(UnnestNode.Mapping::getInput) + .forEach(required::add); + + Set unnestedSymbols = node.getMappings().stream() + .map(UnnestNode.Mapping::getOutputs) + .flatMap(Collection::stream) + .collect(toImmutableSet()); + + Set expectedFilterSymbols = Sets.difference(SymbolsExtractor.extractUnique(node.getFilter().orElse(TRUE_LITERAL)), unnestedSymbols); required.addAll(expectedFilterSymbols); checkDependencies(source.getOutputSymbols(), required.build(), "Invalid node. Dependencies (%s) not in source plan output (%s)", required, source.getOutputSymbols()); diff --git a/presto-main/src/main/java/io/prestosql/util/GraphvizPrinter.java b/presto-main/src/main/java/io/prestosql/util/GraphvizPrinter.java index 2058a68ef475..6a6b55e5f874 100644 --- a/presto-main/src/main/java/io/prestosql/util/GraphvizPrinter.java +++ b/presto-main/src/main/java/io/prestosql/util/GraphvizPrinter.java @@ -398,7 +398,12 @@ else if (!node.getReplicateSymbols().isEmpty()) { else { label.append("Unnest"); } - label.append(format(" [%s", node.getUnnestSymbols().keySet())) + + List unnestInputs = node.getMappings().stream() + .map(UnnestNode.Mapping::getInput) + .collect(toImmutableList()); + + label.append(format(" [%s", unnestInputs)) .append(node.getOrdinalitySymbol().isPresent() ? " (ordinality)]" : "]"); String details = node.getFilter().isPresent() ? " filter " + node.getFilter().get().toString() : ""; diff --git a/presto-main/src/test/java/io/prestosql/sql/query/TestUnnest.java b/presto-main/src/test/java/io/prestosql/sql/query/TestUnnest.java index bdfc346194ff..2997e505683e 100644 --- a/presto-main/src/test/java/io/prestosql/sql/query/TestUnnest.java +++ b/presto-main/src/test/java/io/prestosql/sql/query/TestUnnest.java @@ -188,4 +188,12 @@ public void testInnerJoinUnnest() "SELECT * FROM (VALUES ARRAY[1, null]) a(x) INNER JOIN UNNEST(x) b(y) ON b.y = 1", "line .*: INNER JOIN involving UNNEST is only supported with condition ON TRUE"); } + + @Test + public void testRepeatedExpressions() + { + assertions.assertQuery( + "SELECT * FROM (VALUES 1) t, UNNEST(ARRAY['a', 'b'], ARRAY['a', 'b']) u (x, y)", + "VALUES (1, 'a', 'a'), (1, 'b', 'b')"); + } } From 445aa5d605160481320f37268a6f09a0a71a8adc Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Tue, 5 May 2020 21:44:39 +0200 Subject: [PATCH 346/519] Remove unused RowNumberNode with non empty partitionBy In PruneUnreferencedOutputs and PruneRowNumberColumns rule, when rowNumberSymbol is unreferenced, the RowNumberNode can be removed from the plan if partitionBy columns are defined as long as maxRowCountPerPartition is not present. Before this change, RowNumberNode was removed only if partitionBy list was empty. --- .../iterative/rule/PruneRowNumberColumns.java | 9 +++++--- .../PruneUnreferencedOutputs.java | 8 ++++--- .../rule/TestPruneRowNumberColumns.java | 21 ++++++++++++++++--- 3 files changed, 29 insertions(+), 9 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneRowNumberColumns.java b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneRowNumberColumns.java index 7cd90c4b1e96..f0d651a574e7 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneRowNumberColumns.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/iterative/rule/PruneRowNumberColumns.java @@ -38,15 +38,18 @@ public PruneRowNumberColumns() @Override protected Optional pushDownProjectOff(Context context, RowNumberNode rowNumberNode, Set referencedOutputs) { - if (!referencedOutputs.contains(rowNumberNode.getRowNumberSymbol()) && rowNumberNode.getPartitionBy().isEmpty()) { - if (rowNumberNode.getMaxRowCountPerPartition().isPresent()) { + // Remove unused RowNumberNode + if (!referencedOutputs.contains(rowNumberNode.getRowNumberSymbol())) { + if (!rowNumberNode.getMaxRowCountPerPartition().isPresent()) { + return Optional.of(rowNumberNode.getSource()); + } + if (rowNumberNode.getPartitionBy().isEmpty()) { return Optional.of(new LimitNode( rowNumberNode.getId(), rowNumberNode.getSource(), rowNumberNode.getMaxRowCountPerPartition().get(), false)); } - return Optional.of(rowNumberNode.getSource()); } Set requiredInputs = Streams.concat( diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java index f021b06004cb..e13075a857d1 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/optimizations/PruneUnreferencedOutputs.java @@ -616,12 +616,14 @@ public PlanNode visitTopN(TopNNode node, RewriteContext> context) public PlanNode visitRowNumber(RowNumberNode node, RewriteContext> context) { // Remove unused RowNumberNode - if (!context.get().contains(node.getRowNumberSymbol()) && node.getPartitionBy().isEmpty()) { + if (!context.get().contains(node.getRowNumberSymbol())) { PlanNode source = context.rewrite(node.getSource(), context.get()); - if (node.getMaxRowCountPerPartition().isPresent()) { + if (!node.getMaxRowCountPerPartition().isPresent()) { + return source; + } + if (node.getPartitionBy().isEmpty()) { return new LimitNode(node.getId(), source, node.getMaxRowCountPerPartition().get(), false); } - return source; } ImmutableSet.Builder inputsBuilder = ImmutableSet.builder(); diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneRowNumberColumns.java b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneRowNumberColumns.java index e64550e23c91..d51c6daf672d 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneRowNumberColumns.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneRowNumberColumns.java @@ -49,6 +49,20 @@ public void testRowNumberSymbolNotReferenced() ImmutableMap.of("a", expression("a")), values(ImmutableList.of("a")))); + // partitioning is present, no limit per partition + tester().assertThat(new PruneRowNumberColumns()) + .on(p -> { + Symbol a = p.symbol("a"); + Symbol rowNumber = p.symbol("row_number"); + return p.project( + Assignments.identity(a), + p.rowNumber(ImmutableList.of(a), Optional.empty(), rowNumber, p.values(a))); + }) + .matches( + strictProject( + ImmutableMap.of("a", expression("a")), + values(ImmutableList.of("a")))); + // no partitioning, limit per partition is present tester().assertThat(new PruneRowNumberColumns()) .on(p -> { @@ -65,7 +79,7 @@ public void testRowNumberSymbolNotReferenced() 5, values(ImmutableList.of("a"))))); - // partitioning is present + // partitioning and limit per partition are present tester().assertThat(new PruneRowNumberColumns()) .on(p -> { Symbol a = p.symbol("a"); @@ -73,14 +87,15 @@ public void testRowNumberSymbolNotReferenced() Symbol rowNumber = p.symbol("row_number"); return p.project( Assignments.identity(a), - p.rowNumber(ImmutableList.of(a), Optional.empty(), rowNumber, p.values(a, b))); + p.rowNumber(ImmutableList.of(a), Optional.of(5), rowNumber, p.values(a, b))); }) .matches( strictProject( ImmutableMap.of("a", expression("a")), rowNumber( pattern -> pattern - .partitionBy(ImmutableList.of("a")), + .partitionBy(ImmutableList.of("a")) + .maxRowCountPerPartition(Optional.of(5)), strictProject( ImmutableMap.of("a", expression("a")), values(ImmutableList.of("a", "b")))))); From e333672a352885f139c45892649eb4fdb0d6402e Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Thu, 7 May 2020 12:33:33 +0200 Subject: [PATCH 347/519] Use test queries that select some data --- .../AbstractTestIntegrationSmokeTest.java | 25 ++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java index b9c62c8715a3..e5d8c9a075e3 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java @@ -67,19 +67,38 @@ public void testCountAll() @Test public void testExactPredicate() { - assertQuery("SELECT * FROM orders WHERE orderkey = 10"); + assertQueryReturnsEmptyResult("SELECT * FROM orders WHERE orderkey = 10"); + + // filtered column is selected + assertQuery("SELECT custkey, orderkey FROM orders WHERE orderkey = 32", "VALUES (1301, 32)"); + + // filtered column is not selected + assertQuery("SELECT custkey FROM orders WHERE orderkey = 32", "VALUES (1301)"); } @Test public void testInListPredicate() { - assertQuery("SELECT * FROM orders WHERE orderkey IN (10, 11, 20, 21)"); + assertQueryReturnsEmptyResult("SELECT * FROM orders WHERE orderkey IN (10, 11, 20, 21)"); + + // filtered column is selected + assertQuery("SELECT custkey, orderkey FROM orders WHERE orderkey IN (7, 10, 32, 33)", "VALUES (392, 7), (1301, 32), (670, 33)"); + + // filtered column is not selected + assertQuery("SELECT custkey FROM orders WHERE orderkey IN (7, 10, 32, 33)", "VALUES (392), (1301), (670)"); } @Test public void testIsNullPredicate() { - assertQuery("SELECT * FROM orders WHERE orderkey = 10 OR orderkey IS NULL"); + assertQueryReturnsEmptyResult("SELECT * FROM orders WHERE orderkey IS NULL"); + assertQueryReturnsEmptyResult("SELECT * FROM orders WHERE orderkey = 10 OR orderkey IS NULL"); + + // filtered column is selected + assertQuery("SELECT custkey, orderkey FROM orders WHERE orderkey = 32 OR orderkey IS NULL", "VALUES (1301, 32)"); + + // filtered column is not selected + assertQuery("SELECT custkey FROM orders WHERE orderkey = 32 OR orderkey IS NULL", "VALUES (1301)"); } @Test From 41642c9bed1dedba9edf5d3d6b3fed7d89bb38cb Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Thu, 7 May 2020 12:33:35 +0200 Subject: [PATCH 348/519] Remove redundant condition The method is used to get `varchar` `Domain`'s value, so we know the value is a `Slice`. --- .../java/io/prestosql/connector/system/jdbc/FilterUtil.java | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/FilterUtil.java b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/FilterUtil.java index f5ea17d21db2..18b29a35359b 100644 --- a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/FilterUtil.java +++ b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/FilterUtil.java @@ -38,10 +38,7 @@ public static Optional stringFilter(TupleDomain constraint, int } Object value = domain.getSingleValue(); - if (value instanceof Slice) { - return Optional.of(((Slice) value).toStringUtf8()); - } - return Optional.empty(); + return Optional.of(((Slice) value).toStringUtf8()); } public static QualifiedTablePrefix tablePrefix(String catalog, Optional schema, Optional table) From a299e3240d8b7c52496b7003fc3cc946f8f3301c Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Thu, 7 May 2020 00:11:01 +0200 Subject: [PATCH 349/519] Use Optional instead of @Nullable parameter --- .../sql/planner/ExpressionInterpreter.java | 4 +-- .../java/io/prestosql/type/LikeFunctions.java | 29 +++++++++++------ .../io/prestosql/sql/TestLikeFunctions.java | 32 ++++++++++--------- 3 files changed, 38 insertions(+), 27 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/ExpressionInterpreter.java b/presto-main/src/main/java/io/prestosql/sql/planner/ExpressionInterpreter.java index bd51c7e70a2f..c2ed67236153 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/ExpressionInterpreter.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/ExpressionInterpreter.java @@ -1038,8 +1038,8 @@ protected Object visitLikePredicate(LikePredicate node, Object context) } // if pattern is a constant without % or _ replace with a comparison - if (pattern instanceof Slice && (escape == null || escape instanceof Slice) && !isLikePattern((Slice) pattern, (Slice) escape)) { - Slice unescapedPattern = unescapeLiteralLikePattern((Slice) pattern, (Slice) escape); + if (pattern instanceof Slice && (escape == null || escape instanceof Slice) && !isLikePattern((Slice) pattern, Optional.ofNullable((Slice) escape))) { + Slice unescapedPattern = unescapeLiteralLikePattern((Slice) pattern, Optional.ofNullable((Slice) escape)); Type valueType = type(node.getValue()); Type patternType = createVarcharType(unescapedPattern.length()); Optional commonSuperType = typeCoercion.getCommonSuperType(valueType, patternType); diff --git a/presto-main/src/main/java/io/prestosql/type/LikeFunctions.java b/presto-main/src/main/java/io/prestosql/type/LikeFunctions.java index 0b44ad5a3151..bd197400eba2 100644 --- a/presto-main/src/main/java/io/prestosql/type/LikeFunctions.java +++ b/presto-main/src/main/java/io/prestosql/type/LikeFunctions.java @@ -27,6 +27,8 @@ import io.prestosql.spi.function.SqlType; import io.prestosql.spi.type.StandardTypes; +import java.util.Optional; + import static io.airlift.joni.constants.MetaChar.INEFFECTIVE_META_CHAR; import static io.airlift.joni.constants.SyntaxProperties.OP_ASTERISK_ZERO_INF; import static io.airlift.joni.constants.SyntaxProperties.OP_DOT_ANYCHAR; @@ -112,17 +114,14 @@ public static JoniRegexp likePattern(@SqlType("varchar(x)") Slice pattern, @SqlT return likePattern(pattern.toStringUtf8(), getEscapeChar(escape), true); } - public static boolean isLikePattern(Slice pattern, Slice escape) + public static boolean isLikePattern(Slice pattern, Optional escape) { String stringPattern = pattern.toStringUtf8(); - if (escape == null) { + if (!escape.isPresent()) { return stringPattern.contains("%") || stringPattern.contains("_"); } - String stringEscape = escape.toStringUtf8(); - checkCondition(stringEscape.length() == 1, INVALID_FUNCTION_ARGUMENT, "Escape string must be a single character"); - - char escapeChar = stringEscape.charAt(0); + char escapeChar = getEscapeCharacter(escape).get(); boolean escaped = false; boolean isLikePattern = false; for (int currentChar : stringPattern.codePoints().toArray()) { @@ -141,14 +140,13 @@ else if ((currentChar == '%') || (currentChar == '_')) { return isLikePattern; } - public static Slice unescapeLiteralLikePattern(Slice pattern, Slice escape) + public static Slice unescapeLiteralLikePattern(Slice pattern, Optional escape) { - if (escape == null) { + if (!escape.isPresent()) { return pattern; } - String stringEscape = escape.toStringUtf8(); - char escapeChar = stringEscape.charAt(0); + char escapeChar = getEscapeCharacter(escape).get(); String stringPattern = pattern.toStringUtf8(); StringBuilder unescapedPattern = new StringBuilder(stringPattern.length()); boolean escaped = false; @@ -164,6 +162,17 @@ public static Slice unescapeLiteralLikePattern(Slice pattern, Slice escape) return Slices.utf8Slice(unescapedPattern.toString()); } + private static Optional getEscapeCharacter(Optional escape) + { + if (!escape.isPresent()) { + return Optional.empty(); + } + String stringEscape = escape.get().toStringUtf8(); + // non-BMP escape is not supported + checkCondition(stringEscape.length() == 1, INVALID_FUNCTION_ARGUMENT, "Escape string must be a single character"); + return Optional.of(stringEscape.charAt(0)); + } + private static void checkEscape(boolean condition) { checkCondition(condition, INVALID_FUNCTION_ARGUMENT, "Escape character must be followed by '%%', '_' or the escape character itself"); diff --git a/presto-main/src/test/java/io/prestosql/sql/TestLikeFunctions.java b/presto-main/src/test/java/io/prestosql/sql/TestLikeFunctions.java index 87084eb0643e..0e390b6f6e4d 100644 --- a/presto-main/src/test/java/io/prestosql/sql/TestLikeFunctions.java +++ b/presto-main/src/test/java/io/prestosql/sql/TestLikeFunctions.java @@ -21,6 +21,8 @@ import io.prestosql.type.LikeFunctions; import org.testng.annotations.Test; +import java.util.Optional; + import static io.airlift.slice.Slices.utf8Slice; import static io.prestosql.spi.type.BooleanType.BOOLEAN; import static io.prestosql.type.LikeFunctions.isLikePattern; @@ -151,25 +153,25 @@ public void testInvalidLikePattern() @Test public void testIsLikePattern() { - assertFalse(isLikePattern(utf8Slice("abc"), null)); - assertFalse(isLikePattern(utf8Slice("abc#_def"), utf8Slice("#"))); - assertFalse(isLikePattern(utf8Slice("abc##def"), utf8Slice("#"))); - assertFalse(isLikePattern(utf8Slice("abc#%def"), utf8Slice("#"))); - assertTrue(isLikePattern(utf8Slice("abc%def"), null)); - assertTrue(isLikePattern(utf8Slice("abcdef_"), null)); - assertTrue(isLikePattern(utf8Slice("abcdef##_"), utf8Slice("#"))); - assertTrue(isLikePattern(utf8Slice("%abcdef#_"), utf8Slice("#"))); - assertThrows(PrestoException.class, () -> isLikePattern(utf8Slice("#"), utf8Slice("#"))); - assertThrows(PrestoException.class, () -> isLikePattern(utf8Slice("abc#abc"), utf8Slice("#"))); - assertThrows(PrestoException.class, () -> isLikePattern(utf8Slice("abc#"), utf8Slice("#"))); + assertFalse(isLikePattern(utf8Slice("abc"), Optional.empty())); + assertFalse(isLikePattern(utf8Slice("abc#_def"), Optional.of(utf8Slice("#")))); + assertFalse(isLikePattern(utf8Slice("abc##def"), Optional.of(utf8Slice("#")))); + assertFalse(isLikePattern(utf8Slice("abc#%def"), Optional.of(utf8Slice("#")))); + assertTrue(isLikePattern(utf8Slice("abc%def"), Optional.empty())); + assertTrue(isLikePattern(utf8Slice("abcdef_"), Optional.empty())); + assertTrue(isLikePattern(utf8Slice("abcdef##_"), Optional.of(utf8Slice("#")))); + assertTrue(isLikePattern(utf8Slice("%abcdef#_"), Optional.of(utf8Slice("#")))); + assertThrows(PrestoException.class, () -> isLikePattern(utf8Slice("#"), Optional.of(utf8Slice("#")))); + assertThrows(PrestoException.class, () -> isLikePattern(utf8Slice("abc#abc"), Optional.of(utf8Slice("#")))); + assertThrows(PrestoException.class, () -> isLikePattern(utf8Slice("abc#"), Optional.of(utf8Slice("#")))); } @Test public void testUnescapeValidLikePattern() { - assertEquals(unescapeLiteralLikePattern(utf8Slice("abc"), null), utf8Slice("abc")); - assertEquals(unescapeLiteralLikePattern(utf8Slice("abc#_"), utf8Slice("#")), utf8Slice("abc_")); - assertEquals(unescapeLiteralLikePattern(utf8Slice("a##bc#_"), utf8Slice("#")), utf8Slice("a#bc_")); - assertEquals(unescapeLiteralLikePattern(utf8Slice("a###_bc"), utf8Slice("#")), utf8Slice("a#_bc")); + assertEquals(unescapeLiteralLikePattern(utf8Slice("abc"), Optional.empty()), utf8Slice("abc")); + assertEquals(unescapeLiteralLikePattern(utf8Slice("abc#_"), Optional.of(utf8Slice("#"))), utf8Slice("abc_")); + assertEquals(unescapeLiteralLikePattern(utf8Slice("a##bc#_"), Optional.of(utf8Slice("#"))), utf8Slice("a#bc_")); + assertEquals(unescapeLiteralLikePattern(utf8Slice("a###_bc"), Optional.of(utf8Slice("#"))), utf8Slice("a#_bc")); } } From 8748ffe4a7161ff90b37ceb347895d2cc6ad9f48 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Thu, 7 May 2020 00:11:03 +0200 Subject: [PATCH 350/519] Operate on Slice directly --- .../java/io/prestosql/type/LikeFunctions.java | 41 ++++++++++++------- 1 file changed, 26 insertions(+), 15 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/type/LikeFunctions.java b/presto-main/src/main/java/io/prestosql/type/LikeFunctions.java index bd197400eba2..b701777a3bae 100644 --- a/presto-main/src/main/java/io/prestosql/type/LikeFunctions.java +++ b/presto-main/src/main/java/io/prestosql/type/LikeFunctions.java @@ -18,6 +18,7 @@ import io.airlift.joni.Option; import io.airlift.joni.Regex; import io.airlift.joni.Syntax; +import io.airlift.slice.DynamicSliceOutput; import io.airlift.slice.Slice; import io.airlift.slice.Slices; import io.prestosql.spi.PrestoException; @@ -33,6 +34,8 @@ import static io.airlift.joni.constants.SyntaxProperties.OP_ASTERISK_ZERO_INF; import static io.airlift.joni.constants.SyntaxProperties.OP_DOT_ANYCHAR; import static io.airlift.joni.constants.SyntaxProperties.OP_LINE_ANCHOR; +import static io.airlift.slice.SliceUtf8.getCodePointAt; +import static io.airlift.slice.SliceUtf8.lengthOfCodePoint; import static io.prestosql.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT; import static io.prestosql.spi.type.Chars.padSpaces; import static io.prestosql.util.Failures.checkCondition; @@ -116,15 +119,14 @@ public static JoniRegexp likePattern(@SqlType("varchar(x)") Slice pattern, @SqlT public static boolean isLikePattern(Slice pattern, Optional escape) { - String stringPattern = pattern.toStringUtf8(); - if (!escape.isPresent()) { - return stringPattern.contains("%") || stringPattern.contains("_"); - } + int escapeChar = getEscapeCharacter(escape) + .map(c -> (int) c) + .orElse(-1); - char escapeChar = getEscapeCharacter(escape).get(); boolean escaped = false; - boolean isLikePattern = false; - for (int currentChar : stringPattern.codePoints().toArray()) { + int position = 0; + while (position < pattern.length()) { + int currentChar = getCodePointAt(pattern, position); if (!escaped && (currentChar == escapeChar)) { escaped = true; } @@ -133,11 +135,12 @@ else if (escaped) { escaped = false; } else if ((currentChar == '%') || (currentChar == '_')) { - isLikePattern = true; + return true; } + position += lengthOfCodePoint(currentChar); } checkEscape(!escaped); - return isLikePattern; + return position < pattern.length(); } public static Slice unescapeLiteralLikePattern(Slice pattern, Optional escape) @@ -146,20 +149,28 @@ public static Slice unescapeLiteralLikePattern(Slice pattern, Optional es return pattern; } - char escapeChar = getEscapeCharacter(escape).get(); - String stringPattern = pattern.toStringUtf8(); - StringBuilder unescapedPattern = new StringBuilder(stringPattern.length()); + int escapeChar = getEscapeCharacter(escape) + .map(c -> (int) c) + .orElse(-1); + + @SuppressWarnings("resource") + DynamicSliceOutput output = new DynamicSliceOutput(pattern.length()); boolean escaped = false; - for (int currentChar : stringPattern.codePoints().toArray()) { + int position = 0; + while (position < pattern.length()) { + int currentChar = getCodePointAt(pattern, position); + int lengthOfCodePoint = lengthOfCodePoint(currentChar); if (!escaped && (currentChar == escapeChar)) { escaped = true; } else { - unescapedPattern.append(Character.toChars(currentChar)); + output.writeBytes(pattern, position, lengthOfCodePoint); escaped = false; } + position += lengthOfCodePoint; } - return Slices.utf8Slice(unescapedPattern.toString()); + checkEscape(!escaped); + return output.slice(); } private static Optional getEscapeCharacter(Optional escape) From 75db23a12dda4b56415bdc41602553e94d96bcc8 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Thu, 7 May 2020 00:11:04 +0200 Subject: [PATCH 351/519] Add patternConstantPrefixBytes --- .../java/io/prestosql/type/LikeFunctions.java | 9 +++-- .../io/prestosql/sql/TestLikeFunctions.java | 36 +++++++++++++++++-- 2 files changed, 40 insertions(+), 5 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/type/LikeFunctions.java b/presto-main/src/main/java/io/prestosql/type/LikeFunctions.java index b701777a3bae..fdb9d45bee00 100644 --- a/presto-main/src/main/java/io/prestosql/type/LikeFunctions.java +++ b/presto-main/src/main/java/io/prestosql/type/LikeFunctions.java @@ -118,6 +118,11 @@ public static JoniRegexp likePattern(@SqlType("varchar(x)") Slice pattern, @SqlT } public static boolean isLikePattern(Slice pattern, Optional escape) + { + return patternConstantPrefixBytes(pattern, escape) < pattern.length(); + } + + public static int patternConstantPrefixBytes(Slice pattern, Optional escape) { int escapeChar = getEscapeCharacter(escape) .map(c -> (int) c) @@ -135,12 +140,12 @@ else if (escaped) { escaped = false; } else if ((currentChar == '%') || (currentChar == '_')) { - return true; + return position; } position += lengthOfCodePoint(currentChar); } checkEscape(!escaped); - return position < pattern.length(); + return position; } public static Slice unescapeLiteralLikePattern(Slice pattern, Optional escape) diff --git a/presto-main/src/test/java/io/prestosql/sql/TestLikeFunctions.java b/presto-main/src/test/java/io/prestosql/sql/TestLikeFunctions.java index 0e390b6f6e4d..2071e091408a 100644 --- a/presto-main/src/test/java/io/prestosql/sql/TestLikeFunctions.java +++ b/presto-main/src/test/java/io/prestosql/sql/TestLikeFunctions.java @@ -29,7 +29,9 @@ import static io.prestosql.type.LikeFunctions.likeChar; import static io.prestosql.type.LikeFunctions.likePattern; import static io.prestosql.type.LikeFunctions.likeVarchar; +import static io.prestosql.type.LikeFunctions.patternConstantPrefixBytes; import static io.prestosql.type.LikeFunctions.unescapeLiteralLikePattern; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertThrows; @@ -161,9 +163,37 @@ public void testIsLikePattern() assertTrue(isLikePattern(utf8Slice("abcdef_"), Optional.empty())); assertTrue(isLikePattern(utf8Slice("abcdef##_"), Optional.of(utf8Slice("#")))); assertTrue(isLikePattern(utf8Slice("%abcdef#_"), Optional.of(utf8Slice("#")))); - assertThrows(PrestoException.class, () -> isLikePattern(utf8Slice("#"), Optional.of(utf8Slice("#")))); - assertThrows(PrestoException.class, () -> isLikePattern(utf8Slice("abc#abc"), Optional.of(utf8Slice("#")))); - assertThrows(PrestoException.class, () -> isLikePattern(utf8Slice("abc#"), Optional.of(utf8Slice("#")))); + assertThatThrownBy(() -> isLikePattern(utf8Slice("#"), Optional.of(utf8Slice("#")))) + .isInstanceOf(PrestoException.class) + .hasMessage("Escape character must be followed by '%', '_' or the escape character itself"); + assertThatThrownBy(() -> isLikePattern(utf8Slice("abc#abc"), Optional.of(utf8Slice("#")))) + .isInstanceOf(PrestoException.class) + .hasMessage("Escape character must be followed by '%', '_' or the escape character itself"); + assertThatThrownBy(() -> isLikePattern(utf8Slice("abc#"), Optional.of(utf8Slice("#")))) + .isInstanceOf(PrestoException.class) + .hasMessage("Escape character must be followed by '%', '_' or the escape character itself"); + } + + @Test + public void testPatternConstantPrefixBytes() + { + assertEquals(patternConstantPrefixBytes(utf8Slice("abc"), Optional.empty()), 3); + assertEquals(patternConstantPrefixBytes(utf8Slice("abc#_def"), Optional.of(utf8Slice("#"))), 8); + assertEquals(patternConstantPrefixBytes(utf8Slice("abc##def"), Optional.of(utf8Slice("#"))), 8); + assertEquals(patternConstantPrefixBytes(utf8Slice("abc#%def"), Optional.of(utf8Slice("#"))), 8); + assertEquals(patternConstantPrefixBytes(utf8Slice("abc%def"), Optional.empty()), 3); + assertEquals(patternConstantPrefixBytes(utf8Slice("abcdef_"), Optional.empty()), 6); + assertEquals(patternConstantPrefixBytes(utf8Slice("abcdef##_"), Optional.of(utf8Slice("#"))), 8); + assertEquals(patternConstantPrefixBytes(utf8Slice("%abcdef#_"), Optional.of(utf8Slice("#"))), 0); + assertThatThrownBy(() -> patternConstantPrefixBytes(utf8Slice("#"), Optional.of(utf8Slice("#")))) + .isInstanceOf(PrestoException.class) + .hasMessage("Escape character must be followed by '%', '_' or the escape character itself"); + assertThatThrownBy(() -> patternConstantPrefixBytes(utf8Slice("abc#abc"), Optional.of(utf8Slice("#")))) + .isInstanceOf(PrestoException.class) + .hasMessage("Escape character must be followed by '%', '_' or the escape character itself"); + assertThatThrownBy(() -> patternConstantPrefixBytes(utf8Slice("abc#"), Optional.of(utf8Slice("#")))) + .isInstanceOf(PrestoException.class) + .hasMessage("Escape character must be followed by '%', '_' or the escape character itself"); } @Test From 059061d262d5c42e0e7b1ed0ac3a3dc495b73a30 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Thu, 7 May 2020 00:11:05 +0200 Subject: [PATCH 352/519] Use JDK Predicate interface --- .../prestosql/sql/planner/assertions/ColumnHandleMatcher.java | 2 +- .../sql/planner/assertions/ConnectorAwareTableScanMatcher.java | 3 ++- .../io/prestosql/sql/planner/assertions/PlanMatchPattern.java | 2 +- .../test/java/io/prestosql/sql/planner/assertions/Util.java | 2 +- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/ColumnHandleMatcher.java b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/ColumnHandleMatcher.java index 860c6581ee19..8ac7818d9f0b 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/ColumnHandleMatcher.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/ColumnHandleMatcher.java @@ -13,7 +13,6 @@ */ package io.prestosql.sql.planner.assertions; -import com.google.common.base.Predicate; import io.prestosql.Session; import io.prestosql.metadata.Metadata; import io.prestosql.spi.connector.ColumnHandle; @@ -23,6 +22,7 @@ import java.util.Map; import java.util.Optional; +import java.util.function.Predicate; import static java.util.Objects.requireNonNull; diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/ConnectorAwareTableScanMatcher.java b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/ConnectorAwareTableScanMatcher.java index f1e104fa3988..2b51ff1cd6ce 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/ConnectorAwareTableScanMatcher.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/ConnectorAwareTableScanMatcher.java @@ -13,7 +13,6 @@ */ package io.prestosql.sql.planner.assertions; -import com.google.common.base.Predicate; import io.prestosql.Session; import io.prestosql.cost.StatsProvider; import io.prestosql.metadata.Metadata; @@ -23,6 +22,8 @@ import io.prestosql.sql.planner.plan.PlanNode; import io.prestosql.sql.planner.plan.TableScanNode; +import java.util.function.Predicate; + import static com.google.common.base.Preconditions.checkState; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.node; import static io.prestosql.sql.planner.assertions.Util.domainsMatch; diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java index a1f586fb4842..26e5d2d84545 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/PlanMatchPattern.java @@ -13,7 +13,6 @@ */ package io.prestosql.sql.planner.assertions; -import com.google.common.base.Predicate; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -76,6 +75,7 @@ import java.util.Optional; import java.util.Set; import java.util.function.Consumer; +import java.util.function.Predicate; import java.util.stream.IntStream; import static com.google.common.base.MoreObjects.toStringHelper; diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/Util.java b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/Util.java index 5b65eccb4052..c703cc6c74a5 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/assertions/Util.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/assertions/Util.java @@ -13,7 +13,6 @@ */ package io.prestosql.sql.planner.assertions; -import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import io.prestosql.Session; import io.prestosql.metadata.Metadata; @@ -28,6 +27,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.function.Predicate; import static com.google.common.collect.ImmutableList.toImmutableList; From f9dd802f5c1e7df51c03535c50dcd81fcea771c0 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Thu, 7 May 2020 00:11:06 +0200 Subject: [PATCH 353/519] Fix indentation --- .../sql/planner/TestLogicalPlanner.java | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/TestLogicalPlanner.java b/presto-main/src/test/java/io/prestosql/sql/planner/TestLogicalPlanner.java index f4e11a5f7474..c683b5c4673c 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/TestLogicalPlanner.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/TestLogicalPlanner.java @@ -177,20 +177,20 @@ public void testAllFieldsDereferenceOnSubquery() ImmutableMap.of( "output_1", expression("CAST(\"row\" AS ROW(f0 bigint,f1 varchar(25))).f0"), "output_2", expression("CAST(\"row\" AS ROW(f0 bigint,f1 varchar(25))).f1")), - project( - ImmutableMap.of("row", expression("ROW(min, max)")), - aggregation( - ImmutableMap.of( - "min", functionCall("min", ImmutableList.of("min_regionkey")), - "max", functionCall("max", ImmutableList.of("max_name"))), - FINAL, - any( - aggregation( - ImmutableMap.of( - "min_regionkey", functionCall("min", ImmutableList.of("REGIONKEY")), - "max_name", functionCall("max", ImmutableList.of("NAME"))), - PARTIAL, - tableScan("nation", ImmutableMap.of("NAME", "name", "REGIONKEY", "regionkey"))))))))); + project( + ImmutableMap.of("row", expression("ROW(min, max)")), + aggregation( + ImmutableMap.of( + "min", functionCall("min", ImmutableList.of("min_regionkey")), + "max", functionCall("max", ImmutableList.of("max_name"))), + FINAL, + any( + aggregation( + ImmutableMap.of( + "min_regionkey", functionCall("min", ImmutableList.of("REGIONKEY")), + "max_name", functionCall("max", ImmutableList.of("NAME"))), + PARTIAL, + tableScan("nation", ImmutableMap.of("NAME", "name", "REGIONKEY", "regionkey"))))))))); } @Test From 6aea881752ef9a83521a4a20bccf510bf9a31d2c Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Thu, 7 May 2020 00:11:07 +0200 Subject: [PATCH 354/519] Translate LIKE predicate to Domain --- .../sql/planner/DomainTranslator.java | 96 ++++++++++++ .../sql/planner/TestDomainTranslator.java | 148 +++++++++++++++++- .../sql/planner/TestLogicalPlanner.java | 43 +++++ .../AbstractTestIntegrationSmokeTest.java | 16 ++ 4 files changed, 299 insertions(+), 4 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/DomainTranslator.java b/presto-main/src/main/java/io/prestosql/sql/planner/DomainTranslator.java index 7f0d44b84890..9f8199c04151 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/DomainTranslator.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/DomainTranslator.java @@ -16,6 +16,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.PeekingIterator; +import io.airlift.slice.Slice; +import io.airlift.slice.Slices; import io.prestosql.Session; import io.prestosql.metadata.Metadata; import io.prestosql.metadata.OperatorNotFoundException; @@ -34,6 +36,7 @@ import io.prestosql.spi.type.DoubleType; import io.prestosql.spi.type.RealType; import io.prestosql.spi.type.Type; +import io.prestosql.spi.type.VarcharType; import io.prestosql.sql.ExpressionUtils; import io.prestosql.sql.InterpretedFunctionInvoker; import io.prestosql.sql.parser.SqlParser; @@ -47,11 +50,14 @@ import io.prestosql.sql.tree.InPredicate; import io.prestosql.sql.tree.IsNotNullPredicate; import io.prestosql.sql.tree.IsNullPredicate; +import io.prestosql.sql.tree.LikePredicate; import io.prestosql.sql.tree.LogicalBinaryExpression; import io.prestosql.sql.tree.NodeRef; import io.prestosql.sql.tree.NotExpression; import io.prestosql.sql.tree.NullLiteral; +import io.prestosql.sql.tree.StringLiteral; import io.prestosql.sql.tree.SymbolReference; +import io.prestosql.type.LikeFunctions; import io.prestosql.type.TypeCoercion; import javax.annotation.Nullable; @@ -66,6 +72,10 @@ import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.Iterables.getOnlyElement; import static com.google.common.collect.Iterators.peekingIterator; +import static io.airlift.slice.SliceUtf8.countCodePoints; +import static io.airlift.slice.SliceUtf8.getCodePointAt; +import static io.airlift.slice.SliceUtf8.lengthOfCodePoint; +import static io.airlift.slice.SliceUtf8.setCodePointAt; import static io.prestosql.spi.function.OperatorType.SATURATED_FLOOR_CAST; import static io.prestosql.sql.ExpressionUtils.and; import static io.prestosql.sql.ExpressionUtils.combineConjuncts; @@ -857,6 +867,92 @@ protected ExtractionResult visitBetweenPredicate(BetweenPredicate node, Boolean new ComparisonExpression(LESS_THAN_OR_EQUAL, node.getValue(), node.getMax())), complement); } + @Override + protected ExtractionResult visitLikePredicate(LikePredicate node, Boolean complement) + { + Optional result = tryVisitLikePredicate(node, complement); + if (result.isPresent()) { + return result.get(); + } + return super.visitLikePredicate(node, complement); + } + + private Optional tryVisitLikePredicate(LikePredicate node, Boolean complement) + { + if (!(node.getValue() instanceof SymbolReference)) { + // LIKE not on a symbol + return Optional.empty(); + } + + if (!(node.getPattern() instanceof StringLiteral)) { + // dynamic pattern + return Optional.empty(); + } + + if (node.getEscape().isPresent() && !(node.getEscape().get() instanceof StringLiteral)) { + // dynamic escape + return Optional.empty(); + } + + Type type = typeAnalyzer.getType(session, types, node.getValue()); + if (!(type instanceof VarcharType)) { + // TODO support CharType + return Optional.empty(); + } + VarcharType varcharType = (VarcharType) type; + + Symbol symbol = Symbol.from(node.getValue()); + Slice pattern = ((StringLiteral) node.getPattern()).getSlice(); + Optional escape = node.getEscape() + .map(StringLiteral.class::cast) + .map(StringLiteral::getSlice); + + int patternConstantPrefixBytes = LikeFunctions.patternConstantPrefixBytes(pattern, escape); + if (patternConstantPrefixBytes == pattern.length()) { + // This should not actually happen, constant LIKE pattern should be converted to equality predicate before DomainTranslator is invoked. + + Slice literal = LikeFunctions.unescapeLiteralLikePattern(pattern, escape); + ValueSet valueSet; + if (varcharType.isUnbounded() || countCodePoints(literal) <= varcharType.getBoundedLength()) { + valueSet = ValueSet.of(type, literal); + } + else { + // impossible to satisfy + valueSet = ValueSet.none(type); + } + Domain domain = Domain.create(complementIfNecessary(valueSet, complement), false); + return Optional.of(new ExtractionResult(TupleDomain.withColumnDomains(ImmutableMap.of(symbol, domain)), TRUE_LITERAL)); + } + + if (complement || patternConstantPrefixBytes == 0) { + // TODO + return Optional.empty(); + } + + Slice constantPrefix = LikeFunctions.unescapeLiteralLikePattern(pattern.slice(0, patternConstantPrefixBytes), escape); + + int lastIncrementable = -1; + for (int position = 0; position < constantPrefix.length(); position += lengthOfCodePoint(constantPrefix, position)) { + // Get last ASCII character to increment, so that character length in bytes does not change. + // Also prefer not to produce non-ASCII if input is all-ASCII, to be on the safe side with connectors. + // TODO remove those limitations + if (getCodePointAt(constantPrefix, position) < 127) { + lastIncrementable = position; + } + } + + if (lastIncrementable == -1) { + return Optional.empty(); + } + + Slice lowerBound = constantPrefix; + Slice upperBound = Slices.copyOf(constantPrefix.slice(0, lastIncrementable + lengthOfCodePoint(constantPrefix, lastIncrementable))); + setCodePointAt(getCodePointAt(constantPrefix, lastIncrementable) + 1, upperBound, lastIncrementable); + + Domain domain = Domain.create(ValueSet.ofRanges(Range.range(type, lowerBound, true, upperBound, false)), false); + return Optional.of(new ExtractionResult(TupleDomain.withColumnDomains(ImmutableMap.of(symbol, domain)), node)); + } + @Override protected ExtractionResult visitIsNullPredicate(IsNullPredicate node, Boolean complement) { diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/TestDomainTranslator.java b/presto-main/src/test/java/io/prestosql/sql/planner/TestDomainTranslator.java index 9c56361cf4f6..fffca4020a64 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/TestDomainTranslator.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/TestDomainTranslator.java @@ -39,6 +39,7 @@ import io.prestosql.sql.tree.InListExpression; import io.prestosql.sql.tree.InPredicate; import io.prestosql.sql.tree.IsNullPredicate; +import io.prestosql.sql.tree.LikePredicate; import io.prestosql.sql.tree.Literal; import io.prestosql.sql.tree.LongLiteral; import io.prestosql.sql.tree.NotExpression; @@ -56,6 +57,7 @@ import java.math.BigDecimal; import java.util.Arrays; import java.util.List; +import java.util.Optional; import java.util.concurrent.TimeUnit; import static io.airlift.slice.Slices.utf8Slice; @@ -77,6 +79,7 @@ import static io.prestosql.spi.type.TinyintType.TINYINT; import static io.prestosql.spi.type.VarbinaryType.VARBINARY; import static io.prestosql.spi.type.VarcharType.VARCHAR; +import static io.prestosql.spi.type.VarcharType.createUnboundedVarcharType; import static io.prestosql.sql.ExpressionUtils.and; import static io.prestosql.sql.ExpressionUtils.or; import static io.prestosql.sql.analyzer.TypeSignatureTranslator.toSqlType; @@ -1457,6 +1460,128 @@ private void testNumericTypeTranslation(NumericValues columnValues, NumericVa } } + @Test + public void testLikePredicate() + { + Type varcharType = createUnboundedVarcharType(); + + // constant + testSimpleComparison( + like(C_VARCHAR, stringLiteral("abc")), + C_VARCHAR, + Domain.multipleValues(varcharType, ImmutableList.of(utf8Slice("abc")))); + + // starts with pattern + assertUnsupportedPredicate(like(C_VARCHAR, stringLiteral("_def"))); + assertUnsupportedPredicate(like(C_VARCHAR, stringLiteral("%def"))); + + // _ pattern (unless escaped) + testSimpleComparison( + like(C_VARCHAR, stringLiteral("abc_def")), + C_VARCHAR, + like(C_VARCHAR, stringLiteral("abc_def")), + Domain.create(ValueSet.ofRanges(Range.range(varcharType, utf8Slice("abc"), true, utf8Slice("abd"), false)), false)); + + testSimpleComparison( + like(C_VARCHAR, stringLiteral("abc\\_def")), + C_VARCHAR, + like(C_VARCHAR, stringLiteral("abc\\_def")), + Domain.create(ValueSet.ofRanges(Range.range(varcharType, utf8Slice("abc\\"), true, utf8Slice("abc]"), false)), false)); + + testSimpleComparison( + like(C_VARCHAR, stringLiteral("abc\\_def"), stringLiteral("\\")), + C_VARCHAR, + Domain.multipleValues(varcharType, ImmutableList.of(utf8Slice("abc_def")))); + + testSimpleComparison( + like(C_VARCHAR, stringLiteral("abc\\_def_"), stringLiteral("\\")), + C_VARCHAR, + like(C_VARCHAR, stringLiteral("abc\\_def_"), stringLiteral("\\")), + Domain.create(ValueSet.ofRanges(Range.range(varcharType, utf8Slice("abc_def"), true, utf8Slice("abc_deg"), false)), false)); + + testSimpleComparison( + like(C_VARCHAR, stringLiteral("abc^_def_"), stringLiteral("^")), + C_VARCHAR, + like(C_VARCHAR, stringLiteral("abc^_def_"), stringLiteral("^")), + Domain.create(ValueSet.ofRanges(Range.range(varcharType, utf8Slice("abc_def"), true, utf8Slice("abc_deg"), false)), false)); + + // % pattern (unless escaped) + testSimpleComparison( + like(C_VARCHAR, stringLiteral("abc%")), + C_VARCHAR, + like(C_VARCHAR, stringLiteral("abc%")), + Domain.create(ValueSet.ofRanges(Range.range(varcharType, utf8Slice("abc"), true, utf8Slice("abd"), false)), false)); + + testSimpleComparison( + like(C_VARCHAR, stringLiteral("abc%def")), + C_VARCHAR, + like(C_VARCHAR, stringLiteral("abc%def")), + Domain.create(ValueSet.ofRanges(Range.range(varcharType, utf8Slice("abc"), true, utf8Slice("abd"), false)), false)); + + testSimpleComparison( + like(C_VARCHAR, stringLiteral("abc\\%def")), + C_VARCHAR, + like(C_VARCHAR, stringLiteral("abc\\%def")), + Domain.create(ValueSet.ofRanges(Range.range(varcharType, utf8Slice("abc\\"), true, utf8Slice("abc]"), false)), false)); + + testSimpleComparison( + like(C_VARCHAR, stringLiteral("abc\\%def"), stringLiteral("\\")), + C_VARCHAR, + Domain.multipleValues(varcharType, ImmutableList.of(utf8Slice("abc%def")))); + + testSimpleComparison( + like(C_VARCHAR, stringLiteral("abc\\%def_"), stringLiteral("\\")), + C_VARCHAR, + like(C_VARCHAR, stringLiteral("abc\\%def_"), stringLiteral("\\")), + Domain.create(ValueSet.ofRanges(Range.range(varcharType, utf8Slice("abc%def"), true, utf8Slice("abc%deg"), false)), false)); + + testSimpleComparison( + like(C_VARCHAR, stringLiteral("abc^%def_"), stringLiteral("^")), + C_VARCHAR, + like(C_VARCHAR, stringLiteral("abc^%def_"), stringLiteral("^")), + Domain.create(ValueSet.ofRanges(Range.range(varcharType, utf8Slice("abc%def"), true, utf8Slice("abc%deg"), false)), false)); + + // non-ASCII literal + testSimpleComparison( + like(C_VARCHAR, stringLiteral("abc\u007f\u0123\udbfe")), + C_VARCHAR, + Domain.multipleValues(varcharType, ImmutableList.of(utf8Slice("abc\u007f\u0123\udbfe")))); + + // non-ASCII prefix + testSimpleComparison( + like(C_VARCHAR, stringLiteral("abc\u0123\ud83d\ude80def\u007e\u007f\u00ff\u0123\uccf0%")), + C_VARCHAR, + like(C_VARCHAR, stringLiteral("abc\u0123\ud83d\ude80def\u007e\u007f\u00ff\u0123\uccf0%")), + Domain.create( + ValueSet.ofRanges(Range.range(varcharType, + utf8Slice("abc\u0123\ud83d\ude80def\u007e\u007f\u00ff\u0123\uccf0"), true, + utf8Slice("abc\u0123\ud83d\ude80def\u007f"), false)), + false)); + + // dynamic escape + assertUnsupportedPredicate(like(C_VARCHAR, stringLiteral("abc\\_def"), C_VARCHAR_1.toSymbolReference())); + + // negation with literal + testSimpleComparison( + not(like(C_VARCHAR, stringLiteral("abcdef"))), + C_VARCHAR, + Domain.create(ValueSet.ofRanges( + Range.lessThan(varcharType, utf8Slice("abcdef")), + Range.greaterThan(varcharType, utf8Slice("abcdef"))), + false)); + + testSimpleComparison( + not(like(C_VARCHAR, stringLiteral("abc\\_def"), stringLiteral("\\"))), + C_VARCHAR, + Domain.create(ValueSet.ofRanges( + Range.lessThan(varcharType, utf8Slice("abc_def")), + Range.greaterThan(varcharType, utf8Slice("abc_def"))), + false)); + + // negation with pattern + assertUnsupportedPredicate(not(like(C_VARCHAR, stringLiteral("abc\\_def")))); + } + @Test public void testCharComparedToVarcharExpression() { @@ -1568,6 +1693,16 @@ private static ComparisonExpression isDistinctFrom(Symbol symbol, Expression exp return isDistinctFrom(symbol.toSymbolReference(), expression); } + private static LikePredicate like(Symbol symbol, Expression expression) + { + return new LikePredicate(symbol.toSymbolReference(), expression, Optional.empty()); + } + + private static LikePredicate like(Symbol symbol, Expression expression, Expression escape) + { + return new LikePredicate(symbol.toSymbolReference(), expression, Optional.of(escape)); + } + private static Expression isNotNull(Symbol symbol) { return isNotNull(symbol.toSymbolReference()); @@ -1733,14 +1868,19 @@ private void testSimpleComparison(Expression expression, Symbol symbol, Range ex testSimpleComparison(expression, symbol, Domain.create(ValueSet.ofRanges(expectedDomainRange), false)); } - private void testSimpleComparison(Expression expression, Symbol symbol, Domain domain) + private void testSimpleComparison(Expression expression, Symbol symbol, Domain expectedDomain) + { + testSimpleComparison(expression, symbol, TRUE_LITERAL, expectedDomain); + } + + private void testSimpleComparison(Expression expression, Symbol symbol, Expression expectedRemainingExpression, Domain expectedDomain) { ExtractionResult result = fromPredicate(expression); - assertEquals(result.getRemainingExpression(), TRUE_LITERAL); + assertEquals(result.getRemainingExpression(), expectedRemainingExpression); TupleDomain actual = result.getTupleDomain(); - TupleDomain expected = withColumnDomains(ImmutableMap.of(symbol, domain)); + TupleDomain expected = withColumnDomains(ImmutableMap.of(symbol, expectedDomain)); if (!actual.equals(expected)) { - fail(format("for comparison [%s] expected %s but found %s", expression.toString(), expected.toString(SESSION), actual.toString(SESSION))); + fail(format("for comparison [%s] expected [%s] but found [%s]", expression.toString(), expected.toString(SESSION), actual.toString(SESSION))); } } diff --git a/presto-main/src/test/java/io/prestosql/sql/planner/TestLogicalPlanner.java b/presto-main/src/test/java/io/prestosql/sql/planner/TestLogicalPlanner.java index c683b5c4673c..a9687a0e1707 100644 --- a/presto-main/src/test/java/io/prestosql/sql/planner/TestLogicalPlanner.java +++ b/presto-main/src/test/java/io/prestosql/sql/planner/TestLogicalPlanner.java @@ -15,8 +15,16 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import io.airlift.slice.Slices; import io.prestosql.Session; +import io.prestosql.plugin.tpch.TpchColumnHandle; +import io.prestosql.plugin.tpch.TpchTableHandle; import io.prestosql.spi.block.SortOrder; +import io.prestosql.spi.connector.ColumnHandle; +import io.prestosql.spi.predicate.Domain; +import io.prestosql.spi.predicate.Range; +import io.prestosql.spi.predicate.TupleDomain; +import io.prestosql.spi.predicate.ValueSet; import io.prestosql.sql.analyzer.FeaturesConfig.JoinDistributionType; import io.prestosql.sql.analyzer.FeaturesConfig.JoinReorderingStrategy; import io.prestosql.sql.planner.assertions.BasePlanTest; @@ -51,10 +59,14 @@ import org.testng.annotations.Test; import java.util.List; +import java.util.Map; +import java.util.Map.Entry; import java.util.Optional; import java.util.function.Consumer; import java.util.function.Predicate; +import static com.google.common.collect.ImmutableList.toImmutableList; +import static com.google.common.collect.MoreCollectors.toOptional; import static io.airlift.slice.Slices.utf8Slice; import static io.prestosql.SystemSessionProperties.DISTRIBUTED_SORT; import static io.prestosql.SystemSessionProperties.FORCE_SINGLE_NODE_OUTPUT; @@ -138,6 +150,37 @@ public void testAnalyze() tableScan("orders", ImmutableMap.of())))))))))); } + @Test + public void testLikePredicate() + { + assertPlan("SELECT type FROM part WHERE type LIKE 'LARGE PLATED %'", + anyTree( + tableScan( + tableHandle -> { + Map domains = ((TpchTableHandle) tableHandle).getConstraint().getDomains() + .orElseThrow(() -> new AssertionError("Unexpected none TupleDomain")); + + Domain domain = domains.entrySet().stream() + .filter(entry -> ((TpchColumnHandle) entry.getKey()).getColumnName().equals("type")) + .map(Entry::getValue) + .collect(toOptional()) + .orElseThrow(() -> new AssertionError("No domain for 'type'")); + + assertEquals(domain, Domain.multipleValues( + createVarcharType(25), + ImmutableList.of("LARGE PLATED BRASS", "LARGE PLATED COPPER", "LARGE PLATED NICKEL", "LARGE PLATED STEEL", "LARGE PLATED TIN").stream() + .map(Slices::utf8Slice) + .collect(toImmutableList()))); + return true; + }, + TupleDomain.withColumnDomains(ImmutableMap.of( + tableHandle -> ((TpchColumnHandle) tableHandle).getColumnName().equals("type"), + Domain.create( + ValueSet.ofRanges(Range.range(createVarcharType(25), utf8Slice("LARGE PLATED "), true, utf8Slice("LARGE PLATED!"), false)), + false))), + ImmutableMap.of()))); + } + @Test public void testAggregation() { diff --git a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java index e5d8c9a075e3..39062d17faf6 100644 --- a/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java +++ b/presto-testing/src/main/java/io/prestosql/testing/AbstractTestIntegrationSmokeTest.java @@ -101,6 +101,22 @@ public void testIsNullPredicate() assertQuery("SELECT custkey FROM orders WHERE orderkey = 32 OR orderkey IS NULL", "VALUES (1301)"); } + @Test + public void testLikePredicate() + { + // filtered column is not selected + assertQuery("SELECT orderkey FROM orders WHERE orderpriority LIKE '5-L%'"); + + // filtered column is selected + assertQuery("SELECT orderkey, orderpriority FROM orders WHERE orderpriority LIKE '5-L%'"); + + // filtered column is not selected + assertQuery("SELECT orderkey FROM orders WHERE orderpriority LIKE '5-L__'"); + + // filtered column is selected + assertQuery("SELECT orderkey, orderpriority FROM orders WHERE orderpriority LIKE '5-L__'"); + } + @Test public void testLimit() { From e776d98c6752e9fd45ef55a9ad2bb6a0e42b6907 Mon Sep 17 00:00:00 2001 From: Dain Sundstrom Date: Tue, 14 Apr 2020 15:01:09 -0700 Subject: [PATCH 355/519] MoveWebUiModule to UI package --- .../main/java/io/prestosql/server/CoordinatorModule.java | 1 + .../java/io/prestosql/server/{ => ui}/WebUiModule.java | 9 +-------- 2 files changed, 2 insertions(+), 8 deletions(-) rename presto-main/src/main/java/io/prestosql/server/{ => ui}/WebUiModule.java (81%) diff --git a/presto-main/src/main/java/io/prestosql/server/CoordinatorModule.java b/presto-main/src/main/java/io/prestosql/server/CoordinatorModule.java index 42820053aee9..dc97deb8a33f 100644 --- a/presto-main/src/main/java/io/prestosql/server/CoordinatorModule.java +++ b/presto-main/src/main/java/io/prestosql/server/CoordinatorModule.java @@ -109,6 +109,7 @@ import io.prestosql.operator.ForScheduler; import io.prestosql.server.protocol.ExecutingStatementResource; import io.prestosql.server.remotetask.RemoteTaskStats; +import io.prestosql.server.ui.WebUiModule; import io.prestosql.server.ui.WorkerResource; import io.prestosql.spi.memory.ClusterMemoryPoolManager; import io.prestosql.spi.resourcegroups.QueryType; diff --git a/presto-main/src/main/java/io/prestosql/server/WebUiModule.java b/presto-main/src/main/java/io/prestosql/server/ui/WebUiModule.java similarity index 81% rename from presto-main/src/main/java/io/prestosql/server/WebUiModule.java rename to presto-main/src/main/java/io/prestosql/server/ui/WebUiModule.java index b70f747ccf68..dfc811c6eafd 100644 --- a/presto-main/src/main/java/io/prestosql/server/WebUiModule.java +++ b/presto-main/src/main/java/io/prestosql/server/ui/WebUiModule.java @@ -11,18 +11,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.prestosql.server; +package io.prestosql.server.ui; import com.google.inject.Binder; import com.google.inject.Scopes; import io.airlift.configuration.AbstractConfigurationAwareModule; -import io.prestosql.server.ui.ClusterResource; -import io.prestosql.server.ui.ClusterStatsResource; -import io.prestosql.server.ui.DisabledWebUiAuthenticationManager; -import io.prestosql.server.ui.FormWebUiAuthenticationManager; -import io.prestosql.server.ui.UiQueryResource; -import io.prestosql.server.ui.WebUiAuthenticationManager; -import io.prestosql.server.ui.WebUiConfig; import static io.airlift.configuration.ConfigBinder.configBinder; import static io.airlift.http.server.HttpServerBinder.httpServerBinder; From 98173c2c2b5beaac5230c65c03e1cddfe6e80256 Mon Sep 17 00:00:00 2001 From: Dain Sundstrom Date: Tue, 14 Apr 2020 15:07:16 -0700 Subject: [PATCH 356/519] Add utility for servlet security handling --- .../server/ServletSecurityUtils.java | 107 ++++++++++++++++++ .../server/security/AuthenticationFilter.java | 80 +------------ 2 files changed, 112 insertions(+), 75 deletions(-) create mode 100644 presto-main/src/main/java/io/prestosql/server/ServletSecurityUtils.java diff --git a/presto-main/src/main/java/io/prestosql/server/ServletSecurityUtils.java b/presto-main/src/main/java/io/prestosql/server/ServletSecurityUtils.java new file mode 100644 index 000000000000..c6d09cc71b85 --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/server/ServletSecurityUtils.java @@ -0,0 +1,107 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.server; + +import com.google.common.net.HttpHeaders; +import io.prestosql.spi.security.Identity; + +import javax.servlet.FilterChain; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletRequestWrapper; +import javax.servlet.http.HttpServletResponse; + +import java.io.IOException; +import java.io.InputStream; +import java.io.PrintWriter; +import java.security.Principal; +import java.util.Optional; + +import static com.google.common.io.ByteStreams.copy; +import static com.google.common.io.ByteStreams.nullOutputStream; +import static com.google.common.net.MediaType.PLAIN_TEXT_UTF_8; +import static io.prestosql.server.HttpRequestSessionContext.AUTHENTICATED_IDENTITY; +import static java.util.Objects.requireNonNull; + +public final class ServletSecurityUtils +{ + private ServletSecurityUtils() {} + + public static boolean isSecure(HttpServletRequest request, boolean httpsForwardingEnabled) + { + if (request.isSecure()) { + return true; + } + return httpsForwardingEnabled && "https".equalsIgnoreCase(request.getHeader(HttpHeaders.X_FORWARDED_PROTO)); + } + + public static void sendErrorMessage(HttpServletResponse response, int errorCode, String errorMessage) + throws IOException + { + // Clients should use the response body rather than the HTTP status + // message (which does not exist with HTTP/2), but the status message + // still needs to be sent for compatibility with existing clients. + response.setStatus(errorCode, errorMessage); + response.setContentType(PLAIN_TEXT_UTF_8.toString()); + try (PrintWriter writer = response.getWriter()) { + writer.write(errorMessage); + } + } + + public static void withAuthenticatedIdentity(FilterChain nextFilter, HttpServletRequest request, HttpServletResponse response, Identity authenticatedIdentity) + throws IOException, ServletException + { + request.setAttribute(AUTHENTICATED_IDENTITY, authenticatedIdentity); + try { + nextFilter.doFilter(withPrincipal(request, authenticatedIdentity.getPrincipal()), response); + } + finally { + // destroy identity if identity is still attached to the request + Optional.ofNullable(request.getAttribute(AUTHENTICATED_IDENTITY)) + .map(Identity.class::cast) + .ifPresent(Identity::destroy); + } + } + + public static ServletRequest withPrincipal(HttpServletRequest request, Optional principal) + { + requireNonNull(principal, "principal is null"); + if (!principal.isPresent()) { + return request; + } + return new HttpServletRequestWrapper(request) + { + @Override + public Principal getUserPrincipal() + { + return principal.get(); + } + }; + } + + public static void skipRequestBody(HttpServletRequest request) + throws IOException + { + // If we send the challenge without consuming the body of the request, + // the server will close the connection after sending the response. + // The client may interpret this as a failed request and not resend the + // request with the authentication header. We can avoid this behavior + // in the client by reading and discarding the entire body of the + // unauthenticated request before sending the response. + try (InputStream inputStream = request.getInputStream()) { + copy(inputStream, nullOutputStream()); + } + } +} diff --git a/presto-main/src/main/java/io/prestosql/server/security/AuthenticationFilter.java b/presto-main/src/main/java/io/prestosql/server/security/AuthenticationFilter.java index 1d0f341907ea..272f9dd3fd05 100644 --- a/presto-main/src/main/java/io/prestosql/server/security/AuthenticationFilter.java +++ b/presto-main/src/main/java/io/prestosql/server/security/AuthenticationFilter.java @@ -14,9 +14,7 @@ package io.prestosql.server.security; import com.google.common.base.Joiner; -import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; -import com.google.common.net.HttpHeaders; import io.prestosql.server.InternalAuthenticationManager; import io.prestosql.server.ui.WebUiAuthenticationManager; import io.prestosql.spi.security.Identity; @@ -29,23 +27,21 @@ import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletRequestWrapper; import javax.servlet.http.HttpServletResponse; import java.io.IOException; -import java.io.InputStream; -import java.io.PrintWriter; import java.security.Principal; import java.util.LinkedHashSet; import java.util.List; import java.util.Optional; import java.util.Set; -import static com.google.common.io.ByteStreams.copy; -import static com.google.common.io.ByteStreams.nullOutputStream; import static com.google.common.net.HttpHeaders.WWW_AUTHENTICATE; import static com.google.common.net.MediaType.PLAIN_TEXT_UTF_8; -import static io.prestosql.server.HttpRequestSessionContext.AUTHENTICATED_IDENTITY; +import static io.prestosql.server.ServletSecurityUtils.isSecure; +import static io.prestosql.server.ServletSecurityUtils.sendErrorMessage; +import static io.prestosql.server.ServletSecurityUtils.skipRequestBody; +import static io.prestosql.server.ServletSecurityUtils.withAuthenticatedIdentity; import static io.prestosql.server.security.BasicAuthCredentials.extractBasicAuthCredentials; import static java.util.Objects.requireNonNull; import static javax.servlet.http.HttpServletResponse.SC_FORBIDDEN; @@ -54,8 +50,6 @@ public class AuthenticationFilter implements Filter { - private static final String HTTPS_PROTOCOL = "https"; - private final List authenticators; private final boolean httpsForwardingEnabled; private final InternalAuthenticationManager internalAuthenticationManager; @@ -150,19 +144,6 @@ public void doFilter(ServletRequest servletRequest, ServletResponse servletRespo sendErrorMessage(response, SC_UNAUTHORIZED, error); } - private static void sendErrorMessage(HttpServletResponse response, int errorCode, String errorMessage) - throws IOException - { - // Clients should use the response body rather than the HTTP status - // message (which does not exist with HTTP/2), but the status message - // still needs to be sent for compatibility with existing clients. - response.setStatus(errorCode, errorMessage); - response.setContentType(PLAIN_TEXT_UTF_8.toString()); - try (PrintWriter writer = response.getWriter()) { - writer.write(errorMessage); - } - } - private static void handleInsecureRequest(FilterChain nextFilter, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { @@ -190,57 +171,6 @@ private static void handleInsecureRequest(FilterChain nextFilter, HttpServletReq private boolean doesRequestSupportAuthentication(HttpServletRequest request) { - if (authenticators.isEmpty()) { - return false; - } - if (request.isSecure()) { - return true; - } - return httpsForwardingEnabled && Strings.nullToEmpty(request.getHeader(HttpHeaders.X_FORWARDED_PROTO)).equalsIgnoreCase(HTTPS_PROTOCOL); - } - - private static void withAuthenticatedIdentity(FilterChain nextFilter, HttpServletRequest request, HttpServletResponse response, Identity authenticatedIdentity) - throws IOException, ServletException - { - request.setAttribute(AUTHENTICATED_IDENTITY, authenticatedIdentity); - try { - nextFilter.doFilter(withPrincipal(request, authenticatedIdentity.getPrincipal()), response); - } - finally { - // destroy identity if identity is still attached to the request - Optional.ofNullable(request.getAttribute(AUTHENTICATED_IDENTITY)) - .map(Identity.class::cast) - .ifPresent(Identity::destroy); - } - } - - private static ServletRequest withPrincipal(HttpServletRequest request, Optional principal) - { - requireNonNull(principal, "principal is null"); - if (!principal.isPresent()) { - return request; - } - return new HttpServletRequestWrapper(request) - { - @Override - public Principal getUserPrincipal() - { - return principal.get(); - } - }; - } - - private static void skipRequestBody(HttpServletRequest request) - throws IOException - { - // If we send the challenge without consuming the body of the request, - // the server will close the connection after sending the response. - // The client may interpret this as a failed request and not resend the - // request with the authentication header. We can avoid this behavior - // in the client by reading and discarding the entire body of the - // unauthenticated request before sending the response. - try (InputStream inputStream = request.getInputStream()) { - copy(inputStream, nullOutputStream()); - } + return !authenticators.isEmpty() && isSecure(request, httpsForwardingEnabled); } } From 62244417234a6700567ba64e3597cbcfe9b2cc6d Mon Sep 17 00:00:00 2001 From: Dain Sundstrom Date: Tue, 14 Apr 2020 15:13:01 -0700 Subject: [PATCH 357/519] Support forwarded https in web ui authentication --- .../ui/FormWebUiAuthenticationManager.java | 25 +++++++++++-------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/server/ui/FormWebUiAuthenticationManager.java b/presto-main/src/main/java/io/prestosql/server/ui/FormWebUiAuthenticationManager.java index cb77ebfc1a80..3ac20c6913f9 100644 --- a/presto-main/src/main/java/io/prestosql/server/ui/FormWebUiAuthenticationManager.java +++ b/presto-main/src/main/java/io/prestosql/server/ui/FormWebUiAuthenticationManager.java @@ -22,6 +22,7 @@ import io.jsonwebtoken.Jwts; import io.jsonwebtoken.SignatureAlgorithm; import io.prestosql.server.security.PasswordAuthenticatorManager; +import io.prestosql.server.security.SecurityConfig; import io.prestosql.spi.security.AccessDeniedException; import io.prestosql.spi.security.BasicPrincipal; import io.prestosql.spi.security.Identity; @@ -57,6 +58,7 @@ import static com.google.common.net.HttpHeaders.X_FORWARDED_PROTO; import static io.airlift.http.client.HttpUriBuilder.uriBuilder; import static io.prestosql.server.HttpRequestSessionContext.AUTHENTICATED_IDENTITY; +import static io.prestosql.server.ServletSecurityUtils.isSecure; import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.Arrays.stream; import static java.util.Objects.requireNonNull; @@ -75,10 +77,14 @@ public class FormWebUiAuthenticationManager private final Function jwtParser; private final Function jwtGenerator; + private final boolean httpsForwardingEnabled; private final PasswordAuthenticatorManager passwordAuthenticatorManager; @Inject - public FormWebUiAuthenticationManager(WebUiConfig config, PasswordAuthenticatorManager passwordAuthenticatorManager) + public FormWebUiAuthenticationManager( + WebUiConfig config, + SecurityConfig securityConfig, + PasswordAuthenticatorManager passwordAuthenticatorManager) { byte[] hmac; if (config.getSharedSecret().isPresent()) { @@ -94,6 +100,8 @@ public FormWebUiAuthenticationManager(WebUiConfig config, PasswordAuthenticatorM long sessionTimeoutNanos = config.getSessionTimeout().roundTo(NANOSECONDS); this.jwtGenerator = username -> generateJwt(hmac, username, sessionTimeoutNanos); + this.httpsForwardingEnabled = requireNonNull(securityConfig, "securityConfig is null").getEnableForwardingHttps(); + this.passwordAuthenticatorManager = requireNonNull(passwordAuthenticatorManager, "passwordAuthenticatorManager is null"); } @@ -204,7 +212,7 @@ private Optional checkLoginCredentials(HttpServletRequest request) return Optional.empty(); } - if (!isHttps(request)) { + if (!isSecure(request, httpsForwardingEnabled)) { return Optional.of(username); } @@ -266,17 +274,17 @@ private Cookie createAuthenticationCookie(HttpServletRequest request, String use { String jwt = jwtGenerator.apply(userName); Cookie cookie = new Cookie(PRESTO_UI_COOKIE, jwt); - cookie.setSecure(isHttps(request)); + cookie.setSecure(isSecure(request, httpsForwardingEnabled)); cookie.setHttpOnly(true); cookie.setPath("/ui"); return cookie; } - private static Cookie getDeleteCookie(HttpServletRequest request) + private Cookie getDeleteCookie(HttpServletRequest request) { Cookie cookie = new Cookie(PRESTO_UI_COOKIE, "delete"); cookie.setMaxAge(0); - cookie.setSecure(isHttps(request)); + cookie.setSecure(isSecure(request, httpsForwardingEnabled)); cookie.setHttpOnly(true); return cookie; } @@ -328,14 +336,9 @@ static String getRedirectLocation(HttpServletRequest request, String path, Strin return builder.toString(); } - private static boolean isHttps(HttpServletRequest request) - { - return "https".equals(firstNonNull(emptyToNull(request.getHeader(X_FORWARDED_PROTO)), request.getScheme())); - } - private boolean isAuthenticationEnabled(HttpServletRequest request) { - return !isHttps(request) || passwordAuthenticatorManager.isLoaded(); + return !isSecure(request, httpsForwardingEnabled) || passwordAuthenticatorManager.isLoaded(); } private static String generateJwt(byte[] hmac, String username, long sessionTimeoutNanos) From 3c7a30da58eafb916d06fef328f481a45288ce57 Mon Sep 17 00:00:00 2001 From: Dain Sundstrom Date: Tue, 14 Apr 2020 15:22:29 -0700 Subject: [PATCH 358/519] Add extensible web ui authentication bindings --- .../server/ui/FormUiAuthenticatorModule.java | 33 +++++++ .../ui/FormWebUiAuthenticationManager.java | 2 +- .../prestosql/server/ui/FormWebUiConfig.java | 55 ++++++++++++ .../server/ui/WebUiAuthenticationConfig.java | 35 ++++++++ .../server/ui/WebUiAuthenticationModule.java | 89 +++++++++++++++++++ .../io/prestosql/server/ui/WebUiConfig.java | 35 -------- .../io/prestosql/server/ui/WebUiModule.java | 2 +- .../server/ui/TestFormWebUiConfig.java | 51 +++++++++++ .../prestosql/server/ui/TestWebUiConfig.java | 12 +-- 9 files changed, 267 insertions(+), 47 deletions(-) create mode 100644 presto-main/src/main/java/io/prestosql/server/ui/FormUiAuthenticatorModule.java create mode 100644 presto-main/src/main/java/io/prestosql/server/ui/FormWebUiConfig.java create mode 100644 presto-main/src/main/java/io/prestosql/server/ui/WebUiAuthenticationConfig.java create mode 100644 presto-main/src/main/java/io/prestosql/server/ui/WebUiAuthenticationModule.java create mode 100644 presto-main/src/test/java/io/prestosql/server/ui/TestFormWebUiConfig.java diff --git a/presto-main/src/main/java/io/prestosql/server/ui/FormUiAuthenticatorModule.java b/presto-main/src/main/java/io/prestosql/server/ui/FormUiAuthenticatorModule.java new file mode 100644 index 000000000000..0ae606f9f713 --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/server/ui/FormUiAuthenticatorModule.java @@ -0,0 +1,33 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.server.ui; + +import com.google.inject.Binder; +import com.google.inject.Module; +import com.google.inject.Scopes; +import io.prestosql.server.security.PasswordAuthenticatorManager; + +import static io.airlift.configuration.ConfigBinder.configBinder; + +public class FormUiAuthenticatorModule + implements Module +{ + @Override + public void configure(Binder binder) + { + binder.bind(PasswordAuthenticatorManager.class).in(Scopes.SINGLETON); + binder.bind(WebUiAuthenticationManager.class).to(FormWebUiAuthenticationManager.class).in(Scopes.SINGLETON); + configBinder(binder).bindConfig(FormWebUiConfig.class); + } +} diff --git a/presto-main/src/main/java/io/prestosql/server/ui/FormWebUiAuthenticationManager.java b/presto-main/src/main/java/io/prestosql/server/ui/FormWebUiAuthenticationManager.java index 3ac20c6913f9..6b3d425a88e3 100644 --- a/presto-main/src/main/java/io/prestosql/server/ui/FormWebUiAuthenticationManager.java +++ b/presto-main/src/main/java/io/prestosql/server/ui/FormWebUiAuthenticationManager.java @@ -82,7 +82,7 @@ public class FormWebUiAuthenticationManager @Inject public FormWebUiAuthenticationManager( - WebUiConfig config, + FormWebUiConfig config, SecurityConfig securityConfig, PasswordAuthenticatorManager passwordAuthenticatorManager) { diff --git a/presto-main/src/main/java/io/prestosql/server/ui/FormWebUiConfig.java b/presto-main/src/main/java/io/prestosql/server/ui/FormWebUiConfig.java new file mode 100644 index 000000000000..3104977c221d --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/server/ui/FormWebUiConfig.java @@ -0,0 +1,55 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.server.ui; + +import io.airlift.configuration.Config; +import io.airlift.units.Duration; + +import javax.validation.constraints.NotNull; + +import java.util.Optional; + +import static java.util.concurrent.TimeUnit.DAYS; + +public class FormWebUiConfig +{ + private Optional sharedSecret = Optional.empty(); + private Duration sessionTimeout = new Duration(1, DAYS); + + @NotNull + public Optional getSharedSecret() + { + return sharedSecret; + } + + @Config("web-ui.shared-secret") + public FormWebUiConfig setSharedSecret(String sharedSecret) + { + this.sharedSecret = Optional.ofNullable(sharedSecret); + return this; + } + + @NotNull + public Duration getSessionTimeout() + { + return sessionTimeout; + } + + @Config("web-ui.session-timeout") + public FormWebUiConfig setSessionTimeout(Duration sessionTimeout) + { + this.sessionTimeout = sessionTimeout; + return this; + } +} diff --git a/presto-main/src/main/java/io/prestosql/server/ui/WebUiAuthenticationConfig.java b/presto-main/src/main/java/io/prestosql/server/ui/WebUiAuthenticationConfig.java new file mode 100644 index 000000000000..8a92b5e98e98 --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/server/ui/WebUiAuthenticationConfig.java @@ -0,0 +1,35 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.server.ui; + +import io.airlift.configuration.Config; +import io.airlift.configuration.ConfigDescription; + +public class WebUiAuthenticationConfig +{ + private String authentication; + + public String getAuthentication() + { + return authentication; + } + + @Config("web-ui.authentication.type") + @ConfigDescription("Authentication type for the web ui") + public WebUiAuthenticationConfig setAuthentication(String authentication) + { + this.authentication = authentication; + return this; + } +} diff --git a/presto-main/src/main/java/io/prestosql/server/ui/WebUiAuthenticationModule.java b/presto-main/src/main/java/io/prestosql/server/ui/WebUiAuthenticationModule.java new file mode 100644 index 000000000000..1babbb40b142 --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/server/ui/WebUiAuthenticationModule.java @@ -0,0 +1,89 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.server.ui; + +import com.google.inject.Binder; +import com.google.inject.Module; +import io.airlift.configuration.AbstractConfigurationAwareModule; +import io.prestosql.server.security.SecurityConfig; + +import java.util.List; + +import static com.google.common.collect.ImmutableList.toImmutableList; +import static io.airlift.configuration.ConfigBinder.configBinder; +import static java.util.Locale.ENGLISH; +import static java.util.Objects.requireNonNull; + +public class WebUiAuthenticationModule + extends AbstractConfigurationAwareModule +{ + @Override + protected void setup(Binder binder) + { + configBinder(binder).bindConfig(WebUiAuthenticationConfig.class); + + installWebUiAuthenticator("form", new FormUiAuthenticatorModule()); + } + + private void installWebUiAuthenticator(String type, Module module) + { + install(webUiAuthenticator(type, module)); + } + + public static Module webUiAuthenticator(String type, Module module) + { + return new ConditionalWebUiAuthenticationModule(type, module); + } + + private static class ConditionalWebUiAuthenticationModule + extends AbstractConfigurationAwareModule + { + private final String type; + private final Module module; + + public ConditionalWebUiAuthenticationModule(String type, Module module) + { + this.type = requireNonNull(type, "type is null"); + this.module = requireNonNull(module, "module is null"); + } + + @Override + protected void setup(Binder binder) + { + if (type.equals(getAuthenticationType())) { + install(module); + } + } + + private String getAuthenticationType() + { + String authentication = buildConfigObject(WebUiAuthenticationConfig.class).getAuthentication(); + if (authentication != null) { + return authentication; + } + + // no authenticator explicitly set for the web ui, so choose a default: + // If there is a password authenticator, use that. + List authenticationTypes = buildConfigObject(SecurityConfig.class).getAuthenticationTypes().stream() + .map(type -> type.toLowerCase(ENGLISH)) + .collect(toImmutableList()); + if (authenticationTypes.contains("password")) { + return "form"; + } + // otherwise use the first authenticator, or if there are no authenticators + // configured, use form for the UI since it handles this case + return authenticationTypes.stream().findFirst().orElse("form"); + } + } +} diff --git a/presto-main/src/main/java/io/prestosql/server/ui/WebUiConfig.java b/presto-main/src/main/java/io/prestosql/server/ui/WebUiConfig.java index ef1d9799affe..52f8f5ffbbf2 100644 --- a/presto-main/src/main/java/io/prestosql/server/ui/WebUiConfig.java +++ b/presto-main/src/main/java/io/prestosql/server/ui/WebUiConfig.java @@ -14,19 +14,10 @@ package io.prestosql.server.ui; import io.airlift.configuration.Config; -import io.airlift.units.Duration; - -import javax.validation.constraints.NotNull; - -import java.util.Optional; - -import static java.util.concurrent.TimeUnit.DAYS; public class WebUiConfig { private boolean enabled = true; - private Optional sharedSecret = Optional.empty(); - private Duration sessionTimeout = new Duration(1, DAYS); public boolean isEnabled() { @@ -39,30 +30,4 @@ public WebUiConfig setEnabled(boolean enabled) this.enabled = enabled; return this; } - - @NotNull - public Optional getSharedSecret() - { - return sharedSecret; - } - - @Config("web-ui.shared-secret") - public WebUiConfig setSharedSecret(String sharedSecret) - { - this.sharedSecret = Optional.ofNullable(sharedSecret); - return this; - } - - @NotNull - public Duration getSessionTimeout() - { - return sessionTimeout; - } - - @Config("web-ui.session-timeout") - public WebUiConfig setSessionTimeout(Duration sessionTimeout) - { - this.sessionTimeout = sessionTimeout; - return this; - } } diff --git a/presto-main/src/main/java/io/prestosql/server/ui/WebUiModule.java b/presto-main/src/main/java/io/prestosql/server/ui/WebUiModule.java index dfc811c6eafd..1a431f38c0df 100644 --- a/presto-main/src/main/java/io/prestosql/server/ui/WebUiModule.java +++ b/presto-main/src/main/java/io/prestosql/server/ui/WebUiModule.java @@ -32,7 +32,7 @@ protected void setup(Binder binder) configBinder(binder).bindConfig(WebUiConfig.class); if (buildConfigObject(WebUiConfig.class).isEnabled()) { - binder.bind(WebUiAuthenticationManager.class).to(FormWebUiAuthenticationManager.class).in(Scopes.SINGLETON); + install(new WebUiAuthenticationModule()); jaxrsBinder(binder).bind(ClusterResource.class); jaxrsBinder(binder).bind(ClusterStatsResource.class); jaxrsBinder(binder).bind(UiQueryResource.class); diff --git a/presto-main/src/test/java/io/prestosql/server/ui/TestFormWebUiConfig.java b/presto-main/src/test/java/io/prestosql/server/ui/TestFormWebUiConfig.java new file mode 100644 index 000000000000..2d2f534823f5 --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/server/ui/TestFormWebUiConfig.java @@ -0,0 +1,51 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.server.ui; + +import com.google.common.collect.ImmutableMap; +import io.airlift.units.Duration; +import org.testng.annotations.Test; + +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static io.airlift.configuration.testing.ConfigAssertions.assertFullMapping; +import static io.airlift.configuration.testing.ConfigAssertions.assertRecordedDefaults; +import static io.airlift.configuration.testing.ConfigAssertions.recordDefaults; + +public class TestFormWebUiConfig +{ + @Test + public void testDefaults() + { + assertRecordedDefaults(recordDefaults(FormWebUiConfig.class) + .setSessionTimeout(new Duration(1, TimeUnit.DAYS)) + .setSharedSecret(null)); + } + + @Test + public void testExplicitPropertyMappings() + { + Map properties = new ImmutableMap.Builder() + .put("web-ui.session-timeout", "33s") + .put("web-ui.shared-secret", "test-secret") + .build(); + + FormWebUiConfig expected = new FormWebUiConfig() + .setSessionTimeout(new Duration(33, TimeUnit.SECONDS)) + .setSharedSecret("test-secret"); + + assertFullMapping(properties, expected); + } +} diff --git a/presto-main/src/test/java/io/prestosql/server/ui/TestWebUiConfig.java b/presto-main/src/test/java/io/prestosql/server/ui/TestWebUiConfig.java index c794d381e903..75a56f9d6993 100644 --- a/presto-main/src/test/java/io/prestosql/server/ui/TestWebUiConfig.java +++ b/presto-main/src/test/java/io/prestosql/server/ui/TestWebUiConfig.java @@ -14,11 +14,9 @@ package io.prestosql.server.ui; import com.google.common.collect.ImmutableMap; -import io.airlift.units.Duration; import org.testng.annotations.Test; import java.util.Map; -import java.util.concurrent.TimeUnit; import static io.airlift.configuration.testing.ConfigAssertions.assertFullMapping; import static io.airlift.configuration.testing.ConfigAssertions.assertRecordedDefaults; @@ -30,9 +28,7 @@ public class TestWebUiConfig public void testDefaults() { assertRecordedDefaults(recordDefaults(WebUiConfig.class) - .setEnabled(true) - .setSessionTimeout(new Duration(1, TimeUnit.DAYS)) - .setSharedSecret(null)); + .setEnabled(true)); } @Test @@ -40,14 +36,10 @@ public void testExplicitPropertyMappings() { Map properties = new ImmutableMap.Builder() .put("web-ui.enabled", "false") - .put("web-ui.session-timeout", "33s") - .put("web-ui.shared-secret", "test-secret") .build(); WebUiConfig expected = new WebUiConfig() - .setEnabled(false) - .setSessionTimeout(new Duration(33, TimeUnit.SECONDS)) - .setSharedSecret("test-secret"); + .setEnabled(false); assertFullMapping(properties, expected); } From 489effa19b3c0791ad75723899bcab4966b50694 Mon Sep 17 00:00:00 2001 From: Dain Sundstrom Date: Tue, 14 Apr 2020 15:33:36 -0700 Subject: [PATCH 359/519] Add fixed user web ui authenticator --- .../server/ui/FixedUiAuthenticatorModule.java | 31 +++++++++ .../FixedUserWebUiAuthenticationManager.java | 65 +++++++++++++++++++ .../server/ui/FixedUserWebUiConfig.java | 36 ++++++++++ .../ui/FormWebUiAuthenticationManager.java | 10 +++ .../server/ui/WebUiAuthenticationModule.java | 1 + .../server/ui/TestFixedUserWebUiConfig.java | 46 +++++++++++++ .../io/prestosql/server/ui/TestWebUi.java | 51 ++++++++++++++- 7 files changed, 239 insertions(+), 1 deletion(-) create mode 100644 presto-main/src/main/java/io/prestosql/server/ui/FixedUiAuthenticatorModule.java create mode 100644 presto-main/src/main/java/io/prestosql/server/ui/FixedUserWebUiAuthenticationManager.java create mode 100644 presto-main/src/main/java/io/prestosql/server/ui/FixedUserWebUiConfig.java create mode 100644 presto-main/src/test/java/io/prestosql/server/ui/TestFixedUserWebUiConfig.java diff --git a/presto-main/src/main/java/io/prestosql/server/ui/FixedUiAuthenticatorModule.java b/presto-main/src/main/java/io/prestosql/server/ui/FixedUiAuthenticatorModule.java new file mode 100644 index 000000000000..d0acc0518946 --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/server/ui/FixedUiAuthenticatorModule.java @@ -0,0 +1,31 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.server.ui; + +import com.google.inject.Binder; +import com.google.inject.Module; +import com.google.inject.Scopes; + +import static io.airlift.configuration.ConfigBinder.configBinder; + +public class FixedUiAuthenticatorModule + implements Module +{ + @Override + public void configure(Binder binder) + { + binder.bind(WebUiAuthenticationManager.class).to(FixedUserWebUiAuthenticationManager.class).in(Scopes.SINGLETON); + configBinder(binder).bindConfig(FixedUserWebUiConfig.class); + } +} diff --git a/presto-main/src/main/java/io/prestosql/server/ui/FixedUserWebUiAuthenticationManager.java b/presto-main/src/main/java/io/prestosql/server/ui/FixedUserWebUiAuthenticationManager.java new file mode 100644 index 000000000000..923ee1e9d915 --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/server/ui/FixedUserWebUiAuthenticationManager.java @@ -0,0 +1,65 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.server.ui; + +import io.prestosql.spi.security.BasicPrincipal; +import io.prestosql.spi.security.Identity; + +import javax.inject.Inject; +import javax.servlet.FilterChain; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import java.io.IOException; + +import static io.prestosql.server.ServletSecurityUtils.withAuthenticatedIdentity; +import static io.prestosql.server.ui.FormWebUiAuthenticationManager.redirectAllFormLoginToUi; +import static java.util.Objects.requireNonNull; + +public class FixedUserWebUiAuthenticationManager + implements WebUiAuthenticationManager +{ + private final Identity webUiIdentity; + + @Inject + public FixedUserWebUiAuthenticationManager(FixedUserWebUiConfig config) + { + this(basicIdentity(requireNonNull(config, "config is null").getUsername())); + } + + public FixedUserWebUiAuthenticationManager(Identity webUiIdentity) + { + this.webUiIdentity = requireNonNull(webUiIdentity, "webUiIdentity is null"); + } + + @Override + public void handleUiRequest(HttpServletRequest request, HttpServletResponse response, FilterChain nextFilter) + throws IOException, ServletException + { + if (redirectAllFormLoginToUi(request, response)) { + return; + } + + withAuthenticatedIdentity(nextFilter, request, response, webUiIdentity); + } + + private static Identity basicIdentity(String username) + { + requireNonNull(username, "username is null"); + return Identity.forUser(username) + .withPrincipal(new BasicPrincipal(username)) + .build(); + } +} diff --git a/presto-main/src/main/java/io/prestosql/server/ui/FixedUserWebUiConfig.java b/presto-main/src/main/java/io/prestosql/server/ui/FixedUserWebUiConfig.java new file mode 100644 index 000000000000..360c6a56275f --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/server/ui/FixedUserWebUiConfig.java @@ -0,0 +1,36 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.server.ui; + +import io.airlift.configuration.Config; + +import javax.validation.constraints.NotNull; + +public class FixedUserWebUiConfig +{ + private String username; + + @NotNull + public String getUsername() + { + return username; + } + + @Config("web-ui.user") + public FixedUserWebUiConfig setUsername(String username) + { + this.username = username; + return this; + } +} diff --git a/presto-main/src/main/java/io/prestosql/server/ui/FormWebUiAuthenticationManager.java b/presto-main/src/main/java/io/prestosql/server/ui/FormWebUiAuthenticationManager.java index 6b3d425a88e3..5cde3156a755 100644 --- a/presto-main/src/main/java/io/prestosql/server/ui/FormWebUiAuthenticationManager.java +++ b/presto-main/src/main/java/io/prestosql/server/ui/FormWebUiAuthenticationManager.java @@ -382,4 +382,14 @@ private static HttpUriBuilder toUriBuilderWithForwarding(HttpServletRequest requ } return builder; } + + public static boolean redirectAllFormLoginToUi(HttpServletRequest request, HttpServletResponse response) + { + // these paths should never be used with a protocol login, but the user might have this cached or linked, so redirect back ot the main UI page. + if (request.getPathInfo().equals(LOGIN_FORM) || request.getPathInfo().equals("/ui/login") || request.getPathInfo().equals("/ui/logout")) { + sendRedirect(response, getRedirectLocation(request, UI_LOCATION)); + return true; + } + return false; + } } diff --git a/presto-main/src/main/java/io/prestosql/server/ui/WebUiAuthenticationModule.java b/presto-main/src/main/java/io/prestosql/server/ui/WebUiAuthenticationModule.java index 1babbb40b142..ecb0122b53b9 100644 --- a/presto-main/src/main/java/io/prestosql/server/ui/WebUiAuthenticationModule.java +++ b/presto-main/src/main/java/io/prestosql/server/ui/WebUiAuthenticationModule.java @@ -34,6 +34,7 @@ protected void setup(Binder binder) configBinder(binder).bindConfig(WebUiAuthenticationConfig.class); installWebUiAuthenticator("form", new FormUiAuthenticatorModule()); + installWebUiAuthenticator("fixed", new FixedUiAuthenticatorModule()); } private void installWebUiAuthenticator(String type, Module module) diff --git a/presto-main/src/test/java/io/prestosql/server/ui/TestFixedUserWebUiConfig.java b/presto-main/src/test/java/io/prestosql/server/ui/TestFixedUserWebUiConfig.java new file mode 100644 index 000000000000..63d458a8415b --- /dev/null +++ b/presto-main/src/test/java/io/prestosql/server/ui/TestFixedUserWebUiConfig.java @@ -0,0 +1,46 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.server.ui; + +import com.google.common.collect.ImmutableMap; +import org.testng.annotations.Test; + +import java.util.Map; + +import static io.airlift.configuration.testing.ConfigAssertions.assertFullMapping; +import static io.airlift.configuration.testing.ConfigAssertions.assertRecordedDefaults; +import static io.airlift.configuration.testing.ConfigAssertions.recordDefaults; + +public class TestFixedUserWebUiConfig +{ + @Test + public void testDefaults() + { + assertRecordedDefaults(recordDefaults(FixedUserWebUiConfig.class) + .setUsername(null)); + } + + @Test + public void testExplicitPropertyMappings() + { + Map properties = new ImmutableMap.Builder() + .put("web-ui.user", "some-user") + .build(); + + FixedUserWebUiConfig expected = new FixedUserWebUiConfig() + .setUsername("some-user"); + + assertFullMapping(properties, expected); + } +} diff --git a/presto-main/src/test/java/io/prestosql/server/ui/TestWebUi.java b/presto-main/src/test/java/io/prestosql/server/ui/TestWebUi.java index 72c9f8e991f6..a9c21b347ca9 100644 --- a/presto-main/src/test/java/io/prestosql/server/ui/TestWebUi.java +++ b/presto-main/src/test/java/io/prestosql/server/ui/TestWebUi.java @@ -62,7 +62,6 @@ public class TestWebUi .put("http-server.https.enabled", "true") .put("http-server.https.keystore.path", LOCALHOST_KEYSTORE) .put("http-server.https.keystore.key", "") - .put("http-server.authentication.type", "PASSWORD") .build(); private static final String TEST_USER = "test-user"; private static final String TEST_PASSWORD = "test-password"; @@ -323,6 +322,56 @@ private void testNoPasswordAuthenticator(URI baseUri) assertOk(client, getValidVendorLocation(baseUri)); } + @Test + public void testFixedAuthenticator() + throws Exception + { + try (TestingPrestoServer server = TestingPrestoServer.builder() + .setProperties(ImmutableMap.builder() + .putAll(SECURE_PROPERTIES) + .put("web-ui.authentication.type", "fixed") + .put("web-ui.user", "test-user") + .build()) + .build()) { + HttpServerInfo httpServerInfo = server.getInstance(Key.get(HttpServerInfo.class)); + testAlwaysAuthorized(httpServerInfo.getHttpUri(), client); + testAlwaysAuthorized(httpServerInfo.getHttpsUri(), client); + + testFixedAuthenticator(httpServerInfo.getHttpUri()); + testFixedAuthenticator(httpServerInfo.getHttpsUri()); + } + } + + private void testFixedAuthenticator(URI baseUri) + throws Exception + { + assertOk(client, getUiLocation(baseUri)); + + assertOk(client, getValidApiLocation(baseUri)); + + assertResponseCode(client, getLocation(baseUri, "/ui/unknown"), SC_NOT_FOUND); + + assertResponseCode(client, getLocation(baseUri, "/ui/api/unknown"), SC_NOT_FOUND); + } + + private static void testAlwaysAuthorized(URI baseUri, OkHttpClient authorizedClient) + throws IOException + { + assertOk(authorizedClient, getUiLocation(baseUri)); + + assertOk(authorizedClient, getValidApiLocation(baseUri)); + + assertRedirect(authorizedClient, getLoginHtmlLocation(baseUri), getUiLocation(baseUri)); + + assertRedirect(authorizedClient, getLoginLocation(baseUri), getUiLocation(baseUri)); + + assertRedirect(authorizedClient, getLogoutLocation(baseUri), getUiLocation(baseUri)); + + assertResponseCode(authorizedClient, getLocation(baseUri, "/ui/unknown"), SC_NOT_FOUND); + + assertResponseCode(authorizedClient, getLocation(baseUri, "/ui/api/unknown"), SC_NOT_FOUND); + } + private static Response assertOk(OkHttpClient client, String url) throws IOException { From ae6efb89b52ba5d80f1bb9171a987dd665f72394 Mon Sep 17 00:00:00 2001 From: Dain Sundstrom Date: Tue, 14 Apr 2020 15:43:48 -0700 Subject: [PATCH 360/519] Add HTTP protocol based authenticators to web ui Add support for certificate, JWT and Kerberos authentication --- .../java/io/prestosql/server/ui/ForWebUi.java | 29 +++++++ .../server/ui/FormUiAuthenticatorModule.java | 4 + .../ui/FormWebUiAuthenticationManager.java | 57 ++++++++++++- .../server/ui/WebUiAuthenticationModule.java | 31 +++++++ .../io/prestosql/server/ui/TestWebUi.java | 83 +++++++++++++++++++ presto-main/src/test/resources/hmac_key.txt | 1 + 6 files changed, 201 insertions(+), 4 deletions(-) create mode 100644 presto-main/src/main/java/io/prestosql/server/ui/ForWebUi.java create mode 100644 presto-main/src/test/resources/hmac_key.txt diff --git a/presto-main/src/main/java/io/prestosql/server/ui/ForWebUi.java b/presto-main/src/main/java/io/prestosql/server/ui/ForWebUi.java new file mode 100644 index 000000000000..9048a4149782 --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/server/ui/ForWebUi.java @@ -0,0 +1,29 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.server.ui; + +import javax.inject.Qualifier; + +import java.lang.annotation.Retention; +import java.lang.annotation.Target; + +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.RetentionPolicy.RUNTIME; + +@Retention(RUNTIME) +@Target({FIELD, PARAMETER, METHOD}) +@Qualifier +public @interface ForWebUi {} diff --git a/presto-main/src/main/java/io/prestosql/server/ui/FormUiAuthenticatorModule.java b/presto-main/src/main/java/io/prestosql/server/ui/FormUiAuthenticatorModule.java index 0ae606f9f713..434a62a7741d 100644 --- a/presto-main/src/main/java/io/prestosql/server/ui/FormUiAuthenticatorModule.java +++ b/presto-main/src/main/java/io/prestosql/server/ui/FormUiAuthenticatorModule.java @@ -14,10 +14,13 @@ package io.prestosql.server.ui; import com.google.inject.Binder; +import com.google.inject.Key; import com.google.inject.Module; import com.google.inject.Scopes; +import io.prestosql.server.security.Authenticator; import io.prestosql.server.security.PasswordAuthenticatorManager; +import static com.google.inject.multibindings.OptionalBinder.newOptionalBinder; import static io.airlift.configuration.ConfigBinder.configBinder; public class FormUiAuthenticatorModule @@ -29,5 +32,6 @@ public void configure(Binder binder) binder.bind(PasswordAuthenticatorManager.class).in(Scopes.SINGLETON); binder.bind(WebUiAuthenticationManager.class).to(FormWebUiAuthenticationManager.class).in(Scopes.SINGLETON); configBinder(binder).bindConfig(FormWebUiConfig.class); + newOptionalBinder(binder, Key.get(Authenticator.class, ForWebUi.class)); } } diff --git a/presto-main/src/main/java/io/prestosql/server/ui/FormWebUiAuthenticationManager.java b/presto-main/src/main/java/io/prestosql/server/ui/FormWebUiAuthenticationManager.java index 5cde3156a755..8cad5f46486b 100644 --- a/presto-main/src/main/java/io/prestosql/server/ui/FormWebUiAuthenticationManager.java +++ b/presto-main/src/main/java/io/prestosql/server/ui/FormWebUiAuthenticationManager.java @@ -21,6 +21,9 @@ import io.jsonwebtoken.JwtException; import io.jsonwebtoken.Jwts; import io.jsonwebtoken.SignatureAlgorithm; +import io.prestosql.server.ServletSecurityUtils; +import io.prestosql.server.security.AuthenticationException; +import io.prestosql.server.security.Authenticator; import io.prestosql.server.security.PasswordAuthenticatorManager; import io.prestosql.server.security.SecurityConfig; import io.prestosql.spi.security.AccessDeniedException; @@ -79,12 +82,14 @@ public class FormWebUiAuthenticationManager private final Function jwtGenerator; private final boolean httpsForwardingEnabled; private final PasswordAuthenticatorManager passwordAuthenticatorManager; + private final Optional authenticator; @Inject public FormWebUiAuthenticationManager( FormWebUiConfig config, SecurityConfig securityConfig, - PasswordAuthenticatorManager passwordAuthenticatorManager) + PasswordAuthenticatorManager passwordAuthenticatorManager, + @ForWebUi Optional authenticator) { byte[] hmac; if (config.getSharedSecret().isPresent()) { @@ -103,6 +108,7 @@ public FormWebUiAuthenticationManager( this.httpsForwardingEnabled = requireNonNull(securityConfig, "securityConfig is null").getEnableForwardingHttps(); this.passwordAuthenticatorManager = requireNonNull(passwordAuthenticatorManager, "passwordAuthenticatorManager is null"); + this.authenticator = requireNonNull(authenticator, "authenticator is null"); } @Override @@ -118,8 +124,15 @@ public void handleUiRequest(HttpServletRequest request, HttpServletResponse resp nextFilter.doFilter(request, response); return; } + + // authenticator over a secure connection bypasses the form login + if (authenticator.isPresent() && isSecure(request, httpsForwardingEnabled)) { + handleProtocolLoginRequest(authenticator.get(), request, response, nextFilter); + return; + } + if (request.getPathInfo().equals("/ui/login")) { - handleLoginRequest(request, response); + handleFormLoginRequest(request, response); return; } if (request.getPathInfo().equals("/ui/logout")) { @@ -179,7 +192,41 @@ private static String encodeCurrentLocationForLoginRedirect(HttpServletRequest r return path; } - private void handleLoginRequest(HttpServletRequest request, HttpServletResponse response) + private static void handleProtocolLoginRequest(Authenticator authenticator, HttpServletRequest request, HttpServletResponse response, FilterChain nextFilter) + throws IOException, ServletException + { + Identity authenticatedIdentity; + try { + authenticatedIdentity = authenticator.authenticate(request); + } + catch (AuthenticationException e) { + // authentication failed + ServletSecurityUtils.skipRequestBody(request); + + e.getAuthenticateHeader().ifPresent(value -> response.addHeader(WWW_AUTHENTICATE, value)); + + ServletSecurityUtils.sendErrorMessage(response, SC_UNAUTHORIZED, firstNonNull(e.getMessage(), "Unauthorized")); + return; + } + + if (redirectFormLoginToUi(request, response)) { + return; + } + + ServletSecurityUtils.withAuthenticatedIdentity(nextFilter, request, response, authenticatedIdentity); + } + + public static boolean redirectFormLoginToUi(HttpServletRequest request, HttpServletResponse response) + { + // these paths should never be used with a protocol login, but the user might have this cached or linked, so redirect back to the main UI page. + if (request.getPathInfo().equals(LOGIN_FORM) || request.getPathInfo().equals("/ui/login") || request.getPathInfo().equals("/ui/logout")) { + sendRedirect(response, getRedirectLocation(request, UI_LOCATION)); + return true; + } + return false; + } + + private void handleFormLoginRequest(HttpServletRequest request, HttpServletResponse response) { if (!isAuthenticationEnabled(request)) { sendRedirect(response, getRedirectLocation(request, DISABLED_LOCATION)); @@ -338,7 +385,9 @@ static String getRedirectLocation(HttpServletRequest request, String path, Strin private boolean isAuthenticationEnabled(HttpServletRequest request) { - return !isSecure(request, httpsForwardingEnabled) || passwordAuthenticatorManager.isLoaded(); + // unsecured requests support username-only authentication (no password) + // secured requests require a password authenticator or a protocol level authenticator + return !isSecure(request, httpsForwardingEnabled) || passwordAuthenticatorManager.isLoaded() || authenticator.isPresent(); } private static String generateJwt(byte[] hmac, String username, long sessionTimeoutNanos) diff --git a/presto-main/src/main/java/io/prestosql/server/ui/WebUiAuthenticationModule.java b/presto-main/src/main/java/io/prestosql/server/ui/WebUiAuthenticationModule.java index ecb0122b53b9..ac75599eca9e 100644 --- a/presto-main/src/main/java/io/prestosql/server/ui/WebUiAuthenticationModule.java +++ b/presto-main/src/main/java/io/prestosql/server/ui/WebUiAuthenticationModule.java @@ -14,13 +14,25 @@ package io.prestosql.server.ui; import com.google.inject.Binder; +import com.google.inject.Key; import com.google.inject.Module; +import com.google.inject.util.Modules; import io.airlift.configuration.AbstractConfigurationAwareModule; +import io.prestosql.server.security.Authenticator; +import io.prestosql.server.security.CertificateAuthenticator; +import io.prestosql.server.security.CertificateConfig; +import io.prestosql.server.security.JsonWebTokenAuthenticator; +import io.prestosql.server.security.JsonWebTokenConfig; +import io.prestosql.server.security.KerberosAuthenticator; +import io.prestosql.server.security.KerberosConfig; import io.prestosql.server.security.SecurityConfig; import java.util.List; +import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.ImmutableList.toImmutableList; +import static com.google.inject.Scopes.SINGLETON; +import static com.google.inject.multibindings.OptionalBinder.newOptionalBinder; import static io.airlift.configuration.ConfigBinder.configBinder; import static java.util.Locale.ENGLISH; import static java.util.Objects.requireNonNull; @@ -35,6 +47,10 @@ protected void setup(Binder binder) installWebUiAuthenticator("form", new FormUiAuthenticatorModule()); installWebUiAuthenticator("fixed", new FixedUiAuthenticatorModule()); + + installWebUiAuthenticator("certificate", CertificateAuthenticator.class, CertificateConfig.class); + installWebUiAuthenticator("kerberos", KerberosAuthenticator.class, KerberosConfig.class); + installWebUiAuthenticator("jwt", JsonWebTokenAuthenticator.class, JsonWebTokenConfig.class); } private void installWebUiAuthenticator(String type, Module module) @@ -42,11 +58,26 @@ private void installWebUiAuthenticator(String type, Module module) install(webUiAuthenticator(type, module)); } + private void installWebUiAuthenticator(String name, Class authenticator, Class config) + { + install(webUiAuthenticator(name, authenticator, binder -> configBinder(binder).bindConfig(config))); + } + public static Module webUiAuthenticator(String type, Module module) { return new ConditionalWebUiAuthenticationModule(type, module); } + public static Module webUiAuthenticator(String name, Class clazz, Module module) + { + checkArgument(name.toLowerCase(ENGLISH).equals(name), "name is not lower case: %s", name); + Module authModule = binder -> { + binder.install(new FormUiAuthenticatorModule()); + newOptionalBinder(binder, Key.get(Authenticator.class, ForWebUi.class)).setBinding().to(clazz).in(SINGLETON); + }; + return webUiAuthenticator(name, Modules.combine(module, authModule)); + } + private static class ConditionalWebUiAuthenticationModule extends AbstractConfigurationAwareModule { diff --git a/presto-main/src/test/java/io/prestosql/server/ui/TestWebUi.java b/presto-main/src/test/java/io/prestosql/server/ui/TestWebUi.java index a9c21b347ca9..06451dbf7a6f 100644 --- a/presto-main/src/test/java/io/prestosql/server/ui/TestWebUi.java +++ b/presto-main/src/test/java/io/prestosql/server/ui/TestWebUi.java @@ -18,6 +18,8 @@ import com.google.inject.Key; import io.airlift.http.server.HttpServerInfo; import io.airlift.node.NodeInfo; +import io.jsonwebtoken.Jwts; +import io.jsonwebtoken.SignatureAlgorithm; import io.prestosql.server.security.PasswordAuthenticatorManager; import io.prestosql.server.testing.TestingPrestoServer; import io.prestosql.spi.security.AccessDeniedException; @@ -35,10 +37,15 @@ import java.net.CookieManager; import java.net.HttpCookie; import java.net.URI; +import java.nio.file.Files; +import java.nio.file.Paths; import java.security.Principal; +import java.time.ZonedDateTime; +import java.util.Date; import java.util.Optional; import static com.google.common.collect.Iterables.getOnlyElement; +import static com.google.common.net.HttpHeaders.AUTHORIZATION; import static com.google.common.net.HttpHeaders.LOCATION; import static com.google.common.net.HttpHeaders.X_FORWARDED_HOST; import static com.google.common.net.HttpHeaders.X_FORWARDED_PORT; @@ -47,6 +54,7 @@ import static io.airlift.testing.Closeables.closeQuietly; import static io.prestosql.client.OkHttpUtil.setupSsl; import static io.prestosql.testing.assertions.Assert.assertEquals; +import static java.nio.charset.StandardCharsets.UTF_8; import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND; import static javax.servlet.http.HttpServletResponse.SC_OK; import static javax.servlet.http.HttpServletResponse.SC_SEE_OTHER; @@ -65,6 +73,7 @@ public class TestWebUi .build(); private static final String TEST_USER = "test-user"; private static final String TEST_PASSWORD = "test-password"; + private static final String HMAC_KEY = Resources.getResource("hmac_key.txt").getPath(); private TestingPrestoServer server; private OkHttpClient client; @@ -354,6 +363,69 @@ private void testFixedAuthenticator(URI baseUri) assertResponseCode(client, getLocation(baseUri, "/ui/api/unknown"), SC_NOT_FOUND); } + @Test + public void testCertAuthenticator() + throws Exception + { + try (TestingPrestoServer server = TestingPrestoServer.builder() + .setProperties(ImmutableMap.builder() + .putAll(SECURE_PROPERTIES) + .put("web-ui.authentication.type", "certificate") + .put("http-server.https.truststore.path", LOCALHOST_KEYSTORE) + .put("http-server.https.truststore.key", "") + .build()) + .build()) { + HttpServerInfo httpServerInfo = server.getInstance(Key.get(HttpServerInfo.class)); + + testLogIn(httpServerInfo.getHttpUri()); + + testNeverAuthorized(httpServerInfo.getHttpsUri(), client); + + OkHttpClient.Builder clientBuilder = client.newBuilder(); + setupSsl( + clientBuilder, + Optional.of(LOCALHOST_KEYSTORE), + Optional.empty(), + Optional.of(LOCALHOST_KEYSTORE), + Optional.empty()); + OkHttpClient clientWithCert = clientBuilder.build(); + testAlwaysAuthorized(httpServerInfo.getHttpsUri(), clientWithCert); + } + } + + @Test + public void testJwtAuthenticator() + throws Exception + { + try (TestingPrestoServer server = TestingPrestoServer.builder() + .setProperties(ImmutableMap.builder() + .putAll(SECURE_PROPERTIES) + .put("web-ui.authentication.type", "jwt") + .put("http-server.authentication.jwt.key-file", HMAC_KEY) + .build()) + .build()) { + HttpServerInfo httpServerInfo = server.getInstance(Key.get(HttpServerInfo.class)); + + testLogIn(httpServerInfo.getHttpUri()); + + testNeverAuthorized(httpServerInfo.getHttpsUri(), client); + + String hmac = new String(Files.readAllBytes(Paths.get(HMAC_KEY)), UTF_8); + String token = Jwts.builder() + .signWith(SignatureAlgorithm.HS256, hmac) + .setSubject("test-user") + .setExpiration(Date.from(ZonedDateTime.now().plusMinutes(5).toInstant())) + .compact(); + + OkHttpClient clientWithJwt = client.newBuilder() + .authenticator((route, response) -> response.request().newBuilder() + .header(AUTHORIZATION, "Bearer " + token) + .build()) + .build(); + testAlwaysAuthorized(httpServerInfo.getHttpsUri(), clientWithJwt); + } + } + private static void testAlwaysAuthorized(URI baseUri, OkHttpClient authorizedClient) throws IOException { @@ -372,6 +444,17 @@ private static void testAlwaysAuthorized(URI baseUri, OkHttpClient authorizedCli assertResponseCode(authorizedClient, getLocation(baseUri, "/ui/api/unknown"), SC_NOT_FOUND); } + private static void testNeverAuthorized(URI baseUri, OkHttpClient notAuthorizedClient) + throws IOException + { + assertResponseCode(notAuthorizedClient, getUiLocation(baseUri), SC_UNAUTHORIZED); + assertResponseCode(notAuthorizedClient, getValidApiLocation(baseUri), SC_UNAUTHORIZED); + assertResponseCode(notAuthorizedClient, getLoginLocation(baseUri), SC_UNAUTHORIZED); + assertResponseCode(notAuthorizedClient, getLogoutLocation(baseUri), SC_UNAUTHORIZED); + assertResponseCode(notAuthorizedClient, getLocation(baseUri, "/ui/unknown"), SC_UNAUTHORIZED); + assertResponseCode(notAuthorizedClient, getLocation(baseUri, "/ui/api/unknown"), SC_UNAUTHORIZED); + } + private static Response assertOk(OkHttpClient client, String url) throws IOException { diff --git a/presto-main/src/test/resources/hmac_key.txt b/presto-main/src/test/resources/hmac_key.txt new file mode 100644 index 000000000000..4019303d022f --- /dev/null +++ b/presto-main/src/test/resources/hmac_key.txt @@ -0,0 +1 @@ +0wRuzI3TJCrfYoAaM+8QBAA7AbTI9VjfcuY32jAyHM0= From 7c090491a3682640af4c932b9cffb813d5ad480a Mon Sep 17 00:00:00 2001 From: Martin Traverso Date: Wed, 6 May 2020 09:27:17 -0700 Subject: [PATCH 361/519] Move delete analysis to the analyzer The derivation of the row id field was being done during planning instead of analysis. This also resulted in having to special-case the planning of the source table plan. --- .../io/prestosql/sql/analyzer/Analysis.java | 15 ++++ .../sql/analyzer/StatementAnalyzer.java | 75 +++++++++++++------ .../prestosql/sql/planner/QueryPlanner.java | 35 +++------ .../sql/planner/RelationPlanner.java | 2 +- .../tests/AbstractTestEngineOnlyQueries.java | 1 - 5 files changed, 76 insertions(+), 52 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/sql/analyzer/Analysis.java b/presto-main/src/main/java/io/prestosql/sql/analyzer/Analysis.java index 0f21f044eee8..75f2e5140203 100644 --- a/presto-main/src/main/java/io/prestosql/sql/analyzer/Analysis.java +++ b/presto-main/src/main/java/io/prestosql/sql/analyzer/Analysis.java @@ -39,6 +39,7 @@ import io.prestosql.sql.tree.AllColumns; import io.prestosql.sql.tree.ExistsPredicate; import io.prestosql.sql.tree.Expression; +import io.prestosql.sql.tree.FieldReference; import io.prestosql.sql.tree.FunctionCall; import io.prestosql.sql.tree.GroupingOperation; import io.prestosql.sql.tree.Identifier; @@ -164,6 +165,9 @@ public class Analysis // for recursive view detection private final Deque tablesForView = new ArrayDeque<>(); + // row id field for update/delete queries + private final Map, FieldReference> rowIdField = new LinkedHashMap<>(); + public Analysis(@Nullable Statement root, Map, Expression> parameters, boolean isDescribe) { this.root = root; @@ -783,6 +787,7 @@ public List getReferencedTables() NodeRef
table = entry.getKey(); List columns = referencedFields.get(table).stream() + .filter(field -> field.getName().isPresent()) // For DELETE queries, the synthetic column for row id doesn't have a name .map(field -> { String fieldName = field.getName().get(); @@ -816,6 +821,16 @@ public List getRoutines() .collect(toImmutableList()); } + public void setRowIdField(Table table, FieldReference field) + { + rowIdField.put(NodeRef.of(table), field); + } + + public FieldReference getRowIdField(Table table) + { + return rowIdField.get(NodeRef.of(table)); + } + @Immutable public static final class SelectExpression { diff --git a/presto-main/src/main/java/io/prestosql/sql/analyzer/StatementAnalyzer.java b/presto-main/src/main/java/io/prestosql/sql/analyzer/StatementAnalyzer.java index 61a43fb4a3f9..ea9b6721820b 100644 --- a/presto-main/src/main/java/io/prestosql/sql/analyzer/StatementAnalyzer.java +++ b/presto-main/src/main/java/io/prestosql/sql/analyzer/StatementAnalyzer.java @@ -43,7 +43,6 @@ import io.prestosql.spi.connector.ConnectorTableMetadata; import io.prestosql.spi.connector.ConnectorViewDefinition; import io.prestosql.spi.connector.ConnectorViewDefinition.ViewColumn; -import io.prestosql.spi.connector.TableNotFoundException; import io.prestosql.spi.function.OperatorType; import io.prestosql.spi.security.AccessDeniedException; import io.prestosql.spi.security.Identity; @@ -289,7 +288,14 @@ public Scope analyze(Node node, Scope outerQueryScope) public Scope analyze(Node node, Optional outerQueryScope) { - return new Visitor(outerQueryScope, warningCollector).process(node, Optional.empty()); + return new Visitor(outerQueryScope, warningCollector, false) + .process(node, Optional.empty()); + } + + public Scope analyzeForUpdate(Table table, Optional outerQueryScope) + { + return new Visitor(outerQueryScope, warningCollector, true) + .process(table, Optional.empty()); } /** @@ -302,11 +308,13 @@ private class Visitor { private final Optional outerQueryScope; private final WarningCollector warningCollector; + private final boolean isUpdateQuery; - private Visitor(Optional outerQueryScope, WarningCollector warningCollector) + private Visitor(Optional outerQueryScope, WarningCollector warningCollector, boolean isUpdateQuery) { this.outerQueryScope = requireNonNull(outerQueryScope, "outerQueryScope is null"); this.warningCollector = requireNonNull(warningCollector, "warningCollector is null"); + this.isUpdateQuery = isUpdateQuery; } @Override @@ -490,6 +498,22 @@ protected Scope visitDelete(Delete node, Optional scope) throw semanticException(NOT_SUPPORTED, node, "Deleting from views is not supported"); } + TableHandle handle = metadata.getTableHandle(session, tableName) + .orElseThrow(() -> semanticException(TABLE_NOT_FOUND, table, "Table '%s' does not exist", tableName)); + + accessControl.checkCanDeleteFromTable(session.toSecurityContext(), tableName); + + if (!accessControl.getRowFilters(session.toSecurityContext(), tableName).isEmpty()) { + throw semanticException(NOT_SUPPORTED, node, "Delete from table with row filter"); + } + + TableMetadata tableMetadata = metadata.getTableMetadata(session, handle); + for (ColumnMetadata tableColumn : tableMetadata.getColumns()) { + if (!accessControl.getColumnMasks(session.toSecurityContext(), tableName, tableColumn.getName(), tableColumn.getType()).isEmpty()) { + throw semanticException(NOT_SUPPORTED, node, "Delete from table with column mask"); + } + } + // Analyzer checks for select permissions but DELETE has a separate permission, so disable access checks // TODO: we shouldn't need to create a new analyzer. The access control should be carried in the context object StatementAnalyzer analyzer = new StatementAnalyzer( @@ -501,27 +525,11 @@ protected Scope visitDelete(Delete node, Optional scope) warningCollector, CorrelationSupport.ALLOWED); - Scope tableScope = analyzer.analyze(table, scope); + Scope tableScope = analyzer.analyzeForUpdate(table, scope); node.getWhere().ifPresent(where -> analyzeWhere(node, tableScope, where)); analysis.setUpdateType("DELETE", tableName); - accessControl.checkCanDeleteFromTable(session.toSecurityContext(), tableName); - - if (!accessControl.getRowFilters(session.toSecurityContext(), tableName).isEmpty()) { - throw semanticException(NOT_SUPPORTED, node, "Delete from table with row filter"); - } - - TableHandle handle = metadata.getTableHandle(session, tableName) - .orElseThrow(() -> new TableNotFoundException(tableName.asSchemaTableName())); - - TableMetadata tableMetadata = metadata.getTableMetadata(session, handle); - for (ColumnMetadata tableColumn : tableMetadata.getColumns()) { - if (!accessControl.getColumnMasks(session.toSecurityContext(), tableName, tableColumn.getName(), tableColumn.getType()).isEmpty()) { - throw semanticException(NOT_SUPPORTED, node, "Delete from table with column mask"); - } - } - return createAndAssignScope(node, scope, Field.newUnqualified("rows", BIGINT)); } @@ -1052,11 +1060,28 @@ protected Scope visitTable(Table table, Optional scope) analysis.setColumn(field, columnHandle); } + if (isUpdateQuery) { + // Add the row id field + ColumnHandle column = metadata.getUpdateRowIdColumnHandle(session, tableHandle.get()); + Type type = metadata.getColumnMetadata(session, tableHandle.get(), column).getType(); + Field field = Field.newUnqualified(Optional.empty(), type); + fields.add(field); + analysis.setColumn(field, column); + } + List outputFields = fields.build(); analyzeFiltersAndMasks(table, name, tableHandle, outputFields, session.getIdentity().getUser()); - return createAndAssignScope(table, scope, outputFields); + Scope tableScope = createAndAssignScope(table, scope, outputFields); + + if (isUpdateQuery) { + FieldReference reference = new FieldReference(outputFields.size() - 1); + analyzeExpression(reference, tableScope); + analysis.setRowIdField(table, reference); + } + + return tableScope; } private void analyzeFiltersAndMasks(Table table, QualifiedObjectName name, Optional tableHandle, List fields, String authorization) @@ -1067,10 +1092,12 @@ private void analyzeFiltersAndMasks(Table table, QualifiedObjectName name, Optio ImmutableMap.Builder> columnMasks = ImmutableMap.builder(); for (Field field : fields) { - List masks = accessControl.getColumnMasks(session.toSecurityContext(), name, field.getName().get(), field.getType()); - columnMasks.put(field, masks); + if (field.getName().isPresent()) { + List masks = accessControl.getColumnMasks(session.toSecurityContext(), name, field.getName().get(), field.getType()); + columnMasks.put(field, masks); - masks.forEach(mask -> analyzeColumnMask(session.getIdentity().getUser(), table, name, field, accessControlScope, mask)); + masks.forEach(mask -> analyzeColumnMask(session.getIdentity().getUser(), table, name, field, accessControlScope, mask)); + } } List filters = accessControl.getRowFilters(session.toSecurityContext(), name); diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/QueryPlanner.java b/presto-main/src/main/java/io/prestosql/sql/planner/QueryPlanner.java index ed92bb871171..93ba52403669 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/QueryPlanner.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/QueryPlanner.java @@ -28,7 +28,6 @@ import io.prestosql.sql.analyzer.Analysis.SelectExpression; import io.prestosql.sql.analyzer.Field; import io.prestosql.sql.analyzer.FieldId; -import io.prestosql.sql.analyzer.RelationId; import io.prestosql.sql.analyzer.RelationType; import io.prestosql.sql.analyzer.Scope; import io.prestosql.sql.planner.plan.AggregationNode; @@ -39,10 +38,8 @@ import io.prestosql.sql.planner.plan.GroupIdNode; import io.prestosql.sql.planner.plan.LimitNode; import io.prestosql.sql.planner.plan.OffsetNode; -import io.prestosql.sql.planner.plan.PlanNode; import io.prestosql.sql.planner.plan.ProjectNode; import io.prestosql.sql.planner.plan.SortNode; -import io.prestosql.sql.planner.plan.TableScanNode; import io.prestosql.sql.planner.plan.TableWriterNode.DeleteTarget; import io.prestosql.sql.planner.plan.ValuesNode; import io.prestosql.sql.planner.plan.WindowNode; @@ -65,6 +62,7 @@ import io.prestosql.sql.tree.QuerySpecification; import io.prestosql.sql.tree.SortItem; import io.prestosql.sql.tree.SymbolReference; +import io.prestosql.sql.tree.Table; import io.prestosql.sql.tree.Window; import io.prestosql.sql.tree.WindowFrame; import io.prestosql.type.TypeCoercion; @@ -261,45 +259,30 @@ private List outputExpressions(List selectExpressi public DeleteNode plan(Delete node) { - RelationType descriptor = analysis.getOutputDescriptor(node.getTable()); - TableHandle handle = analysis.getTableHandle(node.getTable()); - ColumnHandle rowIdHandle = metadata.getUpdateRowIdColumnHandle(session, handle); - Type rowIdType = metadata.getColumnMetadata(session, handle, rowIdHandle).getType(); + Table table = node.getTable(); + RelationType descriptor = analysis.getOutputDescriptor(table); + TableHandle handle = analysis.getTableHandle(table); // add table columns - ImmutableList.Builder outputSymbols = ImmutableList.builder(); ImmutableMap.Builder columns = ImmutableMap.builder(); ImmutableList.Builder fields = ImmutableList.builder(); for (Field field : descriptor.getAllFields()) { - Symbol symbol = symbolAllocator.newSymbol(field.getName().get(), field.getType()); - outputSymbols.add(symbol); + Symbol symbol = symbolAllocator.newSymbol(field); columns.put(symbol, analysis.getColumn(field)); fields.add(field); } - // add rowId column - Field rowIdField = Field.newUnqualified(Optional.empty(), rowIdType); - Symbol rowIdSymbol = symbolAllocator.newSymbol("$rowId", rowIdField.getType()); - outputSymbols.add(rowIdSymbol); - columns.put(rowIdSymbol, rowIdHandle); - fields.add(rowIdField); - // create table scan - PlanNode tableScan = TableScanNode.newInstance(idAllocator.getNextId(), handle, outputSymbols.build(), columns.build()); - Scope scope = Scope.builder().withRelationType(RelationId.anonymous(), new RelationType(fields.build())).build(); - RelationPlan relationPlan = new RelationPlan(tableScan, scope, outputSymbols.build()); - - TranslationMap translations = new TranslationMap(relationPlan, analysis, lambdaDeclarationToSymbolMap); - translations.setFieldMappings(relationPlan.getFieldMappings()); - - PlanBuilder builder = new PlanBuilder(translations, relationPlan.getRoot()); + RelationPlan relationPlan = new RelationPlanner(analysis, symbolAllocator, idAllocator, lambdaDeclarationToSymbolMap, metadata, session) + .process(table, null); + PlanBuilder builder = planBuilderFor(relationPlan); if (node.getWhere().isPresent()) { builder = filter(builder, node.getWhere().get(), node); } // create delete node - Symbol rowId = builder.translate(new FieldReference(relationPlan.getDescriptor().indexOf(rowIdField))); + Symbol rowId = builder.translate(analysis.getRowIdField(table)); List outputs = ImmutableList.of( symbolAllocator.newSymbol("partialrows", BIGINT), symbolAllocator.newSymbol("fragment", VARBINARY)); diff --git a/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java b/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java index 031aa15ac001..36af4c198742 100644 --- a/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java +++ b/presto-main/src/main/java/io/prestosql/sql/planner/RelationPlanner.java @@ -169,7 +169,7 @@ protected RelationPlan visitTable(Table node, Void context) ImmutableList.Builder outputSymbolsBuilder = ImmutableList.builder(); ImmutableMap.Builder columns = ImmutableMap.builder(); for (Field field : scope.getRelationType().getAllFields()) { - Symbol symbol = symbolAllocator.newSymbol(field.getName().get(), field.getType()); + Symbol symbol = symbolAllocator.newSymbol(field); outputSymbolsBuilder.add(symbol); columns.put(symbol, analysis.getColumn(field)); diff --git a/presto-tests/src/test/java/io/prestosql/tests/AbstractTestEngineOnlyQueries.java b/presto-tests/src/test/java/io/prestosql/tests/AbstractTestEngineOnlyQueries.java index 722ce136b6d0..d6040236a261 100644 --- a/presto-tests/src/test/java/io/prestosql/tests/AbstractTestEngineOnlyQueries.java +++ b/presto-tests/src/test/java/io/prestosql/tests/AbstractTestEngineOnlyQueries.java @@ -800,7 +800,6 @@ public void testDescribeOutputNamedAndUnnamed() public void testDescribeOutputNonSelect() { assertDescribeOutputRowCount("CREATE TABLE foo AS SELECT * FROM nation"); - assertDescribeOutputRowCount("DELETE FROM orders"); assertDescribeOutputEmpty("CALL foo()"); assertDescribeOutputEmpty("SET SESSION optimize_hash_generation=false"); From 971b32c9fa17931221f376d1697fca44cbf2c8c4 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Thu, 7 May 2020 12:33:34 +0200 Subject: [PATCH 362/519] Constraint cleanup * rename `columns` to `predicateColumns` to make the semantics clearer * deprecate constructors that should not be used anymore * add javadoc for `predicateColumns` accessor --- .../plugin/bigquery/BigQueryMetadata.java | 2 +- .../prestosql/plugin/hive/HiveMetadata.java | 2 +- .../prestosql/spi/connector/Constraint.java | 51 +++++++++++++------ 3 files changed, 38 insertions(+), 17 deletions(-) diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryMetadata.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryMetadata.java index 3c9197f14697..abb861730e31 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryMetadata.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryMetadata.java @@ -294,7 +294,7 @@ public Optional> applyFilter( Constraint constraint) { log.debug("applyFilter(session=%s, handle=%s, summary=%s, predicate=%s, columns=%s)", - session, handle, constraint.getSummary(), constraint.predicate(), constraint.getColumns()); + session, handle, constraint.getSummary(), constraint.predicate(), constraint.getPredicateColumns()); BigQueryTableHandle bigQueryTableHandle = (BigQueryTableHandle) handle; TupleDomain oldDomain = bigQueryTableHandle.getConstraint(); diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java index 98a043de38ea..ba6aa1723059 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveMetadata.java @@ -1930,7 +1930,7 @@ public Optional> applyFilter(C checkArgument(!handle.getAnalyzePartitionValues().isPresent() || constraint.getSummary().isAll(), "Analyze should not have a constraint"); HivePartitionResult partitionResult = partitionManager.getPartitions(metastore, new HiveIdentity(session), handle, constraint); - HiveTableHandle newHandle = partitionManager.applyPartitionResult(handle, partitionResult, constraint.getColumns()); + HiveTableHandle newHandle = partitionManager.applyPartitionResult(handle, partitionResult, constraint.getPredicateColumns()); if (handle.getPartitions().equals(newHandle.getPartitions()) && handle.getCompactEffectivePredicate().equals(newHandle.getCompactEffectivePredicate()) && diff --git a/presto-spi/src/main/java/io/prestosql/spi/connector/Constraint.java b/presto-spi/src/main/java/io/prestosql/spi/connector/Constraint.java index 6f894f87f8c2..2a41795c3853 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/connector/Constraint.java +++ b/presto-spi/src/main/java/io/prestosql/spi/connector/Constraint.java @@ -27,47 +27,56 @@ public class Constraint { private final TupleDomain summary; private final Optional>> predicate; - private final Optional> columns; + private final Optional> predicateColumns; public static Constraint alwaysTrue() { - return new Constraint(TupleDomain.all(), Optional.empty()); + return new Constraint(TupleDomain.all(), Optional.empty(), Optional.empty()); } public static Constraint alwaysFalse() { - return new Constraint(TupleDomain.none(), Optional.of(bindings -> false)); + return new Constraint(TupleDomain.none(), Optional.of(bindings -> false), Optional.empty()); } public Constraint(TupleDomain summary) { - this(summary, Optional.empty()); + this(summary, Optional.empty(), Optional.empty()); } + /** + * @deprecated Use {@link #Constraint(TupleDomain, Predicate, Set)} instead. + */ + @Deprecated public Constraint(TupleDomain summary, Predicate> predicate) { - this(summary, Optional.of(predicate)); + this(summary, Optional.of(predicate), Optional.empty()); } - public Constraint(TupleDomain summary, Predicate> predicate, Set columns) + public Constraint(TupleDomain summary, Predicate> predicate, Set predicateColumns) { - this(summary, Optional.of(predicate), Optional.of(columns)); + this(summary, Optional.of(predicate), Optional.of(predicateColumns)); } + /** + * @deprecated Use {@link #Constraint(TupleDomain, Optional, Optional)} instead. + */ + @Deprecated public Constraint(TupleDomain summary, Optional>> predicate) { this(summary, predicate, Optional.empty()); } - public Constraint(TupleDomain summary, Optional>> predicate, Optional> columns) + public Constraint(TupleDomain summary, Optional>> predicate, Optional> predicateColumns) { - requireNonNull(summary, "summary is null"); - requireNonNull(predicate, "predicate is null"); - requireNonNull(columns, "columns is null"); + this.summary = requireNonNull(summary, "summary is null"); + this.predicate = requireNonNull(predicate, "predicate is null"); + this.predicateColumns = requireNonNull(predicateColumns, "predicateColumns is null"); - this.summary = summary; - this.predicate = predicate; - this.columns = columns; + // TODO remove deprecated constructors and validate that predicate is present *iff* predicateColumns is present + if (predicateColumns.isPresent() && !predicate.isPresent()) { + throw new IllegalArgumentException("predicateColumns cannot be present when predicate is not present"); + } } public TupleDomain getSummary() @@ -80,8 +89,20 @@ public Optional>> predicate() return predicate; } + /** + * @deprecated Use {@link #getPredicateColumns()} instead. + */ + @Deprecated public Optional> getColumns() { - return columns; + return getPredicateColumns(); + } + + /** + * Set of columns the {@link #predicate()} result depends on. + */ + public Optional> getPredicateColumns() + { + return predicateColumns; } } From 77a59135207143c883404d05f6e03e4f37cfae6b Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Wed, 6 May 2020 17:37:15 +0200 Subject: [PATCH 363/519] Rename method and make more flexible --- .../connector/system/TableCommentSystemTable.java | 8 ++++---- .../connector/system/jdbc/ColumnJdbcTable.java | 8 ++++---- .../io/prestosql/connector/system/jdbc/FilterUtil.java | 2 +- .../connector/system/jdbc/SchemaJdbcTable.java | 3 ++- .../connector/system/jdbc/TableJdbcTable.java | 10 +++++----- 5 files changed, 16 insertions(+), 15 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/connector/system/TableCommentSystemTable.java b/presto-main/src/main/java/io/prestosql/connector/system/TableCommentSystemTable.java index b11a2c75138b..7ec570270168 100644 --- a/presto-main/src/main/java/io/prestosql/connector/system/TableCommentSystemTable.java +++ b/presto-main/src/main/java/io/prestosql/connector/system/TableCommentSystemTable.java @@ -38,8 +38,8 @@ import java.util.Set; import static io.prestosql.connector.system.jdbc.FilterUtil.filter; -import static io.prestosql.connector.system.jdbc.FilterUtil.stringFilter; import static io.prestosql.connector.system.jdbc.FilterUtil.tablePrefix; +import static io.prestosql.connector.system.jdbc.FilterUtil.tryGetSingleVarcharValue; import static io.prestosql.metadata.MetadataListing.listCatalogs; import static io.prestosql.metadata.MetadataListing.listTables; import static io.prestosql.metadata.MetadataUtil.TableMetadataBuilder.tableMetadataBuilder; @@ -86,9 +86,9 @@ public ConnectorTableMetadata getTableMetadata() @Override public RecordCursor cursor(ConnectorTransactionHandle transactionHandle, ConnectorSession connectorSession, TupleDomain constraint) { - Optional catalogFilter = stringFilter(constraint, 0); - Optional schemaFilter = stringFilter(constraint, 1); - Optional tableFilter = stringFilter(constraint, 2); + Optional catalogFilter = tryGetSingleVarcharValue(constraint, 0); + Optional schemaFilter = tryGetSingleVarcharValue(constraint, 1); + Optional tableFilter = tryGetSingleVarcharValue(constraint, 2); Session session = ((FullConnectorSession) connectorSession).getSession(); Builder table = InMemoryRecordSet.builder(COMMENT_TABLE); diff --git a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/ColumnJdbcTable.java b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/ColumnJdbcTable.java index 00f3b40526e4..1f33faee3dd6 100644 --- a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/ColumnJdbcTable.java +++ b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/ColumnJdbcTable.java @@ -42,7 +42,7 @@ import java.util.Optional; import static io.prestosql.connector.system.jdbc.FilterUtil.filter; -import static io.prestosql.connector.system.jdbc.FilterUtil.stringFilter; +import static io.prestosql.connector.system.jdbc.FilterUtil.tryGetSingleVarcharValue; import static io.prestosql.metadata.MetadataListing.listCatalogs; import static io.prestosql.metadata.MetadataListing.listTableColumns; import static io.prestosql.metadata.MetadataUtil.TableMetadataBuilder.tableMetadataBuilder; @@ -116,9 +116,9 @@ public ConnectorTableMetadata getTableMetadata() public RecordCursor cursor(ConnectorTransactionHandle transactionHandle, ConnectorSession connectorSession, TupleDomain constraint) { Session session = ((FullConnectorSession) connectorSession).getSession(); - Optional catalogFilter = stringFilter(constraint, 0); - Optional schemaFilter = stringFilter(constraint, 1); - Optional tableFilter = stringFilter(constraint, 2); + Optional catalogFilter = tryGetSingleVarcharValue(constraint, 0); + Optional schemaFilter = tryGetSingleVarcharValue(constraint, 1); + Optional tableFilter = tryGetSingleVarcharValue(constraint, 2); Builder table = InMemoryRecordSet.builder(METADATA); for (String catalog : filter(listCatalogs(session, metadata, accessControl).keySet(), catalogFilter)) { diff --git a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/FilterUtil.java b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/FilterUtil.java index 18b29a35359b..368df8c67163 100644 --- a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/FilterUtil.java +++ b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/FilterUtil.java @@ -26,7 +26,7 @@ public final class FilterUtil { private FilterUtil() {} - public static Optional stringFilter(TupleDomain constraint, int index) + public static Optional tryGetSingleVarcharValue(TupleDomain constraint, T index) { if (constraint.isNone()) { return Optional.empty(); diff --git a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/SchemaJdbcTable.java b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/SchemaJdbcTable.java index f1b579084a45..8d0576c9ae9e 100644 --- a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/SchemaJdbcTable.java +++ b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/SchemaJdbcTable.java @@ -31,6 +31,7 @@ import java.util.Optional; import static io.prestosql.connector.system.jdbc.FilterUtil.filter; +import static io.prestosql.connector.system.jdbc.FilterUtil.tryGetSingleVarcharValue; import static io.prestosql.metadata.MetadataListing.listCatalogs; import static io.prestosql.metadata.MetadataListing.listSchemas; import static io.prestosql.metadata.MetadataUtil.TableMetadataBuilder.tableMetadataBuilder; @@ -67,7 +68,7 @@ public ConnectorTableMetadata getTableMetadata() public RecordCursor cursor(ConnectorTransactionHandle transactionHandle, ConnectorSession connectorSession, TupleDomain constraint) { Session session = ((FullConnectorSession) connectorSession).getSession(); - Optional catalogFilter = FilterUtil.stringFilter(constraint, 1); + Optional catalogFilter = tryGetSingleVarcharValue(constraint, 1); Builder table = InMemoryRecordSet.builder(METADATA); for (String catalog : filter(listCatalogs(session, metadata, accessControl).keySet(), catalogFilter)) { diff --git a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/TableJdbcTable.java b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/TableJdbcTable.java index 7e85dd22a73e..20439a6deea9 100644 --- a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/TableJdbcTable.java +++ b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/TableJdbcTable.java @@ -34,8 +34,8 @@ import static io.prestosql.connector.system.jdbc.FilterUtil.emptyOrEquals; import static io.prestosql.connector.system.jdbc.FilterUtil.filter; -import static io.prestosql.connector.system.jdbc.FilterUtil.stringFilter; import static io.prestosql.connector.system.jdbc.FilterUtil.tablePrefix; +import static io.prestosql.connector.system.jdbc.FilterUtil.tryGetSingleVarcharValue; import static io.prestosql.metadata.MetadataListing.listCatalogs; import static io.prestosql.metadata.MetadataListing.listTables; import static io.prestosql.metadata.MetadataListing.listViews; @@ -81,10 +81,10 @@ public ConnectorTableMetadata getTableMetadata() public RecordCursor cursor(ConnectorTransactionHandle transactionHandle, ConnectorSession connectorSession, TupleDomain constraint) { Session session = ((FullConnectorSession) connectorSession).getSession(); - Optional catalogFilter = stringFilter(constraint, 0); - Optional schemaFilter = stringFilter(constraint, 1); - Optional tableFilter = stringFilter(constraint, 2); - Optional typeFilter = stringFilter(constraint, 3); + Optional catalogFilter = tryGetSingleVarcharValue(constraint, 0); + Optional schemaFilter = tryGetSingleVarcharValue(constraint, 1); + Optional tableFilter = tryGetSingleVarcharValue(constraint, 2); + Optional typeFilter = tryGetSingleVarcharValue(constraint, 3); boolean includeTables = emptyOrEquals(typeFilter, "TABLE"); boolean includeViews = emptyOrEquals(typeFilter, "VIEW"); From a3763b8d3839322722db09a8fccd424e3e5bca03 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Wed, 6 May 2020 17:37:16 +0200 Subject: [PATCH 364/519] Replace FilterUtil.filter method Replace the method with better API. --- .../system/TableCommentSystemTable.java | 3 +-- .../connector/system/jdbc/ColumnJdbcTable.java | 3 +-- .../connector/system/jdbc/FilterUtil.java | 10 ---------- .../connector/system/jdbc/SchemaJdbcTable.java | 3 +-- .../connector/system/jdbc/TableJdbcTable.java | 3 +-- .../io/prestosql/metadata/MetadataListing.java | 18 +++++++++++++++++- 6 files changed, 21 insertions(+), 19 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/connector/system/TableCommentSystemTable.java b/presto-main/src/main/java/io/prestosql/connector/system/TableCommentSystemTable.java index 7ec570270168..ed202f3eea2d 100644 --- a/presto-main/src/main/java/io/prestosql/connector/system/TableCommentSystemTable.java +++ b/presto-main/src/main/java/io/prestosql/connector/system/TableCommentSystemTable.java @@ -37,7 +37,6 @@ import java.util.Optional; import java.util.Set; -import static io.prestosql.connector.system.jdbc.FilterUtil.filter; import static io.prestosql.connector.system.jdbc.FilterUtil.tablePrefix; import static io.prestosql.connector.system.jdbc.FilterUtil.tryGetSingleVarcharValue; import static io.prestosql.metadata.MetadataListing.listCatalogs; @@ -93,7 +92,7 @@ public RecordCursor cursor(ConnectorTransactionHandle transactionHandle, Connect Session session = ((FullConnectorSession) connectorSession).getSession(); Builder table = InMemoryRecordSet.builder(COMMENT_TABLE); - for (String catalog : filter(listCatalogs(session, metadata, accessControl).keySet(), catalogFilter)) { + for (String catalog : listCatalogs(session, metadata, accessControl, catalogFilter).keySet()) { QualifiedTablePrefix prefix = tablePrefix(catalog, schemaFilter, tableFilter); Set names = ImmutableSet.of(); diff --git a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/ColumnJdbcTable.java b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/ColumnJdbcTable.java index 1f33faee3dd6..5c2afb7455b8 100644 --- a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/ColumnJdbcTable.java +++ b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/ColumnJdbcTable.java @@ -41,7 +41,6 @@ import java.util.Map.Entry; import java.util.Optional; -import static io.prestosql.connector.system.jdbc.FilterUtil.filter; import static io.prestosql.connector.system.jdbc.FilterUtil.tryGetSingleVarcharValue; import static io.prestosql.metadata.MetadataListing.listCatalogs; import static io.prestosql.metadata.MetadataListing.listTableColumns; @@ -121,7 +120,7 @@ public RecordCursor cursor(ConnectorTransactionHandle transactionHandle, Connect Optional tableFilter = tryGetSingleVarcharValue(constraint, 2); Builder table = InMemoryRecordSet.builder(METADATA); - for (String catalog : filter(listCatalogs(session, metadata, accessControl).keySet(), catalogFilter)) { + for (String catalog : listCatalogs(session, metadata, accessControl, catalogFilter).keySet()) { QualifiedTablePrefix prefix = FilterUtil.tablePrefix(catalog, schemaFilter, tableFilter); for (Entry> entry : listTableColumns(session, metadata, accessControl, prefix).entrySet()) { addColumnRows(table, catalog, entry.getKey(), entry.getValue()); diff --git a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/FilterUtil.java b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/FilterUtil.java index 368df8c67163..a97eec0357b1 100644 --- a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/FilterUtil.java +++ b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/FilterUtil.java @@ -13,8 +13,6 @@ */ package io.prestosql.connector.system.jdbc; -import com.google.common.base.Predicates; -import com.google.common.collect.Iterables; import io.airlift.slice.Slice; import io.prestosql.metadata.QualifiedTablePrefix; import io.prestosql.spi.predicate.Domain; @@ -53,14 +51,6 @@ public static QualifiedTablePrefix tablePrefix(String catalog, Optional return prefix; } - public static Iterable filter(Iterable items, Optional filter) - { - if (!filter.isPresent()) { - return items; - } - return Iterables.filter(items, Predicates.equalTo(filter.get())); - } - public static boolean emptyOrEquals(Optional value, T other) { return !value.isPresent() || value.get().equals(other); diff --git a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/SchemaJdbcTable.java b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/SchemaJdbcTable.java index 8d0576c9ae9e..dfc9decdd3d0 100644 --- a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/SchemaJdbcTable.java +++ b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/SchemaJdbcTable.java @@ -30,7 +30,6 @@ import java.util.Optional; -import static io.prestosql.connector.system.jdbc.FilterUtil.filter; import static io.prestosql.connector.system.jdbc.FilterUtil.tryGetSingleVarcharValue; import static io.prestosql.metadata.MetadataListing.listCatalogs; import static io.prestosql.metadata.MetadataListing.listSchemas; @@ -71,7 +70,7 @@ public RecordCursor cursor(ConnectorTransactionHandle transactionHandle, Connect Optional catalogFilter = tryGetSingleVarcharValue(constraint, 1); Builder table = InMemoryRecordSet.builder(METADATA); - for (String catalog : filter(listCatalogs(session, metadata, accessControl).keySet(), catalogFilter)) { + for (String catalog : listCatalogs(session, metadata, accessControl, catalogFilter).keySet()) { for (String schema : listSchemas(session, metadata, accessControl, catalog)) { table.addRow(schema, catalog); } diff --git a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/TableJdbcTable.java b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/TableJdbcTable.java index 20439a6deea9..1c9fa3eb368c 100644 --- a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/TableJdbcTable.java +++ b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/TableJdbcTable.java @@ -33,7 +33,6 @@ import java.util.Set; import static io.prestosql.connector.system.jdbc.FilterUtil.emptyOrEquals; -import static io.prestosql.connector.system.jdbc.FilterUtil.filter; import static io.prestosql.connector.system.jdbc.FilterUtil.tablePrefix; import static io.prestosql.connector.system.jdbc.FilterUtil.tryGetSingleVarcharValue; import static io.prestosql.metadata.MetadataListing.listCatalogs; @@ -94,7 +93,7 @@ public RecordCursor cursor(ConnectorTransactionHandle transactionHandle, Connect return table.build().cursor(); } - for (String catalog : filter(listCatalogs(session, metadata, accessControl).keySet(), catalogFilter)) { + for (String catalog : listCatalogs(session, metadata, accessControl, catalogFilter).keySet()) { QualifiedTablePrefix prefix = tablePrefix(catalog, schemaFilter, tableFilter); Set views = listViews(session, metadata, accessControl, prefix); diff --git a/presto-main/src/main/java/io/prestosql/metadata/MetadataListing.java b/presto-main/src/main/java/io/prestosql/metadata/MetadataListing.java index f1c32ffc66d3..5821ecced808 100644 --- a/presto-main/src/main/java/io/prestosql/metadata/MetadataListing.java +++ b/presto-main/src/main/java/io/prestosql/metadata/MetadataListing.java @@ -29,6 +29,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Optional; import java.util.Set; import java.util.SortedMap; import java.util.SortedSet; @@ -42,7 +43,22 @@ private MetadataListing() {} public static SortedMap listCatalogs(Session session, Metadata metadata, AccessControl accessControl) { - Map catalogNames = metadata.getCatalogNames(session); + return listCatalogs(session, metadata, accessControl, Optional.empty()); + } + + public static SortedMap listCatalogs(Session session, Metadata metadata, AccessControl accessControl, Optional catalogName) + { + Map catalogNames; + if (catalogName.isPresent()) { + Optional catalogHandle = metadata.getCatalogHandle(session, catalogName.get()); + if (!catalogHandle.isPresent()) { + return ImmutableSortedMap.of(); + } + catalogNames = ImmutableSortedMap.of(catalogName.get(), catalogHandle.get()); + } + else { + catalogNames = metadata.getCatalogNames(session); + } Set allowedCatalogs = accessControl.filterCatalogs(session.getIdentity(), catalogNames.keySet()); ImmutableSortedMap.Builder result = ImmutableSortedMap.naturalOrder(); From ce9abcde7ebab77b83a37bcc089057a796f1574e Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Wed, 6 May 2020 17:37:17 +0200 Subject: [PATCH 365/519] Simplify control flow --- .../prestosql/connector/system/jdbc/FilterUtil.java | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/FilterUtil.java b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/FilterUtil.java index a97eec0357b1..e8133ffbe297 100644 --- a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/FilterUtil.java +++ b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/FilterUtil.java @@ -41,14 +41,13 @@ public static Optional tryGetSingleVarcharValue(TupleDomain const public static QualifiedTablePrefix tablePrefix(String catalog, Optional schema, Optional table) { - QualifiedTablePrefix prefix = new QualifiedTablePrefix(catalog); + if (schema.isPresent() && table.isPresent()) { + return new QualifiedTablePrefix(catalog, schema.get(), table.get()); + } if (schema.isPresent()) { - prefix = new QualifiedTablePrefix(catalog, schema.get()); - if (table.isPresent()) { - prefix = new QualifiedTablePrefix(catalog, schema.get(), table.get()); - } + return new QualifiedTablePrefix(catalog, schema.get()); } - return prefix; + return new QualifiedTablePrefix(catalog); } public static boolean emptyOrEquals(Optional value, T other) From 1d7cba21855072d1b6ae1ed2e59f9deffddc6788 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Thu, 7 May 2020 17:49:16 +0200 Subject: [PATCH 366/519] Move empty table name check to MetadataManager `MetadataManager` does not correctly behave when asked for `new QualifiedObjectName("", "", "")` table or view. This was previously worked around in the `information_schema` connector, but since it applies to other metadata users, it should be fixed in `MetadataManager` directly. --- .../InformationSchemaMetadata.java | 13 ++----- .../prestosql/metadata/MetadataManager.java | 10 +++++ .../TestInformationSchemaMetadata.java | 37 ++++++++++--------- .../tests/TestInformationSchemaConnector.java | 36 ++++++++---------- 4 files changed, 48 insertions(+), 48 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/connector/informationschema/InformationSchemaMetadata.java b/presto-main/src/main/java/io/prestosql/connector/informationschema/InformationSchemaMetadata.java index d1c223d6a1f5..e6206882544c 100644 --- a/presto-main/src/main/java/io/prestosql/connector/informationschema/InformationSchemaMetadata.java +++ b/presto-main/src/main/java/io/prestosql/connector/informationschema/InformationSchemaMetadata.java @@ -217,7 +217,7 @@ private Set getPrefixes(ConnectorSession session, Informat return ImmutableSet.of(); } - Optional> catalogs = filterString(constraint.getSummary(), CATALOG_COLUMN_HANDLE).map(this::removeEmptyValues); + Optional> catalogs = filterString(constraint.getSummary(), CATALOG_COLUMN_HANDLE); if (catalogs.isPresent() && !catalogs.get().contains(table.getCatalogName())) { return ImmutableSet.of(); } @@ -247,7 +247,7 @@ private Set calculatePrefixesWithSchemaName( TupleDomain constraint, Optional>> predicate) { - Optional> schemas = filterString(constraint, SCHEMA_COLUMN_HANDLE).map(this::removeEmptyValues); + Optional> schemas = filterString(constraint, SCHEMA_COLUMN_HANDLE); if (schemas.isPresent()) { return schemas.get().stream() .filter(this::isLowerCase) @@ -275,7 +275,7 @@ private Set calculatePrefixesWithTableName( { Session session = ((FullConnectorSession) connectorSession).getSession(); - Optional> tables = filterString(constraint, TABLE_NAME_COLUMN_HANDLE).map(this::removeEmptyValues); + Optional> tables = filterString(constraint, TABLE_NAME_COLUMN_HANDLE); if (tables.isPresent()) { return prefixes.stream() .peek(prefix -> verify(!prefix.asQualifiedObjectName().isPresent())) @@ -369,11 +369,4 @@ private boolean isLowerCase(String value) { return value.toLowerCase(ENGLISH).equals(value); } - - private Set removeEmptyValues(Set values) - { - return values.stream() - .filter(value -> !value.isEmpty()) - .collect(toImmutableSet()); - } } diff --git a/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java b/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java index 4247d588ff1c..e763b181dd9b 100644 --- a/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java +++ b/presto-main/src/main/java/io/prestosql/metadata/MetadataManager.java @@ -305,6 +305,11 @@ public Optional getTableHandle(Session session, QualifiedObjectName { requireNonNull(table, "table is null"); + if (table.getCatalogName().isEmpty() || table.getSchemaName().isEmpty() || table.getObjectName().isEmpty()) { + // Table cannot exist + return Optional.empty(); + } + Optional catalog = getOptionalCatalogMetadata(session, table.getCatalogName()); if (catalog.isPresent()) { CatalogMetadata catalogMetadata = catalog.get(); @@ -991,6 +996,11 @@ public Optional getSchemaOwner(Session session, CatalogSchemaNa @Override public Optional getView(Session session, QualifiedObjectName viewName) { + if (viewName.getCatalogName().isEmpty() || viewName.getSchemaName().isEmpty() || viewName.getObjectName().isEmpty()) { + // View cannot exist + return Optional.empty(); + } + Optional catalog = getOptionalCatalogMetadata(session, viewName.getCatalogName()); if (catalog.isPresent()) { CatalogMetadata catalogMetadata = catalog.get(); diff --git a/presto-main/src/test/java/io/prestosql/metadata/TestInformationSchemaMetadata.java b/presto-main/src/test/java/io/prestosql/metadata/TestInformationSchemaMetadata.java index f332b87e3163..b85123ff2f9f 100644 --- a/presto-main/src/test/java/io/prestosql/metadata/TestInformationSchemaMetadata.java +++ b/presto-main/src/test/java/io/prestosql/metadata/TestInformationSchemaMetadata.java @@ -57,7 +57,6 @@ import static java.util.Arrays.stream; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; -import static org.testng.Assert.assertTrue; public class TestInformationSchemaMetadata { @@ -221,31 +220,33 @@ public void testInformationSchemaPredicatePushdownOnCatalogWiseTables() @Test public void testInformationSchemaPredicatePushdownForEmptyNames() - { - assertApplyFilterReturnsEmptyPrefixes( - new SchemaTableName("information_schema", "tables"), - ImmutableMap.of( - new InformationSchemaColumnHandle("table_name"), new NullableValue(VARCHAR, Slices.utf8Slice("")))); - - assertApplyFilterReturnsEmptyPrefixes( - new SchemaTableName("information_schema", "tables"), - ImmutableMap.of( - new InformationSchemaColumnHandle("table_schema"), new NullableValue(VARCHAR, Slices.utf8Slice("")))); - } - - private void assertApplyFilterReturnsEmptyPrefixes(SchemaTableName schemaTableName, Map constraint) { TransactionId transactionId = transactionManager.beginTransaction(false); ConnectorSession session = createNewSession(transactionId); ConnectorMetadata metadata = new InformationSchemaMetadata("test_catalog", this.metadata); + InformationSchemaColumnHandle tableSchemaColumn = new InformationSchemaColumnHandle("table_schema"); + InformationSchemaColumnHandle tableNameColumn = new InformationSchemaColumnHandle("table_name"); + ConnectorTableHandle tableHandle = metadata.getTableHandle(session, new SchemaTableName("information_schema", "tables")); + + // Empty schema name + InformationSchemaTableHandle filtered = metadata.applyFilter(session, tableHandle, new Constraint(TupleDomain.withColumnDomains( + ImmutableMap.of(tableSchemaColumn, Domain.singleValue(VARCHAR, Slices.utf8Slice("")))))) + .map(ConstraintApplicationResult::getHandle) + .map(InformationSchemaTableHandle.class::cast) + .orElseThrow(AssertionError::new); - InformationSchemaTableHandle tableHandle = (InformationSchemaTableHandle) metadata.getTableHandle(session, schemaTableName); + // "" schema name is valid schema name, but is (currently) valid for QualifiedTablePrefix + assertEquals(filtered.getPrefixes(), ImmutableSet.of(new QualifiedTablePrefix("test_catalog", ""))); - assertTrue(metadata.applyFilter(session, tableHandle, new Constraint(TupleDomain.fromFixedValues(constraint))) + // Empty table name + filtered = metadata.applyFilter(session, tableHandle, new Constraint(TupleDomain.withColumnDomains( + ImmutableMap.of(tableNameColumn, Domain.singleValue(VARCHAR, Slices.utf8Slice("")))))) .map(ConstraintApplicationResult::getHandle) .map(InformationSchemaTableHandle.class::cast) - .orElseThrow(AssertionError::new) - .getPrefixes().isEmpty()); + .orElseThrow(AssertionError::new); + + // "" table name is valid schema name, but is (currently) valid for QualifiedTablePrefix + assertEquals(filtered.getPrefixes(), ImmutableSet.of(new QualifiedTablePrefix("test_catalog", "test_schema", ""))); } private static boolean testConstraint(Map bindings) diff --git a/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java b/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java index 903d0fc9d58d..1b9739556c8b 100644 --- a/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java +++ b/presto-tests/src/test/java/io/prestosql/tests/TestInformationSchemaConnector.java @@ -15,19 +15,15 @@ import com.google.common.collect.ImmutableMap; import io.prestosql.Session; -import io.prestosql.execution.warnings.WarningCollector; import io.prestosql.plugin.tpch.TpchPlugin; -import io.prestosql.sql.planner.plan.TableScanNode; import io.prestosql.testing.AbstractTestQueryFramework; import io.prestosql.testing.CountingMockConnector; import io.prestosql.testing.CountingMockConnector.MetadataCallsCount; import io.prestosql.testing.DistributedQueryRunner; import org.testng.annotations.Test; -import static io.prestosql.sql.planner.optimizations.PlanNodeSearcher.searchFrom; import static io.prestosql.testing.TestingSession.testSessionBuilder; import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertFalse; @Test(singleThreaded = true) public class TestInformationSchemaConnector @@ -186,17 +182,26 @@ public void testMetadataCalls() .withListSchemasCount(1) .withListTablesCount(2) .withGetColumnsCount(1000)); + + // Empty table schema and table name assertMetadataCalls( - "SELECT count(*) from test_catalog.information_schema.tables WHERE table_schema = ''", + "SELECT count(*) from test_catalog.information_schema.tables WHERE table_schema = '' AND table_name = ''", "VALUES 0", new MetadataCallsCount()); - } - @Test - public void testInformationForEmptyNames() - { - assertNoTableScan("SELECT count(*) from test_catalog.information_schema.tables WHERE table_schema = ''"); - assertNoTableScan("SELECT count(*) from test_catalog.information_schema.tables WHERE table_name = ''"); + // Empty table schema + assertMetadataCalls( + "SELECT count(*) from test_catalog.information_schema.tables WHERE table_schema = ''", + "VALUES 0", + new MetadataCallsCount() + .withListTablesCount(1)); + + // Empty table name + assertMetadataCalls( + "SELECT count(*) from test_catalog.information_schema.tables WHERE table_name = ''", + "VALUES 0", + new MetadataCallsCount() + .withListSchemasCount(1)); } @Override @@ -230,13 +235,4 @@ private void assertMetadataCalls(String actualSql, String expectedSql, MetadataC assertEquals(actualMetadataCallsCount, expectedMetadataCallsCount); } - - private void assertNoTableScan(String query) - { - assertFalse(searchFrom(getQueryRunner().createPlan(getSession(), query, WarningCollector.NOOP).getRoot()) - .where(TableScanNode.class::isInstance) - .findFirst() - .isPresent(), - "TableScanNode was not expected"); - } } From fe8fcf0c12d7155291d9abdc0618abf992fc70c1 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Thu, 7 May 2020 17:49:17 +0200 Subject: [PATCH 367/519] Optimize system.jdbc.columns metadata calls --- .../jdbc/TestPrestoDatabaseMetaData.java | 58 ++++-- .../system/SystemPageSourceProvider.java | 4 + .../system/SystemTablesMetadata.java | 15 +- .../system/jdbc/ColumnJdbcTable.java | 179 +++++++++++++++++- .../connector/system/jdbc/JdbcTable.java | 19 +- .../prestosql/metadata/MetadataListing.java | 12 ++ .../prestosql/spi/predicate/TupleDomain.java | 3 + 7 files changed, 270 insertions(+), 20 deletions(-) diff --git a/presto-jdbc/src/test/java/io/prestosql/jdbc/TestPrestoDatabaseMetaData.java b/presto-jdbc/src/test/java/io/prestosql/jdbc/TestPrestoDatabaseMetaData.java index d4e4326df310..e17c6580d44b 100644 --- a/presto-jdbc/src/test/java/io/prestosql/jdbc/TestPrestoDatabaseMetaData.java +++ b/presto-jdbc/src/test/java/io/prestosql/jdbc/TestPrestoDatabaseMetaData.java @@ -1169,9 +1169,9 @@ public void testGetColumnsMetadataCalls() list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "TYPE_NAME")), list(list(COUNTING_CATALOG, "test_schema1", "test_table1", "column_17", "varchar")), new MetadataCallsCount() - .withListSchemasCount(1) - .withListTablesCount(2) - .withGetColumnsCount(3000)); + .withListSchemasCount(2) + .withListTablesCount(3) + .withGetColumnsCount(1)); // LIKE predicate on schema name and table name, but no predicate on catalog name assertMetadataCalls( @@ -1179,12 +1179,42 @@ public void testGetColumnsMetadataCalls() databaseMetaData -> databaseMetaData.getColumns(null, "test_schema1", "test_table1", null), list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "TYPE_NAME")), IntStream.range(0, 100) - .mapToObj(i -> list(COUNTING_CATALOG, "test_schema1", "test_table1", "column_" + i, "varchar")) + .mapToObj(columnIndex -> list(COUNTING_CATALOG, "test_schema1", "test_table1", "column_" + columnIndex, "varchar")) .collect(toImmutableList()), new MetadataCallsCount() - .withListSchemasCount(1) - .withListTablesCount(2) - .withGetColumnsCount(3000)); // TODO (https://github.com/prestosql/presto/issues/1620) + .withListSchemasCount(2) + .withListTablesCount(3) + .withGetColumnsCount(1)); + + // LIKE predicate on schema name, but no predicate on catalog name and table name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getColumns(null, "test_schema1", null, null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "TYPE_NAME")), + IntStream.range(0, 1000).boxed() + .flatMap(tableIndex -> + IntStream.range(0, 100) + .mapToObj(columnIndex -> list(COUNTING_CATALOG, "test_schema1", "test_table" + tableIndex, "column_" + columnIndex, "varchar"))) + .collect(toImmutableList()), + new MetadataCallsCount() + .withListSchemasCount(3) + .withListTablesCount(1001) + .withGetColumnsCount(1000)); + + // LIKE predicate on table name, but no predicate on catalog name and schema name + assertMetadataCalls( + readMetaData( + databaseMetaData -> databaseMetaData.getColumns(null, null, "test_table1", null), + list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "TYPE_NAME")), + IntStream.rangeClosed(1, 2).boxed() + .flatMap(schemaIndex -> + IntStream.range(0, 100) + .mapToObj(columnIndex -> list(COUNTING_CATALOG, "test_schema" + schemaIndex, "test_table1", "column_" + columnIndex, "varchar"))) + .collect(toImmutableList()), + new MetadataCallsCount() + .withListSchemasCount(3) + .withListTablesCount(8) + .withGetColumnsCount(2)); // Equality predicate on schema name and table name, but no predicate on catalog name assertMetadataCalls( @@ -1222,9 +1252,9 @@ public void testGetColumnsMetadataCalls() list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "TYPE_NAME")), list(), new MetadataCallsCount() - .withListSchemasCount(1) - .withListTablesCount(2) - .withGetColumnsCount(3000)); + .withListSchemasCount(2) + .withListTablesCount(0) + .withGetColumnsCount(0)); // empty schema name assertMetadataCalls( @@ -1234,8 +1264,8 @@ public void testGetColumnsMetadataCalls() list(), new MetadataCallsCount() .withListSchemasCount(1) - .withListTablesCount(2) - .withGetColumnsCount(3000)); + .withListTablesCount(0) + .withGetColumnsCount(0)); // empty table name assertMetadataCalls( @@ -1245,8 +1275,8 @@ public void testGetColumnsMetadataCalls() list(), new MetadataCallsCount() .withListSchemasCount(1) - .withListTablesCount(2) - .withGetColumnsCount(3000)); + .withListTablesCount(0) + .withGetColumnsCount(0)); // empty column name assertMetadataCalls( diff --git a/presto-main/src/main/java/io/prestosql/connector/system/SystemPageSourceProvider.java b/presto-main/src/main/java/io/prestosql/connector/system/SystemPageSourceProvider.java index 5ea98cc5fcf9..41c3a580ad46 100644 --- a/presto-main/src/main/java/io/prestosql/connector/system/SystemPageSourceProvider.java +++ b/presto-main/src/main/java/io/prestosql/connector/system/SystemPageSourceProvider.java @@ -24,6 +24,7 @@ import io.prestosql.spi.connector.ConnectorSplit; import io.prestosql.spi.connector.ConnectorTableHandle; import io.prestosql.spi.connector.ConnectorTransactionHandle; +import io.prestosql.spi.connector.FixedPageSource; import io.prestosql.spi.connector.RecordCursor; import io.prestosql.spi.connector.RecordPageSource; import io.prestosql.spi.connector.RecordSet; @@ -96,6 +97,9 @@ public ConnectorPageSource createPageSource( } TupleDomain constraint = systemSplit.getConstraint(); + if (constraint.isNone()) { + return new FixedPageSource(ImmutableList.of()); + } ImmutableMap.Builder newConstraints = ImmutableMap.builder(); for (Map.Entry entry : constraint.getDomains().get().entrySet()) { String columnName = ((SystemColumnHandle) entry.getKey()).getColumnName(); diff --git a/presto-main/src/main/java/io/prestosql/connector/system/SystemTablesMetadata.java b/presto-main/src/main/java/io/prestosql/connector/system/SystemTablesMetadata.java index dd7fe29cb12d..d71b5328dbfa 100644 --- a/presto-main/src/main/java/io/prestosql/connector/system/SystemTablesMetadata.java +++ b/presto-main/src/main/java/io/prestosql/connector/system/SystemTablesMetadata.java @@ -14,6 +14,7 @@ package io.prestosql.connector.system; import com.google.common.collect.ImmutableMap; +import io.prestosql.connector.system.jdbc.JdbcTable; import io.prestosql.spi.PrestoException; import io.prestosql.spi.connector.ColumnHandle; import io.prestosql.spi.connector.ColumnMetadata; @@ -156,12 +157,24 @@ public Optional> applyFilter(C TupleDomain oldDomain = table.getConstraint(); TupleDomain newDomain = oldDomain.intersect(constraint.getSummary()); + if (oldDomain.equals(newDomain) && !constraint.predicate().isPresent()) { + return Optional.empty(); + } + + SystemTable systemTable = checkAndGetTable(session, table); + if (systemTable instanceof JdbcTable) { + TupleDomain filtered = ((JdbcTable) systemTable).applyFilter(session, new Constraint(newDomain, constraint.predicate(), constraint.getColumns())); + newDomain = newDomain.intersect(filtered); + } + if (oldDomain.equals(newDomain)) { return Optional.empty(); } + if (newDomain.isNone()) { + // TODO (https://github.com/prestosql/presto/issues/3647) indicate the table scan is empty + } table = new SystemTableHandle(table.getSchemaName(), table.getTableName(), newDomain); - return Optional.of(new ConstraintApplicationResult<>(table, constraint.getSummary())); } } diff --git a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/ColumnJdbcTable.java b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/ColumnJdbcTable.java index 5c2afb7455b8..fdd3f56fbc49 100644 --- a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/ColumnJdbcTable.java +++ b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/ColumnJdbcTable.java @@ -13,19 +13,29 @@ */ package io.prestosql.connector.system.jdbc; +import com.google.common.base.VerifyException; +import com.google.common.collect.ImmutableMap; +import io.airlift.slice.Slices; import io.prestosql.FullConnectorSession; import io.prestosql.Session; +import io.prestosql.connector.system.SystemColumnHandle; import io.prestosql.metadata.Metadata; import io.prestosql.metadata.QualifiedTablePrefix; import io.prestosql.security.AccessControl; +import io.prestosql.spi.connector.CatalogSchemaName; +import io.prestosql.spi.connector.CatalogSchemaTableName; +import io.prestosql.spi.connector.ColumnHandle; import io.prestosql.spi.connector.ColumnMetadata; import io.prestosql.spi.connector.ConnectorSession; import io.prestosql.spi.connector.ConnectorTableMetadata; import io.prestosql.spi.connector.ConnectorTransactionHandle; +import io.prestosql.spi.connector.Constraint; import io.prestosql.spi.connector.InMemoryRecordSet; import io.prestosql.spi.connector.InMemoryRecordSet.Builder; import io.prestosql.spi.connector.RecordCursor; import io.prestosql.spi.connector.SchemaTableName; +import io.prestosql.spi.predicate.Domain; +import io.prestosql.spi.predicate.NullableValue; import io.prestosql.spi.predicate.TupleDomain; import io.prestosql.spi.type.ArrayType; import io.prestosql.spi.type.CharType; @@ -37,13 +47,25 @@ import java.sql.DatabaseMetaData; import java.sql.Types; +import java.util.Collection; import java.util.List; +import java.util.Map; import java.util.Map.Entry; import java.util.Optional; +import java.util.Set; +import java.util.function.Predicate; +import java.util.stream.Collector; +import java.util.stream.Collectors; +import static com.google.common.collect.ImmutableList.toImmutableList; +import static com.google.common.collect.ImmutableSet.toImmutableSet; +import static io.airlift.slice.Slices.utf8Slice; +import static io.prestosql.connector.system.jdbc.FilterUtil.tablePrefix; import static io.prestosql.connector.system.jdbc.FilterUtil.tryGetSingleVarcharValue; import static io.prestosql.metadata.MetadataListing.listCatalogs; +import static io.prestosql.metadata.MetadataListing.listSchemas; import static io.prestosql.metadata.MetadataListing.listTableColumns; +import static io.prestosql.metadata.MetadataListing.listTables; import static io.prestosql.metadata.MetadataUtil.TableMetadataBuilder.tableMetadataBuilder; import static io.prestosql.spi.type.BigintType.BIGINT; import static io.prestosql.spi.type.BooleanType.BOOLEAN; @@ -68,6 +90,12 @@ public class ColumnJdbcTable { public static final SchemaTableName NAME = new SchemaTableName("jdbc", "columns"); + private static final int MAX_DOMAIN_SIZE = 100; + + private static final ColumnHandle TABLE_CATALOG_COLUMN = new SystemColumnHandle("table_cat"); + private static final ColumnHandle TABLE_SCHEMA_COLUMN = new SystemColumnHandle("table_schem"); + private static final ColumnHandle TABLE_NAME_COLUMN = new SystemColumnHandle("table_name"); + public static final ConnectorTableMetadata METADATA = tableMetadataBuilder(NAME) .column("table_cat", createUnboundedVarcharType()) .column("table_schem", createUnboundedVarcharType()) @@ -111,24 +139,150 @@ public ConnectorTableMetadata getTableMetadata() return METADATA; } + @Override + public TupleDomain applyFilter(ConnectorSession connectorSession, Constraint constraint) + { + TupleDomain tupleDomain = constraint.getSummary(); + if (tupleDomain.isNone() || !constraint.predicate().isPresent()) { + return tupleDomain; + } + Predicate> predicate = constraint.predicate().get(); + Set predicateColumns = constraint.getColumns().orElseThrow(() -> new VerifyException("columns not present for a predicate")); + + boolean hasSchemaPredicate = predicateColumns.contains(TABLE_SCHEMA_COLUMN); + boolean hasTablePredicate = predicateColumns.contains(TABLE_NAME_COLUMN); + if (!hasSchemaPredicate && !hasTablePredicate) { + // No filter on schema name and table name at all. + return tupleDomain; + } + + Session session = ((FullConnectorSession) connectorSession).getSession(); + + Optional catalogFilter = tryGetSingleVarcharValue(tupleDomain, TABLE_CATALOG_COLUMN); + Optional schemaFilter = tryGetSingleVarcharValue(tupleDomain, TABLE_SCHEMA_COLUMN); + Optional tableFilter = tryGetSingleVarcharValue(tupleDomain, TABLE_NAME_COLUMN); + + if (schemaFilter.isPresent() && tableFilter.isPresent()) { + // No need to narrow down the domain. + return tupleDomain; + } + + List catalogs = listCatalogs(session, metadata, accessControl, catalogFilter).keySet().stream() + .filter(catalogName -> predicate.test(ImmutableMap.of(TABLE_CATALOG_COLUMN, toNullableValue(catalogName)))) + .collect(toImmutableList()); + + List schemas = catalogs.stream() + .flatMap(catalogName -> + listSchemas(session, metadata, accessControl, catalogName, schemaFilter).stream() + .filter(schemaName -> !hasSchemaPredicate || predicate.test(ImmutableMap.of( + TABLE_CATALOG_COLUMN, toNullableValue(catalogName), + TABLE_SCHEMA_COLUMN, toNullableValue(schemaName)))) + .map(schemaName -> new CatalogSchemaName(catalogName, schemaName))) + .collect(toImmutableList()); + + if (!hasTablePredicate) { + return TupleDomain.withColumnDomains(ImmutableMap.builder() + .put(TABLE_CATALOG_COLUMN, schemas.stream() + .map(CatalogSchemaName::getCatalogName) + .collect(toVarcharDomain()) + .simplify(MAX_DOMAIN_SIZE)) + .put(TABLE_SCHEMA_COLUMN, schemas.stream() + .map(CatalogSchemaName::getSchemaName) + .collect(toVarcharDomain()) + .simplify(MAX_DOMAIN_SIZE)) + .build()); + } + + List tables = schemas.stream() + .flatMap(schema -> { + QualifiedTablePrefix tablePrefix = tableFilter.isPresent() + ? new QualifiedTablePrefix(schema.getCatalogName(), schema.getSchemaName(), tableFilter.get()) + : new QualifiedTablePrefix(schema.getCatalogName(), schema.getSchemaName()); + return listTables(session, metadata, accessControl, tablePrefix).stream() + .filter(schemaTableName -> predicate.test(ImmutableMap.of( + TABLE_CATALOG_COLUMN, toNullableValue(schema.getCatalogName()), + TABLE_SCHEMA_COLUMN, toNullableValue(schemaTableName.getSchemaName()), + TABLE_NAME_COLUMN, toNullableValue(schemaTableName.getTableName())))) + .map(schemaTableName -> new CatalogSchemaTableName(schema.getCatalogName(), schemaTableName.getSchemaName(), schemaTableName.getTableName())); + }) + .collect(toImmutableList()); + + return TupleDomain.withColumnDomains(ImmutableMap.builder() + .put(TABLE_CATALOG_COLUMN, tables.stream() + .map(CatalogSchemaTableName::getCatalogName) + .collect(toVarcharDomain()) + .simplify(MAX_DOMAIN_SIZE)) + .put(TABLE_SCHEMA_COLUMN, tables.stream() + .map(catalogSchemaTableName -> catalogSchemaTableName.getSchemaTableName().getSchemaName()) + .collect(toVarcharDomain()) + .simplify(MAX_DOMAIN_SIZE)) + .put(TABLE_NAME_COLUMN, tables.stream() + .map(catalogSchemaTableName -> catalogSchemaTableName.getSchemaTableName().getTableName()) + .collect(toVarcharDomain()) + .simplify(MAX_DOMAIN_SIZE)) + .build()); + } + @Override public RecordCursor cursor(ConnectorTransactionHandle transactionHandle, ConnectorSession connectorSession, TupleDomain constraint) { + Builder table = InMemoryRecordSet.builder(METADATA); + if (constraint.isNone()) { + return table.build().cursor(); + } + Session session = ((FullConnectorSession) connectorSession).getSession(); Optional catalogFilter = tryGetSingleVarcharValue(constraint, 0); Optional schemaFilter = tryGetSingleVarcharValue(constraint, 1); Optional tableFilter = tryGetSingleVarcharValue(constraint, 2); - Builder table = InMemoryRecordSet.builder(METADATA); + Domain catalogDomain = constraint.getDomains().get().getOrDefault(0, Domain.all(createUnboundedVarcharType())); + Domain schemaDomain = constraint.getDomains().get().getOrDefault(1, Domain.all(createUnboundedVarcharType())); + Domain tableDomain = constraint.getDomains().get().getOrDefault(2, Domain.all(createUnboundedVarcharType())); + for (String catalog : listCatalogs(session, metadata, accessControl, catalogFilter).keySet()) { - QualifiedTablePrefix prefix = FilterUtil.tablePrefix(catalog, schemaFilter, tableFilter); - for (Entry> entry : listTableColumns(session, metadata, accessControl, prefix).entrySet()) { - addColumnRows(table, catalog, entry.getKey(), entry.getValue()); + if (!catalogDomain.includesNullableValue(utf8Slice(catalog))) { + continue; + } + + if ((schemaDomain.isAll() && tableDomain.isAll()) || (schemaFilter.isPresent() && tableFilter.isPresent())) { + QualifiedTablePrefix tablePrefix = tablePrefix(catalog, schemaFilter, tableFilter); + Map> tableColumns = listTableColumns(session, metadata, accessControl, tablePrefix); + addColumnsRow(table, catalog, tableColumns); + } + else { + Collection schemas = listSchemas(session, metadata, accessControl, catalog, schemaFilter); + for (String schema : schemas) { + if (!schemaDomain.includesNullableValue(utf8Slice(schema))) { + continue; + } + + QualifiedTablePrefix tablePrefix = tableFilter.isPresent() + ? new QualifiedTablePrefix(catalog, schema, tableFilter.get()) + : new QualifiedTablePrefix(catalog, schema); + Set tables = listTables(session, metadata, accessControl, tablePrefix); + for (SchemaTableName schemaTableName : tables) { + String tableName = schemaTableName.getTableName(); + if (!tableDomain.includesNullableValue(utf8Slice(tableName))) { + continue; + } + + Map> tableColumns = listTableColumns(session, metadata, accessControl, new QualifiedTablePrefix(catalog, schema, tableName)); + addColumnsRow(table, catalog, tableColumns); + } + } } } return table.build().cursor(); } + private static void addColumnsRow(Builder builder, String catalog, Map> columns) + { + for (Entry> entry : columns.entrySet()) { + addColumnRows(builder, catalog, entry.getKey(), entry.getValue()); + } + } + private static void addColumnRows(Builder builder, String catalog, SchemaTableName tableName, List columns) { int ordinalPosition = 1; @@ -308,4 +462,21 @@ static Integer numPrecRadix(Type type) } return null; } + + private static NullableValue toNullableValue(String varcharValue) + { + return NullableValue.of(createUnboundedVarcharType(), utf8Slice(varcharValue)); + } + + private static Collector toVarcharDomain() + { + return Collectors.collectingAndThen(toImmutableSet(), set -> { + if (set.isEmpty()) { + return Domain.none(createUnboundedVarcharType()); + } + return Domain.multipleValues(createUnboundedVarcharType(), set.stream() + .map(Slices::utf8Slice) + .collect(toImmutableList())); + }); + } } diff --git a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/JdbcTable.java b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/JdbcTable.java index a8f574740132..c21fef2791c2 100644 --- a/presto-main/src/main/java/io/prestosql/connector/system/jdbc/JdbcTable.java +++ b/presto-main/src/main/java/io/prestosql/connector/system/jdbc/JdbcTable.java @@ -13,11 +13,15 @@ */ package io.prestosql.connector.system.jdbc; +import io.prestosql.spi.connector.ColumnHandle; +import io.prestosql.spi.connector.ConnectorSession; +import io.prestosql.spi.connector.Constraint; import io.prestosql.spi.connector.SystemTable; +import io.prestosql.spi.predicate.TupleDomain; import static io.prestosql.spi.connector.SystemTable.Distribution.SINGLE_COORDINATOR; -abstract class JdbcTable +public abstract class JdbcTable implements SystemTable { @Override @@ -25,4 +29,17 @@ public final Distribution getDistribution() { return SINGLE_COORDINATOR; } + + /** + * @param constraint a {@link Constraint} using {@link io.prestosql.connector.system.SystemColumnHandle} to identify columns + */ + /* + * This method is not part of the SystemTable interface, because system tables do not operate on column handles, + * and without column handles it's currently not possible to express Constraint or ConstraintApplicationResult. + * TODO provide equivalent API in the SystemTable interface + */ + public TupleDomain applyFilter(ConnectorSession session, Constraint constraint) + { + return constraint.getSummary(); + } } diff --git a/presto-main/src/main/java/io/prestosql/metadata/MetadataListing.java b/presto-main/src/main/java/io/prestosql/metadata/MetadataListing.java index 5821ecced808..efb7317e2e93 100644 --- a/presto-main/src/main/java/io/prestosql/metadata/MetadataListing.java +++ b/presto-main/src/main/java/io/prestosql/metadata/MetadataListing.java @@ -71,8 +71,20 @@ public static SortedMap listCatalogs(Session session, Metad } public static SortedSet listSchemas(Session session, Metadata metadata, AccessControl accessControl, String catalogName) + { + return listSchemas(session, metadata, accessControl, catalogName, Optional.empty()); + } + + public static SortedSet listSchemas(Session session, Metadata metadata, AccessControl accessControl, String catalogName, Optional schemaName) { Set schemaNames = ImmutableSet.copyOf(metadata.listSchemaNames(session, catalogName)); + if (schemaName.isPresent()) { + // we don't use metadata.schemaExists(), because this would change semantics of the method (all vs visible schemas) + if (!schemaNames.contains(schemaName.get())) { + return ImmutableSortedSet.of(); + } + schemaNames = ImmutableSet.of(schemaName.get()); + } return ImmutableSortedSet.copyOf(accessControl.filterSchemas(session.toSecurityContext(), catalogName, schemaNames)); } diff --git a/presto-spi/src/main/java/io/prestosql/spi/predicate/TupleDomain.java b/presto-spi/src/main/java/io/prestosql/spi/predicate/TupleDomain.java index e9bed96a1348..a809818f3830 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/predicate/TupleDomain.java +++ b/presto-spi/src/main/java/io/prestosql/spi/predicate/TupleDomain.java @@ -217,6 +217,9 @@ public TupleDomain intersect(TupleDomain other) if (this.isNone() || other.isNone()) { return none(); } + if (this == other) { + return this; + } Map intersected = new LinkedHashMap<>(this.getDomains().get()); for (Map.Entry entry : other.getDomains().get().entrySet()) { From febbff64fe77a161f7321d707be77901b37d12ef Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Wed, 29 Apr 2020 12:53:21 +0200 Subject: [PATCH 368/519] Extract statistics conversion method --- .../TupleDomainParquetPredicate.java | 46 +++++++++++-------- 1 file changed, 27 insertions(+), 19 deletions(-) diff --git a/presto-parquet/src/main/java/io/prestosql/parquet/predicate/TupleDomainParquetPredicate.java b/presto-parquet/src/main/java/io/prestosql/parquet/predicate/TupleDomainParquetPredicate.java index 11769215190a..78be63831157 100644 --- a/presto-parquet/src/main/java/io/prestosql/parquet/predicate/TupleDomainParquetPredicate.java +++ b/presto-parquet/src/main/java/io/prestosql/parquet/predicate/TupleDomainParquetPredicate.java @@ -160,27 +160,11 @@ public static Domain getDomain(Type type, long rowCount, Statistics statistic } if ((type.equals(BIGINT) || type.equals(TINYINT) || type.equals(SMALLINT) || type.equals(INTEGER)) && (statistics instanceof LongStatistics || statistics instanceof IntStatistics)) { - ParquetIntegerStatistics parquetIntegerStatistics; - if (statistics instanceof LongStatistics) { - LongStatistics longStatistics = (LongStatistics) statistics; - if (longStatistics.genericGetMin() > longStatistics.genericGetMax()) { - failWithCorruptionException(failOnCorruptedParquetStatistics, column, id, longStatistics); - return Domain.create(ValueSet.all(type), hasNullValue); - } - parquetIntegerStatistics = new ParquetIntegerStatistics(longStatistics.genericGetMin(), longStatistics.genericGetMax()); - } - else { - IntStatistics intStatistics = (IntStatistics) statistics; - if (intStatistics.genericGetMin() > intStatistics.genericGetMax()) { - failWithCorruptionException(failOnCorruptedParquetStatistics, column, id, intStatistics); - return Domain.create(ValueSet.all(type), hasNullValue); - } - parquetIntegerStatistics = new ParquetIntegerStatistics((long) intStatistics.getMin(), (long) intStatistics.getMax()); - } - if (isStatisticsOverflow(type, parquetIntegerStatistics)) { + Optional parquetIntegerStatistics = toParquetIntegerStatistics(statistics, id, column, failOnCorruptedParquetStatistics); + if (!parquetIntegerStatistics.isPresent() || isStatisticsOverflow(type, parquetIntegerStatistics.get())) { return Domain.create(ValueSet.all(type), hasNullValue); } - return createDomain(type, hasNullValue, parquetIntegerStatistics); + return createDomain(type, hasNullValue, parquetIntegerStatistics.get()); } if (type.equals(REAL) && statistics instanceof FloatStatistics) { @@ -232,6 +216,30 @@ public static Domain getDomain(Type type, long rowCount, Statistics statistic return Domain.create(ValueSet.all(type), hasNullValue); } + private static Optional toParquetIntegerStatistics(Statistics statistics, ParquetDataSourceId id, String column, boolean failOnCorruptedParquetStatistics) + throws ParquetCorruptionException + { + if (statistics instanceof LongStatistics) { + LongStatistics longStatistics = (LongStatistics) statistics; + if (longStatistics.genericGetMin() > longStatistics.genericGetMax()) { + failWithCorruptionException(failOnCorruptedParquetStatistics, column, id, longStatistics); + return Optional.empty(); + } + return Optional.of(new ParquetIntegerStatistics(longStatistics.genericGetMin(), longStatistics.genericGetMax())); + } + + if (statistics instanceof IntStatistics) { + IntStatistics intStatistics = (IntStatistics) statistics; + if (intStatistics.genericGetMin() > intStatistics.genericGetMax()) { + failWithCorruptionException(failOnCorruptedParquetStatistics, column, id, intStatistics); + return Optional.empty(); + } + return Optional.of(new ParquetIntegerStatistics((long) intStatistics.getMin(), (long) intStatistics.getMax())); + } + + throw new IllegalArgumentException("Cannot convert statistics of type " + statistics.getClass().getName()); + } + @VisibleForTesting public static Domain getDomain(Type type, DictionaryDescriptor dictionaryDescriptor) { From bcf4e6f0d574042a7ba64f666552528e052ce011 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Wed, 29 Apr 2020 13:57:36 +0200 Subject: [PATCH 369/519] Reject unsupported type --- .../parquet/predicate/PredicateUtils.java | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/presto-parquet/src/main/java/io/prestosql/parquet/predicate/PredicateUtils.java b/presto-parquet/src/main/java/io/prestosql/parquet/predicate/PredicateUtils.java index 53b762e83bb8..9306ae9c8d1e 100644 --- a/presto-parquet/src/main/java/io/prestosql/parquet/predicate/PredicateUtils.java +++ b/presto-parquet/src/main/java/io/prestosql/parquet/predicate/PredicateUtils.java @@ -51,6 +51,7 @@ import static io.airlift.slice.Slices.wrappedBuffer; import static io.prestosql.parquet.ParquetCompressionUtils.decompress; import static io.prestosql.parquet.ParquetTypeUtils.getParquetEncoding; +import static io.prestosql.spi.type.BigintType.BIGINT; import static io.prestosql.spi.type.IntegerType.INTEGER; import static io.prestosql.spi.type.SmallintType.SMALLINT; import static io.prestosql.spi.type.TinyintType.TINYINT; @@ -67,9 +68,21 @@ public static boolean isStatisticsOverflow(Type type, ParquetIntegerStatistics p { long min = parquetIntegerStatistics.getMin(); long max = parquetIntegerStatistics.getMax(); - return (type.equals(TINYINT) && (min < Byte.MIN_VALUE || max > Byte.MAX_VALUE)) || - (type.equals(SMALLINT) && (min < Short.MIN_VALUE || max > Short.MAX_VALUE)) || - (type.equals(INTEGER) && (min < Integer.MIN_VALUE || max > Integer.MAX_VALUE)); + + if (type == TINYINT) { + return min < Byte.MIN_VALUE || max > Byte.MAX_VALUE; + } + if (type == SMALLINT) { + return min < Short.MIN_VALUE || max > Short.MAX_VALUE; + } + if (type == INTEGER) { + return min < Integer.MIN_VALUE || max > Integer.MAX_VALUE; + } + if (type == BIGINT) { + return false; + } + + throw new IllegalArgumentException("Unsupported type: " + type); } public static Predicate buildPredicate(MessageType requestedSchema, TupleDomain parquetTupleDomain, Map, RichColumnDescriptor> descriptorsByPath) From a71fa9b3b08d26a7e353be7d0ede8ee0034e5958 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Wed, 29 Apr 2020 13:58:23 +0200 Subject: [PATCH 370/519] Add isStatisticsOverflow test --- .../parquet/predicate/TestPredicateUtils.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/presto-parquet/src/test/java/io/prestosql/parquet/predicate/TestPredicateUtils.java b/presto-parquet/src/test/java/io/prestosql/parquet/predicate/TestPredicateUtils.java index efb605073460..edd312e05a9d 100644 --- a/presto-parquet/src/test/java/io/prestosql/parquet/predicate/TestPredicateUtils.java +++ b/presto-parquet/src/test/java/io/prestosql/parquet/predicate/TestPredicateUtils.java @@ -24,6 +24,10 @@ import static com.google.common.collect.Sets.union; import static io.prestosql.parquet.predicate.PredicateUtils.isOnlyDictionaryEncodingPages; +import static io.prestosql.parquet.predicate.PredicateUtils.isStatisticsOverflow; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.SmallintType.SMALLINT; +import static io.prestosql.spi.type.TinyintType.TINYINT; import static org.apache.parquet.column.Encoding.BIT_PACKED; import static org.apache.parquet.column.Encoding.PLAIN; import static org.apache.parquet.column.Encoding.PLAIN_DICTIONARY; @@ -37,6 +41,22 @@ public class TestPredicateUtils { + @Test + public void testIsStatisticsOverflow() + { + assertFalse(isStatisticsOverflow(TINYINT, new ParquetIntegerStatistics(-10L, 10L))); + assertTrue(isStatisticsOverflow(TINYINT, new ParquetIntegerStatistics(-129L, 10L))); + assertTrue(isStatisticsOverflow(TINYINT, new ParquetIntegerStatistics(-10L, 129L))); + + assertFalse(isStatisticsOverflow(SMALLINT, new ParquetIntegerStatistics(-32_000L, 32_000L))); + assertTrue(isStatisticsOverflow(SMALLINT, new ParquetIntegerStatistics(-100_000L, 32_000L))); + assertTrue(isStatisticsOverflow(SMALLINT, new ParquetIntegerStatistics(-32_000L, 100_000L))); + + assertFalse(isStatisticsOverflow(INTEGER, new ParquetIntegerStatistics(-2_000_000_000L, 2_000_000_000L))); + assertTrue(isStatisticsOverflow(INTEGER, new ParquetIntegerStatistics(-3_000_000_000L, 2_000_000_000L))); + assertTrue(isStatisticsOverflow(INTEGER, new ParquetIntegerStatistics(-2_000_000_000L, 3_000_000_000L))); + } + @Test @SuppressWarnings("deprecation") public void testDictionaryEncodingV1() From 7d2d9db8130f76a1d9f7814c1ce61d810fdc6864 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Wed, 29 Apr 2020 14:02:52 +0200 Subject: [PATCH 371/519] Use Parquet statistics for decimal with 0 scale --- .../parquet/predicate/PredicateUtils.java | 22 ++++++++++ .../TupleDomainParquetPredicate.java | 18 +++++++- .../TestTupleDomainParquetPredicate.java | 44 +++++++++++++++++++ .../parquet/predicate/TestPredicateUtils.java | 9 ++++ 4 files changed, 92 insertions(+), 1 deletion(-) diff --git a/presto-parquet/src/main/java/io/prestosql/parquet/predicate/PredicateUtils.java b/presto-parquet/src/main/java/io/prestosql/parquet/predicate/PredicateUtils.java index 9306ae9c8d1e..4577e49ceccf 100644 --- a/presto-parquet/src/main/java/io/prestosql/parquet/predicate/PredicateUtils.java +++ b/presto-parquet/src/main/java/io/prestosql/parquet/predicate/PredicateUtils.java @@ -25,6 +25,7 @@ import io.prestosql.parquet.ParquetEncoding; import io.prestosql.parquet.RichColumnDescriptor; import io.prestosql.spi.predicate.TupleDomain; +import io.prestosql.spi.type.DecimalType; import io.prestosql.spi.type.Type; import org.apache.parquet.column.ColumnDescriptor; import org.apache.parquet.column.Encoding; @@ -41,12 +42,14 @@ import java.io.ByteArrayInputStream; import java.io.IOException; +import java.math.BigDecimal; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; +import static com.google.common.base.Strings.repeat; import static com.google.common.base.Verify.verify; import static io.airlift.slice.Slices.wrappedBuffer; import static io.prestosql.parquet.ParquetCompressionUtils.decompress; @@ -56,6 +59,7 @@ import static io.prestosql.spi.type.SmallintType.SMALLINT; import static io.prestosql.spi.type.TinyintType.TINYINT; import static java.lang.Math.toIntExact; +import static java.lang.String.format; import static org.apache.parquet.column.Encoding.BIT_PACKED; import static org.apache.parquet.column.Encoding.PLAIN_DICTIONARY; import static org.apache.parquet.column.Encoding.RLE; @@ -81,10 +85,28 @@ public static boolean isStatisticsOverflow(Type type, ParquetIntegerStatistics p if (type == BIGINT) { return false; } + if (type instanceof DecimalType && ((DecimalType) type).getScale() == 0) { + DecimalType decimalType = (DecimalType) type; + if (!decimalType.isShort()) { + // Smallest long decimal type with 0 scale has broader range than representable in long, as used in ParquetIntegerStatistics + return false; + } + return BigDecimal.valueOf(min).compareTo(minimalValue(decimalType)) < 0 || BigDecimal.valueOf(max).compareTo(maximalValue(decimalType)) > 0; + } throw new IllegalArgumentException("Unsupported type: " + type); } + private static BigDecimal minimalValue(DecimalType decimalType) + { + return new BigDecimal(format("-%s.%s", repeat("9", decimalType.getPrecision() - decimalType.getScale()), repeat("9", decimalType.getScale()))); + } + + private static BigDecimal maximalValue(DecimalType decimalType) + { + return new BigDecimal(format("+%s.%s", repeat("9", decimalType.getPrecision() - decimalType.getScale()), repeat("9", decimalType.getScale()))); + } + public static Predicate buildPredicate(MessageType requestedSchema, TupleDomain parquetTupleDomain, Map, RichColumnDescriptor> descriptorsByPath) { ImmutableList.Builder columnReferences = ImmutableList.builder(); diff --git a/presto-parquet/src/main/java/io/prestosql/parquet/predicate/TupleDomainParquetPredicate.java b/presto-parquet/src/main/java/io/prestosql/parquet/predicate/TupleDomainParquetPredicate.java index 78be63831157..619d27428cc3 100644 --- a/presto-parquet/src/main/java/io/prestosql/parquet/predicate/TupleDomainParquetPredicate.java +++ b/presto-parquet/src/main/java/io/prestosql/parquet/predicate/TupleDomainParquetPredicate.java @@ -27,6 +27,7 @@ import io.prestosql.spi.predicate.Range; import io.prestosql.spi.predicate.TupleDomain; import io.prestosql.spi.predicate.ValueSet; +import io.prestosql.spi.type.DecimalType; import io.prestosql.spi.type.Type; import org.apache.parquet.column.ColumnDescriptor; import org.apache.parquet.column.statistics.BinaryStatistics; @@ -38,6 +39,7 @@ import org.apache.parquet.column.statistics.Statistics; import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName; +import java.math.BigDecimal; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -48,6 +50,7 @@ import static io.prestosql.spi.type.BigintType.BIGINT; import static io.prestosql.spi.type.BooleanType.BOOLEAN; import static io.prestosql.spi.type.DateType.DATE; +import static io.prestosql.spi.type.Decimals.encodeScaledValue; import static io.prestosql.spi.type.DoubleType.DOUBLE; import static io.prestosql.spi.type.IntegerType.INTEGER; import static io.prestosql.spi.type.RealType.REAL; @@ -167,6 +170,19 @@ public static Domain getDomain(Type type, long rowCount, Statistics statistic return createDomain(type, hasNullValue, parquetIntegerStatistics.get()); } + if (type instanceof DecimalType && ((DecimalType) type).getScale() == 0 && (statistics instanceof LongStatistics || statistics instanceof IntStatistics)) { + Optional parquetIntegerStatistics = toParquetIntegerStatistics(statistics, id, column, failOnCorruptedParquetStatistics); + if (!parquetIntegerStatistics.isPresent() || isStatisticsOverflow(type, parquetIntegerStatistics.get())) { + return Domain.create(ValueSet.all(type), hasNullValue); + } + return createDomain(type, hasNullValue, parquetIntegerStatistics.get(), statisticsValue -> { + if (((DecimalType) type).isShort()) { + return statisticsValue; + } + return encodeScaledValue(BigDecimal.valueOf(statisticsValue), 0 /* scale */); + }); + } + if (type.equals(REAL) && statistics instanceof FloatStatistics) { FloatStatistics floatStatistics = (FloatStatistics) statistics; if (floatStatistics.genericGetMin() > floatStatistics.genericGetMax()) { @@ -326,7 +342,7 @@ private static > Domain createDomain(Type type, boolean return createDomain(type, hasNullValue, rangeStatistics, value -> value); } - private static > Domain createDomain(Type type, boolean hasNullValue, ParquetRangeStatistics rangeStatistics, Function function) + private static Domain createDomain(Type type, boolean hasNullValue, ParquetRangeStatistics rangeStatistics, Function function) { F min = rangeStatistics.getMin(); F max = rangeStatistics.getMax(); diff --git a/presto-parquet/src/test/java/io/prestosql/parquet/TestTupleDomainParquetPredicate.java b/presto-parquet/src/test/java/io/prestosql/parquet/TestTupleDomainParquetPredicate.java index 36cfc15de3fe..c372e8bad36a 100644 --- a/presto-parquet/src/test/java/io/prestosql/parquet/TestTupleDomainParquetPredicate.java +++ b/presto-parquet/src/test/java/io/prestosql/parquet/TestTupleDomainParquetPredicate.java @@ -21,6 +21,7 @@ import io.prestosql.spi.predicate.Domain; import io.prestosql.spi.predicate.TupleDomain; import io.prestosql.spi.predicate.ValueSet; +import io.prestosql.spi.type.DecimalType; import io.prestosql.spi.type.Type; import io.prestosql.spi.type.VarcharType; import org.apache.parquet.column.ColumnDescriptor; @@ -36,6 +37,7 @@ import org.testng.annotations.DataProvider; import org.testng.annotations.Test; +import java.math.BigDecimal; import java.util.Map; import java.util.Optional; @@ -52,6 +54,8 @@ import static io.prestosql.spi.type.BigintType.BIGINT; import static io.prestosql.spi.type.BooleanType.BOOLEAN; import static io.prestosql.spi.type.DateType.DATE; +import static io.prestosql.spi.type.DecimalType.createDecimalType; +import static io.prestosql.spi.type.Decimals.encodeScaledValue; import static io.prestosql.spi.type.DoubleType.DOUBLE; import static io.prestosql.spi.type.IntegerType.INTEGER; import static io.prestosql.spi.type.RealType.REAL; @@ -176,6 +180,46 @@ public void testTinyint() .withMessage("Corrupted statistics for column \"TinyintColumn\" in Parquet file \"testFile\": [min: 2147483648, max: 10, num_nulls: 0]"); } + @Test + public void testShortDecimal() + throws Exception + { + String column = "ShortDecimalColumn"; + Type type = createDecimalType(5, 0); + assertEquals(getDomain(type, 0, null, ID, column, true), all(type)); + + assertEquals(getDomain(type, 10, longColumnStats(100L, 100L), ID, column, true), singleValue(type, 100L)); + + assertEquals(getDomain(type, 10, longColumnStats(0L, 100L), ID, column, true), create(ValueSet.ofRanges(range(type, 0L, true, 100L, true)), false)); + // ignore corrupted statistics + assertEquals(getDomain(type, 10, longColumnStats(100L, 0L), ID, column, false), create(ValueSet.all(type), false)); + // fail on corrupted statistics + assertThatExceptionOfType(ParquetCorruptionException.class) + .isThrownBy(() -> getDomain(type, 10, longColumnStats(100L, 10L), ID, column, true)) + .withMessage("Corrupted statistics for column \"ShortDecimalColumn\" in Parquet file \"testFile\": [min: 100, max: 10, num_nulls: 0]"); + } + + @Test + public void testLongDecimal() + throws Exception + { + String column = "LongDecimalColumn"; + DecimalType type = createDecimalType(20, 0); + Slice zero = encodeScaledValue(new BigDecimal("0"), type.getScale()); + Slice hundred = encodeScaledValue(new BigDecimal("100"), type.getScale()); + assertEquals(getDomain(type, 0, null, ID, column, true), all(type)); + + assertEquals(getDomain(type, 10, longColumnStats(100L, 100L), ID, column, true), singleValue(type, hundred)); + + assertEquals(getDomain(type, 10, longColumnStats(0L, 100L), ID, column, true), create(ValueSet.ofRanges(range(type, zero, true, hundred, true)), false)); + // ignore corrupted statistics + assertEquals(getDomain(type, 10, longColumnStats(100L, 0L), ID, column, false), create(ValueSet.all(type), false)); + // fail on corrupted statistics + assertThatExceptionOfType(ParquetCorruptionException.class) + .isThrownBy(() -> getDomain(type, 10, longColumnStats(100L, 10L), ID, column, true)) + .withMessage("Corrupted statistics for column \"LongDecimalColumn\" in Parquet file \"testFile\": [min: 100, max: 10, num_nulls: 0]"); + } + @Test public void testDouble() throws ParquetCorruptionException diff --git a/presto-parquet/src/test/java/io/prestosql/parquet/predicate/TestPredicateUtils.java b/presto-parquet/src/test/java/io/prestosql/parquet/predicate/TestPredicateUtils.java index edd312e05a9d..12fc886498fd 100644 --- a/presto-parquet/src/test/java/io/prestosql/parquet/predicate/TestPredicateUtils.java +++ b/presto-parquet/src/test/java/io/prestosql/parquet/predicate/TestPredicateUtils.java @@ -25,6 +25,7 @@ import static com.google.common.collect.Sets.union; import static io.prestosql.parquet.predicate.PredicateUtils.isOnlyDictionaryEncodingPages; import static io.prestosql.parquet.predicate.PredicateUtils.isStatisticsOverflow; +import static io.prestosql.spi.type.DecimalType.createDecimalType; import static io.prestosql.spi.type.IntegerType.INTEGER; import static io.prestosql.spi.type.SmallintType.SMALLINT; import static io.prestosql.spi.type.TinyintType.TINYINT; @@ -55,6 +56,14 @@ public void testIsStatisticsOverflow() assertFalse(isStatisticsOverflow(INTEGER, new ParquetIntegerStatistics(-2_000_000_000L, 2_000_000_000L))); assertTrue(isStatisticsOverflow(INTEGER, new ParquetIntegerStatistics(-3_000_000_000L, 2_000_000_000L))); assertTrue(isStatisticsOverflow(INTEGER, new ParquetIntegerStatistics(-2_000_000_000L, 3_000_000_000L))); + + // short decimal + assertFalse(isStatisticsOverflow(createDecimalType(5, 0), new ParquetIntegerStatistics(-10_000L, 10_000L))); + assertTrue(isStatisticsOverflow(createDecimalType(5, 0), new ParquetIntegerStatistics(-100_000L, 10_000L))); + assertTrue(isStatisticsOverflow(createDecimalType(5, 0), new ParquetIntegerStatistics(-10_000L, 100_000L))); + + // long decimal + assertFalse(isStatisticsOverflow(createDecimalType(19, 0), new ParquetIntegerStatistics(-1_000_000_000_000_000_000L, 1_000_000_000_000_000_000L))); } @Test From 82fb9a50f5c54ae9336f1a1d6dda37928af6697b Mon Sep 17 00:00:00 2001 From: Yuya Ebihara Date: Wed, 6 May 2020 10:44:35 +0900 Subject: [PATCH 372/519] Minor cleanup of BigQuery connector - Remove unused method and field - Add missing final and static - Add missing requireNonNull - Rename method and variable - Fix typo - Use Slice.toStringUtf8 method --- .../plugin/bigquery/BigQueryClient.java | 18 +++----- .../bigquery/BigQueryCredentialsSupplier.java | 8 ++-- .../plugin/bigquery/BigQueryMetadata.java | 18 +++----- .../bigquery/BigQueryPageSourceProvider.java | 12 ++--- .../bigquery/BigQueryResultPageSource.java | 44 ++++++------------- .../plugin/bigquery/BigQuerySplitManager.java | 11 +++-- .../plugin/bigquery/BigQueryType.java | 31 +------------ .../plugin/bigquery/BigQueryUtil.java | 2 +- .../plugin/bigquery/Conversions.java | 2 +- .../plugin/bigquery/ReadRowsHelper.java | 8 ++-- .../plugin/bigquery/ReadSessionCreator.java | 41 +++++++++-------- .../plugin/bigquery/MockResponsesBatch.java | 2 +- .../plugin/bigquery/TestReadRowsHelper.java | 5 ++- 13 files changed, 72 insertions(+), 130 deletions(-) diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryClient.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryClient.java index 4766e92f93d3..34e3451be6c0 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryClient.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryClient.java @@ -26,14 +26,13 @@ import com.google.cloud.bigquery.TableInfo; import com.google.cloud.bigquery.TableResult; import com.google.cloud.http.BaseHttpServiceException; -import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterators; import java.util.Iterator; +import java.util.List; import java.util.Optional; import java.util.Set; -import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.stream.StreamSupport; @@ -118,7 +117,6 @@ TableId createDestinationTable(TableId tableId) String project = viewMaterializationProject.orElse(tableId.getProject()); String dataset = viewMaterializationDataset.orElse(tableId.getDataset()); DatasetId datasetId = mapIfNeeded(project, dataset); - UUID uuid = randomUUID(); String name = format("_pbc_%s", randomUUID().toString().toLowerCase(ENGLISH).replace("-", "")); return TableId.of(datasetId.getProject(), datasetId.getDataset(), name); } @@ -150,20 +148,16 @@ TableResult query(String sql) } } - String createSql(TableId table, ImmutableList requiredColumns, String[] filters) + String selectSql(TableId table, List requiredColumns) { String columns = requiredColumns.isEmpty() ? "*" : requiredColumns.stream().map(column -> format("`%s`", column)).collect(joining(",")); - String whereClause = createWhereClause(filters) - .map(clause -> "WHERE " + clause) - .orElse(""); - - return createSql(table, columns, filters); + return selectSql(table, columns, new String[] {}); } // assuming the SELECT part is properly formatted, can be used to call functions such as COUNT and SUM - String createSql(TableId table, String formatedQuery, String[] filters) + String selectSql(TableId table, String formattedColumns, String[] filters) { String tableName = fullTableName(table); @@ -171,10 +165,10 @@ String createSql(TableId table, String formatedQuery, String[] filters) .map(clause -> "WHERE " + clause) .orElse(""); - return format("SELECT %s FROM `%s` %s", formatedQuery, tableName, whereClause); + return format("SELECT %s FROM `%s` %s", formattedColumns, tableName, whereClause); } - String fullTableName(TableId tableId) + private String fullTableName(TableId tableId) { tableId = tableIds.getOrDefault(tableId, tableId); return format("%s.%s.%s", tableId.getProject(), tableId.getDataset(), tableId.getTable()); diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryCredentialsSupplier.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryCredentialsSupplier.java index a0565b6bf5d2..bd2f26314041 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryCredentialsSupplier.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryCredentialsSupplier.java @@ -27,16 +27,16 @@ import java.util.Optional; import java.util.stream.Stream; +import static java.util.Objects.requireNonNull; + class BigQueryCredentialsSupplier { - private final Optional credentialsKey; - private final Optional credentialsFile; private final Supplier> credentialsCreator; public BigQueryCredentialsSupplier(Optional credentialsKey, Optional credentialsFile) { - this.credentialsKey = credentialsKey; - this.credentialsFile = credentialsFile; + requireNonNull(credentialsKey, "credentialsKey is null"); + requireNonNull(credentialsFile, "credentialsFile is null"); // lazy creation, cache once it's created this.credentialsCreator = Suppliers.memoize(() -> { Optional credentialsFromKey = credentialsKey.map(BigQueryCredentialsSupplier::createCredentialsFromKey); diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryMetadata.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryMetadata.java index abb861730e31..ae78eabb75c8 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryMetadata.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryMetadata.java @@ -13,7 +13,6 @@ */ package io.prestosql.plugin.bigquery; -import com.google.api.gax.paging.Page; import com.google.cloud.bigquery.DatasetId; import com.google.cloud.bigquery.Field; import com.google.cloud.bigquery.Schema; @@ -62,18 +61,20 @@ public class BigQueryMetadata implements ConnectorMetadata { + private static final Logger log = Logger.get(BigQueryMetadata.class); + static final int NUMERIC_DATA_TYPE_PRECISION = 38; static final int NUMERIC_DATA_TYPE_SCALE = 9; static final String INFORMATION_SCHEMA = "information_schema"; - private static final Logger log = Logger.get(BigQueryMetadata.class); - private BigQueryClient bigQueryClient; - private String projectId; + + private final BigQueryClient bigQueryClient; + private final String projectId; @Inject public BigQueryMetadata(BigQueryClient bigQueryClient, BigQueryConfig config) { - this.bigQueryClient = bigQueryClient; - this.projectId = config.getProjectId().orElse(bigQueryClient.getProjectId()); + this.bigQueryClient = requireNonNull(bigQueryClient, "bigQueryClient is null"); + this.projectId = requireNonNull(config, "config is null").getProjectId().orElse(bigQueryClient.getProjectId()); } @Override @@ -117,11 +118,6 @@ private List listTablesWithTypes(ConnectorSession session, Opti return tableNames.build(); } - ImmutableList collectAll(Page page) - { - return ImmutableList.copyOf(page.iterateAll()); - } - @Override public ConnectorTableHandle getTableHandle(ConnectorSession session, SchemaTableName tableName) { diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryPageSourceProvider.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryPageSourceProvider.java index 4e2823a2f9ff..de919f7ea26d 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryPageSourceProvider.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryPageSourceProvider.java @@ -13,7 +13,6 @@ */ package io.prestosql.plugin.bigquery; -import com.google.common.collect.ImmutableList; import io.airlift.log.Logger; import io.prestosql.spi.connector.ColumnHandle; import io.prestosql.spi.connector.ConnectorPageSource; @@ -29,19 +28,21 @@ import java.util.List; import static com.google.common.collect.ImmutableList.toImmutableList; +import static java.util.Objects.requireNonNull; public class BigQueryPageSourceProvider implements ConnectorPageSourceProvider { private static final Logger log = Logger.get(BigQueryPageSourceProvider.class); + private final BigQueryStorageClientFactory bigQueryStorageClientFactory; private final int maxReadRowsRetries; @Inject public BigQueryPageSourceProvider(BigQueryStorageClientFactory bigQueryStorageClientFactory, BigQueryConfig config) { - this.bigQueryStorageClientFactory = bigQueryStorageClientFactory; - this.maxReadRowsRetries = config.getMaxReadRowsRetries(); + this.bigQueryStorageClientFactory = requireNonNull(bigQueryStorageClientFactory, "bigQueryStorageClientFactory is null"); + this.maxReadRowsRetries = requireNonNull(config, "config is null").getMaxReadRowsRetries(); } @Override @@ -60,11 +61,10 @@ public ConnectorPageSource createPageSource( } // not empty projection - BigQueryTableHandle bigQueryTableHandle = (BigQueryTableHandle) table; - ImmutableList bigQueryColumnHandles = columns.stream() + List bigQueryColumnHandles = columns.stream() .map(BigQueryColumnHandle.class::cast) .collect(toImmutableList()); - return new BigQueryResultPageSource(bigQueryStorageClientFactory, maxReadRowsRetries, bigQuerySplit, bigQueryTableHandle, bigQueryColumnHandles); + return new BigQueryResultPageSource(bigQueryStorageClientFactory, maxReadRowsRetries, bigQuerySplit, bigQueryColumnHandles); } } diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryResultPageSource.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryResultPageSource.java index 3574a15ca0e1..b2666ea4991b 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryResultPageSource.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryResultPageSource.java @@ -15,7 +15,6 @@ import com.google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient; import com.google.cloud.bigquery.storage.v1beta1.Storage; -import com.google.common.collect.ImmutableList; import io.airlift.log.Logger; import io.airlift.slice.Slice; import io.airlift.slice.Slices; @@ -51,8 +50,6 @@ import java.util.Iterator; import java.util.List; import java.util.concurrent.atomic.AtomicLong; -import java.util.stream.Stream; -import java.util.stream.StreamSupport; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.ImmutableList.toImmutableList; @@ -68,37 +65,34 @@ import static io.prestosql.spi.type.TimestampType.TIMESTAMP; import static io.prestosql.spi.type.TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE; import static java.lang.String.format; +import static java.util.Objects.requireNonNull; public class BigQueryResultPageSource implements ConnectorPageSource { - static final AvroDecimalConverter DECIMAL_CONVERTER = new AvroDecimalConverter(); private static final Logger log = Logger.get(BigQueryResultPageSource.class); + + private static final AvroDecimalConverter DECIMAL_CONVERTER = new AvroDecimalConverter(); + private final BigQueryStorageClient bigQueryStorageClient; - private final int maxReadRowsRetries; private final BigQuerySplit split; - private final BigQueryTableHandle table; - private final ImmutableList columns; - private final ImmutableList columnTypes; + private final List columnTypes; private final AtomicLong readBytes; private final PageBuilder pageBuilder; - private Iterator responses; - private boolean closed; + private final Iterator responses; public BigQueryResultPageSource( BigQueryStorageClientFactory bigQueryStorageClientFactory, int maxReadRowsRetries, BigQuerySplit split, - BigQueryTableHandle table, - ImmutableList columns) + List columns) { - this.bigQueryStorageClient = bigQueryStorageClientFactory.createBigQueryStorageClient(); - this.maxReadRowsRetries = maxReadRowsRetries; - this.split = split; - this.table = table; - this.columns = columns; + this.bigQueryStorageClient = requireNonNull(bigQueryStorageClientFactory, "bigQueryStorageClientFactory is null").createBigQueryStorageClient(); + this.split = requireNonNull(split, "split is null"); this.readBytes = new AtomicLong(); - this.columnTypes = columns.stream().map(BigQueryColumnHandle::getPrestoType).collect(toImmutableList()); + this.columnTypes = requireNonNull(columns, "columns is null").stream() + .map(BigQueryColumnHandle::getPrestoType) + .collect(toImmutableList()); this.pageBuilder = new PageBuilder(columnTypes); log.debug("Starting to read from %s", split.getStreamName()); @@ -107,7 +101,6 @@ public BigQueryResultPageSource( .setStream(Storage.Stream.newBuilder() .setName(split.getStreamName()))); responses = new ReadRowsHelper(bigQueryStorageClient, readRowsRequest, maxReadRowsRetries).readRows(); - closed = false; } @Override @@ -201,7 +194,7 @@ else if (javaType == Block.class) { } } - private void writeSlice(BlockBuilder output, Type type, Object value) + private static void writeSlice(BlockBuilder output, Type type, Object value) { if (type instanceof VarcharType) { type.writeSlice(output, utf8Slice(((Utf8) value).toString())); @@ -264,7 +257,6 @@ public long getSystemMemoryUsage() public void close() { bigQueryStorageClient.close(); - closed = true; } Iterable parse(Storage.ReadRowsResponse response) @@ -276,16 +268,6 @@ Iterable parse(Storage.ReadRowsResponse response) return () -> new AvroBinaryIterator(avroSchema, buffer); } - Stream toRecords(Storage.ReadRowsResponse response) - { - byte[] buffer = response.getAvroRows().getSerializedBinaryRows().toByteArray(); - readBytes.addAndGet(buffer.length); - log.debug("Read %d bytes (total %d) from %s", buffer.length, readBytes.get(), split.getStreamName()); - Schema avroSchema = new Schema.Parser().parse(split.getAvroSchema()); - Iterable responseRecords = () -> new AvroBinaryIterator(avroSchema, buffer); - return StreamSupport.stream(responseRecords.spliterator(), false); - } - static class AvroBinaryIterator implements Iterator { diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQuerySplitManager.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQuerySplitManager.java index b2354a68027a..0317a948be71 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQuerySplitManager.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQuerySplitManager.java @@ -38,7 +38,6 @@ import static com.google.common.collect.ImmutableList.toImmutableList; import static io.prestosql.plugin.bigquery.BigQueryErrorCode.BIGQUERY_FAILED_TO_EXECUTE_QUERY; -import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.stream.Collectors.toList; import static java.util.stream.IntStream.range; @@ -90,16 +89,16 @@ public ConnectorSplitSource getSplits( return new FixedSplitSource(splits); } - private boolean emptyProjectionIsRequired(Optional> projectedColumns) + private static boolean emptyProjectionIsRequired(Optional> projectedColumns) { return projectedColumns.isPresent() && projectedColumns.get().isEmpty(); } - private ImmutableList readFromBigQuery(TableId tableId, Optional> projectedColumns, int actualParallelism, Optional filter) + private List readFromBigQuery(TableId tableId, Optional> projectedColumns, int actualParallelism, Optional filter) { log.debug("readFromBigQuery(tableId=%s, projectedColumns=%s, actualParallelism=%s, filter=[%s])", tableId, projectedColumns, actualParallelism, filter); List columns = projectedColumns.orElse(ImmutableList.of()); - ImmutableList projectedColumnsNames = columns.stream() + List projectedColumnsNames = columns.stream() .map(column -> ((BigQueryColumnHandle) column).getName()) .collect(toImmutableList()); @@ -118,7 +117,7 @@ private List createEmptyProjection(TableId tableId, int actualPar long numberOfRows; if (filter.isPresent()) { // count the rows based on the filter - String sql = bigQueryClient.createSql(tableId, "COUNT(*)", new String[] {filter.get()}); + String sql = bigQueryClient.selectSql(tableId, "COUNT(*)", new String[] {filter.get()}); TableResult result = bigQueryClient.query(sql); numberOfRows = result.iterateAll().iterator().next().get(0).getLongValue(); } @@ -136,7 +135,7 @@ private List createEmptyProjection(TableId tableId, int actualPar return splits; } catch (BigQueryException e) { - throw new PrestoException(BIGQUERY_FAILED_TO_EXECUTE_QUERY, format("Failed to compute empty projection"), e); + throw new PrestoException(BIGQUERY_FAILED_TO_EXECUTE_QUERY, "Failed to compute empty projection", e); } } } diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryType.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryType.java index 031dee70737c..c5c3c468f484 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryType.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryType.java @@ -14,7 +14,6 @@ package io.prestosql.plugin.bigquery; import com.google.cloud.bigquery.Field; -import com.google.common.collect.ImmutableMap; import io.airlift.slice.Slice; import io.prestosql.spi.type.ArrayType; import io.prestosql.spi.type.BigintType; @@ -36,7 +35,6 @@ import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; -import java.time.Month; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; @@ -50,19 +48,6 @@ import static io.prestosql.spi.type.VarcharType.createUnboundedVarcharType; import static java.lang.Integer.parseInt; import static java.lang.String.format; -import static java.nio.charset.StandardCharsets.UTF_8; -import static java.time.Month.APRIL; -import static java.time.Month.AUGUST; -import static java.time.Month.DECEMBER; -import static java.time.Month.FEBRUARY; -import static java.time.Month.JANUARY; -import static java.time.Month.JULY; -import static java.time.Month.JUNE; -import static java.time.Month.MARCH; -import static java.time.Month.MAY; -import static java.time.Month.NOVEMBER; -import static java.time.Month.OCTOBER; -import static java.time.Month.SEPTEMBER; import static java.time.ZoneOffset.systemDefault; import static java.util.stream.Collectors.toList; @@ -93,20 +78,6 @@ public enum BigQueryType 10, // 8 digits after the dot 1, // 9 digits after the dot }; - private static final ImmutableMap MONTH = ImmutableMap.builder() - .put("01", JANUARY) - .put("02", FEBRUARY) - .put("03", MARCH) - .put("04", APRIL) - .put("05", MAY) - .put("06", JUNE) - .put("07", JULY) - .put("08", AUGUST) - .put("09", SEPTEMBER) - .put("10", OCTOBER) - .put("11", NOVEMBER) - .put("12", DECEMBER) - .build(); private static final DateTimeFormatter DATETIME_FORMATTER = DateTimeFormatter.ofPattern("''yyyy-MM-dd HH:mm:ss.SSS''"); private final Type nativeType; @@ -194,7 +165,7 @@ private static ZonedDateTime toZonedDateTime(long millisUtc, ZoneId zoneId) static String stringToStringConverter(Object value) { Slice slice = (Slice) value; - return quote(new String(slice.getBytes(), UTF_8)); + return quote(slice.toStringUtf8()); } static String numericToStringConverter(Object value) diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryUtil.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryUtil.java index 4006a74c800d..df960a39c0f0 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryUtil.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/BigQueryUtil.java @@ -24,7 +24,7 @@ import static com.google.cloud.http.BaseHttpServiceException.UNKNOWN_CODE; import static com.google.common.base.Throwables.getCausalChain; -class BigQueryUtil +final class BigQueryUtil { private static final Set INTERNAL_ERROR_MESSAGES = ImmutableSet.of( "HTTP/2 error code: INTERNAL_ERROR", diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/Conversions.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/Conversions.java index 25f676f39edb..6a4004d1665d 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/Conversions.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/Conversions.java @@ -26,7 +26,7 @@ import static com.google.common.base.MoreObjects.firstNonNull; import static com.google.common.collect.ImmutableMap.toImmutableMap; -class Conversions +final class Conversions { private Conversions() {} diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/ReadRowsHelper.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/ReadRowsHelper.java index 32486fc1d5c8..e9c01d6b8944 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/ReadRowsHelper.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/ReadRowsHelper.java @@ -25,14 +25,14 @@ public class ReadRowsHelper { - private BigQueryStorageClient client; - private ReadRowsRequest.Builder request; - private int maxReadRowsRetries; + private final BigQueryStorageClient client; + private final ReadRowsRequest.Builder request; + private final int maxReadRowsRetries; public ReadRowsHelper(BigQueryStorageClient client, ReadRowsRequest.Builder request, int maxReadRowsRetries) { this.client = requireNonNull(client, "client cannot be null"); - this.request = requireNonNull(request, "client cannot be null"); + this.request = requireNonNull(request, "request cannot be null"); this.maxReadRowsRetries = maxReadRowsRetries; } diff --git a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/ReadSessionCreator.java b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/ReadSessionCreator.java index 31cc8bbf8f9f..e69aeec53b6d 100644 --- a/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/ReadSessionCreator.java +++ b/presto-bigquery/src/main/java/io/prestosql/plugin/bigquery/ReadSessionCreator.java @@ -28,7 +28,6 @@ import com.google.cloud.bigquery.storage.v1beta1.TableReferenceProto; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; -import com.google.common.collect.ImmutableList; import io.airlift.log.Logger; import io.prestosql.spi.PrestoException; @@ -42,6 +41,7 @@ import static io.prestosql.plugin.bigquery.BigQueryUtil.convertToBigQueryException; import static io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED; import static java.lang.String.format; +import static java.util.Objects.requireNonNull; import static java.util.stream.Collectors.toList; // A helper class, also handles view materialization @@ -49,7 +49,7 @@ public class ReadSessionCreator { private static final Logger log = Logger.get(ReadSessionCreator.class); - private static Cache destinationTableCache = + private static final Cache destinationTableCache = CacheBuilder.newBuilder() .expireAfterWrite(15, TimeUnit.MINUTES) .maximumSize(1000) @@ -69,11 +69,11 @@ public ReadSessionCreator( this.bigQueryStorageClientFactory = bigQueryStorageClientFactory; } - public Storage.ReadSession create(TableId table, ImmutableList selectedFields, Optional filter, int parallelism) + public Storage.ReadSession create(TableId table, List selectedFields, Optional filter, int parallelism) { TableInfo tableDetails = bigQueryClient.getTable(table); - TableInfo actualTable = getActualTable(tableDetails, selectedFields, new String[] {}); + TableInfo actualTable = getActualTable(tableDetails, selectedFields); List filteredSelectedFields = selectedFields.stream() .filter(BigQueryUtil::validColumnName) @@ -111,10 +111,9 @@ TableReferenceProto.TableReference toTableReference(TableId tableId) .build(); } - TableInfo getActualTable( + private TableInfo getActualTable( TableInfo table, - ImmutableList requiredColumns, - String[] filters) + List requiredColumns) { TableDefinition tableDefinition = table.getDefinition(); TableDefinition.Type tableType = tableDefinition.getType(); @@ -128,10 +127,10 @@ TableInfo getActualTable( BigQueryConfig.VIEWS_ENABLED)); } // get it from the view - String querySql = bigQueryClient.createSql(table.getTableId(), requiredColumns, filters); - log.debug("querySql is %s", querySql); + String query = bigQueryClient.selectSql(table.getTableId(), requiredColumns); + log.debug("query is %s", query); try { - return destinationTableCache.get(querySql, new DestinationTableBuilder(bigQueryClient, config, querySql, table.getTableId())); + return destinationTableCache.get(query, new DestinationTableBuilder(bigQueryClient, config, query, table.getTableId())); } catch (ExecutionException e) { throw new PrestoException(BIGQUERY_VIEW_DESTINATION_TABLE_CREATION_FAILED, "Error creating destination table", e); @@ -144,20 +143,20 @@ TableInfo getActualTable( } } - static class DestinationTableBuilder + private static class DestinationTableBuilder implements Callable { - final BigQueryClient bigQueryClient; - final ReadSessionCreatorConfig config; - final String querySql; - final TableId table; + private final BigQueryClient bigQueryClient; + private final ReadSessionCreatorConfig config; + private final String query; + private final TableId table; - DestinationTableBuilder(BigQueryClient bigQueryClient, ReadSessionCreatorConfig config, String querySql, TableId table) + DestinationTableBuilder(BigQueryClient bigQueryClient, ReadSessionCreatorConfig config, String query, TableId table) { - this.bigQueryClient = bigQueryClient; - this.config = config; - this.querySql = querySql; - this.table = table; + this.bigQueryClient = requireNonNull(bigQueryClient, "bigQueryClient is null"); + this.config = requireNonNull(config, "config is null"); + this.query = requireNonNull(query, "query is null"); + this.table = requireNonNull(table, "table is null"); } @Override @@ -172,7 +171,7 @@ TableInfo createTableFromQuery() log.debug("destinationTable is %s", destinationTable); JobInfo jobInfo = JobInfo.of( QueryJobConfiguration - .newBuilder(querySql) + .newBuilder(query) .setDestinationTable(destinationTable) .build()); log.debug("running query %s", jobInfo); diff --git a/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/MockResponsesBatch.java b/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/MockResponsesBatch.java index 945673c147e3..1827d784f011 100644 --- a/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/MockResponsesBatch.java +++ b/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/MockResponsesBatch.java @@ -22,7 +22,7 @@ class MockResponsesBatch implements Iterator { - private Queue responses = new LinkedList<>(); + private final Queue responses = new LinkedList<>(); void addResponse(ReadRowsResponse response) { diff --git a/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestReadRowsHelper.java b/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestReadRowsHelper.java index 341443af1f78..bc47d10e8056 100644 --- a/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestReadRowsHelper.java +++ b/presto-bigquery/src/test/java/io/prestosql/plugin/bigquery/TestReadRowsHelper.java @@ -24,6 +24,7 @@ import org.testng.annotations.Test; import java.util.Iterator; +import java.util.List; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; @@ -41,7 +42,7 @@ void testNoFailures() batch1.addResponse(ReadRowsResponse.newBuilder().setRowCount(11).build()); // so we can run multiple tests - ImmutableList responses = ImmutableList.copyOf( + List responses = ImmutableList.copyOf( new MockReadRowsHelper(client, request, 3, ImmutableList.of(batch1)) .readRows()); @@ -62,7 +63,7 @@ void testRetryOfSingleFailure() MockResponsesBatch batch2 = new MockResponsesBatch(); batch2.addResponse(ReadRowsResponse.newBuilder().setRowCount(11).build()); - ImmutableList responses = ImmutableList.copyOf( + List responses = ImmutableList.copyOf( new MockReadRowsHelper(client, request, 3, ImmutableList.of(batch1, batch2)) .readRows()); From c7d969cbabfcda344b14ff3f7a5b8dad7a6b25ca Mon Sep 17 00:00:00 2001 From: Victor Azevedo Date: Wed, 23 Oct 2019 21:00:25 -0300 Subject: [PATCH 373/519] Add random functions with range --- .../src/main/sphinx/functions/math.rst | 4 +++ .../operator/scalar/MathFunctions.java | 36 +++++++++++++++++++ .../operator/scalar/TestMathFunctions.java | 28 +++++++++++++++ 3 files changed, 68 insertions(+) diff --git a/presto-docs/src/main/sphinx/functions/math.rst b/presto-docs/src/main/sphinx/functions/math.rst index ae00b4bc85f5..8048cf4e6767 100644 --- a/presto-docs/src/main/sphinx/functions/math.rst +++ b/presto-docs/src/main/sphinx/functions/math.rst @@ -135,6 +135,10 @@ Mathematical Functions Returns a pseudo-random number between 0 and n (exclusive). +.. function:: random(m, n) -> [same as input] + + Returns a pseudo-random number between m and n (exclusive). + .. function:: round(x) -> [same as input] Returns ``x`` rounded to the nearest integer. diff --git a/presto-main/src/main/java/io/prestosql/operator/scalar/MathFunctions.java b/presto-main/src/main/java/io/prestosql/operator/scalar/MathFunctions.java index a071a2a4f0c6..c013d2537c9f 100644 --- a/presto-main/src/main/java/io/prestosql/operator/scalar/MathFunctions.java +++ b/presto-main/src/main/java/io/prestosql/operator/scalar/MathFunctions.java @@ -607,6 +607,42 @@ public static long random(@SqlType(StandardTypes.BIGINT) long value) return ThreadLocalRandom.current().nextLong(value); } + @Description("A pseudo-random number between start and stop (exclusive)") + @ScalarFunction(value = "random", alias = "rand", deterministic = false) + @SqlType(StandardTypes.TINYINT) + public static long randomTinyint(@SqlType(StandardTypes.TINYINT) long start, @SqlType(StandardTypes.TINYINT) long stop) + { + checkCondition(start < stop, INVALID_FUNCTION_ARGUMENT, "start value must be less than stop value"); + return ThreadLocalRandom.current().nextLong(start, stop); + } + + @Description("A pseudo-random number between start and stop (exclusive)") + @ScalarFunction(value = "random", alias = "rand", deterministic = false) + @SqlType(StandardTypes.SMALLINT) + public static long randomSmallint(@SqlType(StandardTypes.SMALLINT) long start, @SqlType(StandardTypes.SMALLINT) long stop) + { + checkCondition(start < stop, INVALID_FUNCTION_ARGUMENT, "start value must be less than stop value"); + return ThreadLocalRandom.current().nextInt((int) start, (int) stop); + } + + @Description("A pseudo-random number between start and stop (exclusive)") + @ScalarFunction(value = "random", alias = "rand", deterministic = false) + @SqlType(StandardTypes.INTEGER) + public static long randomInteger(@SqlType(StandardTypes.INTEGER) long start, @SqlType(StandardTypes.INTEGER) long stop) + { + checkCondition(start < stop, INVALID_FUNCTION_ARGUMENT, "start value must be less than stop value"); + return ThreadLocalRandom.current().nextInt((int) start, (int) stop); + } + + @Description("A pseudo-random number between start and stop (exclusive)") + @ScalarFunction(value = "random", alias = "rand", deterministic = false) + @SqlType(StandardTypes.BIGINT) + public static long random(@SqlType(StandardTypes.BIGINT) long start, @SqlType(StandardTypes.BIGINT) long stop) + { + checkCondition(start < stop, INVALID_FUNCTION_ARGUMENT, "start value must be less than stop value"); + return ThreadLocalRandom.current().nextLong(start, stop); + } + @Description("Inverse of normal cdf given a mean, std, and probability") @ScalarFunction @SqlType(StandardTypes.DOUBLE) diff --git a/presto-main/src/test/java/io/prestosql/operator/scalar/TestMathFunctions.java b/presto-main/src/test/java/io/prestosql/operator/scalar/TestMathFunctions.java index 450c41e6473c..9306874a2a29 100644 --- a/presto-main/src/test/java/io/prestosql/operator/scalar/TestMathFunctions.java +++ b/presto-main/src/test/java/io/prestosql/operator/scalar/TestMathFunctions.java @@ -688,11 +688,39 @@ public void testRandom() functionAssertions.tryEvaluateWithAll("rand()", DOUBLE, TEST_SESSION); functionAssertions.tryEvaluateWithAll("random()", DOUBLE, TEST_SESSION); functionAssertions.tryEvaluateWithAll("rand(1000)", INTEGER, TEST_SESSION); + functionAssertions.tryEvaluateWithAll("random(TINYINT '3', TINYINT '5')", TINYINT, TEST_SESSION); + functionAssertions.tryEvaluateWithAll("random(TINYINT '-3', TINYINT '-1')", TINYINT, TEST_SESSION); + functionAssertions.tryEvaluateWithAll("random(TINYINT '-3', TINYINT '5')", TINYINT, TEST_SESSION); + functionAssertions.tryEvaluateWithAll("random(SMALLINT '20000', SMALLINT '30000')", SMALLINT, TEST_SESSION); + functionAssertions.tryEvaluateWithAll("random(SMALLINT '-20000', SMALLINT '-10000')", SMALLINT, TEST_SESSION); + functionAssertions.tryEvaluateWithAll("random(SMALLINT '-20000', SMALLINT '30000')", SMALLINT, TEST_SESSION); + functionAssertions.tryEvaluateWithAll("random(1000, 2000)", INTEGER, TEST_SESSION); + functionAssertions.tryEvaluateWithAll("random(-10, -5)", INTEGER, TEST_SESSION); + functionAssertions.tryEvaluateWithAll("random(-10, 10)", INTEGER, TEST_SESSION); functionAssertions.tryEvaluateWithAll("random(2000)", INTEGER, TEST_SESSION); functionAssertions.tryEvaluateWithAll("random(3000000000)", BIGINT, TEST_SESSION); + functionAssertions.tryEvaluateWithAll("random(3000000000, 5000000000)", BIGINT, TEST_SESSION); + functionAssertions.tryEvaluateWithAll("random(-3000000000, -2000000000)", BIGINT, TEST_SESSION); + functionAssertions.tryEvaluateWithAll("random(-3000000000, 5000000000)", BIGINT, TEST_SESSION); assertInvalidFunction("rand(-1)", "bound must be positive"); assertInvalidFunction("rand(-3000000000)", "bound must be positive"); + assertInvalidFunction("random(TINYINT '5', TINYINT '3')", "start value must be less than stop value"); + assertInvalidFunction("random(TINYINT '5', TINYINT '5')", "start value must be less than stop value"); + assertInvalidFunction("random(TINYINT '-5', TINYINT '-10')", "start value must be less than stop value"); + assertInvalidFunction("random(TINYINT '-5', TINYINT '-5')", "start value must be less than stop value"); + assertInvalidFunction("random(SMALLINT '30000', SMALLINT '10000')", "start value must be less than stop value"); + assertInvalidFunction("random(SMALLINT '30000', SMALLINT '30000')", "start value must be less than stop value"); + assertInvalidFunction("random(SMALLINT '-30000', SMALLINT '-31000')", "start value must be less than stop value"); + assertInvalidFunction("random(SMALLINT '-30000', SMALLINT '-30000')", "start value must be less than stop value"); + assertInvalidFunction("random(1000, 500)", "start value must be less than stop value"); + assertInvalidFunction("random(500, 500)", "start value must be less than stop value"); + assertInvalidFunction("random(-500, -600)", "start value must be less than stop value"); + assertInvalidFunction("random(-500, -500)", "start value must be less than stop value"); + assertInvalidFunction("random(3000000000, 1000000000)", "start value must be less than stop value"); + assertInvalidFunction("random(3000000000, 3000000000)", "start value must be less than stop value"); + assertInvalidFunction("random(-3000000000, -4000000000)", "start value must be less than stop value"); + assertInvalidFunction("random(-3000000000, -3000000000)", "start value must be less than stop value"); } @Test From c56920368205917f332e987b3e5e04000c4da25f Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Fri, 8 May 2020 11:14:17 +0200 Subject: [PATCH 374/519] Embed version in commit stacktrace --- .../src/main/java/io/prestosql/server/CoordinatorModule.java | 4 +++- .../io/prestosql/transaction/InMemoryTransactionManager.java | 3 +-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/presto-main/src/main/java/io/prestosql/server/CoordinatorModule.java b/presto-main/src/main/java/io/prestosql/server/CoordinatorModule.java index dc97deb8a33f..0fb812bdbe6b 100644 --- a/presto-main/src/main/java/io/prestosql/server/CoordinatorModule.java +++ b/presto-main/src/main/java/io/prestosql/server/CoordinatorModule.java @@ -153,6 +153,7 @@ import io.prestosql.transaction.InMemoryTransactionManager; import io.prestosql.transaction.TransactionManager; import io.prestosql.transaction.TransactionManagerConfig; +import io.prestosql.version.EmbedVersion; import javax.annotation.PreDestroy; import javax.inject.Inject; @@ -421,10 +422,11 @@ public static ExecutorService createTransactionFinishingExecutor() public static TransactionManager createTransactionManager( TransactionManagerConfig config, CatalogManager catalogManager, + EmbedVersion embedVersion, @ForTransactionManager ScheduledExecutorService idleCheckExecutor, @ForTransactionManager ExecutorService finishingExecutor) { - return InMemoryTransactionManager.create(config, idleCheckExecutor, catalogManager, finishingExecutor); + return InMemoryTransactionManager.create(config, idleCheckExecutor, catalogManager, embedVersion.embedVersion(finishingExecutor)); } private static void bindDataDefinitionTask( diff --git a/presto-main/src/main/java/io/prestosql/transaction/InMemoryTransactionManager.java b/presto-main/src/main/java/io/prestosql/transaction/InMemoryTransactionManager.java index 84d07ac0262a..5b900f9156b4 100644 --- a/presto-main/src/main/java/io/prestosql/transaction/InMemoryTransactionManager.java +++ b/presto-main/src/main/java/io/prestosql/transaction/InMemoryTransactionManager.java @@ -46,7 +46,6 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Executor; -import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -101,7 +100,7 @@ public static TransactionManager create( TransactionManagerConfig config, ScheduledExecutorService idleCheckExecutor, CatalogManager catalogManager, - ExecutorService finishingExecutor) + Executor finishingExecutor) { InMemoryTransactionManager transactionManager = new InMemoryTransactionManager(config.getIdleTimeout(), config.getMaxFinishingConcurrency(), catalogManager, finishingExecutor); transactionManager.scheduleIdleChecks(config.getIdleCheckInterval(), idleCheckExecutor); From b33c4ffd9745af6d15e909e95d2be8a729cc5db1 Mon Sep 17 00:00:00 2001 From: Shashwat Arghode Date: Wed, 6 May 2020 15:12:27 -0700 Subject: [PATCH 375/519] Update jQuery to 3.5.1 and Bootstrap to 3.4.1 - Update jQuery to 3.5.1 to fix Presto Web UIs known security vulnerabilities - Update bootstrap to version 3.4.1 which is compatible with jQuery > 3.0 Resolves: #3534 --- .../src/main/resources/webapp/disabled.html | 2 +- .../src/main/resources/webapp/dist/index.js | 6 +- .../src/main/resources/webapp/dist/plan.js | 2 +- .../src/main/resources/webapp/dist/query.js | 4 +- .../src/main/resources/webapp/dist/stage.js | 4 +- .../main/resources/webapp/dist/timeline.js | 2 +- .../src/main/resources/webapp/dist/worker.js | 6 +- .../main/resources/webapp/embedded_plan.html | 2 +- .../src/main/resources/webapp/index.html | 2 +- .../src/main/resources/webapp/login.html | 2 +- .../src/main/resources/webapp/plan.html | 2 +- .../src/main/resources/webapp/query.html | 2 +- .../webapp/src/components/ClusterHUD.jsx | 2 +- .../webapp/src/components/PageTitle.jsx | 6 +- .../webapp/src/components/QueryDetail.jsx | 2 +- .../webapp/src/components/QueryList.jsx | 2 +- .../webapp/src/components/StageDetail.jsx | 2 +- .../webapp/src/components/WorkerStatus.jsx | 2 +- .../src/components/WorkerThreadList.jsx | 2 +- .../src/main/resources/webapp/stage.html | 2 +- .../src/main/resources/webapp/timeline.html | 2 +- .../vendor/bootstrap/css/bootstrap-theme.css | 63 +- .../bootstrap/css/bootstrap-theme.min.css | 15 +- .../webapp/vendor/bootstrap/css/bootstrap.css | 773 ++- .../vendor/bootstrap/css/bootstrap.min.css | 15 +- .../fonts/glyphicons-halflings-regular.eot | Bin .../fonts/glyphicons-halflings-regular.svg | 0 .../fonts/glyphicons-halflings-regular.ttf | Bin .../fonts/glyphicons-halflings-regular.woff | Bin .../fonts/glyphicons-halflings-regular.woff2 | Bin .../webapp/vendor/bootstrap/js/bootstrap.js | 437 +- .../vendor/bootstrap/js/bootstrap.min.js | 14 +- .../webapp/vendor/jquery/jquery-2.2.3.min.js | 4 - .../{jquery-2.2.3.js => jquery-3.5.1.js} | 5184 ++++++++++------- .../webapp/vendor/jquery/jquery-3.5.1.min.js | 2 + .../src/main/resources/webapp/worker.html | 2 +- 36 files changed, 3883 insertions(+), 2684 deletions(-) mode change 100755 => 100644 presto-main/src/main/resources/webapp/vendor/bootstrap/css/bootstrap-theme.css mode change 100755 => 100644 presto-main/src/main/resources/webapp/vendor/bootstrap/css/bootstrap-theme.min.css mode change 100755 => 100644 presto-main/src/main/resources/webapp/vendor/bootstrap/css/bootstrap.css mode change 100755 => 100644 presto-main/src/main/resources/webapp/vendor/bootstrap/css/bootstrap.min.css mode change 100755 => 100644 presto-main/src/main/resources/webapp/vendor/bootstrap/fonts/glyphicons-halflings-regular.eot mode change 100755 => 100644 presto-main/src/main/resources/webapp/vendor/bootstrap/fonts/glyphicons-halflings-regular.svg mode change 100755 => 100644 presto-main/src/main/resources/webapp/vendor/bootstrap/fonts/glyphicons-halflings-regular.ttf mode change 100755 => 100644 presto-main/src/main/resources/webapp/vendor/bootstrap/fonts/glyphicons-halflings-regular.woff mode change 100755 => 100644 presto-main/src/main/resources/webapp/vendor/bootstrap/fonts/glyphicons-halflings-regular.woff2 mode change 100755 => 100644 presto-main/src/main/resources/webapp/vendor/bootstrap/js/bootstrap.js mode change 100755 => 100644 presto-main/src/main/resources/webapp/vendor/bootstrap/js/bootstrap.min.js delete mode 100644 presto-main/src/main/resources/webapp/vendor/jquery/jquery-2.2.3.min.js rename presto-main/src/main/resources/webapp/vendor/jquery/{jquery-2.2.3.js => jquery-3.5.1.js} (67%) create mode 100644 presto-main/src/main/resources/webapp/vendor/jquery/jquery-3.5.1.min.js diff --git a/presto-main/src/main/resources/webapp/disabled.html b/presto-main/src/main/resources/webapp/disabled.html index c82ff673b84d..afb580fa2b38 100644 --- a/presto-main/src/main/resources/webapp/disabled.html +++ b/presto-main/src/main/resources/webapp/disabled.html @@ -32,7 +32,7 @@ - + diff --git a/presto-main/src/main/resources/webapp/dist/index.js b/presto-main/src/main/resources/webapp/dist/index.js index 7a90ab95a543..0b08c5d83be1 100644 --- a/presto-main/src/main/resources/webapp/dist/index.js +++ b/presto-main/src/main/resources/webapp/dist/index.js @@ -94,7 +94,7 @@ /***/ (function(module, exports, __webpack_require__) { "use strict"; -eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.ClusterHUD = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nvar _utils = __webpack_require__(/*! ../utils */ \"./utils.js\");\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nvar SPARKLINE_PROPERTIES = {\n width: '100%',\n height: '75px',\n fillColor: '#3F4552',\n lineColor: '#747F96',\n spotColor: '#1EDCFF',\n tooltipClassname: 'sparkline-tooltip',\n disableHiddenCheck: true\n};\n\nvar ClusterHUD = exports.ClusterHUD = function (_React$Component) {\n _inherits(ClusterHUD, _React$Component);\n\n function ClusterHUD(props) {\n _classCallCheck(this, ClusterHUD);\n\n var _this = _possibleConstructorReturn(this, (ClusterHUD.__proto__ || Object.getPrototypeOf(ClusterHUD)).call(this, props));\n\n _this.state = {\n runningQueries: [],\n queuedQueries: [],\n blockedQueries: [],\n activeWorkers: [],\n runningDrivers: [],\n reservedMemory: [],\n rowInputRate: [],\n byteInputRate: [],\n perWorkerCpuTimeRate: [],\n\n lastRender: null,\n lastRefresh: null,\n\n lastInputRows: null,\n lastInputBytes: null,\n lastCpuTime: null,\n\n initialized: false\n };\n\n _this.refreshLoop = _this.refreshLoop.bind(_this);\n return _this;\n }\n\n _createClass(ClusterHUD, [{\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n // stop refreshing when query finishes or fails\n if (this.state.query === null || !this.state.ended) {\n this.timeoutId = setTimeout(this.refreshLoop, 1000);\n }\n }\n }, {\n key: \"refreshLoop\",\n value: function refreshLoop() {\n clearTimeout(this.timeoutId); // to stop multiple series of refreshLoop from going on simultaneously\n $.get('/ui/api/stats', function (clusterState) {\n\n var newRowInputRate = [];\n var newByteInputRate = [];\n var newPerWorkerCpuTimeRate = [];\n if (this.state.lastRefresh !== null) {\n var rowsInputSinceRefresh = clusterState.totalInputRows - this.state.lastInputRows;\n var bytesInputSinceRefresh = clusterState.totalInputBytes - this.state.lastInputBytes;\n var cpuTimeSinceRefresh = clusterState.totalCpuTimeSecs - this.state.lastCpuTime;\n var secsSinceRefresh = (Date.now() - this.state.lastRefresh) / 1000.0;\n\n newRowInputRate = (0, _utils.addExponentiallyWeightedToHistory)(rowsInputSinceRefresh / secsSinceRefresh, this.state.rowInputRate);\n newByteInputRate = (0, _utils.addExponentiallyWeightedToHistory)(bytesInputSinceRefresh / secsSinceRefresh, this.state.byteInputRate);\n newPerWorkerCpuTimeRate = (0, _utils.addExponentiallyWeightedToHistory)(cpuTimeSinceRefresh / clusterState.activeWorkers / secsSinceRefresh, this.state.perWorkerCpuTimeRate);\n }\n\n this.setState({\n // instantaneous stats\n runningQueries: (0, _utils.addToHistory)(clusterState.runningQueries, this.state.runningQueries),\n queuedQueries: (0, _utils.addToHistory)(clusterState.queuedQueries, this.state.queuedQueries),\n blockedQueries: (0, _utils.addToHistory)(clusterState.blockedQueries, this.state.blockedQueries),\n activeWorkers: (0, _utils.addToHistory)(clusterState.activeWorkers, this.state.activeWorkers),\n\n // moving averages\n runningDrivers: (0, _utils.addExponentiallyWeightedToHistory)(clusterState.runningDrivers, this.state.runningDrivers),\n reservedMemory: (0, _utils.addExponentiallyWeightedToHistory)(clusterState.reservedMemory, this.state.reservedMemory),\n\n // moving averages for diffs\n rowInputRate: newRowInputRate,\n byteInputRate: newByteInputRate,\n perWorkerCpuTimeRate: newPerWorkerCpuTimeRate,\n\n lastInputRows: clusterState.totalInputRows,\n lastInputBytes: clusterState.totalInputBytes,\n lastCpuTime: clusterState.totalCpuTimeSecs,\n\n initialized: true,\n\n lastRefresh: Date.now()\n });\n this.resetTimer();\n }.bind(this)).error(function () {\n this.resetTimer();\n }.bind(this));\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop();\n }\n }, {\n key: \"componentDidUpdate\",\n value: function componentDidUpdate() {\n // prevent multiple calls to componentDidUpdate (resulting from calls to setState or otherwise) within the refresh interval from re-rendering sparklines/charts\n if (this.state.lastRender === null || Date.now() - this.state.lastRender >= 1000) {\n var renderTimestamp = Date.now();\n $('#running-queries-sparkline').sparkline(this.state.runningQueries, $.extend({}, SPARKLINE_PROPERTIES, { chartRangeMin: 0 }));\n $('#blocked-queries-sparkline').sparkline(this.state.blockedQueries, $.extend({}, SPARKLINE_PROPERTIES, { chartRangeMin: 0 }));\n $('#queued-queries-sparkline').sparkline(this.state.queuedQueries, $.extend({}, SPARKLINE_PROPERTIES, { chartRangeMin: 0 }));\n\n $('#active-workers-sparkline').sparkline(this.state.activeWorkers, $.extend({}, SPARKLINE_PROPERTIES, { chartRangeMin: 0 }));\n $('#running-drivers-sparkline').sparkline(this.state.runningDrivers, $.extend({}, SPARKLINE_PROPERTIES, { numberFormatter: _utils.precisionRound }));\n $('#reserved-memory-sparkline').sparkline(this.state.reservedMemory, $.extend({}, SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatDataSizeBytes }));\n\n $('#row-input-rate-sparkline').sparkline(this.state.rowInputRate, $.extend({}, SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatCount }));\n $('#byte-input-rate-sparkline').sparkline(this.state.byteInputRate, $.extend({}, SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatDataSizeBytes }));\n $('#cpu-time-rate-sparkline').sparkline(this.state.perWorkerCpuTimeRate, $.extend({}, SPARKLINE_PROPERTIES, { numberFormatter: _utils.precisionRound }));\n\n this.setState({\n lastRender: renderTimestamp\n });\n }\n\n $('[data-toggle=\"tooltip\"]').tooltip();\n }\n }, {\n key: \"render\",\n value: function render() {\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat-title\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Total number of queries currently running\" },\n \"Running queries\"\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat-title\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Total number of active worker nodes\" },\n \"Active workers\"\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat-title\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Moving average of input rows processed per second\" },\n \"Rows/sec\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row stat-line-end\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat stat-large\" },\n _react2.default.createElement(\n \"span\",\n { className: \"stat-text\" },\n this.state.runningQueries[this.state.runningQueries.length - 1]\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"running-queries-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat stat-large\" },\n _react2.default.createElement(\n \"span\",\n { className: \"stat-text\" },\n this.state.activeWorkers[this.state.activeWorkers.length - 1]\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"active-workers-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat stat-large\" },\n _react2.default.createElement(\n \"span\",\n { className: \"stat-text\" },\n (0, _utils.formatCount)(this.state.rowInputRate[this.state.rowInputRate.length - 1])\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"row-input-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat-title\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Total number of queries currently queued and awaiting execution\" },\n \"Queued queries\"\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat-title\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Moving average of total running drivers\" },\n \"Runnable drivers\"\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat-title\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Moving average of input bytes processed per second\" },\n \"Bytes/sec\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row stat-line-end\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat stat-large\" },\n _react2.default.createElement(\n \"span\",\n { className: \"stat-text\" },\n this.state.queuedQueries[this.state.queuedQueries.length - 1]\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"queued-queries-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat stat-large\" },\n _react2.default.createElement(\n \"span\",\n { className: \"stat-text\" },\n (0, _utils.formatCount)(this.state.runningDrivers[this.state.runningDrivers.length - 1])\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"running-drivers-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat stat-large\" },\n _react2.default.createElement(\n \"span\",\n { className: \"stat-text\" },\n (0, _utils.formatDataSizeBytes)(this.state.byteInputRate[this.state.byteInputRate.length - 1])\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"byte-input-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat-title\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Total number of queries currently blocked and unable to make progress\" },\n \"Blocked Queries\"\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat-title\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Total amount of memory reserved by all running queries\" },\n \"Reserved Memory (B)\"\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat-title\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Moving average of CPU time utilized per second per worker\" },\n \"Worker Parallelism\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row stat-line-end\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat stat-large\" },\n _react2.default.createElement(\n \"span\",\n { className: \"stat-text\" },\n this.state.blockedQueries[this.state.blockedQueries.length - 1]\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"blocked-queries-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat stat-large\" },\n _react2.default.createElement(\n \"span\",\n { className: \"stat-text\" },\n (0, _utils.formatDataSizeBytes)(this.state.reservedMemory[this.state.reservedMemory.length - 1])\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"reserved-memory-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat stat-large\" },\n _react2.default.createElement(\n \"span\",\n { className: \"stat-text\" },\n (0, _utils.formatCount)(this.state.perWorkerCpuTimeRate[this.state.perWorkerCpuTimeRate.length - 1])\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"cpu-time-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n )\n )\n );\n }\n }]);\n\n return ClusterHUD;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/ClusterHUD.jsx?"); +eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.ClusterHUD = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nvar _utils = __webpack_require__(/*! ../utils */ \"./utils.js\");\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nvar SPARKLINE_PROPERTIES = {\n width: '100%',\n height: '75px',\n fillColor: '#3F4552',\n lineColor: '#747F96',\n spotColor: '#1EDCFF',\n tooltipClassname: 'sparkline-tooltip',\n disableHiddenCheck: true\n};\n\nvar ClusterHUD = exports.ClusterHUD = function (_React$Component) {\n _inherits(ClusterHUD, _React$Component);\n\n function ClusterHUD(props) {\n _classCallCheck(this, ClusterHUD);\n\n var _this = _possibleConstructorReturn(this, (ClusterHUD.__proto__ || Object.getPrototypeOf(ClusterHUD)).call(this, props));\n\n _this.state = {\n runningQueries: [],\n queuedQueries: [],\n blockedQueries: [],\n activeWorkers: [],\n runningDrivers: [],\n reservedMemory: [],\n rowInputRate: [],\n byteInputRate: [],\n perWorkerCpuTimeRate: [],\n\n lastRender: null,\n lastRefresh: null,\n\n lastInputRows: null,\n lastInputBytes: null,\n lastCpuTime: null,\n\n initialized: false\n };\n\n _this.refreshLoop = _this.refreshLoop.bind(_this);\n return _this;\n }\n\n _createClass(ClusterHUD, [{\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n // stop refreshing when query finishes or fails\n if (this.state.query === null || !this.state.ended) {\n this.timeoutId = setTimeout(this.refreshLoop, 1000);\n }\n }\n }, {\n key: \"refreshLoop\",\n value: function refreshLoop() {\n clearTimeout(this.timeoutId); // to stop multiple series of refreshLoop from going on simultaneously\n $.get('/ui/api/stats', function (clusterState) {\n\n var newRowInputRate = [];\n var newByteInputRate = [];\n var newPerWorkerCpuTimeRate = [];\n if (this.state.lastRefresh !== null) {\n var rowsInputSinceRefresh = clusterState.totalInputRows - this.state.lastInputRows;\n var bytesInputSinceRefresh = clusterState.totalInputBytes - this.state.lastInputBytes;\n var cpuTimeSinceRefresh = clusterState.totalCpuTimeSecs - this.state.lastCpuTime;\n var secsSinceRefresh = (Date.now() - this.state.lastRefresh) / 1000.0;\n\n newRowInputRate = (0, _utils.addExponentiallyWeightedToHistory)(rowsInputSinceRefresh / secsSinceRefresh, this.state.rowInputRate);\n newByteInputRate = (0, _utils.addExponentiallyWeightedToHistory)(bytesInputSinceRefresh / secsSinceRefresh, this.state.byteInputRate);\n newPerWorkerCpuTimeRate = (0, _utils.addExponentiallyWeightedToHistory)(cpuTimeSinceRefresh / clusterState.activeWorkers / secsSinceRefresh, this.state.perWorkerCpuTimeRate);\n }\n\n this.setState({\n // instantaneous stats\n runningQueries: (0, _utils.addToHistory)(clusterState.runningQueries, this.state.runningQueries),\n queuedQueries: (0, _utils.addToHistory)(clusterState.queuedQueries, this.state.queuedQueries),\n blockedQueries: (0, _utils.addToHistory)(clusterState.blockedQueries, this.state.blockedQueries),\n activeWorkers: (0, _utils.addToHistory)(clusterState.activeWorkers, this.state.activeWorkers),\n\n // moving averages\n runningDrivers: (0, _utils.addExponentiallyWeightedToHistory)(clusterState.runningDrivers, this.state.runningDrivers),\n reservedMemory: (0, _utils.addExponentiallyWeightedToHistory)(clusterState.reservedMemory, this.state.reservedMemory),\n\n // moving averages for diffs\n rowInputRate: newRowInputRate,\n byteInputRate: newByteInputRate,\n perWorkerCpuTimeRate: newPerWorkerCpuTimeRate,\n\n lastInputRows: clusterState.totalInputRows,\n lastInputBytes: clusterState.totalInputBytes,\n lastCpuTime: clusterState.totalCpuTimeSecs,\n\n initialized: true,\n\n lastRefresh: Date.now()\n });\n this.resetTimer();\n }.bind(this)).fail(function () {\n this.resetTimer();\n }.bind(this));\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop();\n }\n }, {\n key: \"componentDidUpdate\",\n value: function componentDidUpdate() {\n // prevent multiple calls to componentDidUpdate (resulting from calls to setState or otherwise) within the refresh interval from re-rendering sparklines/charts\n if (this.state.lastRender === null || Date.now() - this.state.lastRender >= 1000) {\n var renderTimestamp = Date.now();\n $('#running-queries-sparkline').sparkline(this.state.runningQueries, $.extend({}, SPARKLINE_PROPERTIES, { chartRangeMin: 0 }));\n $('#blocked-queries-sparkline').sparkline(this.state.blockedQueries, $.extend({}, SPARKLINE_PROPERTIES, { chartRangeMin: 0 }));\n $('#queued-queries-sparkline').sparkline(this.state.queuedQueries, $.extend({}, SPARKLINE_PROPERTIES, { chartRangeMin: 0 }));\n\n $('#active-workers-sparkline').sparkline(this.state.activeWorkers, $.extend({}, SPARKLINE_PROPERTIES, { chartRangeMin: 0 }));\n $('#running-drivers-sparkline').sparkline(this.state.runningDrivers, $.extend({}, SPARKLINE_PROPERTIES, { numberFormatter: _utils.precisionRound }));\n $('#reserved-memory-sparkline').sparkline(this.state.reservedMemory, $.extend({}, SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatDataSizeBytes }));\n\n $('#row-input-rate-sparkline').sparkline(this.state.rowInputRate, $.extend({}, SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatCount }));\n $('#byte-input-rate-sparkline').sparkline(this.state.byteInputRate, $.extend({}, SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatDataSizeBytes }));\n $('#cpu-time-rate-sparkline').sparkline(this.state.perWorkerCpuTimeRate, $.extend({}, SPARKLINE_PROPERTIES, { numberFormatter: _utils.precisionRound }));\n\n this.setState({\n lastRender: renderTimestamp\n });\n }\n\n $('[data-toggle=\"tooltip\"]').tooltip();\n }\n }, {\n key: \"render\",\n value: function render() {\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat-title\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Total number of queries currently running\" },\n \"Running queries\"\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat-title\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Total number of active worker nodes\" },\n \"Active workers\"\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat-title\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Moving average of input rows processed per second\" },\n \"Rows/sec\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row stat-line-end\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat stat-large\" },\n _react2.default.createElement(\n \"span\",\n { className: \"stat-text\" },\n this.state.runningQueries[this.state.runningQueries.length - 1]\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"running-queries-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat stat-large\" },\n _react2.default.createElement(\n \"span\",\n { className: \"stat-text\" },\n this.state.activeWorkers[this.state.activeWorkers.length - 1]\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"active-workers-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat stat-large\" },\n _react2.default.createElement(\n \"span\",\n { className: \"stat-text\" },\n (0, _utils.formatCount)(this.state.rowInputRate[this.state.rowInputRate.length - 1])\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"row-input-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat-title\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Total number of queries currently queued and awaiting execution\" },\n \"Queued queries\"\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat-title\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Moving average of total running drivers\" },\n \"Runnable drivers\"\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat-title\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Moving average of input bytes processed per second\" },\n \"Bytes/sec\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row stat-line-end\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat stat-large\" },\n _react2.default.createElement(\n \"span\",\n { className: \"stat-text\" },\n this.state.queuedQueries[this.state.queuedQueries.length - 1]\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"queued-queries-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat stat-large\" },\n _react2.default.createElement(\n \"span\",\n { className: \"stat-text\" },\n (0, _utils.formatCount)(this.state.runningDrivers[this.state.runningDrivers.length - 1])\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"running-drivers-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat stat-large\" },\n _react2.default.createElement(\n \"span\",\n { className: \"stat-text\" },\n (0, _utils.formatDataSizeBytes)(this.state.byteInputRate[this.state.byteInputRate.length - 1])\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"byte-input-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat-title\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Total number of queries currently blocked and unable to make progress\" },\n \"Blocked Queries\"\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat-title\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Total amount of memory reserved by all running queries\" },\n \"Reserved Memory (B)\"\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat-title\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Moving average of CPU time utilized per second per worker\" },\n \"Worker Parallelism\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row stat-line-end\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat stat-large\" },\n _react2.default.createElement(\n \"span\",\n { className: \"stat-text\" },\n this.state.blockedQueries[this.state.blockedQueries.length - 1]\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"blocked-queries-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat stat-large\" },\n _react2.default.createElement(\n \"span\",\n { className: \"stat-text\" },\n (0, _utils.formatDataSizeBytes)(this.state.reservedMemory[this.state.reservedMemory.length - 1])\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"reserved-memory-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stat stat-large\" },\n _react2.default.createElement(\n \"span\",\n { className: \"stat-text\" },\n (0, _utils.formatCount)(this.state.perWorkerCpuTimeRate[this.state.perWorkerCpuTimeRate.length - 1])\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"cpu-time-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n )\n )\n );\n }\n }]);\n\n return ClusterHUD;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/ClusterHUD.jsx?"); /***/ }), @@ -106,7 +106,7 @@ eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n}); /***/ (function(module, exports, __webpack_require__) { "use strict"; -eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.PageTitle = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\nvar PageTitle = exports.PageTitle = function (_React$Component) {\n _inherits(PageTitle, _React$Component);\n\n function PageTitle(props) {\n _classCallCheck(this, PageTitle);\n\n var _this = _possibleConstructorReturn(this, (PageTitle.__proto__ || Object.getPrototypeOf(PageTitle)).call(this, props));\n\n _this.state = {\n noConnection: false,\n lightShown: false,\n info: null,\n lastSuccess: Date.now(),\n modalShown: false,\n errorText: null\n };\n return _this;\n }\n\n _createClass(PageTitle, [{\n key: \"refreshLoop\",\n value: function refreshLoop() {\n var _this2 = this;\n\n clearTimeout(this.timeoutId);\n fetch(\"/ui/api/cluster\").then(function (response) {\n if (response.status === 401) {\n location.reload();\n }\n return response.json();\n }).then(function (info) {\n _this2.setState({\n info: info,\n noConnection: false,\n lastSuccess: Date.now(),\n modalShown: false\n });\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal('hide');\n _this2.resetTimer();\n }).catch(function (error) {\n _this2.setState({\n noConnection: true,\n lightShown: !_this2.state.lightShown,\n errorText: error\n });\n _this2.resetTimer();\n\n if (!_this2.state.modalShown && (error || Date.now() - _this2.state.lastSuccess > 30 * 1000)) {\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal();\n _this2.setState({ modalShown: true });\n }\n });\n }\n }, {\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n this.timeoutId = setTimeout(this.refreshLoop.bind(this), 1000);\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop.bind(this)();\n }\n }, {\n key: \"renderStatusLight\",\n value: function renderStatusLight() {\n if (this.state.noConnection) {\n if (this.state.lightShown) {\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-red\", id: \"status-indicator\" });\n } else {\n return _react2.default.createElement(\"span\", { className: \"status-light\", id: \"status-indicator\" });\n }\n }\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-green\", id: \"status-indicator\" });\n }\n }, {\n key: \"render\",\n value: function render() {\n var info = this.state.info;\n if (!info) {\n return null;\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"nav\",\n { className: \"navbar\" },\n _react2.default.createElement(\n \"div\",\n { className: \"container-fluid\" },\n _react2.default.createElement(\n \"div\",\n { className: \"navbar-header\" },\n _react2.default.createElement(\n \"table\",\n null,\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"/ui/\" },\n _react2.default.createElement(\"img\", { src: \"assets/logo.png\" })\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-brand\" },\n this.props.title\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"navbar\", className: \"navbar-collapse collapse\" },\n _react2.default.createElement(\n \"ul\",\n { className: \"nav navbar-nav navbar-right\" },\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Version\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"version-number\" },\n info.nodeVersion.version\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Environment\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"environment\" },\n info.environment\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Uptime\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"bottom\", title: \"Connection status\" },\n this.renderStatusLight()\n ),\n \"\\xA0\",\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"uptime\" },\n info.uptime\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"logout\" },\n _react2.default.createElement(\n \"a\",\n { className: \"btn btn-logout\", href: \"logout\" },\n \"Log Out\"\n )\n )\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"no-connection-modal\", className: \"modal\", tabIndex: \"-1\", role: \"dialog\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-dialog modal-sm\", role: \"document\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-content\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"h4\",\n null,\n \"Unable to connect to server\"\n ),\n _react2.default.createElement(\n \"p\",\n null,\n this.state.errorText ? \"Error: \" + this.state.errorText : null\n )\n )\n )\n )\n )\n )\n );\n }\n }]);\n\n return PageTitle;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/PageTitle.jsx?"); +eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.PageTitle = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\nvar PageTitle = exports.PageTitle = function (_React$Component) {\n _inherits(PageTitle, _React$Component);\n\n function PageTitle(props) {\n _classCallCheck(this, PageTitle);\n\n var _this = _possibleConstructorReturn(this, (PageTitle.__proto__ || Object.getPrototypeOf(PageTitle)).call(this, props));\n\n _this.state = {\n noConnection: false,\n lightShown: false,\n info: null,\n lastSuccess: Date.now(),\n modalShown: false,\n errorText: null\n };\n return _this;\n }\n\n _createClass(PageTitle, [{\n key: \"refreshLoop\",\n value: function refreshLoop() {\n var _this2 = this;\n\n clearTimeout(this.timeoutId);\n fetch(\"/ui/api/cluster\").then(function (response) {\n if (response.status === 401) {\n location.reload();\n }\n return response.json();\n }).then(function (info) {\n _this2.setState({\n info: info,\n noConnection: false,\n lastSuccess: Date.now(),\n modalShown: false\n });\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal('hide');\n _this2.resetTimer();\n }).catch(function (fail) {\n _this2.setState({\n noConnection: true,\n lightShown: !_this2.state.lightShown,\n errorText: fail\n });\n _this2.resetTimer();\n\n if (!_this2.state.modalShown && (fail || Date.now() - _this2.state.lastSuccess > 30 * 1000)) {\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal();\n _this2.setState({ modalShown: true });\n }\n });\n }\n }, {\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n this.timeoutId = setTimeout(this.refreshLoop.bind(this), 1000);\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop.bind(this)();\n }\n }, {\n key: \"renderStatusLight\",\n value: function renderStatusLight() {\n if (this.state.noConnection) {\n if (this.state.lightShown) {\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-red\", id: \"status-indicator\" });\n } else {\n return _react2.default.createElement(\"span\", { className: \"status-light\", id: \"status-indicator\" });\n }\n }\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-green\", id: \"status-indicator\" });\n }\n }, {\n key: \"render\",\n value: function render() {\n var info = this.state.info;\n if (!info) {\n return null;\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"nav\",\n { className: \"navbar\" },\n _react2.default.createElement(\n \"div\",\n { className: \"container-fluid\" },\n _react2.default.createElement(\n \"div\",\n { className: \"navbar-header\" },\n _react2.default.createElement(\n \"table\",\n null,\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"/ui/\" },\n _react2.default.createElement(\"img\", { src: \"assets/logo.png\" })\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-brand\" },\n this.props.title\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"navbar\", className: \"navbar-collapse collapse\" },\n _react2.default.createElement(\n \"ul\",\n { className: \"nav navbar-nav navbar-right\" },\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Version\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"version-number\" },\n info.nodeVersion.version\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Environment\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"environment\" },\n info.environment\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Uptime\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"bottom\", title: \"Connection status\" },\n this.renderStatusLight()\n ),\n \"\\xA0\",\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"uptime\" },\n info.uptime\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"logout\" },\n _react2.default.createElement(\n \"a\",\n { className: \"btn btn-logout\", href: \"logout\" },\n \"Log Out\"\n )\n )\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"no-connection-modal\", className: \"modal\", tabIndex: \"-1\", role: \"dialog\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-dialog modal-sm\", role: \"document\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-content\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"h4\",\n null,\n \"Unable to connect to server\"\n ),\n _react2.default.createElement(\n \"p\",\n null,\n this.state.errorText ? \"Error: \" + this.state.errorText : null\n )\n )\n )\n )\n )\n )\n );\n }\n }]);\n\n return PageTitle;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/PageTitle.jsx?"); /***/ }), @@ -118,7 +118,7 @@ eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n}); /***/ (function(module, exports, __webpack_require__) { "use strict"; -eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.QueryList = exports.QueryListItem = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nvar _utils = __webpack_require__(/*! ../utils */ \"./utils.js\");\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nvar QueryListItem = exports.QueryListItem = function (_React$Component) {\n _inherits(QueryListItem, _React$Component);\n\n function QueryListItem() {\n _classCallCheck(this, QueryListItem);\n\n return _possibleConstructorReturn(this, (QueryListItem.__proto__ || Object.getPrototypeOf(QueryListItem)).apply(this, arguments));\n }\n\n _createClass(QueryListItem, [{\n key: \"render\",\n value: function render() {\n var query = this.props.query;\n var progressBarStyle = { width: (0, _utils.getProgressBarPercentage)(query) + \"%\", backgroundColor: (0, _utils.getQueryStateColor)(query) };\n\n var splitDetails = _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12 tinystat-row\" },\n _react2.default.createElement(\n \"span\",\n { className: \"tinystat\", \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"Completed splits\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-ok\", style: _utils.GLYPHICON_HIGHLIGHT }),\n \"\\xA0\\xA0\",\n query.queryStats.completedDrivers\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"tinystat\", \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"Running splits\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-play\", style: _utils.GLYPHICON_HIGHLIGHT }),\n \"\\xA0\\xA0\",\n query.state === \"FINISHED\" || query.state === \"FAILED\" ? 0 : query.queryStats.runningDrivers\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"tinystat\", \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"Queued splits\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-pause\", style: _utils.GLYPHICON_HIGHLIGHT }),\n \"\\xA0\\xA0\",\n query.state === \"FINISHED\" || query.state === \"FAILED\" ? 0 : query.queryStats.queuedDrivers\n )\n );\n\n var timingDetails = _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12 tinystat-row\" },\n _react2.default.createElement(\n \"span\",\n { className: \"tinystat\", \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"Wall time spent executing the query (not including queued time)\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-hourglass\", style: _utils.GLYPHICON_HIGHLIGHT }),\n \"\\xA0\\xA0\",\n query.queryStats.executionTime\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"tinystat\", \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"Total query wall time\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-time\", style: _utils.GLYPHICON_HIGHLIGHT }),\n \"\\xA0\\xA0\",\n query.queryStats.elapsedTime\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"tinystat\", \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"CPU time spent by this query\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-dashboard\", style: _utils.GLYPHICON_HIGHLIGHT }),\n \"\\xA0\\xA0\",\n query.queryStats.totalCpuTime\n )\n );\n\n var memoryDetails = _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12 tinystat-row\" },\n _react2.default.createElement(\n \"span\",\n { className: \"tinystat\", \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"Current total reserved memory\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-scale\", style: _utils.GLYPHICON_HIGHLIGHT }),\n \"\\xA0\\xA0\",\n (0, _utils.parseAndFormatDataSize)(query.queryStats.totalMemoryReservation)\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"tinystat\", \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"Peak total memory\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-fire\", style: _utils.GLYPHICON_HIGHLIGHT }),\n \"\\xA0\\xA0\",\n (0, _utils.parseAndFormatDataSize)(query.queryStats.peakTotalMemoryReservation)\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"tinystat\", \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"Cumulative user memory\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-equalizer\", style: _utils.GLYPHICON_HIGHLIGHT }),\n \"\\xA0\\xA0\",\n (0, _utils.formatDataSizeBytes)(query.queryStats.cumulativeUserMemory / 1000.0)\n )\n );\n\n var user = _react2.default.createElement(\n \"span\",\n null,\n query.session.user\n );\n if (query.session.principal) {\n user = _react2.default.createElement(\n \"span\",\n null,\n query.session.user,\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-lock-inverse\", style: _utils.GLYPHICON_DEFAULT })\n );\n }\n\n return _react2.default.createElement(\n \"div\",\n { className: \"query\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row stat-row query-header query-header-queryid\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-9\", \"data-toggle\": \"tooltip\", \"data-placement\": \"bottom\", \"data-trigger\": \"hover\", title: \"Query ID\" },\n _react2.default.createElement(\n \"a\",\n { href: \"query.html?\" + query.queryId, target: \"_blank\" },\n query.queryId\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-3 query-header-timestamp\", \"data-toggle\": \"tooltip\", \"data-placement\": \"bottom\", title: \"Submit time\" },\n _react2.default.createElement(\n \"span\",\n null,\n (0, _utils.formatShortTime)(new Date(Date.parse(query.queryStats.createTime)))\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row stat-row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"User\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-user\", style: _utils.GLYPHICON_DEFAULT }),\n \"\\xA0\\xA0\",\n _react2.default.createElement(\n \"span\",\n null,\n (0, _utils.truncateString)(user, 35)\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row stat-row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Source\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-log-in\", style: _utils.GLYPHICON_DEFAULT }),\n \"\\xA0\\xA0\",\n _react2.default.createElement(\n \"span\",\n null,\n (0, _utils.truncateString)(query.session.source, 35)\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row stat-row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Resource Group\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-road\", style: _utils.GLYPHICON_DEFAULT }),\n \"\\xA0\\xA0\",\n _react2.default.createElement(\n \"span\",\n null,\n (0, _utils.truncateString)(query.resourceGroupId ? query.resourceGroupId.join(\".\") : \"n/a\", 35)\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row stat-row\" },\n splitDetails\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row stat-row\" },\n timingDetails\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row stat-row\" },\n memoryDetails\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-8\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row query-header\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12 query-progress-container\" },\n _react2.default.createElement(\n \"div\",\n { className: \"progress\" },\n _react2.default.createElement(\n \"div\",\n { className: \"progress-bar progress-bar-info\", role: \"progressbar\", \"aria-valuenow\": (0, _utils.getProgressBarPercentage)(query), \"aria-valuemin\": \"0\",\n \"aria-valuemax\": \"100\", style: progressBarStyle },\n (0, _utils.getProgressBarTitle)(query)\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row query-row-bottom\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"pre\",\n { className: \"query-snippet\" },\n _react2.default.createElement(\n \"code\",\n { className: \"sql\" },\n QueryListItem.stripQueryTextWhitespace(query.query)\n )\n )\n )\n )\n )\n )\n );\n }\n }], [{\n key: \"stripQueryTextWhitespace\",\n value: function stripQueryTextWhitespace(queryText) {\n var lines = queryText.split(\"\\n\");\n var minLeadingWhitespace = -1;\n for (var i = 0; i < lines.length; i++) {\n if (minLeadingWhitespace === 0) {\n break;\n }\n\n if (lines[i].trim().length === 0) {\n continue;\n }\n\n var leadingWhitespace = lines[i].search(/\\S/);\n\n if (leadingWhitespace > -1 && (leadingWhitespace < minLeadingWhitespace || minLeadingWhitespace === -1)) {\n minLeadingWhitespace = leadingWhitespace;\n }\n }\n\n var formattedQueryText = \"\";\n\n for (var _i = 0; _i < lines.length; _i++) {\n var trimmedLine = lines[_i].substring(minLeadingWhitespace).replace(/\\s+$/g, '');\n\n if (trimmedLine.length > 0) {\n formattedQueryText += trimmedLine;\n\n if (_i < lines.length - 1) {\n formattedQueryText += \"\\n\";\n }\n }\n }\n\n return (0, _utils.truncateString)(formattedQueryText, 300);\n }\n }]);\n\n return QueryListItem;\n}(_react2.default.Component);\n\nvar DisplayedQueriesList = function (_React$Component2) {\n _inherits(DisplayedQueriesList, _React$Component2);\n\n function DisplayedQueriesList() {\n _classCallCheck(this, DisplayedQueriesList);\n\n return _possibleConstructorReturn(this, (DisplayedQueriesList.__proto__ || Object.getPrototypeOf(DisplayedQueriesList)).apply(this, arguments));\n }\n\n _createClass(DisplayedQueriesList, [{\n key: \"render\",\n value: function render() {\n var queryNodes = this.props.queries.map(function (query) {\n return _react2.default.createElement(QueryListItem, { key: query.queryId, query: query });\n }.bind(this));\n return _react2.default.createElement(\n \"div\",\n null,\n queryNodes\n );\n }\n }]);\n\n return DisplayedQueriesList;\n}(_react2.default.Component);\n\nvar FILTER_TYPE = {\n RUNNING: function RUNNING(query) {\n return !(query.state === \"QUEUED\" || query.state === \"FINISHED\" || query.state === \"FAILED\");\n },\n QUEUED: function QUEUED(query) {\n return query.state === \"QUEUED\";\n },\n FINISHED: function FINISHED(query) {\n return query.state === \"FINISHED\";\n }\n};\n\nvar SORT_TYPE = {\n CREATED: function CREATED(query) {\n return Date.parse(query.queryStats.createTime);\n },\n ELAPSED: function ELAPSED(query) {\n return (0, _utils.parseDuration)(query.queryStats.elapsedTime);\n },\n EXECUTION: function EXECUTION(query) {\n return (0, _utils.parseDuration)(query.queryStats.executionTime);\n },\n CPU: function CPU(query) {\n return (0, _utils.parseDuration)(query.queryStats.totalCpuTime);\n },\n CUMULATIVE_MEMORY: function CUMULATIVE_MEMORY(query) {\n return query.queryStats.cumulativeUserMemory;\n },\n CURRENT_MEMORY: function CURRENT_MEMORY(query) {\n return (0, _utils.parseDataSize)(query.queryStats.userMemoryReservation);\n }\n};\n\nvar ERROR_TYPE = {\n USER_ERROR: function USER_ERROR(query) {\n return query.state === \"FAILED\" && query.errorType === \"USER_ERROR\";\n },\n INTERNAL_ERROR: function INTERNAL_ERROR(query) {\n return query.state === \"FAILED\" && query.errorType === \"INTERNAL_ERROR\";\n },\n INSUFFICIENT_RESOURCES: function INSUFFICIENT_RESOURCES(query) {\n return query.state === \"FAILED\" && query.errorType === \"INSUFFICIENT_RESOURCES\";\n },\n EXTERNAL: function EXTERNAL(query) {\n return query.state === \"FAILED\" && query.errorType === \"EXTERNAL\";\n }\n};\n\nvar SORT_ORDER = {\n ASCENDING: function ASCENDING(value) {\n return value;\n },\n DESCENDING: function DESCENDING(value) {\n return -value;\n }\n};\n\nvar QueryList = exports.QueryList = function (_React$Component3) {\n _inherits(QueryList, _React$Component3);\n\n function QueryList(props) {\n _classCallCheck(this, QueryList);\n\n var _this3 = _possibleConstructorReturn(this, (QueryList.__proto__ || Object.getPrototypeOf(QueryList)).call(this, props));\n\n _this3.state = {\n allQueries: [],\n displayedQueries: [],\n reorderInterval: 5000,\n currentSortType: SORT_TYPE.CREATED,\n currentSortOrder: SORT_ORDER.DESCENDING,\n stateFilters: [FILTER_TYPE.RUNNING, FILTER_TYPE.QUEUED],\n errorTypeFilters: [ERROR_TYPE.INTERNAL_ERROR, ERROR_TYPE.INSUFFICIENT_RESOURCES, ERROR_TYPE.EXTERNAL],\n searchString: '',\n maxQueries: 100,\n lastRefresh: Date.now(),\n lastReorder: Date.now(),\n initialized: false\n };\n\n _this3.refreshLoop = _this3.refreshLoop.bind(_this3);\n _this3.handleSearchStringChange = _this3.handleSearchStringChange.bind(_this3);\n _this3.executeSearch = _this3.executeSearch.bind(_this3);\n _this3.handleSortClick = _this3.handleSortClick.bind(_this3);\n return _this3;\n }\n\n _createClass(QueryList, [{\n key: \"sortAndLimitQueries\",\n value: function sortAndLimitQueries(queries, sortType, sortOrder, maxQueries) {\n queries.sort(function (queryA, queryB) {\n return sortOrder(sortType(queryA) - sortType(queryB));\n }, this);\n\n if (maxQueries !== 0 && queries.length > maxQueries) {\n queries.splice(maxQueries, queries.length - maxQueries);\n }\n }\n }, {\n key: \"filterQueries\",\n value: function filterQueries(queries, stateFilters, errorTypeFilters, searchString) {\n var stateFilteredQueries = queries.filter(function (query) {\n for (var i = 0; i < stateFilters.length; i++) {\n if (stateFilters[i](query)) {\n return true;\n }\n }\n for (var _i2 = 0; _i2 < errorTypeFilters.length; _i2++) {\n if (errorTypeFilters[_i2](query)) {\n return true;\n }\n }\n return false;\n });\n\n if (searchString === '') {\n return stateFilteredQueries;\n } else {\n return stateFilteredQueries.filter(function (query) {\n var term = searchString.toLowerCase();\n if (query.queryId.toLowerCase().indexOf(term) !== -1 || (0, _utils.getHumanReadableState)(query).toLowerCase().indexOf(term) !== -1 || query.query.toLowerCase().indexOf(term) !== -1) {\n return true;\n }\n\n if (query.session.user && query.session.user.toLowerCase().indexOf(term) !== -1) {\n return true;\n }\n\n if (query.session.source && query.session.source.toLowerCase().indexOf(term) !== -1) {\n return true;\n }\n\n if (query.resourceGroupId && query.resourceGroupId.join(\".\").toLowerCase().indexOf(term) !== -1) {\n return true;\n }\n }, this);\n }\n }\n }, {\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n // stop refreshing when query finishes or fails\n if (this.state.query === null || !this.state.ended) {\n this.timeoutId = setTimeout(this.refreshLoop, 1000);\n }\n }\n }, {\n key: \"refreshLoop\",\n value: function refreshLoop() {\n clearTimeout(this.timeoutId); // to stop multiple series of refreshLoop from going on simultaneously\n clearTimeout(this.searchTimeoutId);\n\n $.get('/ui/api/query', function (queryList) {\n var queryMap = queryList.reduce(function (map, query) {\n map[query.queryId] = query;\n return map;\n }, {});\n\n var updatedQueries = [];\n this.state.displayedQueries.forEach(function (oldQuery) {\n if (oldQuery.queryId in queryMap) {\n updatedQueries.push(queryMap[oldQuery.queryId]);\n queryMap[oldQuery.queryId] = false;\n }\n });\n\n var newQueries = [];\n for (var queryId in queryMap) {\n if (queryMap[queryId]) {\n newQueries.push(queryMap[queryId]);\n }\n }\n newQueries = this.filterQueries(newQueries, this.state.stateFilters, this.state.errorTypeFilters, this.state.searchString);\n\n var lastRefresh = Date.now();\n var lastReorder = this.state.lastReorder;\n\n if (this.state.reorderInterval !== 0 && lastRefresh - lastReorder >= this.state.reorderInterval) {\n updatedQueries = this.filterQueries(updatedQueries, this.state.stateFilters, this.state.errorTypeFilters, this.state.searchString);\n updatedQueries = updatedQueries.concat(newQueries);\n this.sortAndLimitQueries(updatedQueries, this.state.currentSortType, this.state.currentSortOrder, 0);\n lastReorder = Date.now();\n } else {\n this.sortAndLimitQueries(newQueries, this.state.currentSortType, this.state.currentSortOrder, 0);\n updatedQueries = updatedQueries.concat(newQueries);\n }\n\n if (this.state.maxQueries !== 0 && updatedQueries.length > this.state.maxQueries) {\n updatedQueries.splice(this.state.maxQueries, updatedQueries.length - this.state.maxQueries);\n }\n\n this.setState({\n allQueries: queryList,\n displayedQueries: updatedQueries,\n lastRefresh: lastRefresh,\n lastReorder: lastReorder,\n initialized: true\n });\n this.resetTimer();\n }.bind(this)).error(function () {\n this.setState({\n initialized: true\n });\n this.resetTimer();\n }.bind(this));\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop();\n }\n }, {\n key: \"handleSearchStringChange\",\n value: function handleSearchStringChange(event) {\n var newSearchString = event.target.value;\n clearTimeout(this.searchTimeoutId);\n\n this.setState({\n searchString: newSearchString\n });\n\n this.searchTimeoutId = setTimeout(this.executeSearch, 200);\n }\n }, {\n key: \"executeSearch\",\n value: function executeSearch() {\n clearTimeout(this.searchTimeoutId);\n\n var newDisplayedQueries = this.filterQueries(this.state.allQueries, this.state.stateFilters, this.state.errorTypeFilters, this.state.searchString);\n this.sortAndLimitQueries(newDisplayedQueries, this.state.currentSortType, this.state.currentSortOrder, this.state.maxQueries);\n\n this.setState({\n displayedQueries: newDisplayedQueries\n });\n }\n }, {\n key: \"renderMaxQueriesListItem\",\n value: function renderMaxQueriesListItem(maxQueries, maxQueriesText) {\n return _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: this.state.maxQueries === maxQueries ? \"selected\" : \"\", onClick: this.handleMaxQueriesClick.bind(this, maxQueries) },\n maxQueriesText\n )\n );\n }\n }, {\n key: \"handleMaxQueriesClick\",\n value: function handleMaxQueriesClick(newMaxQueries) {\n var filteredQueries = this.filterQueries(this.state.allQueries, this.state.stateFilters, this.state.errorTypeFilters, this.state.searchString);\n this.sortAndLimitQueries(filteredQueries, this.state.currentSortType, this.state.currentSortOrder, newMaxQueries);\n\n this.setState({\n maxQueries: newMaxQueries,\n displayedQueries: filteredQueries\n });\n }\n }, {\n key: \"renderReorderListItem\",\n value: function renderReorderListItem(interval, intervalText) {\n return _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: this.state.reorderInterval === interval ? \"selected\" : \"\", onClick: this.handleReorderClick.bind(this, interval) },\n intervalText\n )\n );\n }\n }, {\n key: \"handleReorderClick\",\n value: function handleReorderClick(interval) {\n if (this.state.reorderInterval !== interval) {\n this.setState({\n reorderInterval: interval\n });\n }\n }\n }, {\n key: \"renderSortListItem\",\n value: function renderSortListItem(sortType, sortText) {\n if (this.state.currentSortType === sortType) {\n var directionArrow = this.state.currentSortOrder === SORT_ORDER.ASCENDING ? _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-triangle-top\" }) : _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-triangle-bottom\" });\n return _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: \"selected\", onClick: this.handleSortClick.bind(this, sortType) },\n sortText,\n \" \",\n directionArrow\n )\n );\n } else {\n return _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"#\", onClick: this.handleSortClick.bind(this, sortType) },\n sortText\n )\n );\n }\n }\n }, {\n key: \"handleSortClick\",\n value: function handleSortClick(sortType) {\n var newSortType = sortType;\n var newSortOrder = SORT_ORDER.DESCENDING;\n\n if (this.state.currentSortType === sortType && this.state.currentSortOrder === SORT_ORDER.DESCENDING) {\n newSortOrder = SORT_ORDER.ASCENDING;\n }\n\n var newDisplayedQueries = this.filterQueries(this.state.allQueries, this.state.stateFilters, this.state.errorTypeFilters, this.state.searchString);\n this.sortAndLimitQueries(newDisplayedQueries, newSortType, newSortOrder, this.state.maxQueries);\n\n this.setState({\n displayedQueries: newDisplayedQueries,\n currentSortType: newSortType,\n currentSortOrder: newSortOrder\n });\n }\n }, {\n key: \"renderFilterButton\",\n value: function renderFilterButton(filterType, filterText) {\n var checkmarkStyle = { color: '#57aac7' };\n var classNames = \"btn btn-sm btn-info style-check\";\n if (this.state.stateFilters.indexOf(filterType) > -1) {\n classNames += \" active\";\n checkmarkStyle = { color: '#ffffff' };\n }\n\n return _react2.default.createElement(\n \"button\",\n { type: \"button\", className: classNames, onClick: this.handleStateFilterClick.bind(this, filterType) },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-ok\", style: checkmarkStyle }),\n \"\\xA0\",\n filterText\n );\n }\n }, {\n key: \"handleStateFilterClick\",\n value: function handleStateFilterClick(filter) {\n var newFilters = this.state.stateFilters.slice();\n if (this.state.stateFilters.indexOf(filter) > -1) {\n newFilters.splice(newFilters.indexOf(filter), 1);\n } else {\n newFilters.push(filter);\n }\n\n var filteredQueries = this.filterQueries(this.state.allQueries, newFilters, this.state.errorTypeFilters, this.state.searchString);\n this.sortAndLimitQueries(filteredQueries, this.state.currentSortType, this.state.currentSortOrder);\n\n this.setState({\n stateFilters: newFilters,\n displayedQueries: filteredQueries\n });\n }\n }, {\n key: \"renderErrorTypeListItem\",\n value: function renderErrorTypeListItem(errorType, errorTypeText) {\n var checkmarkStyle = { color: '#ffffff' };\n if (this.state.errorTypeFilters.indexOf(errorType) > -1) {\n checkmarkStyle = _utils.GLYPHICON_HIGHLIGHT;\n }\n return _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"#\", onClick: this.handleErrorTypeFilterClick.bind(this, errorType) },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-ok\", style: checkmarkStyle }),\n \"\\xA0\",\n errorTypeText\n )\n );\n }\n }, {\n key: \"handleErrorTypeFilterClick\",\n value: function handleErrorTypeFilterClick(errorType) {\n var newFilters = this.state.errorTypeFilters.slice();\n if (this.state.errorTypeFilters.indexOf(errorType) > -1) {\n newFilters.splice(newFilters.indexOf(errorType), 1);\n } else {\n newFilters.push(errorType);\n }\n\n var filteredQueries = this.filterQueries(this.state.allQueries, this.state.stateFilters, newFilters, this.state.searchString);\n this.sortAndLimitQueries(filteredQueries, this.state.currentSortType, this.state.currentSortOrder);\n\n this.setState({\n errorTypeFilters: newFilters,\n displayedQueries: filteredQueries\n });\n }\n }, {\n key: \"render\",\n value: function render() {\n var queryList = _react2.default.createElement(DisplayedQueriesList, { queries: this.state.displayedQueries });\n if (this.state.displayedQueries === null || this.state.displayedQueries.length === 0) {\n var label = _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading...\"\n );\n if (this.state.initialized) {\n if (this.state.allQueries === null || this.state.allQueries.length === 0) {\n label = \"No queries\";\n } else {\n label = \"No queries matched filters\";\n }\n }\n queryList = _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n label\n )\n )\n );\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"row toolbar-row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12 toolbar-col\" },\n _react2.default.createElement(\n \"div\",\n { className: \"input-group input-group-sm\" },\n _react2.default.createElement(\"input\", { type: \"text\", className: \"form-control form-control-small search-bar\", placeholder: \"User, source, query ID, query state, resource group, or query text\",\n onChange: this.handleSearchStringChange, value: this.state.searchString }),\n _react2.default.createElement(\n \"span\",\n { className: \"input-group-addon filter-addon\" },\n \"State:\"\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"input-group-btn\" },\n this.renderFilterButton(FILTER_TYPE.RUNNING, \"Running\"),\n this.renderFilterButton(FILTER_TYPE.QUEUED, \"Queued\"),\n this.renderFilterButton(FILTER_TYPE.FINISHED, \"Finished\"),\n _react2.default.createElement(\n \"button\",\n { type: \"button\", id: \"error-type-dropdown\", className: \"btn btn-default dropdown-toggle\", \"data-toggle\": \"dropdown\", \"aria-haspopup\": \"true\", \"aria-expanded\": \"false\" },\n \"Failed \",\n _react2.default.createElement(\"span\", { className: \"caret\" })\n ),\n _react2.default.createElement(\n \"ul\",\n { className: \"dropdown-menu error-type-dropdown-menu\" },\n this.renderErrorTypeListItem(ERROR_TYPE.INTERNAL_ERROR, \"Internal Error\"),\n this.renderErrorTypeListItem(ERROR_TYPE.EXTERNAL, \"External Error\"),\n this.renderErrorTypeListItem(ERROR_TYPE.INSUFFICIENT_RESOURCES, \"Resources Error\"),\n this.renderErrorTypeListItem(ERROR_TYPE.USER_ERROR, \"User Error\")\n )\n ),\n \"\\xA0\",\n _react2.default.createElement(\n \"div\",\n { className: \"input-group-btn\" },\n _react2.default.createElement(\n \"button\",\n { type: \"button\", className: \"btn btn-default dropdown-toggle\", \"data-toggle\": \"dropdown\", \"aria-haspopup\": \"true\", \"aria-expanded\": \"false\" },\n \"Sort \",\n _react2.default.createElement(\"span\", { className: \"caret\" })\n ),\n _react2.default.createElement(\n \"ul\",\n { className: \"dropdown-menu\" },\n this.renderSortListItem(SORT_TYPE.CREATED, \"Creation Time\"),\n this.renderSortListItem(SORT_TYPE.ELAPSED, \"Elapsed Time\"),\n this.renderSortListItem(SORT_TYPE.CPU, \"CPU Time\"),\n this.renderSortListItem(SORT_TYPE.EXECUTION, \"Execution Time\"),\n this.renderSortListItem(SORT_TYPE.CURRENT_MEMORY, \"Current Memory\"),\n this.renderSortListItem(SORT_TYPE.CUMULATIVE_MEMORY, \"Cumulative User Memory\")\n )\n ),\n \"\\xA0\",\n _react2.default.createElement(\n \"div\",\n { className: \"input-group-btn\" },\n _react2.default.createElement(\n \"button\",\n { type: \"button\", className: \"btn btn-default dropdown-toggle\", \"data-toggle\": \"dropdown\", \"aria-haspopup\": \"true\", \"aria-expanded\": \"false\" },\n \"Reorder Interval \",\n _react2.default.createElement(\"span\", { className: \"caret\" })\n ),\n _react2.default.createElement(\n \"ul\",\n { className: \"dropdown-menu\" },\n this.renderReorderListItem(1000, \"1s\"),\n this.renderReorderListItem(5000, \"5s\"),\n this.renderReorderListItem(10000, \"10s\"),\n this.renderReorderListItem(30000, \"30s\"),\n _react2.default.createElement(\"li\", { role: \"separator\", className: \"divider\" }),\n this.renderReorderListItem(0, \"Off\")\n )\n ),\n \"\\xA0\",\n _react2.default.createElement(\n \"div\",\n { className: \"input-group-btn\" },\n _react2.default.createElement(\n \"button\",\n { type: \"button\", className: \"btn btn-default dropdown-toggle\", \"data-toggle\": \"dropdown\", \"aria-haspopup\": \"true\", \"aria-expanded\": \"false\" },\n \"Show \",\n _react2.default.createElement(\"span\", { className: \"caret\" })\n ),\n _react2.default.createElement(\n \"ul\",\n { className: \"dropdown-menu\" },\n this.renderMaxQueriesListItem(20, \"20 queries\"),\n this.renderMaxQueriesListItem(50, \"50 queries\"),\n this.renderMaxQueriesListItem(100, \"100 queries\"),\n _react2.default.createElement(\"li\", { role: \"separator\", className: \"divider\" }),\n this.renderMaxQueriesListItem(0, \"All queries\")\n )\n )\n )\n )\n ),\n queryList\n );\n }\n }]);\n\n return QueryList;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/QueryList.jsx?"); +eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.QueryList = exports.QueryListItem = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nvar _utils = __webpack_require__(/*! ../utils */ \"./utils.js\");\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nvar QueryListItem = exports.QueryListItem = function (_React$Component) {\n _inherits(QueryListItem, _React$Component);\n\n function QueryListItem() {\n _classCallCheck(this, QueryListItem);\n\n return _possibleConstructorReturn(this, (QueryListItem.__proto__ || Object.getPrototypeOf(QueryListItem)).apply(this, arguments));\n }\n\n _createClass(QueryListItem, [{\n key: \"render\",\n value: function render() {\n var query = this.props.query;\n var progressBarStyle = { width: (0, _utils.getProgressBarPercentage)(query) + \"%\", backgroundColor: (0, _utils.getQueryStateColor)(query) };\n\n var splitDetails = _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12 tinystat-row\" },\n _react2.default.createElement(\n \"span\",\n { className: \"tinystat\", \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"Completed splits\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-ok\", style: _utils.GLYPHICON_HIGHLIGHT }),\n \"\\xA0\\xA0\",\n query.queryStats.completedDrivers\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"tinystat\", \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"Running splits\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-play\", style: _utils.GLYPHICON_HIGHLIGHT }),\n \"\\xA0\\xA0\",\n query.state === \"FINISHED\" || query.state === \"FAILED\" ? 0 : query.queryStats.runningDrivers\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"tinystat\", \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"Queued splits\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-pause\", style: _utils.GLYPHICON_HIGHLIGHT }),\n \"\\xA0\\xA0\",\n query.state === \"FINISHED\" || query.state === \"FAILED\" ? 0 : query.queryStats.queuedDrivers\n )\n );\n\n var timingDetails = _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12 tinystat-row\" },\n _react2.default.createElement(\n \"span\",\n { className: \"tinystat\", \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"Wall time spent executing the query (not including queued time)\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-hourglass\", style: _utils.GLYPHICON_HIGHLIGHT }),\n \"\\xA0\\xA0\",\n query.queryStats.executionTime\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"tinystat\", \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"Total query wall time\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-time\", style: _utils.GLYPHICON_HIGHLIGHT }),\n \"\\xA0\\xA0\",\n query.queryStats.elapsedTime\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"tinystat\", \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"CPU time spent by this query\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-dashboard\", style: _utils.GLYPHICON_HIGHLIGHT }),\n \"\\xA0\\xA0\",\n query.queryStats.totalCpuTime\n )\n );\n\n var memoryDetails = _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12 tinystat-row\" },\n _react2.default.createElement(\n \"span\",\n { className: \"tinystat\", \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"Current total reserved memory\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-scale\", style: _utils.GLYPHICON_HIGHLIGHT }),\n \"\\xA0\\xA0\",\n (0, _utils.parseAndFormatDataSize)(query.queryStats.totalMemoryReservation)\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"tinystat\", \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"Peak total memory\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-fire\", style: _utils.GLYPHICON_HIGHLIGHT }),\n \"\\xA0\\xA0\",\n (0, _utils.parseAndFormatDataSize)(query.queryStats.peakTotalMemoryReservation)\n ),\n _react2.default.createElement(\n \"span\",\n { className: \"tinystat\", \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"Cumulative user memory\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-equalizer\", style: _utils.GLYPHICON_HIGHLIGHT }),\n \"\\xA0\\xA0\",\n (0, _utils.formatDataSizeBytes)(query.queryStats.cumulativeUserMemory / 1000.0)\n )\n );\n\n var user = _react2.default.createElement(\n \"span\",\n null,\n query.session.user\n );\n if (query.session.principal) {\n user = _react2.default.createElement(\n \"span\",\n null,\n query.session.user,\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-lock-inverse\", style: _utils.GLYPHICON_DEFAULT })\n );\n }\n\n return _react2.default.createElement(\n \"div\",\n { className: \"query\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row stat-row query-header query-header-queryid\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-9\", \"data-toggle\": \"tooltip\", \"data-placement\": \"bottom\", \"data-trigger\": \"hover\", title: \"Query ID\" },\n _react2.default.createElement(\n \"a\",\n { href: \"query.html?\" + query.queryId, target: \"_blank\" },\n query.queryId\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-3 query-header-timestamp\", \"data-toggle\": \"tooltip\", \"data-placement\": \"bottom\", title: \"Submit time\" },\n _react2.default.createElement(\n \"span\",\n null,\n (0, _utils.formatShortTime)(new Date(Date.parse(query.queryStats.createTime)))\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row stat-row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"User\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-user\", style: _utils.GLYPHICON_DEFAULT }),\n \"\\xA0\\xA0\",\n _react2.default.createElement(\n \"span\",\n null,\n (0, _utils.truncateString)(user, 35)\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row stat-row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Source\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-log-in\", style: _utils.GLYPHICON_DEFAULT }),\n \"\\xA0\\xA0\",\n _react2.default.createElement(\n \"span\",\n null,\n (0, _utils.truncateString)(query.session.source, 35)\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row stat-row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Resource Group\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-road\", style: _utils.GLYPHICON_DEFAULT }),\n \"\\xA0\\xA0\",\n _react2.default.createElement(\n \"span\",\n null,\n (0, _utils.truncateString)(query.resourceGroupId ? query.resourceGroupId.join(\".\") : \"n/a\", 35)\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row stat-row\" },\n splitDetails\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row stat-row\" },\n timingDetails\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row stat-row\" },\n memoryDetails\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-8\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row query-header\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12 query-progress-container\" },\n _react2.default.createElement(\n \"div\",\n { className: \"progress\" },\n _react2.default.createElement(\n \"div\",\n { className: \"progress-bar progress-bar-info\", role: \"progressbar\", \"aria-valuenow\": (0, _utils.getProgressBarPercentage)(query), \"aria-valuemin\": \"0\",\n \"aria-valuemax\": \"100\", style: progressBarStyle },\n (0, _utils.getProgressBarTitle)(query)\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row query-row-bottom\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"pre\",\n { className: \"query-snippet\" },\n _react2.default.createElement(\n \"code\",\n { className: \"sql\" },\n QueryListItem.stripQueryTextWhitespace(query.query)\n )\n )\n )\n )\n )\n )\n );\n }\n }], [{\n key: \"stripQueryTextWhitespace\",\n value: function stripQueryTextWhitespace(queryText) {\n var lines = queryText.split(\"\\n\");\n var minLeadingWhitespace = -1;\n for (var i = 0; i < lines.length; i++) {\n if (minLeadingWhitespace === 0) {\n break;\n }\n\n if (lines[i].trim().length === 0) {\n continue;\n }\n\n var leadingWhitespace = lines[i].search(/\\S/);\n\n if (leadingWhitespace > -1 && (leadingWhitespace < minLeadingWhitespace || minLeadingWhitespace === -1)) {\n minLeadingWhitespace = leadingWhitespace;\n }\n }\n\n var formattedQueryText = \"\";\n\n for (var _i = 0; _i < lines.length; _i++) {\n var trimmedLine = lines[_i].substring(minLeadingWhitespace).replace(/\\s+$/g, '');\n\n if (trimmedLine.length > 0) {\n formattedQueryText += trimmedLine;\n\n if (_i < lines.length - 1) {\n formattedQueryText += \"\\n\";\n }\n }\n }\n\n return (0, _utils.truncateString)(formattedQueryText, 300);\n }\n }]);\n\n return QueryListItem;\n}(_react2.default.Component);\n\nvar DisplayedQueriesList = function (_React$Component2) {\n _inherits(DisplayedQueriesList, _React$Component2);\n\n function DisplayedQueriesList() {\n _classCallCheck(this, DisplayedQueriesList);\n\n return _possibleConstructorReturn(this, (DisplayedQueriesList.__proto__ || Object.getPrototypeOf(DisplayedQueriesList)).apply(this, arguments));\n }\n\n _createClass(DisplayedQueriesList, [{\n key: \"render\",\n value: function render() {\n var queryNodes = this.props.queries.map(function (query) {\n return _react2.default.createElement(QueryListItem, { key: query.queryId, query: query });\n }.bind(this));\n return _react2.default.createElement(\n \"div\",\n null,\n queryNodes\n );\n }\n }]);\n\n return DisplayedQueriesList;\n}(_react2.default.Component);\n\nvar FILTER_TYPE = {\n RUNNING: function RUNNING(query) {\n return !(query.state === \"QUEUED\" || query.state === \"FINISHED\" || query.state === \"FAILED\");\n },\n QUEUED: function QUEUED(query) {\n return query.state === \"QUEUED\";\n },\n FINISHED: function FINISHED(query) {\n return query.state === \"FINISHED\";\n }\n};\n\nvar SORT_TYPE = {\n CREATED: function CREATED(query) {\n return Date.parse(query.queryStats.createTime);\n },\n ELAPSED: function ELAPSED(query) {\n return (0, _utils.parseDuration)(query.queryStats.elapsedTime);\n },\n EXECUTION: function EXECUTION(query) {\n return (0, _utils.parseDuration)(query.queryStats.executionTime);\n },\n CPU: function CPU(query) {\n return (0, _utils.parseDuration)(query.queryStats.totalCpuTime);\n },\n CUMULATIVE_MEMORY: function CUMULATIVE_MEMORY(query) {\n return query.queryStats.cumulativeUserMemory;\n },\n CURRENT_MEMORY: function CURRENT_MEMORY(query) {\n return (0, _utils.parseDataSize)(query.queryStats.userMemoryReservation);\n }\n};\n\nvar ERROR_TYPE = {\n USER_ERROR: function USER_ERROR(query) {\n return query.state === \"FAILED\" && query.errorType === \"USER_ERROR\";\n },\n INTERNAL_ERROR: function INTERNAL_ERROR(query) {\n return query.state === \"FAILED\" && query.errorType === \"INTERNAL_ERROR\";\n },\n INSUFFICIENT_RESOURCES: function INSUFFICIENT_RESOURCES(query) {\n return query.state === \"FAILED\" && query.errorType === \"INSUFFICIENT_RESOURCES\";\n },\n EXTERNAL: function EXTERNAL(query) {\n return query.state === \"FAILED\" && query.errorType === \"EXTERNAL\";\n }\n};\n\nvar SORT_ORDER = {\n ASCENDING: function ASCENDING(value) {\n return value;\n },\n DESCENDING: function DESCENDING(value) {\n return -value;\n }\n};\n\nvar QueryList = exports.QueryList = function (_React$Component3) {\n _inherits(QueryList, _React$Component3);\n\n function QueryList(props) {\n _classCallCheck(this, QueryList);\n\n var _this3 = _possibleConstructorReturn(this, (QueryList.__proto__ || Object.getPrototypeOf(QueryList)).call(this, props));\n\n _this3.state = {\n allQueries: [],\n displayedQueries: [],\n reorderInterval: 5000,\n currentSortType: SORT_TYPE.CREATED,\n currentSortOrder: SORT_ORDER.DESCENDING,\n stateFilters: [FILTER_TYPE.RUNNING, FILTER_TYPE.QUEUED],\n errorTypeFilters: [ERROR_TYPE.INTERNAL_ERROR, ERROR_TYPE.INSUFFICIENT_RESOURCES, ERROR_TYPE.EXTERNAL],\n searchString: '',\n maxQueries: 100,\n lastRefresh: Date.now(),\n lastReorder: Date.now(),\n initialized: false\n };\n\n _this3.refreshLoop = _this3.refreshLoop.bind(_this3);\n _this3.handleSearchStringChange = _this3.handleSearchStringChange.bind(_this3);\n _this3.executeSearch = _this3.executeSearch.bind(_this3);\n _this3.handleSortClick = _this3.handleSortClick.bind(_this3);\n return _this3;\n }\n\n _createClass(QueryList, [{\n key: \"sortAndLimitQueries\",\n value: function sortAndLimitQueries(queries, sortType, sortOrder, maxQueries) {\n queries.sort(function (queryA, queryB) {\n return sortOrder(sortType(queryA) - sortType(queryB));\n }, this);\n\n if (maxQueries !== 0 && queries.length > maxQueries) {\n queries.splice(maxQueries, queries.length - maxQueries);\n }\n }\n }, {\n key: \"filterQueries\",\n value: function filterQueries(queries, stateFilters, errorTypeFilters, searchString) {\n var stateFilteredQueries = queries.filter(function (query) {\n for (var i = 0; i < stateFilters.length; i++) {\n if (stateFilters[i](query)) {\n return true;\n }\n }\n for (var _i2 = 0; _i2 < errorTypeFilters.length; _i2++) {\n if (errorTypeFilters[_i2](query)) {\n return true;\n }\n }\n return false;\n });\n\n if (searchString === '') {\n return stateFilteredQueries;\n } else {\n return stateFilteredQueries.filter(function (query) {\n var term = searchString.toLowerCase();\n if (query.queryId.toLowerCase().indexOf(term) !== -1 || (0, _utils.getHumanReadableState)(query).toLowerCase().indexOf(term) !== -1 || query.query.toLowerCase().indexOf(term) !== -1) {\n return true;\n }\n\n if (query.session.user && query.session.user.toLowerCase().indexOf(term) !== -1) {\n return true;\n }\n\n if (query.session.source && query.session.source.toLowerCase().indexOf(term) !== -1) {\n return true;\n }\n\n if (query.resourceGroupId && query.resourceGroupId.join(\".\").toLowerCase().indexOf(term) !== -1) {\n return true;\n }\n }, this);\n }\n }\n }, {\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n // stop refreshing when query finishes or fails\n if (this.state.query === null || !this.state.ended) {\n this.timeoutId = setTimeout(this.refreshLoop, 1000);\n }\n }\n }, {\n key: \"refreshLoop\",\n value: function refreshLoop() {\n clearTimeout(this.timeoutId); // to stop multiple series of refreshLoop from going on simultaneously\n clearTimeout(this.searchTimeoutId);\n\n $.get('/ui/api/query', function (queryList) {\n var queryMap = queryList.reduce(function (map, query) {\n map[query.queryId] = query;\n return map;\n }, {});\n\n var updatedQueries = [];\n this.state.displayedQueries.forEach(function (oldQuery) {\n if (oldQuery.queryId in queryMap) {\n updatedQueries.push(queryMap[oldQuery.queryId]);\n queryMap[oldQuery.queryId] = false;\n }\n });\n\n var newQueries = [];\n for (var queryId in queryMap) {\n if (queryMap[queryId]) {\n newQueries.push(queryMap[queryId]);\n }\n }\n newQueries = this.filterQueries(newQueries, this.state.stateFilters, this.state.errorTypeFilters, this.state.searchString);\n\n var lastRefresh = Date.now();\n var lastReorder = this.state.lastReorder;\n\n if (this.state.reorderInterval !== 0 && lastRefresh - lastReorder >= this.state.reorderInterval) {\n updatedQueries = this.filterQueries(updatedQueries, this.state.stateFilters, this.state.errorTypeFilters, this.state.searchString);\n updatedQueries = updatedQueries.concat(newQueries);\n this.sortAndLimitQueries(updatedQueries, this.state.currentSortType, this.state.currentSortOrder, 0);\n lastReorder = Date.now();\n } else {\n this.sortAndLimitQueries(newQueries, this.state.currentSortType, this.state.currentSortOrder, 0);\n updatedQueries = updatedQueries.concat(newQueries);\n }\n\n if (this.state.maxQueries !== 0 && updatedQueries.length > this.state.maxQueries) {\n updatedQueries.splice(this.state.maxQueries, updatedQueries.length - this.state.maxQueries);\n }\n\n this.setState({\n allQueries: queryList,\n displayedQueries: updatedQueries,\n lastRefresh: lastRefresh,\n lastReorder: lastReorder,\n initialized: true\n });\n this.resetTimer();\n }.bind(this)).fail(function () {\n this.setState({\n initialized: true\n });\n this.resetTimer();\n }.bind(this));\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop();\n }\n }, {\n key: \"handleSearchStringChange\",\n value: function handleSearchStringChange(event) {\n var newSearchString = event.target.value;\n clearTimeout(this.searchTimeoutId);\n\n this.setState({\n searchString: newSearchString\n });\n\n this.searchTimeoutId = setTimeout(this.executeSearch, 200);\n }\n }, {\n key: \"executeSearch\",\n value: function executeSearch() {\n clearTimeout(this.searchTimeoutId);\n\n var newDisplayedQueries = this.filterQueries(this.state.allQueries, this.state.stateFilters, this.state.errorTypeFilters, this.state.searchString);\n this.sortAndLimitQueries(newDisplayedQueries, this.state.currentSortType, this.state.currentSortOrder, this.state.maxQueries);\n\n this.setState({\n displayedQueries: newDisplayedQueries\n });\n }\n }, {\n key: \"renderMaxQueriesListItem\",\n value: function renderMaxQueriesListItem(maxQueries, maxQueriesText) {\n return _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: this.state.maxQueries === maxQueries ? \"selected\" : \"\", onClick: this.handleMaxQueriesClick.bind(this, maxQueries) },\n maxQueriesText\n )\n );\n }\n }, {\n key: \"handleMaxQueriesClick\",\n value: function handleMaxQueriesClick(newMaxQueries) {\n var filteredQueries = this.filterQueries(this.state.allQueries, this.state.stateFilters, this.state.errorTypeFilters, this.state.searchString);\n this.sortAndLimitQueries(filteredQueries, this.state.currentSortType, this.state.currentSortOrder, newMaxQueries);\n\n this.setState({\n maxQueries: newMaxQueries,\n displayedQueries: filteredQueries\n });\n }\n }, {\n key: \"renderReorderListItem\",\n value: function renderReorderListItem(interval, intervalText) {\n return _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: this.state.reorderInterval === interval ? \"selected\" : \"\", onClick: this.handleReorderClick.bind(this, interval) },\n intervalText\n )\n );\n }\n }, {\n key: \"handleReorderClick\",\n value: function handleReorderClick(interval) {\n if (this.state.reorderInterval !== interval) {\n this.setState({\n reorderInterval: interval\n });\n }\n }\n }, {\n key: \"renderSortListItem\",\n value: function renderSortListItem(sortType, sortText) {\n if (this.state.currentSortType === sortType) {\n var directionArrow = this.state.currentSortOrder === SORT_ORDER.ASCENDING ? _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-triangle-top\" }) : _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-triangle-bottom\" });\n return _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: \"selected\", onClick: this.handleSortClick.bind(this, sortType) },\n sortText,\n \" \",\n directionArrow\n )\n );\n } else {\n return _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"#\", onClick: this.handleSortClick.bind(this, sortType) },\n sortText\n )\n );\n }\n }\n }, {\n key: \"handleSortClick\",\n value: function handleSortClick(sortType) {\n var newSortType = sortType;\n var newSortOrder = SORT_ORDER.DESCENDING;\n\n if (this.state.currentSortType === sortType && this.state.currentSortOrder === SORT_ORDER.DESCENDING) {\n newSortOrder = SORT_ORDER.ASCENDING;\n }\n\n var newDisplayedQueries = this.filterQueries(this.state.allQueries, this.state.stateFilters, this.state.errorTypeFilters, this.state.searchString);\n this.sortAndLimitQueries(newDisplayedQueries, newSortType, newSortOrder, this.state.maxQueries);\n\n this.setState({\n displayedQueries: newDisplayedQueries,\n currentSortType: newSortType,\n currentSortOrder: newSortOrder\n });\n }\n }, {\n key: \"renderFilterButton\",\n value: function renderFilterButton(filterType, filterText) {\n var checkmarkStyle = { color: '#57aac7' };\n var classNames = \"btn btn-sm btn-info style-check\";\n if (this.state.stateFilters.indexOf(filterType) > -1) {\n classNames += \" active\";\n checkmarkStyle = { color: '#ffffff' };\n }\n\n return _react2.default.createElement(\n \"button\",\n { type: \"button\", className: classNames, onClick: this.handleStateFilterClick.bind(this, filterType) },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-ok\", style: checkmarkStyle }),\n \"\\xA0\",\n filterText\n );\n }\n }, {\n key: \"handleStateFilterClick\",\n value: function handleStateFilterClick(filter) {\n var newFilters = this.state.stateFilters.slice();\n if (this.state.stateFilters.indexOf(filter) > -1) {\n newFilters.splice(newFilters.indexOf(filter), 1);\n } else {\n newFilters.push(filter);\n }\n\n var filteredQueries = this.filterQueries(this.state.allQueries, newFilters, this.state.errorTypeFilters, this.state.searchString);\n this.sortAndLimitQueries(filteredQueries, this.state.currentSortType, this.state.currentSortOrder);\n\n this.setState({\n stateFilters: newFilters,\n displayedQueries: filteredQueries\n });\n }\n }, {\n key: \"renderErrorTypeListItem\",\n value: function renderErrorTypeListItem(errorType, errorTypeText) {\n var checkmarkStyle = { color: '#ffffff' };\n if (this.state.errorTypeFilters.indexOf(errorType) > -1) {\n checkmarkStyle = _utils.GLYPHICON_HIGHLIGHT;\n }\n return _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"#\", onClick: this.handleErrorTypeFilterClick.bind(this, errorType) },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-ok\", style: checkmarkStyle }),\n \"\\xA0\",\n errorTypeText\n )\n );\n }\n }, {\n key: \"handleErrorTypeFilterClick\",\n value: function handleErrorTypeFilterClick(errorType) {\n var newFilters = this.state.errorTypeFilters.slice();\n if (this.state.errorTypeFilters.indexOf(errorType) > -1) {\n newFilters.splice(newFilters.indexOf(errorType), 1);\n } else {\n newFilters.push(errorType);\n }\n\n var filteredQueries = this.filterQueries(this.state.allQueries, this.state.stateFilters, newFilters, this.state.searchString);\n this.sortAndLimitQueries(filteredQueries, this.state.currentSortType, this.state.currentSortOrder);\n\n this.setState({\n errorTypeFilters: newFilters,\n displayedQueries: filteredQueries\n });\n }\n }, {\n key: \"render\",\n value: function render() {\n var queryList = _react2.default.createElement(DisplayedQueriesList, { queries: this.state.displayedQueries });\n if (this.state.displayedQueries === null || this.state.displayedQueries.length === 0) {\n var label = _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading...\"\n );\n if (this.state.initialized) {\n if (this.state.allQueries === null || this.state.allQueries.length === 0) {\n label = \"No queries\";\n } else {\n label = \"No queries matched filters\";\n }\n }\n queryList = _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n label\n )\n )\n );\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"row toolbar-row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12 toolbar-col\" },\n _react2.default.createElement(\n \"div\",\n { className: \"input-group input-group-sm\" },\n _react2.default.createElement(\"input\", { type: \"text\", className: \"form-control form-control-small search-bar\", placeholder: \"User, source, query ID, query state, resource group, or query text\",\n onChange: this.handleSearchStringChange, value: this.state.searchString }),\n _react2.default.createElement(\n \"span\",\n { className: \"input-group-addon filter-addon\" },\n \"State:\"\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"input-group-btn\" },\n this.renderFilterButton(FILTER_TYPE.RUNNING, \"Running\"),\n this.renderFilterButton(FILTER_TYPE.QUEUED, \"Queued\"),\n this.renderFilterButton(FILTER_TYPE.FINISHED, \"Finished\"),\n _react2.default.createElement(\n \"button\",\n { type: \"button\", id: \"error-type-dropdown\", className: \"btn btn-default dropdown-toggle\", \"data-toggle\": \"dropdown\", \"aria-haspopup\": \"true\", \"aria-expanded\": \"false\" },\n \"Failed \",\n _react2.default.createElement(\"span\", { className: \"caret\" })\n ),\n _react2.default.createElement(\n \"ul\",\n { className: \"dropdown-menu error-type-dropdown-menu\" },\n this.renderErrorTypeListItem(ERROR_TYPE.INTERNAL_ERROR, \"Internal Error\"),\n this.renderErrorTypeListItem(ERROR_TYPE.EXTERNAL, \"External Error\"),\n this.renderErrorTypeListItem(ERROR_TYPE.INSUFFICIENT_RESOURCES, \"Resources Error\"),\n this.renderErrorTypeListItem(ERROR_TYPE.USER_ERROR, \"User Error\")\n )\n ),\n \"\\xA0\",\n _react2.default.createElement(\n \"div\",\n { className: \"input-group-btn\" },\n _react2.default.createElement(\n \"button\",\n { type: \"button\", className: \"btn btn-default dropdown-toggle\", \"data-toggle\": \"dropdown\", \"aria-haspopup\": \"true\", \"aria-expanded\": \"false\" },\n \"Sort \",\n _react2.default.createElement(\"span\", { className: \"caret\" })\n ),\n _react2.default.createElement(\n \"ul\",\n { className: \"dropdown-menu\" },\n this.renderSortListItem(SORT_TYPE.CREATED, \"Creation Time\"),\n this.renderSortListItem(SORT_TYPE.ELAPSED, \"Elapsed Time\"),\n this.renderSortListItem(SORT_TYPE.CPU, \"CPU Time\"),\n this.renderSortListItem(SORT_TYPE.EXECUTION, \"Execution Time\"),\n this.renderSortListItem(SORT_TYPE.CURRENT_MEMORY, \"Current Memory\"),\n this.renderSortListItem(SORT_TYPE.CUMULATIVE_MEMORY, \"Cumulative User Memory\")\n )\n ),\n \"\\xA0\",\n _react2.default.createElement(\n \"div\",\n { className: \"input-group-btn\" },\n _react2.default.createElement(\n \"button\",\n { type: \"button\", className: \"btn btn-default dropdown-toggle\", \"data-toggle\": \"dropdown\", \"aria-haspopup\": \"true\", \"aria-expanded\": \"false\" },\n \"Reorder Interval \",\n _react2.default.createElement(\"span\", { className: \"caret\" })\n ),\n _react2.default.createElement(\n \"ul\",\n { className: \"dropdown-menu\" },\n this.renderReorderListItem(1000, \"1s\"),\n this.renderReorderListItem(5000, \"5s\"),\n this.renderReorderListItem(10000, \"10s\"),\n this.renderReorderListItem(30000, \"30s\"),\n _react2.default.createElement(\"li\", { role: \"separator\", className: \"divider\" }),\n this.renderReorderListItem(0, \"Off\")\n )\n ),\n \"\\xA0\",\n _react2.default.createElement(\n \"div\",\n { className: \"input-group-btn\" },\n _react2.default.createElement(\n \"button\",\n { type: \"button\", className: \"btn btn-default dropdown-toggle\", \"data-toggle\": \"dropdown\", \"aria-haspopup\": \"true\", \"aria-expanded\": \"false\" },\n \"Show \",\n _react2.default.createElement(\"span\", { className: \"caret\" })\n ),\n _react2.default.createElement(\n \"ul\",\n { className: \"dropdown-menu\" },\n this.renderMaxQueriesListItem(20, \"20 queries\"),\n this.renderMaxQueriesListItem(50, \"50 queries\"),\n this.renderMaxQueriesListItem(100, \"100 queries\"),\n _react2.default.createElement(\"li\", { role: \"separator\", className: \"divider\" }),\n this.renderMaxQueriesListItem(0, \"All queries\")\n )\n )\n )\n )\n ),\n queryList\n );\n }\n }]);\n\n return QueryList;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/QueryList.jsx?"); /***/ }), diff --git a/presto-main/src/main/resources/webapp/dist/plan.js b/presto-main/src/main/resources/webapp/dist/plan.js index ebf767266526..f6b384ab7952 100644 --- a/presto-main/src/main/resources/webapp/dist/plan.js +++ b/presto-main/src/main/resources/webapp/dist/plan.js @@ -106,7 +106,7 @@ eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n}); /***/ (function(module, exports, __webpack_require__) { "use strict"; -eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.PageTitle = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\nvar PageTitle = exports.PageTitle = function (_React$Component) {\n _inherits(PageTitle, _React$Component);\n\n function PageTitle(props) {\n _classCallCheck(this, PageTitle);\n\n var _this = _possibleConstructorReturn(this, (PageTitle.__proto__ || Object.getPrototypeOf(PageTitle)).call(this, props));\n\n _this.state = {\n noConnection: false,\n lightShown: false,\n info: null,\n lastSuccess: Date.now(),\n modalShown: false,\n errorText: null\n };\n return _this;\n }\n\n _createClass(PageTitle, [{\n key: \"refreshLoop\",\n value: function refreshLoop() {\n var _this2 = this;\n\n clearTimeout(this.timeoutId);\n fetch(\"/ui/api/cluster\").then(function (response) {\n if (response.status === 401) {\n location.reload();\n }\n return response.json();\n }).then(function (info) {\n _this2.setState({\n info: info,\n noConnection: false,\n lastSuccess: Date.now(),\n modalShown: false\n });\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal('hide');\n _this2.resetTimer();\n }).catch(function (error) {\n _this2.setState({\n noConnection: true,\n lightShown: !_this2.state.lightShown,\n errorText: error\n });\n _this2.resetTimer();\n\n if (!_this2.state.modalShown && (error || Date.now() - _this2.state.lastSuccess > 30 * 1000)) {\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal();\n _this2.setState({ modalShown: true });\n }\n });\n }\n }, {\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n this.timeoutId = setTimeout(this.refreshLoop.bind(this), 1000);\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop.bind(this)();\n }\n }, {\n key: \"renderStatusLight\",\n value: function renderStatusLight() {\n if (this.state.noConnection) {\n if (this.state.lightShown) {\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-red\", id: \"status-indicator\" });\n } else {\n return _react2.default.createElement(\"span\", { className: \"status-light\", id: \"status-indicator\" });\n }\n }\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-green\", id: \"status-indicator\" });\n }\n }, {\n key: \"render\",\n value: function render() {\n var info = this.state.info;\n if (!info) {\n return null;\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"nav\",\n { className: \"navbar\" },\n _react2.default.createElement(\n \"div\",\n { className: \"container-fluid\" },\n _react2.default.createElement(\n \"div\",\n { className: \"navbar-header\" },\n _react2.default.createElement(\n \"table\",\n null,\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"/ui/\" },\n _react2.default.createElement(\"img\", { src: \"assets/logo.png\" })\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-brand\" },\n this.props.title\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"navbar\", className: \"navbar-collapse collapse\" },\n _react2.default.createElement(\n \"ul\",\n { className: \"nav navbar-nav navbar-right\" },\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Version\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"version-number\" },\n info.nodeVersion.version\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Environment\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"environment\" },\n info.environment\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Uptime\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"bottom\", title: \"Connection status\" },\n this.renderStatusLight()\n ),\n \"\\xA0\",\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"uptime\" },\n info.uptime\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"logout\" },\n _react2.default.createElement(\n \"a\",\n { className: \"btn btn-logout\", href: \"logout\" },\n \"Log Out\"\n )\n )\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"no-connection-modal\", className: \"modal\", tabIndex: \"-1\", role: \"dialog\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-dialog modal-sm\", role: \"document\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-content\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"h4\",\n null,\n \"Unable to connect to server\"\n ),\n _react2.default.createElement(\n \"p\",\n null,\n this.state.errorText ? \"Error: \" + this.state.errorText : null\n )\n )\n )\n )\n )\n )\n );\n }\n }]);\n\n return PageTitle;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/PageTitle.jsx?"); +eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.PageTitle = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\nvar PageTitle = exports.PageTitle = function (_React$Component) {\n _inherits(PageTitle, _React$Component);\n\n function PageTitle(props) {\n _classCallCheck(this, PageTitle);\n\n var _this = _possibleConstructorReturn(this, (PageTitle.__proto__ || Object.getPrototypeOf(PageTitle)).call(this, props));\n\n _this.state = {\n noConnection: false,\n lightShown: false,\n info: null,\n lastSuccess: Date.now(),\n modalShown: false,\n errorText: null\n };\n return _this;\n }\n\n _createClass(PageTitle, [{\n key: \"refreshLoop\",\n value: function refreshLoop() {\n var _this2 = this;\n\n clearTimeout(this.timeoutId);\n fetch(\"/ui/api/cluster\").then(function (response) {\n if (response.status === 401) {\n location.reload();\n }\n return response.json();\n }).then(function (info) {\n _this2.setState({\n info: info,\n noConnection: false,\n lastSuccess: Date.now(),\n modalShown: false\n });\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal('hide');\n _this2.resetTimer();\n }).catch(function (fail) {\n _this2.setState({\n noConnection: true,\n lightShown: !_this2.state.lightShown,\n errorText: fail\n });\n _this2.resetTimer();\n\n if (!_this2.state.modalShown && (fail || Date.now() - _this2.state.lastSuccess > 30 * 1000)) {\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal();\n _this2.setState({ modalShown: true });\n }\n });\n }\n }, {\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n this.timeoutId = setTimeout(this.refreshLoop.bind(this), 1000);\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop.bind(this)();\n }\n }, {\n key: \"renderStatusLight\",\n value: function renderStatusLight() {\n if (this.state.noConnection) {\n if (this.state.lightShown) {\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-red\", id: \"status-indicator\" });\n } else {\n return _react2.default.createElement(\"span\", { className: \"status-light\", id: \"status-indicator\" });\n }\n }\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-green\", id: \"status-indicator\" });\n }\n }, {\n key: \"render\",\n value: function render() {\n var info = this.state.info;\n if (!info) {\n return null;\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"nav\",\n { className: \"navbar\" },\n _react2.default.createElement(\n \"div\",\n { className: \"container-fluid\" },\n _react2.default.createElement(\n \"div\",\n { className: \"navbar-header\" },\n _react2.default.createElement(\n \"table\",\n null,\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"/ui/\" },\n _react2.default.createElement(\"img\", { src: \"assets/logo.png\" })\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-brand\" },\n this.props.title\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"navbar\", className: \"navbar-collapse collapse\" },\n _react2.default.createElement(\n \"ul\",\n { className: \"nav navbar-nav navbar-right\" },\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Version\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"version-number\" },\n info.nodeVersion.version\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Environment\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"environment\" },\n info.environment\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Uptime\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"bottom\", title: \"Connection status\" },\n this.renderStatusLight()\n ),\n \"\\xA0\",\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"uptime\" },\n info.uptime\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"logout\" },\n _react2.default.createElement(\n \"a\",\n { className: \"btn btn-logout\", href: \"logout\" },\n \"Log Out\"\n )\n )\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"no-connection-modal\", className: \"modal\", tabIndex: \"-1\", role: \"dialog\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-dialog modal-sm\", role: \"document\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-content\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"h4\",\n null,\n \"Unable to connect to server\"\n ),\n _react2.default.createElement(\n \"p\",\n null,\n this.state.errorText ? \"Error: \" + this.state.errorText : null\n )\n )\n )\n )\n )\n )\n );\n }\n }]);\n\n return PageTitle;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/PageTitle.jsx?"); /***/ }), diff --git a/presto-main/src/main/resources/webapp/dist/query.js b/presto-main/src/main/resources/webapp/dist/query.js index e9ed79b40d8c..81103b48fdfe 100644 --- a/presto-main/src/main/resources/webapp/dist/query.js +++ b/presto-main/src/main/resources/webapp/dist/query.js @@ -94,7 +94,7 @@ /***/ (function(module, exports, __webpack_require__) { "use strict"; -eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.PageTitle = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\nvar PageTitle = exports.PageTitle = function (_React$Component) {\n _inherits(PageTitle, _React$Component);\n\n function PageTitle(props) {\n _classCallCheck(this, PageTitle);\n\n var _this = _possibleConstructorReturn(this, (PageTitle.__proto__ || Object.getPrototypeOf(PageTitle)).call(this, props));\n\n _this.state = {\n noConnection: false,\n lightShown: false,\n info: null,\n lastSuccess: Date.now(),\n modalShown: false,\n errorText: null\n };\n return _this;\n }\n\n _createClass(PageTitle, [{\n key: \"refreshLoop\",\n value: function refreshLoop() {\n var _this2 = this;\n\n clearTimeout(this.timeoutId);\n fetch(\"/ui/api/cluster\").then(function (response) {\n if (response.status === 401) {\n location.reload();\n }\n return response.json();\n }).then(function (info) {\n _this2.setState({\n info: info,\n noConnection: false,\n lastSuccess: Date.now(),\n modalShown: false\n });\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal('hide');\n _this2.resetTimer();\n }).catch(function (error) {\n _this2.setState({\n noConnection: true,\n lightShown: !_this2.state.lightShown,\n errorText: error\n });\n _this2.resetTimer();\n\n if (!_this2.state.modalShown && (error || Date.now() - _this2.state.lastSuccess > 30 * 1000)) {\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal();\n _this2.setState({ modalShown: true });\n }\n });\n }\n }, {\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n this.timeoutId = setTimeout(this.refreshLoop.bind(this), 1000);\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop.bind(this)();\n }\n }, {\n key: \"renderStatusLight\",\n value: function renderStatusLight() {\n if (this.state.noConnection) {\n if (this.state.lightShown) {\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-red\", id: \"status-indicator\" });\n } else {\n return _react2.default.createElement(\"span\", { className: \"status-light\", id: \"status-indicator\" });\n }\n }\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-green\", id: \"status-indicator\" });\n }\n }, {\n key: \"render\",\n value: function render() {\n var info = this.state.info;\n if (!info) {\n return null;\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"nav\",\n { className: \"navbar\" },\n _react2.default.createElement(\n \"div\",\n { className: \"container-fluid\" },\n _react2.default.createElement(\n \"div\",\n { className: \"navbar-header\" },\n _react2.default.createElement(\n \"table\",\n null,\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"/ui/\" },\n _react2.default.createElement(\"img\", { src: \"assets/logo.png\" })\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-brand\" },\n this.props.title\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"navbar\", className: \"navbar-collapse collapse\" },\n _react2.default.createElement(\n \"ul\",\n { className: \"nav navbar-nav navbar-right\" },\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Version\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"version-number\" },\n info.nodeVersion.version\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Environment\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"environment\" },\n info.environment\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Uptime\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"bottom\", title: \"Connection status\" },\n this.renderStatusLight()\n ),\n \"\\xA0\",\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"uptime\" },\n info.uptime\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"logout\" },\n _react2.default.createElement(\n \"a\",\n { className: \"btn btn-logout\", href: \"logout\" },\n \"Log Out\"\n )\n )\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"no-connection-modal\", className: \"modal\", tabIndex: \"-1\", role: \"dialog\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-dialog modal-sm\", role: \"document\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-content\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"h4\",\n null,\n \"Unable to connect to server\"\n ),\n _react2.default.createElement(\n \"p\",\n null,\n this.state.errorText ? \"Error: \" + this.state.errorText : null\n )\n )\n )\n )\n )\n )\n );\n }\n }]);\n\n return PageTitle;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/PageTitle.jsx?"); +eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.PageTitle = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\nvar PageTitle = exports.PageTitle = function (_React$Component) {\n _inherits(PageTitle, _React$Component);\n\n function PageTitle(props) {\n _classCallCheck(this, PageTitle);\n\n var _this = _possibleConstructorReturn(this, (PageTitle.__proto__ || Object.getPrototypeOf(PageTitle)).call(this, props));\n\n _this.state = {\n noConnection: false,\n lightShown: false,\n info: null,\n lastSuccess: Date.now(),\n modalShown: false,\n errorText: null\n };\n return _this;\n }\n\n _createClass(PageTitle, [{\n key: \"refreshLoop\",\n value: function refreshLoop() {\n var _this2 = this;\n\n clearTimeout(this.timeoutId);\n fetch(\"/ui/api/cluster\").then(function (response) {\n if (response.status === 401) {\n location.reload();\n }\n return response.json();\n }).then(function (info) {\n _this2.setState({\n info: info,\n noConnection: false,\n lastSuccess: Date.now(),\n modalShown: false\n });\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal('hide');\n _this2.resetTimer();\n }).catch(function (fail) {\n _this2.setState({\n noConnection: true,\n lightShown: !_this2.state.lightShown,\n errorText: fail\n });\n _this2.resetTimer();\n\n if (!_this2.state.modalShown && (fail || Date.now() - _this2.state.lastSuccess > 30 * 1000)) {\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal();\n _this2.setState({ modalShown: true });\n }\n });\n }\n }, {\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n this.timeoutId = setTimeout(this.refreshLoop.bind(this), 1000);\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop.bind(this)();\n }\n }, {\n key: \"renderStatusLight\",\n value: function renderStatusLight() {\n if (this.state.noConnection) {\n if (this.state.lightShown) {\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-red\", id: \"status-indicator\" });\n } else {\n return _react2.default.createElement(\"span\", { className: \"status-light\", id: \"status-indicator\" });\n }\n }\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-green\", id: \"status-indicator\" });\n }\n }, {\n key: \"render\",\n value: function render() {\n var info = this.state.info;\n if (!info) {\n return null;\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"nav\",\n { className: \"navbar\" },\n _react2.default.createElement(\n \"div\",\n { className: \"container-fluid\" },\n _react2.default.createElement(\n \"div\",\n { className: \"navbar-header\" },\n _react2.default.createElement(\n \"table\",\n null,\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"/ui/\" },\n _react2.default.createElement(\"img\", { src: \"assets/logo.png\" })\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-brand\" },\n this.props.title\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"navbar\", className: \"navbar-collapse collapse\" },\n _react2.default.createElement(\n \"ul\",\n { className: \"nav navbar-nav navbar-right\" },\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Version\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"version-number\" },\n info.nodeVersion.version\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Environment\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"environment\" },\n info.environment\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Uptime\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"bottom\", title: \"Connection status\" },\n this.renderStatusLight()\n ),\n \"\\xA0\",\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"uptime\" },\n info.uptime\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"logout\" },\n _react2.default.createElement(\n \"a\",\n { className: \"btn btn-logout\", href: \"logout\" },\n \"Log Out\"\n )\n )\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"no-connection-modal\", className: \"modal\", tabIndex: \"-1\", role: \"dialog\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-dialog modal-sm\", role: \"document\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-content\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"h4\",\n null,\n \"Unable to connect to server\"\n ),\n _react2.default.createElement(\n \"p\",\n null,\n this.state.errorText ? \"Error: \" + this.state.errorText : null\n )\n )\n )\n )\n )\n )\n );\n }\n }]);\n\n return PageTitle;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/PageTitle.jsx?"); /***/ }), @@ -106,7 +106,7 @@ eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n}); /***/ (function(module, exports, __webpack_require__) { "use strict"; -eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.QueryDetail = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nvar _reactable = __webpack_require__(/*! reactable */ \"./node_modules/reactable/lib/reactable.js\");\n\nvar _reactable2 = _interopRequireDefault(_reactable);\n\nvar _utils = __webpack_require__(/*! ../utils */ \"./utils.js\");\n\nvar _QueryHeader = __webpack_require__(/*! ./QueryHeader */ \"./components/QueryHeader.jsx\");\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nvar Table = _reactable2.default.Table,\n Thead = _reactable2.default.Thead,\n Th = _reactable2.default.Th,\n Tr = _reactable2.default.Tr,\n Td = _reactable2.default.Td;\n\nvar TaskList = function (_React$Component) {\n _inherits(TaskList, _React$Component);\n\n function TaskList() {\n _classCallCheck(this, TaskList);\n\n return _possibleConstructorReturn(this, (TaskList.__proto__ || Object.getPrototypeOf(TaskList)).apply(this, arguments));\n }\n\n _createClass(TaskList, [{\n key: \"render\",\n value: function render() {\n var tasks = this.props.tasks;\n\n if (tasks === undefined || tasks.length === 0) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"No threads in the selected group\"\n )\n )\n );\n }\n\n var showPortNumbers = TaskList.showPortNumbers(tasks);\n\n var renderedTasks = tasks.map(function (task) {\n var elapsedTime = (0, _utils.parseDuration)(task.stats.elapsedTime);\n if (elapsedTime === 0) {\n elapsedTime = Date.now() - Date.parse(task.stats.createTime);\n }\n\n return _react2.default.createElement(\n Tr,\n { key: task.taskStatus.taskId },\n _react2.default.createElement(\n Td,\n { column: \"id\", value: task.taskStatus.taskId },\n _react2.default.createElement(\n \"a\",\n { href: \"/ui/api/worker/\" + task.taskStatus.nodeId + \"/task/\" + task.taskStatus.taskId + \"?pretty\" },\n (0, _utils.getTaskIdSuffix)(task.taskStatus.taskId)\n )\n ),\n _react2.default.createElement(\n Td,\n { column: \"host\", value: (0, _utils.getHostname)(task.taskStatus.self) },\n _react2.default.createElement(\n \"a\",\n { href: \"worker.html?\" + task.taskStatus.nodeId, className: \"font-light\", target: \"_blank\" },\n showPortNumbers ? (0, _utils.getHostAndPort)(task.taskStatus.self) : (0, _utils.getHostname)(task.taskStatus.self)\n )\n ),\n _react2.default.createElement(\n Td,\n { column: \"state\", value: TaskList.formatState(task.taskStatus.state, task.stats.fullyBlocked) },\n TaskList.formatState(task.taskStatus.state, task.stats.fullyBlocked)\n ),\n _react2.default.createElement(\n Td,\n { column: \"rows\", value: task.stats.rawInputPositions },\n (0, _utils.formatCount)(task.stats.rawInputPositions)\n ),\n _react2.default.createElement(\n Td,\n { column: \"rowsSec\", value: (0, _utils.computeRate)(task.stats.rawInputPositions, elapsedTime) },\n (0, _utils.formatCount)((0, _utils.computeRate)(task.stats.rawInputPositions, elapsedTime))\n ),\n _react2.default.createElement(\n Td,\n { column: \"bytes\", value: (0, _utils.parseDataSize)(task.stats.rawInputDataSize) },\n (0, _utils.formatDataSizeBytes)((0, _utils.parseDataSize)(task.stats.rawInputDataSize))\n ),\n _react2.default.createElement(\n Td,\n { column: \"bytesSec\", value: (0, _utils.computeRate)((0, _utils.parseDataSize)(task.stats.rawInputDataSize), elapsedTime) },\n (0, _utils.formatDataSizeBytes)((0, _utils.computeRate)((0, _utils.parseDataSize)(task.stats.rawInputDataSize), elapsedTime))\n ),\n _react2.default.createElement(\n Td,\n { column: \"splitsPending\", value: task.stats.queuedDrivers },\n task.stats.queuedDrivers\n ),\n _react2.default.createElement(\n Td,\n { column: \"splitsRunning\", value: task.stats.runningDrivers },\n task.stats.runningDrivers\n ),\n _react2.default.createElement(\n Td,\n { column: \"splitsBlocked\", value: task.stats.blockedDrivers },\n task.stats.blockedDrivers\n ),\n _react2.default.createElement(\n Td,\n { column: \"splitsDone\", value: task.stats.completedDrivers },\n task.stats.completedDrivers\n ),\n _react2.default.createElement(\n Td,\n { column: \"elapsedTime\", value: (0, _utils.parseDuration)(task.stats.elapsedTime) },\n task.stats.elapsedTime\n ),\n _react2.default.createElement(\n Td,\n { column: \"cpuTime\", value: (0, _utils.parseDuration)(task.stats.totalCpuTime) },\n task.stats.totalCpuTime\n ),\n _react2.default.createElement(\n Td,\n { column: \"bufferedBytes\", value: task.outputBuffers.totalBufferedBytes },\n (0, _utils.formatDataSizeBytes)(task.outputBuffers.totalBufferedBytes)\n )\n );\n });\n\n return _react2.default.createElement(\n Table,\n { id: \"tasks\", className: \"table table-striped sortable\", sortable: [{\n column: 'id',\n sortFunction: TaskList.compareTaskId\n }, 'host', 'state', 'splitsPending', 'splitsRunning', 'splitsBlocked', 'splitsDone', 'rows', 'rowsSec', 'bytes', 'bytesSec', 'elapsedTime', 'cpuTime', 'bufferedBytes'],\n defaultSort: { column: 'id', direction: 'asc' } },\n _react2.default.createElement(\n Thead,\n null,\n _react2.default.createElement(\n Th,\n { column: \"id\" },\n \"ID\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"host\" },\n \"Host\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"state\" },\n \"State\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"splitsPending\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-pause\", style: _utils.GLYPHICON_HIGHLIGHT, \"data-toggle\": \"tooltip\", \"data-placement\": \"top\",\n title: \"Pending splits\" })\n ),\n _react2.default.createElement(\n Th,\n { column: \"splitsRunning\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-play\", style: _utils.GLYPHICON_HIGHLIGHT, \"data-toggle\": \"tooltip\", \"data-placement\": \"top\",\n title: \"Running splits\" })\n ),\n _react2.default.createElement(\n Th,\n { column: \"splitsBlocked\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-bookmark\", style: _utils.GLYPHICON_HIGHLIGHT, \"data-toggle\": \"tooltip\", \"data-placement\": \"top\",\n title: \"Blocked splits\" })\n ),\n _react2.default.createElement(\n Th,\n { column: \"splitsDone\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-ok\", style: _utils.GLYPHICON_HIGHLIGHT, \"data-toggle\": \"tooltip\", \"data-placement\": \"top\",\n title: \"Completed splits\" })\n ),\n _react2.default.createElement(\n Th,\n { column: \"rows\" },\n \"Rows\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"rowsSec\" },\n \"Rows/s\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"bytes\" },\n \"Bytes\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"bytesSec\" },\n \"Bytes/s\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"elapsedTime\" },\n \"Elapsed\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"cpuTime\" },\n \"CPU Time\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"bufferedBytes\" },\n \"Buffered\"\n )\n ),\n renderedTasks\n );\n }\n }], [{\n key: \"removeQueryId\",\n value: function removeQueryId(id) {\n var pos = id.indexOf('.');\n if (pos !== -1) {\n return id.substring(pos + 1);\n }\n return id;\n }\n }, {\n key: \"compareTaskId\",\n value: function compareTaskId(taskA, taskB) {\n var taskIdArrA = TaskList.removeQueryId(taskA).split(\".\");\n var taskIdArrB = TaskList.removeQueryId(taskB).split(\".\");\n\n if (taskIdArrA.length > taskIdArrB.length) {\n return 1;\n }\n for (var i = 0; i < taskIdArrA.length; i++) {\n var anum = Number.parseInt(taskIdArrA[i]);\n var bnum = Number.parseInt(taskIdArrB[i]);\n if (anum !== bnum) {\n return anum > bnum ? 1 : -1;\n }\n }\n\n return 0;\n }\n }, {\n key: \"showPortNumbers\",\n value: function showPortNumbers(tasks) {\n // check if any host has multiple port numbers\n var hostToPortNumber = {};\n for (var i = 0; i < tasks.length; i++) {\n var taskUri = tasks[i].taskStatus.self;\n var hostname = (0, _utils.getHostname)(taskUri);\n var port = (0, _utils.getPort)(taskUri);\n if (hostname in hostToPortNumber && hostToPortNumber[hostname] !== port) {\n return true;\n }\n hostToPortNumber[hostname] = port;\n }\n\n return false;\n }\n }, {\n key: \"formatState\",\n value: function formatState(state, fullyBlocked) {\n if (fullyBlocked && state === \"RUNNING\") {\n return \"BLOCKED\";\n } else {\n return state;\n }\n }\n }]);\n\n return TaskList;\n}(_react2.default.Component);\n\nvar BAR_CHART_WIDTH = 800;\n\nvar BAR_CHART_PROPERTIES = {\n type: 'bar',\n barSpacing: '0',\n height: '80px',\n barColor: '#747F96',\n zeroColor: '#8997B3',\n chartRangeMin: 0,\n tooltipClassname: 'sparkline-tooltip',\n tooltipFormat: 'Task {{offset:offset}} - {{value}}',\n disableHiddenCheck: true\n};\n\nvar HISTOGRAM_WIDTH = 175;\n\nvar HISTOGRAM_PROPERTIES = {\n type: 'bar',\n barSpacing: '0',\n height: '80px',\n barColor: '#747F96',\n zeroColor: '#747F96',\n zeroAxis: true,\n chartRangeMin: 0,\n tooltipClassname: 'sparkline-tooltip',\n tooltipFormat: '{{offset:offset}} -- {{value}} tasks',\n disableHiddenCheck: true\n};\n\nvar StageSummary = function (_React$Component2) {\n _inherits(StageSummary, _React$Component2);\n\n function StageSummary(props) {\n _classCallCheck(this, StageSummary);\n\n var _this2 = _possibleConstructorReturn(this, (StageSummary.__proto__ || Object.getPrototypeOf(StageSummary)).call(this, props));\n\n _this2.state = {\n expanded: false,\n lastRender: null\n };\n return _this2;\n }\n\n _createClass(StageSummary, [{\n key: \"getExpandedIcon\",\n value: function getExpandedIcon() {\n return this.state.expanded ? \"glyphicon-chevron-up\" : \"glyphicon-chevron-down\";\n }\n }, {\n key: \"getExpandedStyle\",\n value: function getExpandedStyle() {\n return this.state.expanded ? {} : { display: \"none\" };\n }\n }, {\n key: \"toggleExpanded\",\n value: function toggleExpanded() {\n this.setState({\n expanded: !this.state.expanded\n });\n }\n }, {\n key: \"componentDidUpdate\",\n value: function componentDidUpdate() {\n var stage = this.props.stage;\n var numTasks = stage.tasks.length;\n\n // sort the x-axis\n stage.tasks.sort(function (taskA, taskB) {\n return (0, _utils.getTaskNumber)(taskA.taskStatus.taskId) - (0, _utils.getTaskNumber)(taskB.taskStatus.taskId);\n });\n\n var scheduledTimes = stage.tasks.map(function (task) {\n return (0, _utils.parseDuration)(task.stats.totalScheduledTime);\n });\n var cpuTimes = stage.tasks.map(function (task) {\n return (0, _utils.parseDuration)(task.stats.totalCpuTime);\n });\n\n // prevent multiple calls to componentDidUpdate (resulting from calls to setState or otherwise) within the refresh interval from re-rendering sparklines/charts\n if (this.state.lastRender === null || Date.now() - this.state.lastRender >= 1000) {\n var renderTimestamp = Date.now();\n var stageId = (0, _utils.getStageNumber)(stage.stageId);\n\n StageSummary.renderHistogram('#scheduled-time-histogram-' + stageId, scheduledTimes, _utils.formatDuration);\n StageSummary.renderHistogram('#cpu-time-histogram-' + stageId, cpuTimes, _utils.formatDuration);\n\n if (this.state.expanded) {\n // this needs to be a string otherwise it will also be passed to numberFormatter\n var tooltipValueLookups = { 'offset': {} };\n for (var i = 0; i < numTasks; i++) {\n tooltipValueLookups['offset'][i] = (0, _utils.getStageNumber)(stage.stageId) + \".\" + i;\n }\n\n var stageBarChartProperties = $.extend({}, BAR_CHART_PROPERTIES, { barWidth: BAR_CHART_WIDTH / numTasks, tooltipValueLookups: tooltipValueLookups });\n\n $('#scheduled-time-bar-chart-' + stageId).sparkline(scheduledTimes, $.extend({}, stageBarChartProperties, { numberFormatter: _utils.formatDuration }));\n $('#cpu-time-bar-chart-' + stageId).sparkline(cpuTimes, $.extend({}, stageBarChartProperties, { numberFormatter: _utils.formatDuration }));\n }\n\n this.setState({\n lastRender: renderTimestamp\n });\n }\n }\n }, {\n key: \"render\",\n value: function render() {\n var stage = this.props.stage;\n if (stage === undefined || !stage.hasOwnProperty('plan')) {\n return _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Information about this stage is unavailable.\"\n )\n );\n }\n\n var totalBufferedBytes = stage.tasks.map(function (task) {\n return task.outputBuffers.totalBufferedBytes;\n }).reduce(function (a, b) {\n return a + b;\n }, 0);\n\n var stageId = (0, _utils.getStageNumber)(stage.stageId);\n\n return _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-id\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stage-state-color\", style: { borderLeftColor: (0, _utils.getStageStateColor)(stage) } },\n stageId\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"table single-stage-table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"stage-table stage-table-time\" },\n _react2.default.createElement(\n \"thead\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"th\",\n { className: \"stage-table-stat-title stage-table-stat-header\" },\n \"Time\"\n ),\n _react2.default.createElement(\"th\", null)\n )\n ),\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Scheduled\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.stageStats.totalScheduledTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Blocked\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.stageStats.totalBlockedTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"CPU\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.stageStats.totalCpuTime\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"stage-table stage-table-memory\" },\n _react2.default.createElement(\n \"thead\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"th\",\n { className: \"stage-table-stat-title stage-table-stat-header\" },\n \"Memory\"\n ),\n _react2.default.createElement(\"th\", null)\n )\n ),\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Cumulative\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n (0, _utils.formatDataSizeBytes)(stage.stageStats.cumulativeUserMemory / 1000)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Current\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n (0, _utils.parseAndFormatDataSize)(stage.stageStats.userMemoryReservation)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Buffers\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n (0, _utils.formatDataSize)(totalBufferedBytes)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Peak\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n (0, _utils.parseAndFormatDataSize)(stage.stageStats.peakUserMemoryReservation)\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"stage-table stage-table-tasks\" },\n _react2.default.createElement(\n \"thead\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"th\",\n { className: \"stage-table-stat-title stage-table-stat-header\" },\n \"Tasks\"\n ),\n _react2.default.createElement(\"th\", null)\n )\n ),\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Pending\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.tasks.filter(function (task) {\n return task.taskStatus.state === \"PLANNED\";\n }).length\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Running\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.tasks.filter(function (task) {\n return task.taskStatus.state === \"RUNNING\";\n }).length\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Blocked\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.tasks.filter(function (task) {\n return task.stats.fullyBlocked;\n }).length\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Total\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.tasks.length\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"stage-table histogram-table\" },\n _react2.default.createElement(\n \"thead\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"th\",\n { className: \"stage-table-stat-title stage-table-chart-header\" },\n \"Scheduled Time Skew\"\n )\n )\n ),\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"histogram-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"histogram\", id: \"scheduled-time-histogram-\" + stageId },\n _react2.default.createElement(\"div\", { className: \"loader\" })\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"stage-table histogram-table\" },\n _react2.default.createElement(\n \"thead\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"th\",\n { className: \"stage-table-stat-title stage-table-chart-header\" },\n \"CPU Time Skew\"\n )\n )\n ),\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"histogram-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"histogram\", id: \"cpu-time-histogram-\" + stageId },\n _react2.default.createElement(\"div\", { className: \"loader\" })\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"expand-charts-container\" },\n _react2.default.createElement(\n \"a\",\n { onClick: this.toggleExpanded.bind(this), className: \"expand-charts-button\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon \" + this.getExpandedIcon(), style: _utils.GLYPHICON_HIGHLIGHT, \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"More\" })\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { style: this.getExpandedStyle() },\n _react2.default.createElement(\n \"td\",\n { colSpan: \"6\" },\n _react2.default.createElement(\n \"table\",\n { className: \"expanded-chart\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title expanded-chart-title\" },\n \"Task Scheduled Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"bar-chart-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"bar-chart\", id: \"scheduled-time-bar-chart-\" + stageId },\n _react2.default.createElement(\"div\", { className: \"loader\" })\n )\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { style: this.getExpandedStyle() },\n _react2.default.createElement(\n \"td\",\n { colSpan: \"6\" },\n _react2.default.createElement(\n \"table\",\n { className: \"expanded-chart\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title expanded-chart-title\" },\n \"Task CPU Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"bar-chart-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"bar-chart\", id: \"cpu-time-bar-chart-\" + stageId },\n _react2.default.createElement(\"div\", { className: \"loader\" })\n )\n )\n )\n )\n )\n )\n )\n )\n )\n )\n );\n }\n }], [{\n key: \"renderHistogram\",\n value: function renderHistogram(histogramId, inputData, numberFormatter) {\n var numBuckets = Math.min(HISTOGRAM_WIDTH, Math.sqrt(inputData.length));\n var dataMin = Math.min.apply(null, inputData);\n var dataMax = Math.max.apply(null, inputData);\n var bucketSize = (dataMax - dataMin) / numBuckets;\n\n var histogramData = [];\n if (bucketSize === 0) {\n histogramData = [inputData.length];\n } else {\n for (var i = 0; i < numBuckets + 1; i++) {\n histogramData.push(0);\n }\n\n for (var _i in inputData) {\n var dataPoint = inputData[_i];\n var bucket = Math.floor((dataPoint - dataMin) / bucketSize);\n histogramData[bucket] = histogramData[bucket] + 1;\n }\n }\n\n var tooltipValueLookups = { 'offset': {} };\n for (var _i2 = 0; _i2 < histogramData.length; _i2++) {\n tooltipValueLookups['offset'][_i2] = numberFormatter(dataMin + _i2 * bucketSize) + \"-\" + numberFormatter(dataMin + (_i2 + 1) * bucketSize);\n }\n\n var stageHistogramProperties = $.extend({}, HISTOGRAM_PROPERTIES, { barWidth: HISTOGRAM_WIDTH / histogramData.length, tooltipValueLookups: tooltipValueLookups });\n $(histogramId).sparkline(histogramData, stageHistogramProperties);\n }\n }]);\n\n return StageSummary;\n}(_react2.default.Component);\n\nvar StageList = function (_React$Component3) {\n _inherits(StageList, _React$Component3);\n\n function StageList() {\n _classCallCheck(this, StageList);\n\n return _possibleConstructorReturn(this, (StageList.__proto__ || Object.getPrototypeOf(StageList)).apply(this, arguments));\n }\n\n _createClass(StageList, [{\n key: \"getStages\",\n value: function getStages(stage) {\n if (stage === undefined || !stage.hasOwnProperty('subStages')) {\n return [];\n }\n\n return [].concat.apply(stage, stage.subStages.map(this.getStages, this));\n }\n }, {\n key: \"render\",\n value: function render() {\n var stages = this.getStages(this.props.outputStage);\n\n if (stages === undefined || stages.length === 0) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n \"No stage information available.\"\n )\n );\n }\n\n var renderedStages = stages.map(function (stage) {\n return _react2.default.createElement(StageSummary, { key: stage.stageId, stage: stage });\n });\n\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"table\",\n { className: \"table\", id: \"stage-list\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n renderedStages\n )\n )\n )\n );\n }\n }]);\n\n return StageList;\n}(_react2.default.Component);\n\nvar SMALL_SPARKLINE_PROPERTIES = {\n width: '100%',\n height: '57px',\n fillColor: '#3F4552',\n lineColor: '#747F96',\n spotColor: '#1EDCFF',\n tooltipClassname: 'sparkline-tooltip',\n disableHiddenCheck: true\n};\n\nvar TASK_FILTER = {\n ALL: function ALL() {\n return true;\n },\n PLANNED: function PLANNED(state) {\n return state === 'PLANNED';\n },\n RUNNING: function RUNNING(state) {\n return state === 'RUNNING';\n },\n FINISHED: function FINISHED(state) {\n return state === 'FINISHED';\n },\n FAILED: function FAILED(state) {\n return state === 'FAILED' || state === 'ABORTED' || state === 'CANCELED';\n }\n};\n\nvar QueryDetail = exports.QueryDetail = function (_React$Component4) {\n _inherits(QueryDetail, _React$Component4);\n\n function QueryDetail(props) {\n _classCallCheck(this, QueryDetail);\n\n var _this4 = _possibleConstructorReturn(this, (QueryDetail.__proto__ || Object.getPrototypeOf(QueryDetail)).call(this, props));\n\n _this4.state = {\n query: null,\n lastSnapshotStages: null,\n lastSnapshotTasks: null,\n\n lastScheduledTime: 0,\n lastCpuTime: 0,\n lastRowInput: 0,\n lastByteInput: 0,\n lastPhysicalInput: 0,\n lastPhysicalTime: 0,\n\n scheduledTimeRate: [],\n cpuTimeRate: [],\n rowInputRate: [],\n byteInputRate: [],\n physicalInputRate: [],\n\n reservedMemory: [],\n\n initialized: false,\n queryEnded: false,\n renderingEnded: false,\n\n lastRefresh: null,\n lastRender: null,\n\n stageRefresh: true,\n taskRefresh: true,\n\n taskFilter: TASK_FILTER.ALL\n };\n\n _this4.refreshLoop = _this4.refreshLoop.bind(_this4);\n return _this4;\n }\n\n _createClass(QueryDetail, [{\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n // stop refreshing when query finishes or fails\n if (this.state.query === null || !this.state.queryEnded) {\n // task.info-update-interval is set to 3 seconds by default\n this.timeoutId = setTimeout(this.refreshLoop, 3000);\n }\n }\n }, {\n key: \"refreshLoop\",\n value: function refreshLoop() {\n var _this5 = this;\n\n clearTimeout(this.timeoutId); // to stop multiple series of refreshLoop from going on simultaneously\n var queryId = (0, _utils.getFirstParameter)(window.location.search);\n $.get('/ui/api/query/' + queryId, function (query) {\n var lastSnapshotStages = this.state.lastSnapshotStage;\n if (this.state.stageRefresh) {\n lastSnapshotStages = query.outputStage;\n }\n var lastSnapshotTasks = this.state.lastSnapshotTasks;\n if (this.state.taskRefresh) {\n lastSnapshotTasks = query.outputStage;\n }\n\n var lastRefresh = this.state.lastRefresh;\n var lastScheduledTime = this.state.lastScheduledTime;\n var lastCpuTime = this.state.lastCpuTime;\n var lastRowInput = this.state.lastRowInput;\n var lastByteInput = this.state.lastByteInput;\n var lastPhysicalInput = this.state.lastPhysicalInput;\n var lastPhysicalTime = this.state.lastPhysicalTime;\n var alreadyEnded = this.state.queryEnded;\n var nowMillis = Date.now();\n\n this.setState({\n query: query,\n lastSnapshotStage: lastSnapshotStages,\n lastSnapshotTasks: lastSnapshotTasks,\n\n lastPhysicalTime: (0, _utils.parseDuration)(query.queryStats.physicalInputReadTime),\n lastScheduledTime: (0, _utils.parseDuration)(query.queryStats.totalScheduledTime),\n lastCpuTime: (0, _utils.parseDuration)(query.queryStats.totalCpuTime),\n lastRowInput: query.queryStats.processedInputPositions,\n lastByteInput: (0, _utils.parseDataSize)(query.queryStats.processedInputDataSize),\n lastPhysicalInput: (0, _utils.parseDataSize)(query.queryStats.physicalInputDataSize),\n\n initialized: true,\n queryEnded: !!query.finalQueryInfo,\n\n lastRefresh: nowMillis\n });\n\n // i.e. don't show sparklines if we've already decided not to update or if we don't have one previous measurement\n if (alreadyEnded || lastRefresh === null && query.state === \"RUNNING\") {\n this.resetTimer();\n return;\n }\n\n if (lastRefresh === null) {\n lastRefresh = nowMillis - (0, _utils.parseDuration)(query.queryStats.elapsedTime);\n }\n\n var elapsedSecsSinceLastRefresh = (nowMillis - lastRefresh) / 1000.0;\n if (elapsedSecsSinceLastRefresh >= 0) {\n var currentScheduledTimeRate = ((0, _utils.parseDuration)(query.queryStats.totalScheduledTime) - lastScheduledTime) / (elapsedSecsSinceLastRefresh * 1000);\n var currentCpuTimeRate = ((0, _utils.parseDuration)(query.queryStats.totalCpuTime) - lastCpuTime) / (elapsedSecsSinceLastRefresh * 1000);\n var currentPhysicalReadTime = ((0, _utils.parseDuration)(query.queryStats.physicalInputReadTime) - lastPhysicalTime) / 1000;\n var currentRowInputRate = (query.queryStats.processedInputPositions - lastRowInput) / elapsedSecsSinceLastRefresh;\n var currentByteInputRate = ((0, _utils.parseDataSize)(query.queryStats.processedInputDataSize) - lastByteInput) / elapsedSecsSinceLastRefresh;\n var currentPhysicalInputRate = currentPhysicalReadTime > 0 ? ((0, _utils.parseDataSize)(query.queryStats.physicalInputDataSize) - lastPhysicalInput) / currentPhysicalReadTime : 0;\n\n this.setState({\n scheduledTimeRate: (0, _utils.addToHistory)(currentScheduledTimeRate, this.state.scheduledTimeRate),\n cpuTimeRate: (0, _utils.addToHistory)(currentCpuTimeRate, this.state.cpuTimeRate),\n rowInputRate: (0, _utils.addToHistory)(currentRowInputRate, this.state.rowInputRate),\n byteInputRate: (0, _utils.addToHistory)(currentByteInputRate, this.state.byteInputRate),\n reservedMemory: (0, _utils.addToHistory)((0, _utils.parseDataSize)(query.queryStats.totalMemoryReservation), this.state.reservedMemory),\n physicalInputRate: (0, _utils.addToHistory)(currentPhysicalInputRate, this.state.physicalInputRate)\n });\n }\n this.resetTimer();\n }.bind(this)).error(function () {\n _this5.setState({\n initialized: true\n });\n _this5.resetTimer();\n });\n }\n }, {\n key: \"handleTaskRefreshClick\",\n value: function handleTaskRefreshClick() {\n if (this.state.taskRefresh) {\n this.setState({\n taskRefresh: false,\n lastSnapshotTasks: this.state.query.outputStage\n });\n } else {\n this.setState({\n taskRefresh: true\n });\n }\n }\n }, {\n key: \"renderTaskRefreshButton\",\n value: function renderTaskRefreshButton() {\n if (this.state.taskRefresh) {\n return _react2.default.createElement(\n \"button\",\n { className: \"btn btn-info live-button\", onClick: this.handleTaskRefreshClick.bind(this) },\n \"Auto-Refresh: On\"\n );\n } else {\n return _react2.default.createElement(\n \"button\",\n { className: \"btn btn-info live-button\", onClick: this.handleTaskRefreshClick.bind(this) },\n \"Auto-Refresh: Off\"\n );\n }\n }\n }, {\n key: \"handleStageRefreshClick\",\n value: function handleStageRefreshClick() {\n if (this.state.stageRefresh) {\n this.setState({\n stageRefresh: false,\n lastSnapshotStages: this.state.query.outputStage\n });\n } else {\n this.setState({\n stageRefresh: true\n });\n }\n }\n }, {\n key: \"renderStageRefreshButton\",\n value: function renderStageRefreshButton() {\n if (this.state.stageRefresh) {\n return _react2.default.createElement(\n \"button\",\n { className: \"btn btn-info live-button\", onClick: this.handleStageRefreshClick.bind(this) },\n \"Auto-Refresh: On\"\n );\n } else {\n return _react2.default.createElement(\n \"button\",\n { className: \"btn btn-info live-button\", onClick: this.handleStageRefreshClick.bind(this) },\n \"Auto-Refresh: Off\"\n );\n }\n }\n }, {\n key: \"renderTaskFilterListItem\",\n value: function renderTaskFilterListItem(taskFilter, taskFilterText) {\n return _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: this.state.taskFilter === taskFilter ? \"selected\" : \"\", onClick: this.handleTaskFilterClick.bind(this, taskFilter) },\n taskFilterText\n )\n );\n }\n }, {\n key: \"handleTaskFilterClick\",\n value: function handleTaskFilterClick(filter, event) {\n this.setState({\n taskFilter: filter\n });\n event.preventDefault();\n }\n }, {\n key: \"getTasksFromStage\",\n value: function getTasksFromStage(stage) {\n if (stage === undefined || !stage.hasOwnProperty('subStages') || !stage.hasOwnProperty('tasks')) {\n return [];\n }\n\n return [].concat.apply(stage.tasks, stage.subStages.map(this.getTasksFromStage, this));\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop();\n }\n }, {\n key: \"componentDidUpdate\",\n value: function componentDidUpdate() {\n // prevent multiple calls to componentDidUpdate (resulting from calls to setState or otherwise) within the refresh interval from re-rendering sparklines/charts\n if (this.state.lastRender === null || Date.now() - this.state.lastRender >= 1000 || this.state.ended && !this.state.renderingEnded) {\n var renderTimestamp = Date.now();\n $('#scheduled-time-rate-sparkline').sparkline(this.state.scheduledTimeRate, $.extend({}, SMALL_SPARKLINE_PROPERTIES, {\n chartRangeMin: 0,\n numberFormatter: _utils.precisionRound\n }));\n $('#cpu-time-rate-sparkline').sparkline(this.state.cpuTimeRate, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { chartRangeMin: 0, numberFormatter: _utils.precisionRound }));\n $('#row-input-rate-sparkline').sparkline(this.state.rowInputRate, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatCount }));\n $('#byte-input-rate-sparkline').sparkline(this.state.byteInputRate, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatDataSize }));\n $('#reserved-memory-sparkline').sparkline(this.state.reservedMemory, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatDataSize }));\n $('#physical-input-rate-sparkline').sparkline(this.state.physicalInputRate, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatDataSize }));\n\n if (this.state.lastRender === null) {\n $('#query').each(function (i, block) {\n hljs.highlightBlock(block);\n });\n\n $('#prepared-query').each(function (i, block) {\n hljs.highlightBlock(block);\n });\n }\n\n this.setState({\n renderingEnded: this.state.ended,\n lastRender: renderTimestamp\n });\n }\n\n $('[data-toggle=\"tooltip\"]').tooltip();\n new window.ClipboardJS('.copy-button');\n }\n }, {\n key: \"renderTasks\",\n value: function renderTasks() {\n var _this6 = this;\n\n if (this.state.lastSnapshotTasks === null) {\n return;\n }\n\n var tasks = this.getTasksFromStage(this.state.lastSnapshotTasks).filter(function (task) {\n return _this6.state.taskFilter(task.taskStatus.state);\n }, this);\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-9\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Tasks\"\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-3\" },\n _react2.default.createElement(\n \"table\",\n { className: \"header-inline-links\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"input-group-btn text-right\" },\n _react2.default.createElement(\n \"button\",\n { type: \"button\", className: \"btn btn-default dropdown-toggle pull-right text-right\", \"data-toggle\": \"dropdown\", \"aria-haspopup\": \"true\",\n \"aria-expanded\": \"false\" },\n \"Show \",\n _react2.default.createElement(\"span\", { className: \"caret\" })\n ),\n _react2.default.createElement(\n \"ul\",\n { className: \"dropdown-menu\" },\n this.renderTaskFilterListItem(TASK_FILTER.ALL, \"All\"),\n this.renderTaskFilterListItem(TASK_FILTER.PLANNED, \"Planned\"),\n this.renderTaskFilterListItem(TASK_FILTER.RUNNING, \"Running\"),\n this.renderTaskFilterListItem(TASK_FILTER.FINISHED, \"Finished\"),\n this.renderTaskFilterListItem(TASK_FILTER.FAILED, \"Aborted/Canceled/Failed\")\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n \"\\xA0\\xA0\",\n this.renderTaskRefreshButton()\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(TaskList, { key: this.state.query.queryId, tasks: tasks })\n )\n )\n );\n }\n }, {\n key: \"renderStages\",\n value: function renderStages() {\n if (this.state.lastSnapshotStage === null) {\n return;\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-9\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Stages\"\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-3\" },\n _react2.default.createElement(\n \"table\",\n { className: \"header-inline-links\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n this.renderStageRefreshButton()\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(StageList, { key: this.state.query.queryId, outputStage: this.state.lastSnapshotStage })\n )\n )\n );\n }\n }, {\n key: \"renderPreparedQuery\",\n value: function renderPreparedQuery() {\n var query = this.state.query;\n if (!query.hasOwnProperty('preparedQuery') || query.preparedQuery === null) {\n return;\n }\n\n return _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Prepared Query\",\n _react2.default.createElement(\n \"a\",\n { className: \"btn copy-button\", \"data-clipboard-target\": \"#prepared-query-text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", \"aria-hidden\": \"true\", alt: \"Copy to clipboard\" })\n )\n ),\n _react2.default.createElement(\n \"pre\",\n { id: \"prepared-query\" },\n _react2.default.createElement(\n \"code\",\n { className: \"lang-sql\", id: \"prepared-query-text\" },\n query.preparedQuery\n )\n )\n );\n }\n }, {\n key: \"renderSessionProperties\",\n value: function renderSessionProperties() {\n var query = this.state.query;\n\n var properties = [];\n for (var property in query.session.systemProperties) {\n if (query.session.systemProperties.hasOwnProperty(property)) {\n properties.push(_react2.default.createElement(\n \"span\",\n null,\n \"- \",\n property + \"=\" + query.session.systemProperties[property],\n \" \",\n _react2.default.createElement(\"br\", null)\n ));\n }\n }\n\n for (var catalog in query.session.catalogProperties) {\n if (query.session.catalogProperties.hasOwnProperty(catalog)) {\n for (var _property in query.session.catalogProperties[catalog]) {\n if (query.session.catalogProperties[catalog].hasOwnProperty(_property)) {\n properties.push(_react2.default.createElement(\n \"span\",\n null,\n \"- \",\n catalog + \".\" + _property + \"=\" + query.session.catalogProperties[catalog][_property],\n \" \",\n _react2.default.createElement(\"br\", null)\n ));\n }\n }\n }\n }\n\n return properties;\n }\n }, {\n key: \"renderResourceEstimates\",\n value: function renderResourceEstimates() {\n var query = this.state.query;\n var estimates = query.session.resourceEstimates;\n var renderedEstimates = [];\n\n for (var resource in estimates) {\n if (estimates.hasOwnProperty(resource)) {\n var upperChars = resource.match(/([A-Z])/g) || [];\n var snakeCased = resource;\n for (var i = 0, n = upperChars.length; i < n; i++) {\n snakeCased = snakeCased.replace(new RegExp(upperChars[i]), '_' + upperChars[i].toLowerCase());\n }\n\n renderedEstimates.push(_react2.default.createElement(\n \"span\",\n null,\n \"- \",\n snakeCased + \"=\" + query.session.resourceEstimates[resource],\n \" \",\n _react2.default.createElement(\"br\", null)\n ));\n }\n }\n\n return renderedEstimates;\n }\n }, {\n key: \"renderWarningInfo\",\n value: function renderWarningInfo() {\n var query = this.state.query;\n if (query.warnings.length > 0) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Warnings\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\", id: \"warnings-table\" },\n query.warnings.map(function (warning) {\n return _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n warning.warningCode.name\n ),\n _react2.default.createElement(\n \"td\",\n null,\n warning.message\n )\n );\n })\n )\n )\n );\n } else {\n return null;\n }\n }\n }, {\n key: \"renderFailureInfo\",\n value: function renderFailureInfo() {\n var query = this.state.query;\n if (query.failureInfo) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Error Information\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Error Type\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.errorType\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Error Code\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.errorCode.name + \" (\" + this.state.query.errorCode.code + \")\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Stack Trace\",\n _react2.default.createElement(\n \"a\",\n { className: \"btn copy-button\", \"data-clipboard-target\": \"#stack-trace\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", \"aria-hidden\": \"true\", alt: \"Copy to clipboard\" })\n )\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n _react2.default.createElement(\n \"pre\",\n { id: \"stack-trace\" },\n QueryDetail.formatStackTrace(query.failureInfo)\n )\n )\n )\n )\n )\n )\n );\n } else {\n return \"\";\n }\n }\n }, {\n key: \"render\",\n value: function render() {\n var query = this.state.query;\n\n if (query === null || this.state.initialized === false) {\n var label = _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading...\"\n );\n if (this.state.initialized) {\n label = \"Query not found\";\n }\n return _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n label\n )\n )\n );\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(_QueryHeader.QueryHeader, { query: query }),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Session\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"User\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n _react2.default.createElement(\n \"span\",\n { id: \"query-user\" },\n query.session.user\n ),\n \"\\xA0\\xA0\",\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: \"copy-button\", \"data-clipboard-target\": \"#query-user\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", \"aria-hidden\": \"true\", alt: \"Copy to clipboard\" })\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Principal\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n query.session.principal\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Source\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n query.session.source\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Catalog\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.session.catalog\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Schema\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.session.schema\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Client Address\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.session.remoteUserAddress\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Client Tags\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.session.clientTags.join(\", \")\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Session Properties\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n this.renderSessionProperties()\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Resource Estimates\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n this.renderResourceEstimates()\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Execution\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Resource Group\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n query.resourceGroupId ? query.resourceGroupId.join(\".\") : \"n/a\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Submission Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatShortDateTime)(new Date(query.queryStats.createTime))\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Completion Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.endTime ? (0, _utils.formatShortDateTime)(new Date(query.queryStats.endTime)) : \"\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Elapsed Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.elapsedTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Queued Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.queuedTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Analysis Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.analysisTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Planning Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.planningTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Execution Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.executionTime\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Resource Utilization Summary\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"CPU Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.totalCpuTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Scheduled Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.totalScheduledTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Input Rows\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatCount)(query.queryStats.processedInputPositions)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Input Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.processedInputDataSize)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Physical Input Rows\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatCount)(query.queryStats.physicalInputPositions)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Physical Input Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.physicalInputDataSize)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Physical Input Read Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.physicalInputReadTime)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Internal Network Rows\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatCount)(query.queryStats.internalNetworkInputPositions)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Internal Network Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.internalNetworkInputDataSize)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Peak User Memory\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.peakUserMemoryReservation)\n )\n ),\n (0, _utils.parseDataSize)(query.queryStats.peakRevocableMemoryReservation) > 0 && _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Peak Revocable Memory\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.peakRevocableMemoryReservation)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Peak Total Memory\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.peakTotalMemoryReservation)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Memory Pool\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.memoryPool\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Cumulative User Memory\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatDataSizeBytes)(query.queryStats.cumulativeUserMemory / 1000.0) + \" seconds\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Output Rows\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatCount)(query.queryStats.outputPositions)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Output Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.outputDataSize)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Written Rows\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatCount)(query.queryStats.writtenPositions)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Logical Written Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.logicalWrittenDataSize)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Physical Written Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.physicalWrittenDataSize)\n )\n ),\n (0, _utils.parseDataSize)(query.queryStats.spilledDataSize) > 0 && _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Spilled Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.spilledDataSize)\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Timeline\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Parallelism\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"cpu-time-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatCount)(this.state.cpuTimeRate[this.state.cpuTimeRate.length - 1])\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Scheduled Time/s\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"scheduled-time-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatCount)(this.state.scheduledTimeRate[this.state.scheduledTimeRate.length - 1])\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Input Rows/s\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"row-input-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatCount)(this.state.rowInputRate[this.state.rowInputRate.length - 1])\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Input Bytes/s\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"byte-input-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatDataSize)(this.state.byteInputRate[this.state.byteInputRate.length - 1])\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Physical Input Bytes/s\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"physical-input-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatDataSize)(this.state.physicalInputRate[this.state.physicalInputRate.length - 1])\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Memory Utilization\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"reserved-memory-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatDataSize)(this.state.reservedMemory[this.state.reservedMemory.length - 1])\n )\n )\n )\n )\n )\n )\n )\n ),\n this.renderWarningInfo(),\n this.renderFailureInfo(),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Query\",\n _react2.default.createElement(\n \"a\",\n { className: \"btn copy-button\", \"data-clipboard-target\": \"#query-text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", \"aria-hidden\": \"true\", alt: \"Copy to clipboard\" })\n )\n ),\n _react2.default.createElement(\n \"pre\",\n { id: \"query\" },\n _react2.default.createElement(\n \"code\",\n { className: \"lang-sql\", id: \"query-text\" },\n query.query\n )\n )\n ),\n this.renderPreparedQuery()\n ),\n this.renderStages(),\n this.renderTasks()\n );\n }\n }], [{\n key: \"formatStackTrace\",\n value: function formatStackTrace(info) {\n return QueryDetail.formatStackTraceHelper(info, [], \"\", \"\");\n }\n }, {\n key: \"formatStackTraceHelper\",\n value: function formatStackTraceHelper(info, parentStack, prefix, linePrefix) {\n var s = linePrefix + prefix + QueryDetail.failureInfoToString(info) + \"\\n\";\n\n if (info.stack) {\n var sharedStackFrames = 0;\n if (parentStack !== null) {\n sharedStackFrames = QueryDetail.countSharedStackFrames(info.stack, parentStack);\n }\n\n for (var i = 0; i < info.stack.length - sharedStackFrames; i++) {\n s += linePrefix + \"\\tat \" + info.stack[i] + \"\\n\";\n }\n if (sharedStackFrames !== 0) {\n s += linePrefix + \"\\t... \" + sharedStackFrames + \" more\" + \"\\n\";\n }\n }\n\n if (info.suppressed) {\n for (var _i3 = 0; _i3 < info.suppressed.length; _i3++) {\n s += QueryDetail.formatStackTraceHelper(info.suppressed[_i3], info.stack, \"Suppressed: \", linePrefix + \"\\t\");\n }\n }\n\n if (info.cause) {\n s += QueryDetail.formatStackTraceHelper(info.cause, info.stack, \"Caused by: \", linePrefix);\n }\n\n return s;\n }\n }, {\n key: \"countSharedStackFrames\",\n value: function countSharedStackFrames(stack, parentStack) {\n var n = 0;\n var minStackLength = Math.min(stack.length, parentStack.length);\n while (n < minStackLength && stack[stack.length - 1 - n] === parentStack[parentStack.length - 1 - n]) {\n n++;\n }\n return n;\n }\n }, {\n key: \"failureInfoToString\",\n value: function failureInfoToString(t) {\n return t.message !== null ? t.type + \": \" + t.message : t.type;\n }\n }]);\n\n return QueryDetail;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/QueryDetail.jsx?"); +eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.QueryDetail = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nvar _reactable = __webpack_require__(/*! reactable */ \"./node_modules/reactable/lib/reactable.js\");\n\nvar _reactable2 = _interopRequireDefault(_reactable);\n\nvar _utils = __webpack_require__(/*! ../utils */ \"./utils.js\");\n\nvar _QueryHeader = __webpack_require__(/*! ./QueryHeader */ \"./components/QueryHeader.jsx\");\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nvar Table = _reactable2.default.Table,\n Thead = _reactable2.default.Thead,\n Th = _reactable2.default.Th,\n Tr = _reactable2.default.Tr,\n Td = _reactable2.default.Td;\n\nvar TaskList = function (_React$Component) {\n _inherits(TaskList, _React$Component);\n\n function TaskList() {\n _classCallCheck(this, TaskList);\n\n return _possibleConstructorReturn(this, (TaskList.__proto__ || Object.getPrototypeOf(TaskList)).apply(this, arguments));\n }\n\n _createClass(TaskList, [{\n key: \"render\",\n value: function render() {\n var tasks = this.props.tasks;\n\n if (tasks === undefined || tasks.length === 0) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"No threads in the selected group\"\n )\n )\n );\n }\n\n var showPortNumbers = TaskList.showPortNumbers(tasks);\n\n var renderedTasks = tasks.map(function (task) {\n var elapsedTime = (0, _utils.parseDuration)(task.stats.elapsedTime);\n if (elapsedTime === 0) {\n elapsedTime = Date.now() - Date.parse(task.stats.createTime);\n }\n\n return _react2.default.createElement(\n Tr,\n { key: task.taskStatus.taskId },\n _react2.default.createElement(\n Td,\n { column: \"id\", value: task.taskStatus.taskId },\n _react2.default.createElement(\n \"a\",\n { href: \"/ui/api/worker/\" + task.taskStatus.nodeId + \"/task/\" + task.taskStatus.taskId + \"?pretty\" },\n (0, _utils.getTaskIdSuffix)(task.taskStatus.taskId)\n )\n ),\n _react2.default.createElement(\n Td,\n { column: \"host\", value: (0, _utils.getHostname)(task.taskStatus.self) },\n _react2.default.createElement(\n \"a\",\n { href: \"worker.html?\" + task.taskStatus.nodeId, className: \"font-light\", target: \"_blank\" },\n showPortNumbers ? (0, _utils.getHostAndPort)(task.taskStatus.self) : (0, _utils.getHostname)(task.taskStatus.self)\n )\n ),\n _react2.default.createElement(\n Td,\n { column: \"state\", value: TaskList.formatState(task.taskStatus.state, task.stats.fullyBlocked) },\n TaskList.formatState(task.taskStatus.state, task.stats.fullyBlocked)\n ),\n _react2.default.createElement(\n Td,\n { column: \"rows\", value: task.stats.rawInputPositions },\n (0, _utils.formatCount)(task.stats.rawInputPositions)\n ),\n _react2.default.createElement(\n Td,\n { column: \"rowsSec\", value: (0, _utils.computeRate)(task.stats.rawInputPositions, elapsedTime) },\n (0, _utils.formatCount)((0, _utils.computeRate)(task.stats.rawInputPositions, elapsedTime))\n ),\n _react2.default.createElement(\n Td,\n { column: \"bytes\", value: (0, _utils.parseDataSize)(task.stats.rawInputDataSize) },\n (0, _utils.formatDataSizeBytes)((0, _utils.parseDataSize)(task.stats.rawInputDataSize))\n ),\n _react2.default.createElement(\n Td,\n { column: \"bytesSec\", value: (0, _utils.computeRate)((0, _utils.parseDataSize)(task.stats.rawInputDataSize), elapsedTime) },\n (0, _utils.formatDataSizeBytes)((0, _utils.computeRate)((0, _utils.parseDataSize)(task.stats.rawInputDataSize), elapsedTime))\n ),\n _react2.default.createElement(\n Td,\n { column: \"splitsPending\", value: task.stats.queuedDrivers },\n task.stats.queuedDrivers\n ),\n _react2.default.createElement(\n Td,\n { column: \"splitsRunning\", value: task.stats.runningDrivers },\n task.stats.runningDrivers\n ),\n _react2.default.createElement(\n Td,\n { column: \"splitsBlocked\", value: task.stats.blockedDrivers },\n task.stats.blockedDrivers\n ),\n _react2.default.createElement(\n Td,\n { column: \"splitsDone\", value: task.stats.completedDrivers },\n task.stats.completedDrivers\n ),\n _react2.default.createElement(\n Td,\n { column: \"elapsedTime\", value: (0, _utils.parseDuration)(task.stats.elapsedTime) },\n task.stats.elapsedTime\n ),\n _react2.default.createElement(\n Td,\n { column: \"cpuTime\", value: (0, _utils.parseDuration)(task.stats.totalCpuTime) },\n task.stats.totalCpuTime\n ),\n _react2.default.createElement(\n Td,\n { column: \"bufferedBytes\", value: task.outputBuffers.totalBufferedBytes },\n (0, _utils.formatDataSizeBytes)(task.outputBuffers.totalBufferedBytes)\n )\n );\n });\n\n return _react2.default.createElement(\n Table,\n { id: \"tasks\", className: \"table table-striped sortable\", sortable: [{\n column: 'id',\n sortFunction: TaskList.compareTaskId\n }, 'host', 'state', 'splitsPending', 'splitsRunning', 'splitsBlocked', 'splitsDone', 'rows', 'rowsSec', 'bytes', 'bytesSec', 'elapsedTime', 'cpuTime', 'bufferedBytes'],\n defaultSort: { column: 'id', direction: 'asc' } },\n _react2.default.createElement(\n Thead,\n null,\n _react2.default.createElement(\n Th,\n { column: \"id\" },\n \"ID\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"host\" },\n \"Host\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"state\" },\n \"State\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"splitsPending\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-pause\", style: _utils.GLYPHICON_HIGHLIGHT, \"data-toggle\": \"tooltip\", \"data-placement\": \"top\",\n title: \"Pending splits\" })\n ),\n _react2.default.createElement(\n Th,\n { column: \"splitsRunning\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-play\", style: _utils.GLYPHICON_HIGHLIGHT, \"data-toggle\": \"tooltip\", \"data-placement\": \"top\",\n title: \"Running splits\" })\n ),\n _react2.default.createElement(\n Th,\n { column: \"splitsBlocked\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-bookmark\", style: _utils.GLYPHICON_HIGHLIGHT, \"data-toggle\": \"tooltip\", \"data-placement\": \"top\",\n title: \"Blocked splits\" })\n ),\n _react2.default.createElement(\n Th,\n { column: \"splitsDone\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-ok\", style: _utils.GLYPHICON_HIGHLIGHT, \"data-toggle\": \"tooltip\", \"data-placement\": \"top\",\n title: \"Completed splits\" })\n ),\n _react2.default.createElement(\n Th,\n { column: \"rows\" },\n \"Rows\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"rowsSec\" },\n \"Rows/s\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"bytes\" },\n \"Bytes\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"bytesSec\" },\n \"Bytes/s\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"elapsedTime\" },\n \"Elapsed\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"cpuTime\" },\n \"CPU Time\"\n ),\n _react2.default.createElement(\n Th,\n { column: \"bufferedBytes\" },\n \"Buffered\"\n )\n ),\n renderedTasks\n );\n }\n }], [{\n key: \"removeQueryId\",\n value: function removeQueryId(id) {\n var pos = id.indexOf('.');\n if (pos !== -1) {\n return id.substring(pos + 1);\n }\n return id;\n }\n }, {\n key: \"compareTaskId\",\n value: function compareTaskId(taskA, taskB) {\n var taskIdArrA = TaskList.removeQueryId(taskA).split(\".\");\n var taskIdArrB = TaskList.removeQueryId(taskB).split(\".\");\n\n if (taskIdArrA.length > taskIdArrB.length) {\n return 1;\n }\n for (var i = 0; i < taskIdArrA.length; i++) {\n var anum = Number.parseInt(taskIdArrA[i]);\n var bnum = Number.parseInt(taskIdArrB[i]);\n if (anum !== bnum) {\n return anum > bnum ? 1 : -1;\n }\n }\n\n return 0;\n }\n }, {\n key: \"showPortNumbers\",\n value: function showPortNumbers(tasks) {\n // check if any host has multiple port numbers\n var hostToPortNumber = {};\n for (var i = 0; i < tasks.length; i++) {\n var taskUri = tasks[i].taskStatus.self;\n var hostname = (0, _utils.getHostname)(taskUri);\n var port = (0, _utils.getPort)(taskUri);\n if (hostname in hostToPortNumber && hostToPortNumber[hostname] !== port) {\n return true;\n }\n hostToPortNumber[hostname] = port;\n }\n\n return false;\n }\n }, {\n key: \"formatState\",\n value: function formatState(state, fullyBlocked) {\n if (fullyBlocked && state === \"RUNNING\") {\n return \"BLOCKED\";\n } else {\n return state;\n }\n }\n }]);\n\n return TaskList;\n}(_react2.default.Component);\n\nvar BAR_CHART_WIDTH = 800;\n\nvar BAR_CHART_PROPERTIES = {\n type: 'bar',\n barSpacing: '0',\n height: '80px',\n barColor: '#747F96',\n zeroColor: '#8997B3',\n chartRangeMin: 0,\n tooltipClassname: 'sparkline-tooltip',\n tooltipFormat: 'Task {{offset:offset}} - {{value}}',\n disableHiddenCheck: true\n};\n\nvar HISTOGRAM_WIDTH = 175;\n\nvar HISTOGRAM_PROPERTIES = {\n type: 'bar',\n barSpacing: '0',\n height: '80px',\n barColor: '#747F96',\n zeroColor: '#747F96',\n zeroAxis: true,\n chartRangeMin: 0,\n tooltipClassname: 'sparkline-tooltip',\n tooltipFormat: '{{offset:offset}} -- {{value}} tasks',\n disableHiddenCheck: true\n};\n\nvar StageSummary = function (_React$Component2) {\n _inherits(StageSummary, _React$Component2);\n\n function StageSummary(props) {\n _classCallCheck(this, StageSummary);\n\n var _this2 = _possibleConstructorReturn(this, (StageSummary.__proto__ || Object.getPrototypeOf(StageSummary)).call(this, props));\n\n _this2.state = {\n expanded: false,\n lastRender: null\n };\n return _this2;\n }\n\n _createClass(StageSummary, [{\n key: \"getExpandedIcon\",\n value: function getExpandedIcon() {\n return this.state.expanded ? \"glyphicon-chevron-up\" : \"glyphicon-chevron-down\";\n }\n }, {\n key: \"getExpandedStyle\",\n value: function getExpandedStyle() {\n return this.state.expanded ? {} : { display: \"none\" };\n }\n }, {\n key: \"toggleExpanded\",\n value: function toggleExpanded() {\n this.setState({\n expanded: !this.state.expanded\n });\n }\n }, {\n key: \"componentDidUpdate\",\n value: function componentDidUpdate() {\n var stage = this.props.stage;\n var numTasks = stage.tasks.length;\n\n // sort the x-axis\n stage.tasks.sort(function (taskA, taskB) {\n return (0, _utils.getTaskNumber)(taskA.taskStatus.taskId) - (0, _utils.getTaskNumber)(taskB.taskStatus.taskId);\n });\n\n var scheduledTimes = stage.tasks.map(function (task) {\n return (0, _utils.parseDuration)(task.stats.totalScheduledTime);\n });\n var cpuTimes = stage.tasks.map(function (task) {\n return (0, _utils.parseDuration)(task.stats.totalCpuTime);\n });\n\n // prevent multiple calls to componentDidUpdate (resulting from calls to setState or otherwise) within the refresh interval from re-rendering sparklines/charts\n if (this.state.lastRender === null || Date.now() - this.state.lastRender >= 1000) {\n var renderTimestamp = Date.now();\n var stageId = (0, _utils.getStageNumber)(stage.stageId);\n\n StageSummary.renderHistogram('#scheduled-time-histogram-' + stageId, scheduledTimes, _utils.formatDuration);\n StageSummary.renderHistogram('#cpu-time-histogram-' + stageId, cpuTimes, _utils.formatDuration);\n\n if (this.state.expanded) {\n // this needs to be a string otherwise it will also be passed to numberFormatter\n var tooltipValueLookups = { 'offset': {} };\n for (var i = 0; i < numTasks; i++) {\n tooltipValueLookups['offset'][i] = (0, _utils.getStageNumber)(stage.stageId) + \".\" + i;\n }\n\n var stageBarChartProperties = $.extend({}, BAR_CHART_PROPERTIES, { barWidth: BAR_CHART_WIDTH / numTasks, tooltipValueLookups: tooltipValueLookups });\n\n $('#scheduled-time-bar-chart-' + stageId).sparkline(scheduledTimes, $.extend({}, stageBarChartProperties, { numberFormatter: _utils.formatDuration }));\n $('#cpu-time-bar-chart-' + stageId).sparkline(cpuTimes, $.extend({}, stageBarChartProperties, { numberFormatter: _utils.formatDuration }));\n }\n\n this.setState({\n lastRender: renderTimestamp\n });\n }\n }\n }, {\n key: \"render\",\n value: function render() {\n var stage = this.props.stage;\n if (stage === undefined || !stage.hasOwnProperty('plan')) {\n return _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Information about this stage is unavailable.\"\n )\n );\n }\n\n var totalBufferedBytes = stage.tasks.map(function (task) {\n return task.outputBuffers.totalBufferedBytes;\n }).reduce(function (a, b) {\n return a + b;\n }, 0);\n\n var stageId = (0, _utils.getStageNumber)(stage.stageId);\n\n return _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-id\" },\n _react2.default.createElement(\n \"div\",\n { className: \"stage-state-color\", style: { borderLeftColor: (0, _utils.getStageStateColor)(stage) } },\n stageId\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"table single-stage-table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"stage-table stage-table-time\" },\n _react2.default.createElement(\n \"thead\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"th\",\n { className: \"stage-table-stat-title stage-table-stat-header\" },\n \"Time\"\n ),\n _react2.default.createElement(\"th\", null)\n )\n ),\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Scheduled\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.stageStats.totalScheduledTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Blocked\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.stageStats.totalBlockedTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"CPU\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.stageStats.totalCpuTime\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"stage-table stage-table-memory\" },\n _react2.default.createElement(\n \"thead\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"th\",\n { className: \"stage-table-stat-title stage-table-stat-header\" },\n \"Memory\"\n ),\n _react2.default.createElement(\"th\", null)\n )\n ),\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Cumulative\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n (0, _utils.formatDataSizeBytes)(stage.stageStats.cumulativeUserMemory / 1000)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Current\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n (0, _utils.parseAndFormatDataSize)(stage.stageStats.userMemoryReservation)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Buffers\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n (0, _utils.formatDataSize)(totalBufferedBytes)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Peak\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n (0, _utils.parseAndFormatDataSize)(stage.stageStats.peakUserMemoryReservation)\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"stage-table stage-table-tasks\" },\n _react2.default.createElement(\n \"thead\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"th\",\n { className: \"stage-table-stat-title stage-table-stat-header\" },\n \"Tasks\"\n ),\n _react2.default.createElement(\"th\", null)\n )\n ),\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Pending\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.tasks.filter(function (task) {\n return task.taskStatus.state === \"PLANNED\";\n }).length\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Running\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.tasks.filter(function (task) {\n return task.taskStatus.state === \"RUNNING\";\n }).length\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Blocked\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.tasks.filter(function (task) {\n return task.stats.fullyBlocked;\n }).length\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title\" },\n \"Total\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-text\" },\n stage.tasks.length\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"stage-table histogram-table\" },\n _react2.default.createElement(\n \"thead\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"th\",\n { className: \"stage-table-stat-title stage-table-chart-header\" },\n \"Scheduled Time Skew\"\n )\n )\n ),\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"histogram-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"histogram\", id: \"scheduled-time-histogram-\" + stageId },\n _react2.default.createElement(\"div\", { className: \"loader\" })\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"stage-table histogram-table\" },\n _react2.default.createElement(\n \"thead\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"th\",\n { className: \"stage-table-stat-title stage-table-chart-header\" },\n \"CPU Time Skew\"\n )\n )\n ),\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"histogram-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"histogram\", id: \"cpu-time-histogram-\" + stageId },\n _react2.default.createElement(\"div\", { className: \"loader\" })\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"expand-charts-container\" },\n _react2.default.createElement(\n \"a\",\n { onClick: this.toggleExpanded.bind(this), className: \"expand-charts-button\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon \" + this.getExpandedIcon(), style: _utils.GLYPHICON_HIGHLIGHT, \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"More\" })\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { style: this.getExpandedStyle() },\n _react2.default.createElement(\n \"td\",\n { colSpan: \"6\" },\n _react2.default.createElement(\n \"table\",\n { className: \"expanded-chart\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title expanded-chart-title\" },\n \"Task Scheduled Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"bar-chart-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"bar-chart\", id: \"scheduled-time-bar-chart-\" + stageId },\n _react2.default.createElement(\"div\", { className: \"loader\" })\n )\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { style: this.getExpandedStyle() },\n _react2.default.createElement(\n \"td\",\n { colSpan: \"6\" },\n _react2.default.createElement(\n \"table\",\n { className: \"expanded-chart\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"stage-table-stat-title expanded-chart-title\" },\n \"Task CPU Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"bar-chart-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"bar-chart\", id: \"cpu-time-bar-chart-\" + stageId },\n _react2.default.createElement(\"div\", { className: \"loader\" })\n )\n )\n )\n )\n )\n )\n )\n )\n )\n )\n );\n }\n }], [{\n key: \"renderHistogram\",\n value: function renderHistogram(histogramId, inputData, numberFormatter) {\n var numBuckets = Math.min(HISTOGRAM_WIDTH, Math.sqrt(inputData.length));\n var dataMin = Math.min.apply(null, inputData);\n var dataMax = Math.max.apply(null, inputData);\n var bucketSize = (dataMax - dataMin) / numBuckets;\n\n var histogramData = [];\n if (bucketSize === 0) {\n histogramData = [inputData.length];\n } else {\n for (var i = 0; i < numBuckets + 1; i++) {\n histogramData.push(0);\n }\n\n for (var _i in inputData) {\n var dataPoint = inputData[_i];\n var bucket = Math.floor((dataPoint - dataMin) / bucketSize);\n histogramData[bucket] = histogramData[bucket] + 1;\n }\n }\n\n var tooltipValueLookups = { 'offset': {} };\n for (var _i2 = 0; _i2 < histogramData.length; _i2++) {\n tooltipValueLookups['offset'][_i2] = numberFormatter(dataMin + _i2 * bucketSize) + \"-\" + numberFormatter(dataMin + (_i2 + 1) * bucketSize);\n }\n\n var stageHistogramProperties = $.extend({}, HISTOGRAM_PROPERTIES, { barWidth: HISTOGRAM_WIDTH / histogramData.length, tooltipValueLookups: tooltipValueLookups });\n $(histogramId).sparkline(histogramData, stageHistogramProperties);\n }\n }]);\n\n return StageSummary;\n}(_react2.default.Component);\n\nvar StageList = function (_React$Component3) {\n _inherits(StageList, _React$Component3);\n\n function StageList() {\n _classCallCheck(this, StageList);\n\n return _possibleConstructorReturn(this, (StageList.__proto__ || Object.getPrototypeOf(StageList)).apply(this, arguments));\n }\n\n _createClass(StageList, [{\n key: \"getStages\",\n value: function getStages(stage) {\n if (stage === undefined || !stage.hasOwnProperty('subStages')) {\n return [];\n }\n\n return [].concat.apply(stage, stage.subStages.map(this.getStages, this));\n }\n }, {\n key: \"render\",\n value: function render() {\n var stages = this.getStages(this.props.outputStage);\n\n if (stages === undefined || stages.length === 0) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n \"No stage information available.\"\n )\n );\n }\n\n var renderedStages = stages.map(function (stage) {\n return _react2.default.createElement(StageSummary, { key: stage.stageId, stage: stage });\n });\n\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"table\",\n { className: \"table\", id: \"stage-list\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n renderedStages\n )\n )\n )\n );\n }\n }]);\n\n return StageList;\n}(_react2.default.Component);\n\nvar SMALL_SPARKLINE_PROPERTIES = {\n width: '100%',\n height: '57px',\n fillColor: '#3F4552',\n lineColor: '#747F96',\n spotColor: '#1EDCFF',\n tooltipClassname: 'sparkline-tooltip',\n disableHiddenCheck: true\n};\n\nvar TASK_FILTER = {\n ALL: function ALL() {\n return true;\n },\n PLANNED: function PLANNED(state) {\n return state === 'PLANNED';\n },\n RUNNING: function RUNNING(state) {\n return state === 'RUNNING';\n },\n FINISHED: function FINISHED(state) {\n return state === 'FINISHED';\n },\n FAILED: function FAILED(state) {\n return state === 'FAILED' || state === 'ABORTED' || state === 'CANCELED';\n }\n};\n\nvar QueryDetail = exports.QueryDetail = function (_React$Component4) {\n _inherits(QueryDetail, _React$Component4);\n\n function QueryDetail(props) {\n _classCallCheck(this, QueryDetail);\n\n var _this4 = _possibleConstructorReturn(this, (QueryDetail.__proto__ || Object.getPrototypeOf(QueryDetail)).call(this, props));\n\n _this4.state = {\n query: null,\n lastSnapshotStages: null,\n lastSnapshotTasks: null,\n\n lastScheduledTime: 0,\n lastCpuTime: 0,\n lastRowInput: 0,\n lastByteInput: 0,\n lastPhysicalInput: 0,\n lastPhysicalTime: 0,\n\n scheduledTimeRate: [],\n cpuTimeRate: [],\n rowInputRate: [],\n byteInputRate: [],\n physicalInputRate: [],\n\n reservedMemory: [],\n\n initialized: false,\n queryEnded: false,\n renderingEnded: false,\n\n lastRefresh: null,\n lastRender: null,\n\n stageRefresh: true,\n taskRefresh: true,\n\n taskFilter: TASK_FILTER.ALL\n };\n\n _this4.refreshLoop = _this4.refreshLoop.bind(_this4);\n return _this4;\n }\n\n _createClass(QueryDetail, [{\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n // stop refreshing when query finishes or fails\n if (this.state.query === null || !this.state.queryEnded) {\n // task.info-update-interval is set to 3 seconds by default\n this.timeoutId = setTimeout(this.refreshLoop, 3000);\n }\n }\n }, {\n key: \"refreshLoop\",\n value: function refreshLoop() {\n var _this5 = this;\n\n clearTimeout(this.timeoutId); // to stop multiple series of refreshLoop from going on simultaneously\n var queryId = (0, _utils.getFirstParameter)(window.location.search);\n $.get('/ui/api/query/' + queryId, function (query) {\n var lastSnapshotStages = this.state.lastSnapshotStage;\n if (this.state.stageRefresh) {\n lastSnapshotStages = query.outputStage;\n }\n var lastSnapshotTasks = this.state.lastSnapshotTasks;\n if (this.state.taskRefresh) {\n lastSnapshotTasks = query.outputStage;\n }\n\n var lastRefresh = this.state.lastRefresh;\n var lastScheduledTime = this.state.lastScheduledTime;\n var lastCpuTime = this.state.lastCpuTime;\n var lastRowInput = this.state.lastRowInput;\n var lastByteInput = this.state.lastByteInput;\n var lastPhysicalInput = this.state.lastPhysicalInput;\n var lastPhysicalTime = this.state.lastPhysicalTime;\n var alreadyEnded = this.state.queryEnded;\n var nowMillis = Date.now();\n\n this.setState({\n query: query,\n lastSnapshotStage: lastSnapshotStages,\n lastSnapshotTasks: lastSnapshotTasks,\n\n lastPhysicalTime: (0, _utils.parseDuration)(query.queryStats.physicalInputReadTime),\n lastScheduledTime: (0, _utils.parseDuration)(query.queryStats.totalScheduledTime),\n lastCpuTime: (0, _utils.parseDuration)(query.queryStats.totalCpuTime),\n lastRowInput: query.queryStats.processedInputPositions,\n lastByteInput: (0, _utils.parseDataSize)(query.queryStats.processedInputDataSize),\n lastPhysicalInput: (0, _utils.parseDataSize)(query.queryStats.physicalInputDataSize),\n\n initialized: true,\n queryEnded: !!query.finalQueryInfo,\n\n lastRefresh: nowMillis\n });\n\n // i.e. don't show sparklines if we've already decided not to update or if we don't have one previous measurement\n if (alreadyEnded || lastRefresh === null && query.state === \"RUNNING\") {\n this.resetTimer();\n return;\n }\n\n if (lastRefresh === null) {\n lastRefresh = nowMillis - (0, _utils.parseDuration)(query.queryStats.elapsedTime);\n }\n\n var elapsedSecsSinceLastRefresh = (nowMillis - lastRefresh) / 1000.0;\n if (elapsedSecsSinceLastRefresh >= 0) {\n var currentScheduledTimeRate = ((0, _utils.parseDuration)(query.queryStats.totalScheduledTime) - lastScheduledTime) / (elapsedSecsSinceLastRefresh * 1000);\n var currentCpuTimeRate = ((0, _utils.parseDuration)(query.queryStats.totalCpuTime) - lastCpuTime) / (elapsedSecsSinceLastRefresh * 1000);\n var currentPhysicalReadTime = ((0, _utils.parseDuration)(query.queryStats.physicalInputReadTime) - lastPhysicalTime) / 1000;\n var currentRowInputRate = (query.queryStats.processedInputPositions - lastRowInput) / elapsedSecsSinceLastRefresh;\n var currentByteInputRate = ((0, _utils.parseDataSize)(query.queryStats.processedInputDataSize) - lastByteInput) / elapsedSecsSinceLastRefresh;\n var currentPhysicalInputRate = currentPhysicalReadTime > 0 ? ((0, _utils.parseDataSize)(query.queryStats.physicalInputDataSize) - lastPhysicalInput) / currentPhysicalReadTime : 0;\n\n this.setState({\n scheduledTimeRate: (0, _utils.addToHistory)(currentScheduledTimeRate, this.state.scheduledTimeRate),\n cpuTimeRate: (0, _utils.addToHistory)(currentCpuTimeRate, this.state.cpuTimeRate),\n rowInputRate: (0, _utils.addToHistory)(currentRowInputRate, this.state.rowInputRate),\n byteInputRate: (0, _utils.addToHistory)(currentByteInputRate, this.state.byteInputRate),\n reservedMemory: (0, _utils.addToHistory)((0, _utils.parseDataSize)(query.queryStats.totalMemoryReservation), this.state.reservedMemory),\n physicalInputRate: (0, _utils.addToHistory)(currentPhysicalInputRate, this.state.physicalInputRate)\n });\n }\n this.resetTimer();\n }.bind(this)).fail(function () {\n _this5.setState({\n initialized: true\n });\n _this5.resetTimer();\n });\n }\n }, {\n key: \"handleTaskRefreshClick\",\n value: function handleTaskRefreshClick() {\n if (this.state.taskRefresh) {\n this.setState({\n taskRefresh: false,\n lastSnapshotTasks: this.state.query.outputStage\n });\n } else {\n this.setState({\n taskRefresh: true\n });\n }\n }\n }, {\n key: \"renderTaskRefreshButton\",\n value: function renderTaskRefreshButton() {\n if (this.state.taskRefresh) {\n return _react2.default.createElement(\n \"button\",\n { className: \"btn btn-info live-button\", onClick: this.handleTaskRefreshClick.bind(this) },\n \"Auto-Refresh: On\"\n );\n } else {\n return _react2.default.createElement(\n \"button\",\n { className: \"btn btn-info live-button\", onClick: this.handleTaskRefreshClick.bind(this) },\n \"Auto-Refresh: Off\"\n );\n }\n }\n }, {\n key: \"handleStageRefreshClick\",\n value: function handleStageRefreshClick() {\n if (this.state.stageRefresh) {\n this.setState({\n stageRefresh: false,\n lastSnapshotStages: this.state.query.outputStage\n });\n } else {\n this.setState({\n stageRefresh: true\n });\n }\n }\n }, {\n key: \"renderStageRefreshButton\",\n value: function renderStageRefreshButton() {\n if (this.state.stageRefresh) {\n return _react2.default.createElement(\n \"button\",\n { className: \"btn btn-info live-button\", onClick: this.handleStageRefreshClick.bind(this) },\n \"Auto-Refresh: On\"\n );\n } else {\n return _react2.default.createElement(\n \"button\",\n { className: \"btn btn-info live-button\", onClick: this.handleStageRefreshClick.bind(this) },\n \"Auto-Refresh: Off\"\n );\n }\n }\n }, {\n key: \"renderTaskFilterListItem\",\n value: function renderTaskFilterListItem(taskFilter, taskFilterText) {\n return _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: this.state.taskFilter === taskFilter ? \"selected\" : \"\", onClick: this.handleTaskFilterClick.bind(this, taskFilter) },\n taskFilterText\n )\n );\n }\n }, {\n key: \"handleTaskFilterClick\",\n value: function handleTaskFilterClick(filter, event) {\n this.setState({\n taskFilter: filter\n });\n event.preventDefault();\n }\n }, {\n key: \"getTasksFromStage\",\n value: function getTasksFromStage(stage) {\n if (stage === undefined || !stage.hasOwnProperty('subStages') || !stage.hasOwnProperty('tasks')) {\n return [];\n }\n\n return [].concat.apply(stage.tasks, stage.subStages.map(this.getTasksFromStage, this));\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop();\n }\n }, {\n key: \"componentDidUpdate\",\n value: function componentDidUpdate() {\n // prevent multiple calls to componentDidUpdate (resulting from calls to setState or otherwise) within the refresh interval from re-rendering sparklines/charts\n if (this.state.lastRender === null || Date.now() - this.state.lastRender >= 1000 || this.state.ended && !this.state.renderingEnded) {\n var renderTimestamp = Date.now();\n $('#scheduled-time-rate-sparkline').sparkline(this.state.scheduledTimeRate, $.extend({}, SMALL_SPARKLINE_PROPERTIES, {\n chartRangeMin: 0,\n numberFormatter: _utils.precisionRound\n }));\n $('#cpu-time-rate-sparkline').sparkline(this.state.cpuTimeRate, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { chartRangeMin: 0, numberFormatter: _utils.precisionRound }));\n $('#row-input-rate-sparkline').sparkline(this.state.rowInputRate, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatCount }));\n $('#byte-input-rate-sparkline').sparkline(this.state.byteInputRate, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatDataSize }));\n $('#reserved-memory-sparkline').sparkline(this.state.reservedMemory, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatDataSize }));\n $('#physical-input-rate-sparkline').sparkline(this.state.physicalInputRate, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { numberFormatter: _utils.formatDataSize }));\n\n if (this.state.lastRender === null) {\n $('#query').each(function (i, block) {\n hljs.highlightBlock(block);\n });\n\n $('#prepared-query').each(function (i, block) {\n hljs.highlightBlock(block);\n });\n }\n\n this.setState({\n renderingEnded: this.state.ended,\n lastRender: renderTimestamp\n });\n }\n\n $('[data-toggle=\"tooltip\"]').tooltip();\n new window.ClipboardJS('.copy-button');\n }\n }, {\n key: \"renderTasks\",\n value: function renderTasks() {\n var _this6 = this;\n\n if (this.state.lastSnapshotTasks === null) {\n return;\n }\n\n var tasks = this.getTasksFromStage(this.state.lastSnapshotTasks).filter(function (task) {\n return _this6.state.taskFilter(task.taskStatus.state);\n }, this);\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-9\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Tasks\"\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-3\" },\n _react2.default.createElement(\n \"table\",\n { className: \"header-inline-links\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"input-group-btn text-right\" },\n _react2.default.createElement(\n \"button\",\n { type: \"button\", className: \"btn btn-default dropdown-toggle pull-right text-right\", \"data-toggle\": \"dropdown\", \"aria-haspopup\": \"true\",\n \"aria-expanded\": \"false\" },\n \"Show \",\n _react2.default.createElement(\"span\", { className: \"caret\" })\n ),\n _react2.default.createElement(\n \"ul\",\n { className: \"dropdown-menu\" },\n this.renderTaskFilterListItem(TASK_FILTER.ALL, \"All\"),\n this.renderTaskFilterListItem(TASK_FILTER.PLANNED, \"Planned\"),\n this.renderTaskFilterListItem(TASK_FILTER.RUNNING, \"Running\"),\n this.renderTaskFilterListItem(TASK_FILTER.FINISHED, \"Finished\"),\n this.renderTaskFilterListItem(TASK_FILTER.FAILED, \"Aborted/Canceled/Failed\")\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n \"\\xA0\\xA0\",\n this.renderTaskRefreshButton()\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(TaskList, { key: this.state.query.queryId, tasks: tasks })\n )\n )\n );\n }\n }, {\n key: \"renderStages\",\n value: function renderStages() {\n if (this.state.lastSnapshotStage === null) {\n return;\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-9\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Stages\"\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-3\" },\n _react2.default.createElement(\n \"table\",\n { className: \"header-inline-links\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n this.renderStageRefreshButton()\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(StageList, { key: this.state.query.queryId, outputStage: this.state.lastSnapshotStage })\n )\n )\n );\n }\n }, {\n key: \"renderPreparedQuery\",\n value: function renderPreparedQuery() {\n var query = this.state.query;\n if (!query.hasOwnProperty('preparedQuery') || query.preparedQuery === null) {\n return;\n }\n\n return _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Prepared Query\",\n _react2.default.createElement(\n \"a\",\n { className: \"btn copy-button\", \"data-clipboard-target\": \"#prepared-query-text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", \"aria-hidden\": \"true\", alt: \"Copy to clipboard\" })\n )\n ),\n _react2.default.createElement(\n \"pre\",\n { id: \"prepared-query\" },\n _react2.default.createElement(\n \"code\",\n { className: \"lang-sql\", id: \"prepared-query-text\" },\n query.preparedQuery\n )\n )\n );\n }\n }, {\n key: \"renderSessionProperties\",\n value: function renderSessionProperties() {\n var query = this.state.query;\n\n var properties = [];\n for (var property in query.session.systemProperties) {\n if (query.session.systemProperties.hasOwnProperty(property)) {\n properties.push(_react2.default.createElement(\n \"span\",\n null,\n \"- \",\n property + \"=\" + query.session.systemProperties[property],\n \" \",\n _react2.default.createElement(\"br\", null)\n ));\n }\n }\n\n for (var catalog in query.session.catalogProperties) {\n if (query.session.catalogProperties.hasOwnProperty(catalog)) {\n for (var _property in query.session.catalogProperties[catalog]) {\n if (query.session.catalogProperties[catalog].hasOwnProperty(_property)) {\n properties.push(_react2.default.createElement(\n \"span\",\n null,\n \"- \",\n catalog + \".\" + _property + \"=\" + query.session.catalogProperties[catalog][_property],\n \" \",\n _react2.default.createElement(\"br\", null)\n ));\n }\n }\n }\n }\n\n return properties;\n }\n }, {\n key: \"renderResourceEstimates\",\n value: function renderResourceEstimates() {\n var query = this.state.query;\n var estimates = query.session.resourceEstimates;\n var renderedEstimates = [];\n\n for (var resource in estimates) {\n if (estimates.hasOwnProperty(resource)) {\n var upperChars = resource.match(/([A-Z])/g) || [];\n var snakeCased = resource;\n for (var i = 0, n = upperChars.length; i < n; i++) {\n snakeCased = snakeCased.replace(new RegExp(upperChars[i]), '_' + upperChars[i].toLowerCase());\n }\n\n renderedEstimates.push(_react2.default.createElement(\n \"span\",\n null,\n \"- \",\n snakeCased + \"=\" + query.session.resourceEstimates[resource],\n \" \",\n _react2.default.createElement(\"br\", null)\n ));\n }\n }\n\n return renderedEstimates;\n }\n }, {\n key: \"renderWarningInfo\",\n value: function renderWarningInfo() {\n var query = this.state.query;\n if (query.warnings.length > 0) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Warnings\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\", id: \"warnings-table\" },\n query.warnings.map(function (warning) {\n return _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n warning.warningCode.name\n ),\n _react2.default.createElement(\n \"td\",\n null,\n warning.message\n )\n );\n })\n )\n )\n );\n } else {\n return null;\n }\n }\n }, {\n key: \"renderFailureInfo\",\n value: function renderFailureInfo() {\n var query = this.state.query;\n if (query.failureInfo) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Error Information\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Error Type\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.errorType\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Error Code\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.errorCode.name + \" (\" + this.state.query.errorCode.code + \")\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Stack Trace\",\n _react2.default.createElement(\n \"a\",\n { className: \"btn copy-button\", \"data-clipboard-target\": \"#stack-trace\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", \"aria-hidden\": \"true\", alt: \"Copy to clipboard\" })\n )\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n _react2.default.createElement(\n \"pre\",\n { id: \"stack-trace\" },\n QueryDetail.formatStackTrace(query.failureInfo)\n )\n )\n )\n )\n )\n )\n );\n } else {\n return \"\";\n }\n }\n }, {\n key: \"render\",\n value: function render() {\n var query = this.state.query;\n\n if (query === null || this.state.initialized === false) {\n var label = _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading...\"\n );\n if (this.state.initialized) {\n label = \"Query not found\";\n }\n return _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n label\n )\n )\n );\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(_QueryHeader.QueryHeader, { query: query }),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Session\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"User\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n _react2.default.createElement(\n \"span\",\n { id: \"query-user\" },\n query.session.user\n ),\n \"\\xA0\\xA0\",\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: \"copy-button\", \"data-clipboard-target\": \"#query-user\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", \"aria-hidden\": \"true\", alt: \"Copy to clipboard\" })\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Principal\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n query.session.principal\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Source\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n query.session.source\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Catalog\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.session.catalog\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Schema\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.session.schema\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Client Address\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.session.remoteUserAddress\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Client Tags\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.session.clientTags.join(\", \")\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Session Properties\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n this.renderSessionProperties()\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Resource Estimates\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n this.renderResourceEstimates()\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Execution\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Resource Group\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n query.resourceGroupId ? query.resourceGroupId.join(\".\") : \"n/a\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Submission Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatShortDateTime)(new Date(query.queryStats.createTime))\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Completion Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.endTime ? (0, _utils.formatShortDateTime)(new Date(query.queryStats.endTime)) : \"\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Elapsed Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.elapsedTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Queued Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.queuedTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Analysis Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.analysisTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Planning Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.planningTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Execution Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.executionTime\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Resource Utilization Summary\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"CPU Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.totalCpuTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Scheduled Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.queryStats.totalScheduledTime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Input Rows\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatCount)(query.queryStats.processedInputPositions)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Input Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.processedInputDataSize)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Physical Input Rows\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatCount)(query.queryStats.physicalInputPositions)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Physical Input Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.physicalInputDataSize)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Physical Input Read Time\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.physicalInputReadTime)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Internal Network Rows\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatCount)(query.queryStats.internalNetworkInputPositions)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Internal Network Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.internalNetworkInputDataSize)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Peak User Memory\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.peakUserMemoryReservation)\n )\n ),\n (0, _utils.parseDataSize)(query.queryStats.peakRevocableMemoryReservation) > 0 && _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Peak Revocable Memory\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.peakRevocableMemoryReservation)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Peak Total Memory\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.peakTotalMemoryReservation)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Memory Pool\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n query.memoryPool\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Cumulative User Memory\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatDataSizeBytes)(query.queryStats.cumulativeUserMemory / 1000.0) + \" seconds\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Output Rows\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatCount)(query.queryStats.outputPositions)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Output Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.outputDataSize)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Written Rows\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.formatCount)(query.queryStats.writtenPositions)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Logical Written Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.logicalWrittenDataSize)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Physical Written Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.physicalWrittenDataSize)\n )\n ),\n (0, _utils.parseDataSize)(query.queryStats.spilledDataSize) > 0 && _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Spilled Data\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text\" },\n (0, _utils.parseAndFormatDataSize)(query.queryStats.spilledDataSize)\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Timeline\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Parallelism\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"cpu-time-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatCount)(this.state.cpuTimeRate[this.state.cpuTimeRate.length - 1])\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Scheduled Time/s\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"scheduled-time-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatCount)(this.state.scheduledTimeRate[this.state.scheduledTimeRate.length - 1])\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Input Rows/s\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"row-input-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatCount)(this.state.rowInputRate[this.state.rowInputRate.length - 1])\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Input Bytes/s\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"byte-input-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatDataSize)(this.state.byteInputRate[this.state.byteInputRate.length - 1])\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Physical Input Bytes/s\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"physical-input-rate-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatDataSize)(this.state.physicalInputRate[this.state.physicalInputRate.length - 1])\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Memory Utilization\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"reserved-memory-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatDataSize)(this.state.reservedMemory[this.state.reservedMemory.length - 1])\n )\n )\n )\n )\n )\n )\n )\n ),\n this.renderWarningInfo(),\n this.renderFailureInfo(),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Query\",\n _react2.default.createElement(\n \"a\",\n { className: \"btn copy-button\", \"data-clipboard-target\": \"#query-text\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", \"aria-hidden\": \"true\", alt: \"Copy to clipboard\" })\n )\n ),\n _react2.default.createElement(\n \"pre\",\n { id: \"query\" },\n _react2.default.createElement(\n \"code\",\n { className: \"lang-sql\", id: \"query-text\" },\n query.query\n )\n )\n ),\n this.renderPreparedQuery()\n ),\n this.renderStages(),\n this.renderTasks()\n );\n }\n }], [{\n key: \"formatStackTrace\",\n value: function formatStackTrace(info) {\n return QueryDetail.formatStackTraceHelper(info, [], \"\", \"\");\n }\n }, {\n key: \"formatStackTraceHelper\",\n value: function formatStackTraceHelper(info, parentStack, prefix, linePrefix) {\n var s = linePrefix + prefix + QueryDetail.failureInfoToString(info) + \"\\n\";\n\n if (info.stack) {\n var sharedStackFrames = 0;\n if (parentStack !== null) {\n sharedStackFrames = QueryDetail.countSharedStackFrames(info.stack, parentStack);\n }\n\n for (var i = 0; i < info.stack.length - sharedStackFrames; i++) {\n s += linePrefix + \"\\tat \" + info.stack[i] + \"\\n\";\n }\n if (sharedStackFrames !== 0) {\n s += linePrefix + \"\\t... \" + sharedStackFrames + \" more\" + \"\\n\";\n }\n }\n\n if (info.suppressed) {\n for (var _i3 = 0; _i3 < info.suppressed.length; _i3++) {\n s += QueryDetail.formatStackTraceHelper(info.suppressed[_i3], info.stack, \"Suppressed: \", linePrefix + \"\\t\");\n }\n }\n\n if (info.cause) {\n s += QueryDetail.formatStackTraceHelper(info.cause, info.stack, \"Caused by: \", linePrefix);\n }\n\n return s;\n }\n }, {\n key: \"countSharedStackFrames\",\n value: function countSharedStackFrames(stack, parentStack) {\n var n = 0;\n var minStackLength = Math.min(stack.length, parentStack.length);\n while (n < minStackLength && stack[stack.length - 1 - n] === parentStack[parentStack.length - 1 - n]) {\n n++;\n }\n return n;\n }\n }, {\n key: \"failureInfoToString\",\n value: function failureInfoToString(t) {\n return t.message !== null ? t.type + \": \" + t.message : t.type;\n }\n }]);\n\n return QueryDetail;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/QueryDetail.jsx?"); /***/ }), diff --git a/presto-main/src/main/resources/webapp/dist/stage.js b/presto-main/src/main/resources/webapp/dist/stage.js index a92e1c3e4ac8..34f4662f94cb 100644 --- a/presto-main/src/main/resources/webapp/dist/stage.js +++ b/presto-main/src/main/resources/webapp/dist/stage.js @@ -94,7 +94,7 @@ /***/ (function(module, exports, __webpack_require__) { "use strict"; -eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.PageTitle = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\nvar PageTitle = exports.PageTitle = function (_React$Component) {\n _inherits(PageTitle, _React$Component);\n\n function PageTitle(props) {\n _classCallCheck(this, PageTitle);\n\n var _this = _possibleConstructorReturn(this, (PageTitle.__proto__ || Object.getPrototypeOf(PageTitle)).call(this, props));\n\n _this.state = {\n noConnection: false,\n lightShown: false,\n info: null,\n lastSuccess: Date.now(),\n modalShown: false,\n errorText: null\n };\n return _this;\n }\n\n _createClass(PageTitle, [{\n key: \"refreshLoop\",\n value: function refreshLoop() {\n var _this2 = this;\n\n clearTimeout(this.timeoutId);\n fetch(\"/ui/api/cluster\").then(function (response) {\n if (response.status === 401) {\n location.reload();\n }\n return response.json();\n }).then(function (info) {\n _this2.setState({\n info: info,\n noConnection: false,\n lastSuccess: Date.now(),\n modalShown: false\n });\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal('hide');\n _this2.resetTimer();\n }).catch(function (error) {\n _this2.setState({\n noConnection: true,\n lightShown: !_this2.state.lightShown,\n errorText: error\n });\n _this2.resetTimer();\n\n if (!_this2.state.modalShown && (error || Date.now() - _this2.state.lastSuccess > 30 * 1000)) {\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal();\n _this2.setState({ modalShown: true });\n }\n });\n }\n }, {\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n this.timeoutId = setTimeout(this.refreshLoop.bind(this), 1000);\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop.bind(this)();\n }\n }, {\n key: \"renderStatusLight\",\n value: function renderStatusLight() {\n if (this.state.noConnection) {\n if (this.state.lightShown) {\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-red\", id: \"status-indicator\" });\n } else {\n return _react2.default.createElement(\"span\", { className: \"status-light\", id: \"status-indicator\" });\n }\n }\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-green\", id: \"status-indicator\" });\n }\n }, {\n key: \"render\",\n value: function render() {\n var info = this.state.info;\n if (!info) {\n return null;\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"nav\",\n { className: \"navbar\" },\n _react2.default.createElement(\n \"div\",\n { className: \"container-fluid\" },\n _react2.default.createElement(\n \"div\",\n { className: \"navbar-header\" },\n _react2.default.createElement(\n \"table\",\n null,\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"/ui/\" },\n _react2.default.createElement(\"img\", { src: \"assets/logo.png\" })\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-brand\" },\n this.props.title\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"navbar\", className: \"navbar-collapse collapse\" },\n _react2.default.createElement(\n \"ul\",\n { className: \"nav navbar-nav navbar-right\" },\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Version\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"version-number\" },\n info.nodeVersion.version\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Environment\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"environment\" },\n info.environment\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Uptime\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"bottom\", title: \"Connection status\" },\n this.renderStatusLight()\n ),\n \"\\xA0\",\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"uptime\" },\n info.uptime\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"logout\" },\n _react2.default.createElement(\n \"a\",\n { className: \"btn btn-logout\", href: \"logout\" },\n \"Log Out\"\n )\n )\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"no-connection-modal\", className: \"modal\", tabIndex: \"-1\", role: \"dialog\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-dialog modal-sm\", role: \"document\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-content\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"h4\",\n null,\n \"Unable to connect to server\"\n ),\n _react2.default.createElement(\n \"p\",\n null,\n this.state.errorText ? \"Error: \" + this.state.errorText : null\n )\n )\n )\n )\n )\n )\n );\n }\n }]);\n\n return PageTitle;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/PageTitle.jsx?"); +eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.PageTitle = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\nvar PageTitle = exports.PageTitle = function (_React$Component) {\n _inherits(PageTitle, _React$Component);\n\n function PageTitle(props) {\n _classCallCheck(this, PageTitle);\n\n var _this = _possibleConstructorReturn(this, (PageTitle.__proto__ || Object.getPrototypeOf(PageTitle)).call(this, props));\n\n _this.state = {\n noConnection: false,\n lightShown: false,\n info: null,\n lastSuccess: Date.now(),\n modalShown: false,\n errorText: null\n };\n return _this;\n }\n\n _createClass(PageTitle, [{\n key: \"refreshLoop\",\n value: function refreshLoop() {\n var _this2 = this;\n\n clearTimeout(this.timeoutId);\n fetch(\"/ui/api/cluster\").then(function (response) {\n if (response.status === 401) {\n location.reload();\n }\n return response.json();\n }).then(function (info) {\n _this2.setState({\n info: info,\n noConnection: false,\n lastSuccess: Date.now(),\n modalShown: false\n });\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal('hide');\n _this2.resetTimer();\n }).catch(function (fail) {\n _this2.setState({\n noConnection: true,\n lightShown: !_this2.state.lightShown,\n errorText: fail\n });\n _this2.resetTimer();\n\n if (!_this2.state.modalShown && (fail || Date.now() - _this2.state.lastSuccess > 30 * 1000)) {\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal();\n _this2.setState({ modalShown: true });\n }\n });\n }\n }, {\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n this.timeoutId = setTimeout(this.refreshLoop.bind(this), 1000);\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop.bind(this)();\n }\n }, {\n key: \"renderStatusLight\",\n value: function renderStatusLight() {\n if (this.state.noConnection) {\n if (this.state.lightShown) {\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-red\", id: \"status-indicator\" });\n } else {\n return _react2.default.createElement(\"span\", { className: \"status-light\", id: \"status-indicator\" });\n }\n }\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-green\", id: \"status-indicator\" });\n }\n }, {\n key: \"render\",\n value: function render() {\n var info = this.state.info;\n if (!info) {\n return null;\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"nav\",\n { className: \"navbar\" },\n _react2.default.createElement(\n \"div\",\n { className: \"container-fluid\" },\n _react2.default.createElement(\n \"div\",\n { className: \"navbar-header\" },\n _react2.default.createElement(\n \"table\",\n null,\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"/ui/\" },\n _react2.default.createElement(\"img\", { src: \"assets/logo.png\" })\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-brand\" },\n this.props.title\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"navbar\", className: \"navbar-collapse collapse\" },\n _react2.default.createElement(\n \"ul\",\n { className: \"nav navbar-nav navbar-right\" },\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Version\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"version-number\" },\n info.nodeVersion.version\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Environment\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"environment\" },\n info.environment\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Uptime\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"bottom\", title: \"Connection status\" },\n this.renderStatusLight()\n ),\n \"\\xA0\",\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"uptime\" },\n info.uptime\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"logout\" },\n _react2.default.createElement(\n \"a\",\n { className: \"btn btn-logout\", href: \"logout\" },\n \"Log Out\"\n )\n )\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"no-connection-modal\", className: \"modal\", tabIndex: \"-1\", role: \"dialog\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-dialog modal-sm\", role: \"document\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-content\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"h4\",\n null,\n \"Unable to connect to server\"\n ),\n _react2.default.createElement(\n \"p\",\n null,\n this.state.errorText ? \"Error: \" + this.state.errorText : null\n )\n )\n )\n )\n )\n )\n );\n }\n }]);\n\n return PageTitle;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/PageTitle.jsx?"); /***/ }), @@ -118,7 +118,7 @@ eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n}); /***/ (function(module, exports, __webpack_require__) { "use strict"; -eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.StageDetail = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nvar _reactDom = __webpack_require__(/*! react-dom */ \"./node_modules/react-dom/index.js\");\n\nvar _reactDom2 = _interopRequireDefault(_reactDom);\n\nvar _server = __webpack_require__(/*! react-dom/server */ \"./node_modules/react-dom/server.browser.js\");\n\nvar _server2 = _interopRequireDefault(_server);\n\nvar _dagreD = __webpack_require__(/*! dagre-d3 */ \"./node_modules/dagre-d3/index.js\");\n\nvar dagreD3 = _interopRequireWildcard(_dagreD);\n\nvar _d = __webpack_require__(/*! d3 */ \"./node_modules/d3/index.js\");\n\nvar d3 = _interopRequireWildcard(_d);\n\nvar _utils = __webpack_require__(/*! ../utils */ \"./utils.js\");\n\nvar _QueryHeader = __webpack_require__(/*! ./QueryHeader */ \"./components/QueryHeader.jsx\");\n\nfunction _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj.default = obj; return newObj; } }\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nfunction getTotalWallTime(operator) {\n return (0, _utils.parseDuration)(operator.addInputWall) + (0, _utils.parseDuration)(operator.getOutputWall) + (0, _utils.parseDuration)(operator.finishWall) + (0, _utils.parseDuration)(operator.blockedWall);\n}\n\nvar OperatorSummary = function (_React$Component) {\n _inherits(OperatorSummary, _React$Component);\n\n function OperatorSummary() {\n _classCallCheck(this, OperatorSummary);\n\n return _possibleConstructorReturn(this, (OperatorSummary.__proto__ || Object.getPrototypeOf(OperatorSummary)).apply(this, arguments));\n }\n\n _createClass(OperatorSummary, [{\n key: \"render\",\n value: function render() {\n var operator = this.props.operator;\n\n var totalWallTime = (0, _utils.parseDuration)(operator.addInputWall) + (0, _utils.parseDuration)(operator.getOutputWall) + (0, _utils.parseDuration)(operator.finishWall) + (0, _utils.parseDuration)(operator.blockedWall);\n\n var rowInputRate = totalWallTime === 0 ? 0 : 1.0 * operator.inputPositions / (totalWallTime / 1000.0);\n var byteInputRate = totalWallTime === 0 ? 0 : 1.0 * (0, _utils.parseDataSize)(operator.inputDataSize) / (totalWallTime / 1000.0);\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"highlight-row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"header-row\" },\n operator.operatorType\n ),\n _react2.default.createElement(\n \"div\",\n null,\n (0, _utils.formatCount)(rowInputRate) + \" rows/s (\" + (0, _utils.formatDataSize)(byteInputRate) + \"/s)\"\n )\n ),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Output\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n (0, _utils.formatCount)(operator.outputPositions) + \" rows (\" + (0, _utils.parseAndFormatDataSize)(operator.outputDataSize) + \")\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Drivers\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n operator.totalDrivers\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Wall Time\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n (0, _utils.formatDuration)(totalWallTime)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Blocked\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n (0, _utils.formatDuration)((0, _utils.parseDuration)(operator.blockedWall))\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Input\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n (0, _utils.formatCount)(operator.inputPositions) + \" rows (\" + (0, _utils.parseAndFormatDataSize)(operator.inputDataSize) + \")\"\n )\n )\n )\n )\n );\n }\n }]);\n\n return OperatorSummary;\n}(_react2.default.Component);\n\nvar BAR_CHART_PROPERTIES = {\n type: 'bar',\n barSpacing: '0',\n height: '80px',\n barColor: '#747F96',\n zeroColor: '#8997B3',\n tooltipClassname: 'sparkline-tooltip',\n tooltipFormat: 'Task {{offset:offset}} - {{value}}',\n disableHiddenCheck: true\n};\n\nvar OperatorStatistic = function (_React$Component2) {\n _inherits(OperatorStatistic, _React$Component2);\n\n function OperatorStatistic() {\n _classCallCheck(this, OperatorStatistic);\n\n return _possibleConstructorReturn(this, (OperatorStatistic.__proto__ || Object.getPrototypeOf(OperatorStatistic)).apply(this, arguments));\n }\n\n _createClass(OperatorStatistic, [{\n key: \"componentDidMount\",\n value: function componentDidMount() {\n var operators = this.props.operators;\n var statistic = operators.map(this.props.supplier);\n var numTasks = operators.length;\n\n var tooltipValueLookups = { 'offset': {} };\n for (var i = 0; i < numTasks; i++) {\n tooltipValueLookups['offset'][i] = \"\" + i;\n }\n\n var stageBarChartProperties = $.extend({}, BAR_CHART_PROPERTIES, { barWidth: 800 / numTasks, tooltipValueLookups: tooltipValueLookups });\n $('#' + this.props.id).sparkline(statistic, $.extend({}, stageBarChartProperties, { numberFormatter: this.props.renderer }));\n }\n }, {\n key: \"render\",\n value: function render() {\n return _react2.default.createElement(\n \"div\",\n { className: \"row operator-statistic\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-2 italic-uppercase operator-statistic-title\" },\n this.props.name\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-10\" },\n _react2.default.createElement(\"span\", { className: \"bar-chart\", id: this.props.id })\n )\n );\n }\n }]);\n\n return OperatorStatistic;\n}(_react2.default.Component);\n\nvar OperatorDetail = function (_React$Component3) {\n _inherits(OperatorDetail, _React$Component3);\n\n function OperatorDetail(props) {\n _classCallCheck(this, OperatorDetail);\n\n var _this3 = _possibleConstructorReturn(this, (OperatorDetail.__proto__ || Object.getPrototypeOf(OperatorDetail)).call(this, props));\n\n _this3.state = {\n selectedStatistics: _this3.getInitialStatistics()\n };\n return _this3;\n }\n\n _createClass(OperatorDetail, [{\n key: \"getInitialStatistics\",\n value: function getInitialStatistics() {\n return [{\n name: \"Total Wall Time\",\n id: \"totalWallTime\",\n supplier: getTotalWallTime,\n renderer: _utils.formatDuration\n }, {\n name: \"Input Rows\",\n id: \"inputPositions\",\n supplier: function supplier(operator) {\n return operator.inputPositions;\n },\n renderer: _utils.formatCount\n }, {\n name: \"Input Data Size\",\n id: \"inputDataSize\",\n supplier: function supplier(operator) {\n return (0, _utils.parseDataSize)(operator.inputDataSize);\n },\n renderer: _utils.formatDataSize\n }, {\n name: \"Output Rows\",\n id: \"outputPositions\",\n supplier: function supplier(operator) {\n return operator.outputPositions;\n },\n renderer: _utils.formatCount\n }, {\n name: \"Output Data Size\",\n id: \"outputDataSize\",\n supplier: function supplier(operator) {\n return (0, _utils.parseDataSize)(operator.outputDataSize);\n },\n renderer: _utils.formatDataSize\n }];\n }\n }, {\n key: \"getOperatorTasks\",\n value: function getOperatorTasks() {\n // sort the x-axis\n var tasks = this.props.tasks.sort(function (taskA, taskB) {\n return (0, _utils.getTaskNumber)(taskA.taskStatus.taskId) - (0, _utils.getTaskNumber)(taskB.taskStatus.taskId);\n });\n\n var operatorSummary = this.props.operator;\n\n var operatorTasks = [];\n tasks.forEach(function (task) {\n task.stats.pipelines.forEach(function (pipeline) {\n if (pipeline.pipelineId === operatorSummary.pipelineId) {\n pipeline.operatorSummaries.forEach(function (operator) {\n if (operatorSummary.operatorId === operator.operatorId) {\n operatorTasks.push(operator);\n }\n });\n }\n });\n });\n\n return operatorTasks;\n }\n }, {\n key: \"render\",\n value: function render() {\n var operator = this.props.operator;\n var operatorTasks = this.getOperatorTasks();\n var totalWallTime = getTotalWallTime(operator);\n\n var rowInputRate = totalWallTime === 0 ? 0 : 1.0 * operator.inputPositions / totalWallTime;\n var byteInputRate = totalWallTime === 0 ? 0 : 1.0 * (0, _utils.parseDataSize)(operator.inputDataSize) / (totalWallTime / 1000.0);\n\n var rowOutputRate = totalWallTime === 0 ? 0 : 1.0 * operator.outputPositions / totalWallTime;\n var byteOutputRate = totalWallTime === 0 ? 0 : 1.0 * (0, _utils.parseDataSize)(operator.outputDataSize) / (totalWallTime / 1000.0);\n\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-header\" },\n _react2.default.createElement(\n \"button\",\n { type: \"button\", className: \"close\", \"data-dismiss\": \"modal\", \"aria-label\": \"Close\" },\n _react2.default.createElement(\n \"span\",\n { \"aria-hidden\": \"true\" },\n \"\\xD7\"\n )\n ),\n _react2.default.createElement(\n \"h3\",\n null,\n _react2.default.createElement(\n \"small\",\n null,\n \"Pipeline \",\n operator.pipelineId\n ),\n _react2.default.createElement(\"br\", null),\n operator.operatorType\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Input\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n (0, _utils.formatCount)(operator.inputPositions) + \" rows (\" + (0, _utils.parseAndFormatDataSize)(operator.inputDataSize) + \")\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Input Rate\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n (0, _utils.formatCount)(rowInputRate) + \" rows/s (\" + (0, _utils.formatDataSize)(byteInputRate) + \"/s)\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Output\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n (0, _utils.formatCount)(operator.outputPositions) + \" rows (\" + (0, _utils.parseAndFormatDataSize)(operator.outputDataSize) + \")\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Output Rate\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n (0, _utils.formatCount)(rowOutputRate) + \" rows/s (\" + (0, _utils.formatDataSize)(byteOutputRate) + \"/s)\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Wall Time\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n (0, _utils.formatDuration)(totalWallTime)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Blocked\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n (0, _utils.formatDuration)((0, _utils.parseDuration)(operator.blockedWall))\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Drivers\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n operator.totalDrivers\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Tasks\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n operatorTasks.length\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row font-white\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-2 italic-uppercase\" },\n _react2.default.createElement(\n \"strong\",\n null,\n \"Statistic\"\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-10 italic-uppercase\" },\n _react2.default.createElement(\n \"strong\",\n null,\n \"Tasks\"\n )\n )\n ),\n this.state.selectedStatistics.map(function (statistic) {\n return _react2.default.createElement(OperatorStatistic, {\n key: statistic.id,\n id: statistic.id,\n name: statistic.name,\n supplier: statistic.supplier,\n renderer: statistic.renderer,\n operators: operatorTasks });\n }.bind(this)),\n _react2.default.createElement(\"p\", null),\n _react2.default.createElement(\"p\", null)\n )\n );\n }\n }]);\n\n return OperatorDetail;\n}(_react2.default.Component);\n\nvar StageOperatorGraph = function (_React$Component4) {\n _inherits(StageOperatorGraph, _React$Component4);\n\n function StageOperatorGraph() {\n _classCallCheck(this, StageOperatorGraph);\n\n return _possibleConstructorReturn(this, (StageOperatorGraph.__proto__ || Object.getPrototypeOf(StageOperatorGraph)).apply(this, arguments));\n }\n\n _createClass(StageOperatorGraph, [{\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.updateD3Graph();\n }\n }, {\n key: \"componentDidUpdate\",\n value: function componentDidUpdate() {\n this.updateD3Graph();\n }\n }, {\n key: \"handleOperatorClick\",\n value: function handleOperatorClick(operatorCssId) {\n $('#operator-detail-modal').modal();\n\n var pipelineId = parseInt(operatorCssId.split('-')[1]);\n var operatorId = parseInt(operatorCssId.split('-')[2]);\n var stage = this.props.stage;\n\n var operatorStageSummary = null;\n var operatorSummaries = stage.stageStats.operatorSummaries;\n for (var i = 0; i < operatorSummaries.length; i++) {\n if (operatorSummaries[i].pipelineId === pipelineId && operatorSummaries[i].operatorId === operatorId) {\n operatorStageSummary = operatorSummaries[i];\n }\n }\n\n _reactDom2.default.render(_react2.default.createElement(OperatorDetail, { key: operatorCssId, operator: operatorStageSummary, tasks: stage.tasks }), document.getElementById('operator-detail'));\n }\n }, {\n key: \"computeOperatorGraphs\",\n value: function computeOperatorGraphs(planNode, operatorMap) {\n var _this5 = this;\n\n var sources = (0, _utils.getChildren)(planNode);\n\n var sourceResults = new Map();\n sources.forEach(function (source) {\n var sourceResult = _this5.computeOperatorGraphs(source, operatorMap);\n sourceResult.forEach(function (operator, pipelineId) {\n if (sourceResults.has(pipelineId)) {\n console.error(\"Multiple sources for \", planNode['@type'], \" had the same pipeline ID\");\n return sourceResults;\n }\n sourceResults.set(pipelineId, operator);\n });\n });\n\n var nodeOperators = operatorMap.get(planNode.id);\n if (!nodeOperators || nodeOperators.length === 0) {\n return sourceResults;\n }\n\n var pipelineOperators = new Map();\n nodeOperators.forEach(function (operator) {\n if (!pipelineOperators.has(operator.pipelineId)) {\n pipelineOperators.set(operator.pipelineId, []);\n }\n pipelineOperators.get(operator.pipelineId).push(operator);\n });\n\n var result = new Map();\n pipelineOperators.forEach(function (pipelineOperators, pipelineId) {\n // sort deep-copied operators in this pipeline from source to sink\n var linkedOperators = pipelineOperators.map(function (a) {\n return Object.assign({}, a);\n }).sort(function (a, b) {\n return a.operatorId - b.operatorId;\n });\n var sinkOperator = linkedOperators[linkedOperators.length - 1];\n var sourceOperator = linkedOperators[0];\n\n if (sourceResults.has(pipelineId)) {\n var pipelineChildResult = sourceResults.get(pipelineId);\n if (pipelineChildResult) {\n sourceOperator.child = pipelineChildResult;\n }\n }\n\n // chain operators at this level\n var currentOperator = sourceOperator;\n linkedOperators.slice(1).forEach(function (source) {\n source.child = currentOperator;\n currentOperator = source;\n });\n\n result.set(pipelineId, sinkOperator);\n });\n\n sourceResults.forEach(function (operator, pipelineId) {\n if (!result.has(pipelineId)) {\n result.set(pipelineId, operator);\n }\n });\n\n return result;\n }\n }, {\n key: \"computeOperatorMap\",\n value: function computeOperatorMap() {\n var operatorMap = new Map();\n this.props.stage.stageStats.operatorSummaries.forEach(function (operator) {\n if (!operatorMap.has(operator.planNodeId)) {\n operatorMap.set(operator.planNodeId, []);\n }\n\n operatorMap.get(operator.planNodeId).push(operator);\n });\n\n return operatorMap;\n }\n }, {\n key: \"computeD3StageOperatorGraph\",\n value: function computeD3StageOperatorGraph(graph, operator, sink, pipelineNode) {\n var operatorNodeId = \"operator-\" + operator.pipelineId + \"-\" + operator.operatorId;\n\n // this is a non-standard use of ReactDOMServer, but it's the cleanest way to unify DagreD3 with React\n var html = _server2.default.renderToString(_react2.default.createElement(OperatorSummary, { key: operator.pipelineId + \"-\" + operator.operatorId, operator: operator }));\n graph.setNode(operatorNodeId, { class: \"operator-stats\", label: html, labelType: \"html\" });\n\n if (operator.hasOwnProperty(\"child\")) {\n this.computeD3StageOperatorGraph(graph, operator.child, operatorNodeId, pipelineNode);\n }\n\n if (sink !== null) {\n graph.setEdge(operatorNodeId, sink, { class: \"plan-edge\", arrowheadClass: \"plan-arrowhead\" });\n }\n\n graph.setParent(operatorNodeId, pipelineNode);\n }\n }, {\n key: \"updateD3Graph\",\n value: function updateD3Graph() {\n var _this6 = this;\n\n if (!this.props.stage) {\n return;\n }\n\n var stage = this.props.stage;\n var operatorMap = this.computeOperatorMap();\n var operatorGraphs = this.computeOperatorGraphs(stage.plan.root, operatorMap);\n\n var graph = (0, _utils.initializeGraph)();\n operatorGraphs.forEach(function (operator, pipelineId) {\n var pipelineNodeId = \"pipeline-\" + pipelineId;\n graph.setNode(pipelineNodeId, { label: \"Pipeline \" + pipelineId + \" \", clusterLabelPos: 'top', style: 'fill: #2b2b2b', labelStyle: 'fill: #fff' });\n _this6.computeD3StageOperatorGraph(graph, operator, null, pipelineNodeId);\n });\n\n $(\"#operator-canvas\").html(\"\");\n\n if (operatorGraphs.size > 0) {\n $(\".graph-container\").css(\"display\", \"block\");\n var svg = (0, _utils.initializeSvg)(\"#operator-canvas\");\n var render = new dagreD3.render();\n render(d3.select(\"#operator-canvas g\"), graph);\n\n svg.selectAll(\"g.operator-stats\").on(\"click\", this.handleOperatorClick.bind(this));\n svg.attr(\"height\", graph.graph().height);\n svg.attr(\"width\", graph.graph().width);\n } else {\n $(\".graph-container\").css(\"display\", \"none\");\n }\n }\n }, {\n key: \"render\",\n value: function render() {\n var stage = this.props.stage;\n\n if (!stage.hasOwnProperty('plan')) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"Stage does not have a plan\"\n )\n )\n );\n }\n\n if (!stage.hasOwnProperty('stageStats') || !stage.stageStats.hasOwnProperty(\"operatorSummaries\") || stage.stageStats.operatorSummaries.length === 0) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"Operator data not available for \",\n stage.stageId\n )\n )\n );\n }\n\n return null;\n }\n }]);\n\n return StageOperatorGraph;\n}(_react2.default.Component);\n\nvar StageDetail = exports.StageDetail = function (_React$Component5) {\n _inherits(StageDetail, _React$Component5);\n\n function StageDetail(props) {\n _classCallCheck(this, StageDetail);\n\n var _this7 = _possibleConstructorReturn(this, (StageDetail.__proto__ || Object.getPrototypeOf(StageDetail)).call(this, props));\n\n _this7.state = {\n initialized: false,\n ended: false,\n\n selectedStageId: null,\n query: null,\n\n lastRefresh: null,\n lastRender: null\n };\n\n _this7.refreshLoop = _this7.refreshLoop.bind(_this7);\n return _this7;\n }\n\n _createClass(StageDetail, [{\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n // stop refreshing when query finishes or fails\n if (this.state.query === null || !this.state.ended) {\n this.timeoutId = setTimeout(this.refreshLoop, 1000);\n }\n }\n }, {\n key: \"refreshLoop\",\n value: function refreshLoop() {\n var _this8 = this;\n\n clearTimeout(this.timeoutId); // to stop multiple series of refreshLoop from going on simultaneously\n var queryString = (0, _utils.getFirstParameter)(window.location.search).split('.');\n var queryId = queryString[0];\n\n var selectedStageId = this.state.selectedStageId;\n if (selectedStageId === null) {\n selectedStageId = 0;\n if (queryString.length > 1) {\n selectedStageId = parseInt(queryString[1]);\n }\n }\n\n $.get('/ui/api/query/' + queryId, function (query) {\n _this8.setState({\n initialized: true,\n ended: query.finalQueryInfo,\n\n selectedStageId: selectedStageId,\n query: query\n });\n _this8.resetTimer();\n }).error(function () {\n _this8.setState({\n initialized: true\n });\n _this8.resetTimer();\n });\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop();\n new window.ClipboardJS('.copy-button');\n }\n }, {\n key: \"findStage\",\n value: function findStage(stageId, currentStage) {\n if (stageId === null) {\n return null;\n }\n\n if (currentStage.stageId === stageId) {\n return currentStage;\n }\n\n for (var i = 0; i < currentStage.subStages.length; i++) {\n var stage = this.findStage(stageId, currentStage.subStages[i]);\n if (stage !== null) {\n return stage;\n }\n }\n\n return null;\n }\n }, {\n key: \"getAllStageIds\",\n value: function getAllStageIds(result, currentStage) {\n var _this9 = this;\n\n result.push(currentStage.plan.id);\n currentStage.subStages.forEach(function (stage) {\n _this9.getAllStageIds(result, stage);\n });\n }\n }, {\n key: \"render\",\n value: function render() {\n var _this10 = this;\n\n if (!this.state.query) {\n var label = _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading...\"\n );\n if (this.state.initialized) {\n label = \"Query not found\";\n }\n return _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n label\n )\n )\n );\n }\n\n if (!this.state.query.outputStage) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"Query does not have an output stage\"\n )\n )\n );\n }\n\n var query = this.state.query;\n var allStages = [];\n this.getAllStageIds(allStages, query.outputStage);\n\n var stage = this.findStage(query.queryId + \".\" + this.state.selectedStageId, query.outputStage);\n if (stage === null) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"Stage not found\"\n )\n )\n );\n }\n\n var stageOperatorGraph = null;\n if (!(0, _utils.isQueryEnded)(query)) {\n stageOperatorGraph = _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"Operator graph will appear automatically when query completes.\"\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading...\"\n )\n )\n );\n } else {\n stageOperatorGraph = _react2.default.createElement(StageOperatorGraph, { id: stage.stageId, stage: stage });\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(_QueryHeader.QueryHeader, { query: query }),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-2\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Stage \",\n stage.plan.id\n )\n ),\n _react2.default.createElement(\"div\", { className: \"col-xs-8\" }),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-2 stage-dropdown\" },\n _react2.default.createElement(\n \"div\",\n { className: \"input-group-btn\" },\n _react2.default.createElement(\n \"button\",\n { type: \"button\", className: \"btn btn-default dropdown-toggle\", \"data-toggle\": \"dropdown\", \"aria-haspopup\": \"true\", \"aria-expanded\": \"false\" },\n \"Select Stage \",\n _react2.default.createElement(\"span\", { className: \"caret\" })\n ),\n _react2.default.createElement(\n \"ul\",\n { className: \"dropdown-menu\" },\n allStages.map(function (stageId) {\n return _react2.default.createElement(\n \"li\",\n { key: stageId },\n _react2.default.createElement(\n \"a\",\n { onClick: function onClick() {\n return _this10.setState({ selectedStageId: stageId });\n } },\n stageId\n )\n );\n })\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n stageOperatorGraph\n )\n )\n );\n }\n }]);\n\n return StageDetail;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/StageDetail.jsx?"); +eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.StageDetail = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nvar _reactDom = __webpack_require__(/*! react-dom */ \"./node_modules/react-dom/index.js\");\n\nvar _reactDom2 = _interopRequireDefault(_reactDom);\n\nvar _server = __webpack_require__(/*! react-dom/server */ \"./node_modules/react-dom/server.browser.js\");\n\nvar _server2 = _interopRequireDefault(_server);\n\nvar _dagreD = __webpack_require__(/*! dagre-d3 */ \"./node_modules/dagre-d3/index.js\");\n\nvar dagreD3 = _interopRequireWildcard(_dagreD);\n\nvar _d = __webpack_require__(/*! d3 */ \"./node_modules/d3/index.js\");\n\nvar d3 = _interopRequireWildcard(_d);\n\nvar _utils = __webpack_require__(/*! ../utils */ \"./utils.js\");\n\nvar _QueryHeader = __webpack_require__(/*! ./QueryHeader */ \"./components/QueryHeader.jsx\");\n\nfunction _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj.default = obj; return newObj; } }\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nfunction getTotalWallTime(operator) {\n return (0, _utils.parseDuration)(operator.addInputWall) + (0, _utils.parseDuration)(operator.getOutputWall) + (0, _utils.parseDuration)(operator.finishWall) + (0, _utils.parseDuration)(operator.blockedWall);\n}\n\nvar OperatorSummary = function (_React$Component) {\n _inherits(OperatorSummary, _React$Component);\n\n function OperatorSummary() {\n _classCallCheck(this, OperatorSummary);\n\n return _possibleConstructorReturn(this, (OperatorSummary.__proto__ || Object.getPrototypeOf(OperatorSummary)).apply(this, arguments));\n }\n\n _createClass(OperatorSummary, [{\n key: \"render\",\n value: function render() {\n var operator = this.props.operator;\n\n var totalWallTime = (0, _utils.parseDuration)(operator.addInputWall) + (0, _utils.parseDuration)(operator.getOutputWall) + (0, _utils.parseDuration)(operator.finishWall) + (0, _utils.parseDuration)(operator.blockedWall);\n\n var rowInputRate = totalWallTime === 0 ? 0 : 1.0 * operator.inputPositions / (totalWallTime / 1000.0);\n var byteInputRate = totalWallTime === 0 ? 0 : 1.0 * (0, _utils.parseDataSize)(operator.inputDataSize) / (totalWallTime / 1000.0);\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"highlight-row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"header-row\" },\n operator.operatorType\n ),\n _react2.default.createElement(\n \"div\",\n null,\n (0, _utils.formatCount)(rowInputRate) + \" rows/s (\" + (0, _utils.formatDataSize)(byteInputRate) + \"/s)\"\n )\n ),\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Output\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n (0, _utils.formatCount)(operator.outputPositions) + \" rows (\" + (0, _utils.parseAndFormatDataSize)(operator.outputDataSize) + \")\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Drivers\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n operator.totalDrivers\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Wall Time\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n (0, _utils.formatDuration)(totalWallTime)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Blocked\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n (0, _utils.formatDuration)((0, _utils.parseDuration)(operator.blockedWall))\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Input\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n (0, _utils.formatCount)(operator.inputPositions) + \" rows (\" + (0, _utils.parseAndFormatDataSize)(operator.inputDataSize) + \")\"\n )\n )\n )\n )\n );\n }\n }]);\n\n return OperatorSummary;\n}(_react2.default.Component);\n\nvar BAR_CHART_PROPERTIES = {\n type: 'bar',\n barSpacing: '0',\n height: '80px',\n barColor: '#747F96',\n zeroColor: '#8997B3',\n tooltipClassname: 'sparkline-tooltip',\n tooltipFormat: 'Task {{offset:offset}} - {{value}}',\n disableHiddenCheck: true\n};\n\nvar OperatorStatistic = function (_React$Component2) {\n _inherits(OperatorStatistic, _React$Component2);\n\n function OperatorStatistic() {\n _classCallCheck(this, OperatorStatistic);\n\n return _possibleConstructorReturn(this, (OperatorStatistic.__proto__ || Object.getPrototypeOf(OperatorStatistic)).apply(this, arguments));\n }\n\n _createClass(OperatorStatistic, [{\n key: \"componentDidMount\",\n value: function componentDidMount() {\n var operators = this.props.operators;\n var statistic = operators.map(this.props.supplier);\n var numTasks = operators.length;\n\n var tooltipValueLookups = { 'offset': {} };\n for (var i = 0; i < numTasks; i++) {\n tooltipValueLookups['offset'][i] = \"\" + i;\n }\n\n var stageBarChartProperties = $.extend({}, BAR_CHART_PROPERTIES, { barWidth: 800 / numTasks, tooltipValueLookups: tooltipValueLookups });\n $('#' + this.props.id).sparkline(statistic, $.extend({}, stageBarChartProperties, { numberFormatter: this.props.renderer }));\n }\n }, {\n key: \"render\",\n value: function render() {\n return _react2.default.createElement(\n \"div\",\n { className: \"row operator-statistic\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-2 italic-uppercase operator-statistic-title\" },\n this.props.name\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-10\" },\n _react2.default.createElement(\"span\", { className: \"bar-chart\", id: this.props.id })\n )\n );\n }\n }]);\n\n return OperatorStatistic;\n}(_react2.default.Component);\n\nvar OperatorDetail = function (_React$Component3) {\n _inherits(OperatorDetail, _React$Component3);\n\n function OperatorDetail(props) {\n _classCallCheck(this, OperatorDetail);\n\n var _this3 = _possibleConstructorReturn(this, (OperatorDetail.__proto__ || Object.getPrototypeOf(OperatorDetail)).call(this, props));\n\n _this3.state = {\n selectedStatistics: _this3.getInitialStatistics()\n };\n return _this3;\n }\n\n _createClass(OperatorDetail, [{\n key: \"getInitialStatistics\",\n value: function getInitialStatistics() {\n return [{\n name: \"Total Wall Time\",\n id: \"totalWallTime\",\n supplier: getTotalWallTime,\n renderer: _utils.formatDuration\n }, {\n name: \"Input Rows\",\n id: \"inputPositions\",\n supplier: function supplier(operator) {\n return operator.inputPositions;\n },\n renderer: _utils.formatCount\n }, {\n name: \"Input Data Size\",\n id: \"inputDataSize\",\n supplier: function supplier(operator) {\n return (0, _utils.parseDataSize)(operator.inputDataSize);\n },\n renderer: _utils.formatDataSize\n }, {\n name: \"Output Rows\",\n id: \"outputPositions\",\n supplier: function supplier(operator) {\n return operator.outputPositions;\n },\n renderer: _utils.formatCount\n }, {\n name: \"Output Data Size\",\n id: \"outputDataSize\",\n supplier: function supplier(operator) {\n return (0, _utils.parseDataSize)(operator.outputDataSize);\n },\n renderer: _utils.formatDataSize\n }];\n }\n }, {\n key: \"getOperatorTasks\",\n value: function getOperatorTasks() {\n // sort the x-axis\n var tasks = this.props.tasks.sort(function (taskA, taskB) {\n return (0, _utils.getTaskNumber)(taskA.taskStatus.taskId) - (0, _utils.getTaskNumber)(taskB.taskStatus.taskId);\n });\n\n var operatorSummary = this.props.operator;\n\n var operatorTasks = [];\n tasks.forEach(function (task) {\n task.stats.pipelines.forEach(function (pipeline) {\n if (pipeline.pipelineId === operatorSummary.pipelineId) {\n pipeline.operatorSummaries.forEach(function (operator) {\n if (operatorSummary.operatorId === operator.operatorId) {\n operatorTasks.push(operator);\n }\n });\n }\n });\n });\n\n return operatorTasks;\n }\n }, {\n key: \"render\",\n value: function render() {\n var operator = this.props.operator;\n var operatorTasks = this.getOperatorTasks();\n var totalWallTime = getTotalWallTime(operator);\n\n var rowInputRate = totalWallTime === 0 ? 0 : 1.0 * operator.inputPositions / totalWallTime;\n var byteInputRate = totalWallTime === 0 ? 0 : 1.0 * (0, _utils.parseDataSize)(operator.inputDataSize) / (totalWallTime / 1000.0);\n\n var rowOutputRate = totalWallTime === 0 ? 0 : 1.0 * operator.outputPositions / totalWallTime;\n var byteOutputRate = totalWallTime === 0 ? 0 : 1.0 * (0, _utils.parseDataSize)(operator.outputDataSize) / (totalWallTime / 1000.0);\n\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-header\" },\n _react2.default.createElement(\n \"button\",\n { type: \"button\", className: \"close\", \"data-dismiss\": \"modal\", \"aria-label\": \"Close\" },\n _react2.default.createElement(\n \"span\",\n { \"aria-hidden\": \"true\" },\n \"\\xD7\"\n )\n ),\n _react2.default.createElement(\n \"h3\",\n null,\n _react2.default.createElement(\n \"small\",\n null,\n \"Pipeline \",\n operator.pipelineId\n ),\n _react2.default.createElement(\"br\", null),\n operator.operatorType\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Input\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n (0, _utils.formatCount)(operator.inputPositions) + \" rows (\" + (0, _utils.parseAndFormatDataSize)(operator.inputDataSize) + \")\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Input Rate\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n (0, _utils.formatCount)(rowInputRate) + \" rows/s (\" + (0, _utils.formatDataSize)(byteInputRate) + \"/s)\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Output\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n (0, _utils.formatCount)(operator.outputPositions) + \" rows (\" + (0, _utils.parseAndFormatDataSize)(operator.outputDataSize) + \")\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Output Rate\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n (0, _utils.formatCount)(rowOutputRate) + \" rows/s (\" + (0, _utils.formatDataSize)(byteOutputRate) + \"/s)\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Wall Time\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n (0, _utils.formatDuration)(totalWallTime)\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Blocked\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n (0, _utils.formatDuration)((0, _utils.parseDuration)(operator.blockedWall))\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Drivers\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n operator.totalDrivers\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"Tasks\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n operatorTasks.length\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row font-white\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-2 italic-uppercase\" },\n _react2.default.createElement(\n \"strong\",\n null,\n \"Statistic\"\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-10 italic-uppercase\" },\n _react2.default.createElement(\n \"strong\",\n null,\n \"Tasks\"\n )\n )\n ),\n this.state.selectedStatistics.map(function (statistic) {\n return _react2.default.createElement(OperatorStatistic, {\n key: statistic.id,\n id: statistic.id,\n name: statistic.name,\n supplier: statistic.supplier,\n renderer: statistic.renderer,\n operators: operatorTasks });\n }.bind(this)),\n _react2.default.createElement(\"p\", null),\n _react2.default.createElement(\"p\", null)\n )\n );\n }\n }]);\n\n return OperatorDetail;\n}(_react2.default.Component);\n\nvar StageOperatorGraph = function (_React$Component4) {\n _inherits(StageOperatorGraph, _React$Component4);\n\n function StageOperatorGraph() {\n _classCallCheck(this, StageOperatorGraph);\n\n return _possibleConstructorReturn(this, (StageOperatorGraph.__proto__ || Object.getPrototypeOf(StageOperatorGraph)).apply(this, arguments));\n }\n\n _createClass(StageOperatorGraph, [{\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.updateD3Graph();\n }\n }, {\n key: \"componentDidUpdate\",\n value: function componentDidUpdate() {\n this.updateD3Graph();\n }\n }, {\n key: \"handleOperatorClick\",\n value: function handleOperatorClick(operatorCssId) {\n $('#operator-detail-modal').modal();\n\n var pipelineId = parseInt(operatorCssId.split('-')[1]);\n var operatorId = parseInt(operatorCssId.split('-')[2]);\n var stage = this.props.stage;\n\n var operatorStageSummary = null;\n var operatorSummaries = stage.stageStats.operatorSummaries;\n for (var i = 0; i < operatorSummaries.length; i++) {\n if (operatorSummaries[i].pipelineId === pipelineId && operatorSummaries[i].operatorId === operatorId) {\n operatorStageSummary = operatorSummaries[i];\n }\n }\n\n _reactDom2.default.render(_react2.default.createElement(OperatorDetail, { key: operatorCssId, operator: operatorStageSummary, tasks: stage.tasks }), document.getElementById('operator-detail'));\n }\n }, {\n key: \"computeOperatorGraphs\",\n value: function computeOperatorGraphs(planNode, operatorMap) {\n var _this5 = this;\n\n var sources = (0, _utils.getChildren)(planNode);\n\n var sourceResults = new Map();\n sources.forEach(function (source) {\n var sourceResult = _this5.computeOperatorGraphs(source, operatorMap);\n sourceResult.forEach(function (operator, pipelineId) {\n if (sourceResults.has(pipelineId)) {\n console.error(\"Multiple sources for \", planNode['@type'], \" had the same pipeline ID\");\n return sourceResults;\n }\n sourceResults.set(pipelineId, operator);\n });\n });\n\n var nodeOperators = operatorMap.get(planNode.id);\n if (!nodeOperators || nodeOperators.length === 0) {\n return sourceResults;\n }\n\n var pipelineOperators = new Map();\n nodeOperators.forEach(function (operator) {\n if (!pipelineOperators.has(operator.pipelineId)) {\n pipelineOperators.set(operator.pipelineId, []);\n }\n pipelineOperators.get(operator.pipelineId).push(operator);\n });\n\n var result = new Map();\n pipelineOperators.forEach(function (pipelineOperators, pipelineId) {\n // sort deep-copied operators in this pipeline from source to sink\n var linkedOperators = pipelineOperators.map(function (a) {\n return Object.assign({}, a);\n }).sort(function (a, b) {\n return a.operatorId - b.operatorId;\n });\n var sinkOperator = linkedOperators[linkedOperators.length - 1];\n var sourceOperator = linkedOperators[0];\n\n if (sourceResults.has(pipelineId)) {\n var pipelineChildResult = sourceResults.get(pipelineId);\n if (pipelineChildResult) {\n sourceOperator.child = pipelineChildResult;\n }\n }\n\n // chain operators at this level\n var currentOperator = sourceOperator;\n linkedOperators.slice(1).forEach(function (source) {\n source.child = currentOperator;\n currentOperator = source;\n });\n\n result.set(pipelineId, sinkOperator);\n });\n\n sourceResults.forEach(function (operator, pipelineId) {\n if (!result.has(pipelineId)) {\n result.set(pipelineId, operator);\n }\n });\n\n return result;\n }\n }, {\n key: \"computeOperatorMap\",\n value: function computeOperatorMap() {\n var operatorMap = new Map();\n this.props.stage.stageStats.operatorSummaries.forEach(function (operator) {\n if (!operatorMap.has(operator.planNodeId)) {\n operatorMap.set(operator.planNodeId, []);\n }\n\n operatorMap.get(operator.planNodeId).push(operator);\n });\n\n return operatorMap;\n }\n }, {\n key: \"computeD3StageOperatorGraph\",\n value: function computeD3StageOperatorGraph(graph, operator, sink, pipelineNode) {\n var operatorNodeId = \"operator-\" + operator.pipelineId + \"-\" + operator.operatorId;\n\n // this is a non-standard use of ReactDOMServer, but it's the cleanest way to unify DagreD3 with React\n var html = _server2.default.renderToString(_react2.default.createElement(OperatorSummary, { key: operator.pipelineId + \"-\" + operator.operatorId, operator: operator }));\n graph.setNode(operatorNodeId, { class: \"operator-stats\", label: html, labelType: \"html\" });\n\n if (operator.hasOwnProperty(\"child\")) {\n this.computeD3StageOperatorGraph(graph, operator.child, operatorNodeId, pipelineNode);\n }\n\n if (sink !== null) {\n graph.setEdge(operatorNodeId, sink, { class: \"plan-edge\", arrowheadClass: \"plan-arrowhead\" });\n }\n\n graph.setParent(operatorNodeId, pipelineNode);\n }\n }, {\n key: \"updateD3Graph\",\n value: function updateD3Graph() {\n var _this6 = this;\n\n if (!this.props.stage) {\n return;\n }\n\n var stage = this.props.stage;\n var operatorMap = this.computeOperatorMap();\n var operatorGraphs = this.computeOperatorGraphs(stage.plan.root, operatorMap);\n\n var graph = (0, _utils.initializeGraph)();\n operatorGraphs.forEach(function (operator, pipelineId) {\n var pipelineNodeId = \"pipeline-\" + pipelineId;\n graph.setNode(pipelineNodeId, { label: \"Pipeline \" + pipelineId + \" \", clusterLabelPos: 'top', style: 'fill: #2b2b2b', labelStyle: 'fill: #fff' });\n _this6.computeD3StageOperatorGraph(graph, operator, null, pipelineNodeId);\n });\n\n $(\"#operator-canvas\").html(\"\");\n\n if (operatorGraphs.size > 0) {\n $(\".graph-container\").css(\"display\", \"block\");\n var svg = (0, _utils.initializeSvg)(\"#operator-canvas\");\n var render = new dagreD3.render();\n render(d3.select(\"#operator-canvas g\"), graph);\n\n svg.selectAll(\"g.operator-stats\").on(\"click\", this.handleOperatorClick.bind(this));\n svg.attr(\"height\", graph.graph().height);\n svg.attr(\"width\", graph.graph().width);\n } else {\n $(\".graph-container\").css(\"display\", \"none\");\n }\n }\n }, {\n key: \"render\",\n value: function render() {\n var stage = this.props.stage;\n\n if (!stage.hasOwnProperty('plan')) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"Stage does not have a plan\"\n )\n )\n );\n }\n\n if (!stage.hasOwnProperty('stageStats') || !stage.stageStats.hasOwnProperty(\"operatorSummaries\") || stage.stageStats.operatorSummaries.length === 0) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"Operator data not available for \",\n stage.stageId\n )\n )\n );\n }\n\n return null;\n }\n }]);\n\n return StageOperatorGraph;\n}(_react2.default.Component);\n\nvar StageDetail = exports.StageDetail = function (_React$Component5) {\n _inherits(StageDetail, _React$Component5);\n\n function StageDetail(props) {\n _classCallCheck(this, StageDetail);\n\n var _this7 = _possibleConstructorReturn(this, (StageDetail.__proto__ || Object.getPrototypeOf(StageDetail)).call(this, props));\n\n _this7.state = {\n initialized: false,\n ended: false,\n\n selectedStageId: null,\n query: null,\n\n lastRefresh: null,\n lastRender: null\n };\n\n _this7.refreshLoop = _this7.refreshLoop.bind(_this7);\n return _this7;\n }\n\n _createClass(StageDetail, [{\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n // stop refreshing when query finishes or fails\n if (this.state.query === null || !this.state.ended) {\n this.timeoutId = setTimeout(this.refreshLoop, 1000);\n }\n }\n }, {\n key: \"refreshLoop\",\n value: function refreshLoop() {\n var _this8 = this;\n\n clearTimeout(this.timeoutId); // to stop multiple series of refreshLoop from going on simultaneously\n var queryString = (0, _utils.getFirstParameter)(window.location.search).split('.');\n var queryId = queryString[0];\n\n var selectedStageId = this.state.selectedStageId;\n if (selectedStageId === null) {\n selectedStageId = 0;\n if (queryString.length > 1) {\n selectedStageId = parseInt(queryString[1]);\n }\n }\n\n $.get('/ui/api/query/' + queryId, function (query) {\n _this8.setState({\n initialized: true,\n ended: query.finalQueryInfo,\n\n selectedStageId: selectedStageId,\n query: query\n });\n _this8.resetTimer();\n }).fail(function () {\n _this8.setState({\n initialized: true\n });\n _this8.resetTimer();\n });\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop();\n new window.ClipboardJS('.copy-button');\n }\n }, {\n key: \"findStage\",\n value: function findStage(stageId, currentStage) {\n if (stageId === null) {\n return null;\n }\n\n if (currentStage.stageId === stageId) {\n return currentStage;\n }\n\n for (var i = 0; i < currentStage.subStages.length; i++) {\n var stage = this.findStage(stageId, currentStage.subStages[i]);\n if (stage !== null) {\n return stage;\n }\n }\n\n return null;\n }\n }, {\n key: \"getAllStageIds\",\n value: function getAllStageIds(result, currentStage) {\n var _this9 = this;\n\n result.push(currentStage.plan.id);\n currentStage.subStages.forEach(function (stage) {\n _this9.getAllStageIds(result, stage);\n });\n }\n }, {\n key: \"render\",\n value: function render() {\n var _this10 = this;\n\n if (!this.state.query) {\n var label = _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading...\"\n );\n if (this.state.initialized) {\n label = \"Query not found\";\n }\n return _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n label\n )\n )\n );\n }\n\n if (!this.state.query.outputStage) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"Query does not have an output stage\"\n )\n )\n );\n }\n\n var query = this.state.query;\n var allStages = [];\n this.getAllStageIds(allStages, query.outputStage);\n\n var stage = this.findStage(query.queryId + \".\" + this.state.selectedStageId, query.outputStage);\n if (stage === null) {\n return _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"Stage not found\"\n )\n )\n );\n }\n\n var stageOperatorGraph = null;\n if (!(0, _utils.isQueryEnded)(query)) {\n stageOperatorGraph = _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"Operator graph will appear automatically when query completes.\"\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading...\"\n )\n )\n );\n } else {\n stageOperatorGraph = _react2.default.createElement(StageOperatorGraph, { id: stage.stageId, stage: stage });\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(_QueryHeader.QueryHeader, { query: query }),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-2\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Stage \",\n stage.plan.id\n )\n ),\n _react2.default.createElement(\"div\", { className: \"col-xs-8\" }),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-2 stage-dropdown\" },\n _react2.default.createElement(\n \"div\",\n { className: \"input-group-btn\" },\n _react2.default.createElement(\n \"button\",\n { type: \"button\", className: \"btn btn-default dropdown-toggle\", \"data-toggle\": \"dropdown\", \"aria-haspopup\": \"true\", \"aria-expanded\": \"false\" },\n \"Select Stage \",\n _react2.default.createElement(\"span\", { className: \"caret\" })\n ),\n _react2.default.createElement(\n \"ul\",\n { className: \"dropdown-menu\" },\n allStages.map(function (stageId) {\n return _react2.default.createElement(\n \"li\",\n { key: stageId },\n _react2.default.createElement(\n \"a\",\n { onClick: function onClick() {\n return _this10.setState({ selectedStageId: stageId });\n } },\n stageId\n )\n );\n })\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n stageOperatorGraph\n )\n )\n );\n }\n }]);\n\n return StageDetail;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/StageDetail.jsx?"); /***/ }), diff --git a/presto-main/src/main/resources/webapp/dist/timeline.js b/presto-main/src/main/resources/webapp/dist/timeline.js index 3741ecb3c0fe..4ec479131c59 100644 --- a/presto-main/src/main/resources/webapp/dist/timeline.js +++ b/presto-main/src/main/resources/webapp/dist/timeline.js @@ -94,7 +94,7 @@ /***/ (function(module, exports, __webpack_require__) { "use strict"; -eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.PageTitle = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\nvar PageTitle = exports.PageTitle = function (_React$Component) {\n _inherits(PageTitle, _React$Component);\n\n function PageTitle(props) {\n _classCallCheck(this, PageTitle);\n\n var _this = _possibleConstructorReturn(this, (PageTitle.__proto__ || Object.getPrototypeOf(PageTitle)).call(this, props));\n\n _this.state = {\n noConnection: false,\n lightShown: false,\n info: null,\n lastSuccess: Date.now(),\n modalShown: false,\n errorText: null\n };\n return _this;\n }\n\n _createClass(PageTitle, [{\n key: \"refreshLoop\",\n value: function refreshLoop() {\n var _this2 = this;\n\n clearTimeout(this.timeoutId);\n fetch(\"/ui/api/cluster\").then(function (response) {\n if (response.status === 401) {\n location.reload();\n }\n return response.json();\n }).then(function (info) {\n _this2.setState({\n info: info,\n noConnection: false,\n lastSuccess: Date.now(),\n modalShown: false\n });\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal('hide');\n _this2.resetTimer();\n }).catch(function (error) {\n _this2.setState({\n noConnection: true,\n lightShown: !_this2.state.lightShown,\n errorText: error\n });\n _this2.resetTimer();\n\n if (!_this2.state.modalShown && (error || Date.now() - _this2.state.lastSuccess > 30 * 1000)) {\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal();\n _this2.setState({ modalShown: true });\n }\n });\n }\n }, {\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n this.timeoutId = setTimeout(this.refreshLoop.bind(this), 1000);\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop.bind(this)();\n }\n }, {\n key: \"renderStatusLight\",\n value: function renderStatusLight() {\n if (this.state.noConnection) {\n if (this.state.lightShown) {\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-red\", id: \"status-indicator\" });\n } else {\n return _react2.default.createElement(\"span\", { className: \"status-light\", id: \"status-indicator\" });\n }\n }\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-green\", id: \"status-indicator\" });\n }\n }, {\n key: \"render\",\n value: function render() {\n var info = this.state.info;\n if (!info) {\n return null;\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"nav\",\n { className: \"navbar\" },\n _react2.default.createElement(\n \"div\",\n { className: \"container-fluid\" },\n _react2.default.createElement(\n \"div\",\n { className: \"navbar-header\" },\n _react2.default.createElement(\n \"table\",\n null,\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"/ui/\" },\n _react2.default.createElement(\"img\", { src: \"assets/logo.png\" })\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-brand\" },\n this.props.title\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"navbar\", className: \"navbar-collapse collapse\" },\n _react2.default.createElement(\n \"ul\",\n { className: \"nav navbar-nav navbar-right\" },\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Version\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"version-number\" },\n info.nodeVersion.version\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Environment\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"environment\" },\n info.environment\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Uptime\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"bottom\", title: \"Connection status\" },\n this.renderStatusLight()\n ),\n \"\\xA0\",\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"uptime\" },\n info.uptime\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"logout\" },\n _react2.default.createElement(\n \"a\",\n { className: \"btn btn-logout\", href: \"logout\" },\n \"Log Out\"\n )\n )\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"no-connection-modal\", className: \"modal\", tabIndex: \"-1\", role: \"dialog\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-dialog modal-sm\", role: \"document\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-content\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"h4\",\n null,\n \"Unable to connect to server\"\n ),\n _react2.default.createElement(\n \"p\",\n null,\n this.state.errorText ? \"Error: \" + this.state.errorText : null\n )\n )\n )\n )\n )\n )\n );\n }\n }]);\n\n return PageTitle;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/PageTitle.jsx?"); +eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.PageTitle = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\nvar PageTitle = exports.PageTitle = function (_React$Component) {\n _inherits(PageTitle, _React$Component);\n\n function PageTitle(props) {\n _classCallCheck(this, PageTitle);\n\n var _this = _possibleConstructorReturn(this, (PageTitle.__proto__ || Object.getPrototypeOf(PageTitle)).call(this, props));\n\n _this.state = {\n noConnection: false,\n lightShown: false,\n info: null,\n lastSuccess: Date.now(),\n modalShown: false,\n errorText: null\n };\n return _this;\n }\n\n _createClass(PageTitle, [{\n key: \"refreshLoop\",\n value: function refreshLoop() {\n var _this2 = this;\n\n clearTimeout(this.timeoutId);\n fetch(\"/ui/api/cluster\").then(function (response) {\n if (response.status === 401) {\n location.reload();\n }\n return response.json();\n }).then(function (info) {\n _this2.setState({\n info: info,\n noConnection: false,\n lastSuccess: Date.now(),\n modalShown: false\n });\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal('hide');\n _this2.resetTimer();\n }).catch(function (fail) {\n _this2.setState({\n noConnection: true,\n lightShown: !_this2.state.lightShown,\n errorText: fail\n });\n _this2.resetTimer();\n\n if (!_this2.state.modalShown && (fail || Date.now() - _this2.state.lastSuccess > 30 * 1000)) {\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal();\n _this2.setState({ modalShown: true });\n }\n });\n }\n }, {\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n this.timeoutId = setTimeout(this.refreshLoop.bind(this), 1000);\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop.bind(this)();\n }\n }, {\n key: \"renderStatusLight\",\n value: function renderStatusLight() {\n if (this.state.noConnection) {\n if (this.state.lightShown) {\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-red\", id: \"status-indicator\" });\n } else {\n return _react2.default.createElement(\"span\", { className: \"status-light\", id: \"status-indicator\" });\n }\n }\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-green\", id: \"status-indicator\" });\n }\n }, {\n key: \"render\",\n value: function render() {\n var info = this.state.info;\n if (!info) {\n return null;\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"nav\",\n { className: \"navbar\" },\n _react2.default.createElement(\n \"div\",\n { className: \"container-fluid\" },\n _react2.default.createElement(\n \"div\",\n { className: \"navbar-header\" },\n _react2.default.createElement(\n \"table\",\n null,\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"/ui/\" },\n _react2.default.createElement(\"img\", { src: \"assets/logo.png\" })\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-brand\" },\n this.props.title\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"navbar\", className: \"navbar-collapse collapse\" },\n _react2.default.createElement(\n \"ul\",\n { className: \"nav navbar-nav navbar-right\" },\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Version\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"version-number\" },\n info.nodeVersion.version\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Environment\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"environment\" },\n info.environment\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Uptime\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"bottom\", title: \"Connection status\" },\n this.renderStatusLight()\n ),\n \"\\xA0\",\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"uptime\" },\n info.uptime\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"logout\" },\n _react2.default.createElement(\n \"a\",\n { className: \"btn btn-logout\", href: \"logout\" },\n \"Log Out\"\n )\n )\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"no-connection-modal\", className: \"modal\", tabIndex: \"-1\", role: \"dialog\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-dialog modal-sm\", role: \"document\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-content\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"h4\",\n null,\n \"Unable to connect to server\"\n ),\n _react2.default.createElement(\n \"p\",\n null,\n this.state.errorText ? \"Error: \" + this.state.errorText : null\n )\n )\n )\n )\n )\n )\n );\n }\n }]);\n\n return PageTitle;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/PageTitle.jsx?"); /***/ }), diff --git a/presto-main/src/main/resources/webapp/dist/worker.js b/presto-main/src/main/resources/webapp/dist/worker.js index 39fe5fa3e2fa..042567690196 100644 --- a/presto-main/src/main/resources/webapp/dist/worker.js +++ b/presto-main/src/main/resources/webapp/dist/worker.js @@ -94,7 +94,7 @@ /***/ (function(module, exports, __webpack_require__) { "use strict"; -eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.PageTitle = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\nvar PageTitle = exports.PageTitle = function (_React$Component) {\n _inherits(PageTitle, _React$Component);\n\n function PageTitle(props) {\n _classCallCheck(this, PageTitle);\n\n var _this = _possibleConstructorReturn(this, (PageTitle.__proto__ || Object.getPrototypeOf(PageTitle)).call(this, props));\n\n _this.state = {\n noConnection: false,\n lightShown: false,\n info: null,\n lastSuccess: Date.now(),\n modalShown: false,\n errorText: null\n };\n return _this;\n }\n\n _createClass(PageTitle, [{\n key: \"refreshLoop\",\n value: function refreshLoop() {\n var _this2 = this;\n\n clearTimeout(this.timeoutId);\n fetch(\"/ui/api/cluster\").then(function (response) {\n if (response.status === 401) {\n location.reload();\n }\n return response.json();\n }).then(function (info) {\n _this2.setState({\n info: info,\n noConnection: false,\n lastSuccess: Date.now(),\n modalShown: false\n });\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal('hide');\n _this2.resetTimer();\n }).catch(function (error) {\n _this2.setState({\n noConnection: true,\n lightShown: !_this2.state.lightShown,\n errorText: error\n });\n _this2.resetTimer();\n\n if (!_this2.state.modalShown && (error || Date.now() - _this2.state.lastSuccess > 30 * 1000)) {\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal();\n _this2.setState({ modalShown: true });\n }\n });\n }\n }, {\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n this.timeoutId = setTimeout(this.refreshLoop.bind(this), 1000);\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop.bind(this)();\n }\n }, {\n key: \"renderStatusLight\",\n value: function renderStatusLight() {\n if (this.state.noConnection) {\n if (this.state.lightShown) {\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-red\", id: \"status-indicator\" });\n } else {\n return _react2.default.createElement(\"span\", { className: \"status-light\", id: \"status-indicator\" });\n }\n }\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-green\", id: \"status-indicator\" });\n }\n }, {\n key: \"render\",\n value: function render() {\n var info = this.state.info;\n if (!info) {\n return null;\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"nav\",\n { className: \"navbar\" },\n _react2.default.createElement(\n \"div\",\n { className: \"container-fluid\" },\n _react2.default.createElement(\n \"div\",\n { className: \"navbar-header\" },\n _react2.default.createElement(\n \"table\",\n null,\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"/ui/\" },\n _react2.default.createElement(\"img\", { src: \"assets/logo.png\" })\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-brand\" },\n this.props.title\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"navbar\", className: \"navbar-collapse collapse\" },\n _react2.default.createElement(\n \"ul\",\n { className: \"nav navbar-nav navbar-right\" },\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Version\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"version-number\" },\n info.nodeVersion.version\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Environment\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"environment\" },\n info.environment\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Uptime\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"bottom\", title: \"Connection status\" },\n this.renderStatusLight()\n ),\n \"\\xA0\",\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"uptime\" },\n info.uptime\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"logout\" },\n _react2.default.createElement(\n \"a\",\n { className: \"btn btn-logout\", href: \"logout\" },\n \"Log Out\"\n )\n )\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"no-connection-modal\", className: \"modal\", tabIndex: \"-1\", role: \"dialog\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-dialog modal-sm\", role: \"document\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-content\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"h4\",\n null,\n \"Unable to connect to server\"\n ),\n _react2.default.createElement(\n \"p\",\n null,\n this.state.errorText ? \"Error: \" + this.state.errorText : null\n )\n )\n )\n )\n )\n )\n );\n }\n }]);\n\n return PageTitle;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/PageTitle.jsx?"); +eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.PageTitle = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\nvar PageTitle = exports.PageTitle = function (_React$Component) {\n _inherits(PageTitle, _React$Component);\n\n function PageTitle(props) {\n _classCallCheck(this, PageTitle);\n\n var _this = _possibleConstructorReturn(this, (PageTitle.__proto__ || Object.getPrototypeOf(PageTitle)).call(this, props));\n\n _this.state = {\n noConnection: false,\n lightShown: false,\n info: null,\n lastSuccess: Date.now(),\n modalShown: false,\n errorText: null\n };\n return _this;\n }\n\n _createClass(PageTitle, [{\n key: \"refreshLoop\",\n value: function refreshLoop() {\n var _this2 = this;\n\n clearTimeout(this.timeoutId);\n fetch(\"/ui/api/cluster\").then(function (response) {\n if (response.status === 401) {\n location.reload();\n }\n return response.json();\n }).then(function (info) {\n _this2.setState({\n info: info,\n noConnection: false,\n lastSuccess: Date.now(),\n modalShown: false\n });\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal('hide');\n _this2.resetTimer();\n }).catch(function (fail) {\n _this2.setState({\n noConnection: true,\n lightShown: !_this2.state.lightShown,\n errorText: fail\n });\n _this2.resetTimer();\n\n if (!_this2.state.modalShown && (fail || Date.now() - _this2.state.lastSuccess > 30 * 1000)) {\n //$FlowFixMe$ Bootstrap 3 plugin\n $('#no-connection-modal').modal();\n _this2.setState({ modalShown: true });\n }\n });\n }\n }, {\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n this.timeoutId = setTimeout(this.refreshLoop.bind(this), 1000);\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop.bind(this)();\n }\n }, {\n key: \"renderStatusLight\",\n value: function renderStatusLight() {\n if (this.state.noConnection) {\n if (this.state.lightShown) {\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-red\", id: \"status-indicator\" });\n } else {\n return _react2.default.createElement(\"span\", { className: \"status-light\", id: \"status-indicator\" });\n }\n }\n return _react2.default.createElement(\"span\", { className: \"status-light status-light-green\", id: \"status-indicator\" });\n }\n }, {\n key: \"render\",\n value: function render() {\n var info = this.state.info;\n if (!info) {\n return null;\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"nav\",\n { className: \"navbar\" },\n _react2.default.createElement(\n \"div\",\n { className: \"container-fluid\" },\n _react2.default.createElement(\n \"div\",\n { className: \"navbar-header\" },\n _react2.default.createElement(\n \"table\",\n null,\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"a\",\n { href: \"/ui/\" },\n _react2.default.createElement(\"img\", { src: \"assets/logo.png\" })\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-brand\" },\n this.props.title\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"navbar\", className: \"navbar-collapse collapse\" },\n _react2.default.createElement(\n \"ul\",\n { className: \"nav navbar-nav navbar-right\" },\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Version\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"version-number\" },\n info.nodeVersion.version\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Environment\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"text uppercase\", id: \"environment\" },\n info.environment\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"uppercase\" },\n \"Uptime\"\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"bottom\", title: \"Connection status\" },\n this.renderStatusLight()\n ),\n \"\\xA0\",\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"uptime\" },\n info.uptime\n )\n )\n ),\n _react2.default.createElement(\n \"li\",\n null,\n _react2.default.createElement(\n \"span\",\n { className: \"navbar-cluster-info\" },\n _react2.default.createElement(\n \"span\",\n { className: \"text\", id: \"logout\" },\n _react2.default.createElement(\n \"a\",\n { className: \"btn btn-logout\", href: \"logout\" },\n \"Log Out\"\n )\n )\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { id: \"no-connection-modal\", className: \"modal\", tabIndex: \"-1\", role: \"dialog\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-dialog modal-sm\", role: \"document\" },\n _react2.default.createElement(\n \"div\",\n { className: \"modal-content\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"h4\",\n null,\n \"Unable to connect to server\"\n ),\n _react2.default.createElement(\n \"p\",\n null,\n this.state.errorText ? \"Error: \" + this.state.errorText : null\n )\n )\n )\n )\n )\n )\n );\n }\n }]);\n\n return PageTitle;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/PageTitle.jsx?"); /***/ }), @@ -106,7 +106,7 @@ eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n}); /***/ (function(module, exports, __webpack_require__) { "use strict"; -eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.WorkerStatus = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nvar _utils = __webpack_require__(/*! ../utils */ \"./utils.js\");\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nvar SMALL_SPARKLINE_PROPERTIES = {\n width: '100%',\n height: '57px',\n fillColor: '#3F4552',\n lineColor: '#747F96',\n spotColor: '#1EDCFF',\n tooltipClassname: 'sparkline-tooltip',\n disableHiddenCheck: true\n};\n\nvar WorkerStatus = exports.WorkerStatus = function (_React$Component) {\n _inherits(WorkerStatus, _React$Component);\n\n function WorkerStatus(props) {\n _classCallCheck(this, WorkerStatus);\n\n var _this = _possibleConstructorReturn(this, (WorkerStatus.__proto__ || Object.getPrototypeOf(WorkerStatus)).call(this, props));\n\n _this.state = {\n serverInfo: null,\n initialized: false,\n ended: false,\n\n processCpuLoad: [],\n systemCpuLoad: [],\n heapPercentUsed: [],\n nonHeapUsed: []\n };\n\n _this.refreshLoop = _this.refreshLoop.bind(_this);\n return _this;\n }\n\n _createClass(WorkerStatus, [{\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n // stop refreshing when query finishes or fails\n if (this.state.query === null || !this.state.ended) {\n this.timeoutId = setTimeout(this.refreshLoop, 1000);\n }\n }\n }, {\n key: \"refreshLoop\",\n value: function refreshLoop() {\n clearTimeout(this.timeoutId); // to stop multiple series of refreshLoop from going on simultaneously\n var nodeId = (0, _utils.getFirstParameter)(window.location.search);\n $.get('/ui/api/worker/' + nodeId + '/status', function (serverInfo) {\n this.setState({\n serverInfo: serverInfo,\n initialized: true,\n\n processCpuLoad: (0, _utils.addToHistory)(serverInfo.processCpuLoad * 100.0, this.state.processCpuLoad),\n systemCpuLoad: (0, _utils.addToHistory)(serverInfo.systemCpuLoad * 100.0, this.state.systemCpuLoad),\n heapPercentUsed: (0, _utils.addToHistory)(serverInfo.heapUsed * 100.0 / serverInfo.heapAvailable, this.state.heapPercentUsed),\n nonHeapUsed: (0, _utils.addToHistory)(serverInfo.nonHeapUsed, this.state.nonHeapUsed)\n });\n\n this.resetTimer();\n }.bind(this)).error(function () {\n this.setState({\n initialized: true\n });\n this.resetTimer();\n }.bind(this));\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop();\n }\n }, {\n key: \"componentDidUpdate\",\n value: function componentDidUpdate() {\n $('#process-cpu-load-sparkline').sparkline(this.state.processCpuLoad, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { chartRangeMin: 0, numberFormatter: _utils.precisionRound }));\n $('#system-cpu-load-sparkline').sparkline(this.state.systemCpuLoad, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { chartRangeMin: 0, numberFormatter: _utils.precisionRound }));\n $('#heap-percent-used-sparkline').sparkline(this.state.heapPercentUsed, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { chartRangeMin: 0, numberFormatter: _utils.precisionRound }));\n $('#nonheap-used-sparkline').sparkline(this.state.nonHeapUsed, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { chartRangeMin: 0, numberFormatter: _utils.formatDataSize }));\n\n $('[data-toggle=\"tooltip\"]').tooltip();\n new window.ClipboardJS('.copy-button');\n }\n }, {\n key: \"renderPoolQueries\",\n value: function renderPoolQueries(pool) {\n if (!pool) {\n return;\n }\n\n var queries = {};\n var reservations = pool.queryMemoryReservations;\n var revocableReservations = pool.queryMemoryRevocableReservations;\n\n for (var query in reservations) {\n queries[query] = [reservations[query], 0];\n }\n\n for (var _query in revocableReservations) {\n if (queries.hasOwnProperty(_query)) {\n queries[_query][1] = revocableReservations[_query];\n } else {\n queries[_query] = [0, revocableReservations[_query]];\n }\n }\n\n var size = pool.maxBytes;\n\n if (Object.keys(queries).length === 0) {\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"table table-condensed\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"No queries using pool\"\n )\n )\n )\n )\n );\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n Object.keys(queries).map(function (key) {\n return WorkerStatus.renderPoolQuery(key, queries[key][0], queries[key][1], size);\n })\n )\n )\n );\n }\n }, {\n key: \"render\",\n value: function render() {\n var serverInfo = this.state.serverInfo;\n\n if (serverInfo === null) {\n if (this.state.initialized === false) {\n return _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading...\"\n );\n } else {\n return _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"Node information could not be loaded\"\n )\n )\n );\n }\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Overview\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Node ID\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n _react2.default.createElement(\n \"span\",\n { id: \"node-id\" },\n serverInfo.nodeId\n ),\n \"\\xA0\\xA0\",\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: \"copy-button\", \"data-clipboard-target\": \"#node-id\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\",\n title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", alt: \"Copy to clipboard\" })\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Heap Memory\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n _react2.default.createElement(\n \"span\",\n { id: \"node-heap-available\" },\n (0, _utils.formatDataSize)(serverInfo.heapAvailable)\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Processors\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n _react2.default.createElement(\n \"span\",\n { id: \"node-processors\" },\n serverInfo.processors\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Uptime\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n serverInfo.uptime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"External Address\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n _react2.default.createElement(\n \"span\",\n { id: \"external-address\" },\n serverInfo.externalAddress\n ),\n \"\\xA0\\xA0\",\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: \"copy-button\", \"data-clipboard-target\": \"#external-address\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\",\n title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", alt: \"Copy to clipboard\" })\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Internal Address\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n _react2.default.createElement(\n \"span\",\n { id: \"internal-address\" },\n serverInfo.internalAddress\n ),\n \"\\xA0\\xA0\",\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: \"copy-button\", \"data-clipboard-target\": \"#internal-address\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\",\n title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", alt: \"Copy to clipboard\" })\n )\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Resource Utilization\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Process CPU Utilization\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"process-cpu-load-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatCount)(this.state.processCpuLoad[this.state.processCpuLoad.length - 1]),\n \"%\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"System CPU Utilization\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"system-cpu-load-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatCount)(this.state.systemCpuLoad[this.state.systemCpuLoad.length - 1]),\n \"%\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Heap Utilization\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"heap-percent-used-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatCount)(this.state.heapPercentUsed[this.state.heapPercentUsed.length - 1]),\n \"%\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Non-Heap Memory Used\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"nonheap-used-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatDataSize)(this.state.nonHeapUsed[this.state.nonHeapUsed.length - 1])\n )\n )\n )\n )\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Memory Pools\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n WorkerStatus.renderPoolBar(\"General\", serverInfo.memoryInfo.pools.general),\n this.renderPoolQueries(serverInfo.memoryInfo.pools.general)\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n WorkerStatus.renderPoolBar(\"Reserved\", serverInfo.memoryInfo.pools.reserved),\n this.renderPoolQueries(serverInfo.memoryInfo.pools.reserved)\n )\n )\n )\n )\n );\n }\n }], [{\n key: \"renderPoolBar\",\n value: function renderPoolBar(name, pool) {\n if (!pool) {\n return;\n }\n\n var size = pool.maxBytes;\n var reserved = pool.reservedBytes;\n var revocable = pool.reservedRevocableBytes;\n\n var percentageReservedNonRevocable = reserved - revocable === 0 ? 0 : Math.max(Math.round((reserved - revocable) * 100.0 / size), 15);\n var percentageRevocable = revocable === 0 ? 0 : Math.max(Math.round(revocable * 100.0 / size), 15);\n var percentageFree = 100 - (percentageRevocable + percentageReservedNonRevocable);\n\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-8\" },\n _react2.default.createElement(\n \"h4\",\n null,\n name,\n \" Pool\"\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"progress\", style: { marginTop: \"6px\" } },\n _react2.default.createElement(\n \"div\",\n { className: \"progress-bar memory-progress-bar memory-progress-bar-info\", role: \"progressbar\", style: { width: \"100%\" } },\n (0, _utils.formatDataSize)(size),\n \" total\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\"hr\", { className: \"h4-hr\" }),\n _react2.default.createElement(\n \"div\",\n { className: \"progress\" },\n _react2.default.createElement(\n \"div\",\n { className: \"progress-bar memory-progress-bar progress-bar-warning progress-bar-striped active\", role: \"progressbar\",\n style: { width: percentageReservedNonRevocable + \"%\" } },\n (0, _utils.formatDataSize)(reserved - revocable)\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"progress-bar memory-progress-bar progress-bar-danger progress-bar-striped active\", role: \"progressbar\",\n style: { width: percentageRevocable + \"%\" } },\n (0, _utils.formatDataSize)(revocable)\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"progress-bar memory-progress-bar progress-bar-success\", role: \"progressbar\", style: { width: percentageFree + \"%\" } },\n (0, _utils.formatDataSize)(size - reserved)\n )\n )\n )\n )\n )\n );\n }\n }, {\n key: \"renderPoolQuery\",\n value: function renderPoolQuery(query, reserved, revocable, total) {\n return _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"row query-memory-list-header\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-7\" },\n _react2.default.createElement(\n \"a\",\n { href: \"query.html?\" + query, target: \"_blank\" },\n query\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-5\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row text-right\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"% of pool memory reserved\" },\n Math.round(reserved * 100.0 / total),\n \"%\"\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"top\",\n title: \"Reserved: \" + (0, _utils.formatDataSize)(reserved) + \". Revocable: \" + (0, _utils.formatDataSize)(revocable) },\n (0, _utils.formatDataSize)(reserved)\n )\n )\n )\n )\n )\n )\n );\n }\n }]);\n\n return WorkerStatus;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/WorkerStatus.jsx?"); +eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.WorkerStatus = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nvar _utils = __webpack_require__(/*! ../utils */ \"./utils.js\");\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nvar SMALL_SPARKLINE_PROPERTIES = {\n width: '100%',\n height: '57px',\n fillColor: '#3F4552',\n lineColor: '#747F96',\n spotColor: '#1EDCFF',\n tooltipClassname: 'sparkline-tooltip',\n disableHiddenCheck: true\n};\n\nvar WorkerStatus = exports.WorkerStatus = function (_React$Component) {\n _inherits(WorkerStatus, _React$Component);\n\n function WorkerStatus(props) {\n _classCallCheck(this, WorkerStatus);\n\n var _this = _possibleConstructorReturn(this, (WorkerStatus.__proto__ || Object.getPrototypeOf(WorkerStatus)).call(this, props));\n\n _this.state = {\n serverInfo: null,\n initialized: false,\n ended: false,\n\n processCpuLoad: [],\n systemCpuLoad: [],\n heapPercentUsed: [],\n nonHeapUsed: []\n };\n\n _this.refreshLoop = _this.refreshLoop.bind(_this);\n return _this;\n }\n\n _createClass(WorkerStatus, [{\n key: \"resetTimer\",\n value: function resetTimer() {\n clearTimeout(this.timeoutId);\n // stop refreshing when query finishes or fails\n if (this.state.query === null || !this.state.ended) {\n this.timeoutId = setTimeout(this.refreshLoop, 1000);\n }\n }\n }, {\n key: \"refreshLoop\",\n value: function refreshLoop() {\n clearTimeout(this.timeoutId); // to stop multiple series of refreshLoop from going on simultaneously\n var nodeId = (0, _utils.getFirstParameter)(window.location.search);\n $.get('/ui/api/worker/' + nodeId + '/status', function (serverInfo) {\n this.setState({\n serverInfo: serverInfo,\n initialized: true,\n\n processCpuLoad: (0, _utils.addToHistory)(serverInfo.processCpuLoad * 100.0, this.state.processCpuLoad),\n systemCpuLoad: (0, _utils.addToHistory)(serverInfo.systemCpuLoad * 100.0, this.state.systemCpuLoad),\n heapPercentUsed: (0, _utils.addToHistory)(serverInfo.heapUsed * 100.0 / serverInfo.heapAvailable, this.state.heapPercentUsed),\n nonHeapUsed: (0, _utils.addToHistory)(serverInfo.nonHeapUsed, this.state.nonHeapUsed)\n });\n\n this.resetTimer();\n }.bind(this)).fail(function () {\n this.setState({\n initialized: true\n });\n this.resetTimer();\n }.bind(this));\n }\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.refreshLoop();\n }\n }, {\n key: \"componentDidUpdate\",\n value: function componentDidUpdate() {\n $('#process-cpu-load-sparkline').sparkline(this.state.processCpuLoad, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { chartRangeMin: 0, numberFormatter: _utils.precisionRound }));\n $('#system-cpu-load-sparkline').sparkline(this.state.systemCpuLoad, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { chartRangeMin: 0, numberFormatter: _utils.precisionRound }));\n $('#heap-percent-used-sparkline').sparkline(this.state.heapPercentUsed, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { chartRangeMin: 0, numberFormatter: _utils.precisionRound }));\n $('#nonheap-used-sparkline').sparkline(this.state.nonHeapUsed, $.extend({}, SMALL_SPARKLINE_PROPERTIES, { chartRangeMin: 0, numberFormatter: _utils.formatDataSize }));\n\n $('[data-toggle=\"tooltip\"]').tooltip();\n new window.ClipboardJS('.copy-button');\n }\n }, {\n key: \"renderPoolQueries\",\n value: function renderPoolQueries(pool) {\n if (!pool) {\n return;\n }\n\n var queries = {};\n var reservations = pool.queryMemoryReservations;\n var revocableReservations = pool.queryMemoryRevocableReservations;\n\n for (var query in reservations) {\n queries[query] = [reservations[query], 0];\n }\n\n for (var _query in revocableReservations) {\n if (queries.hasOwnProperty(_query)) {\n queries[_query][1] = revocableReservations[_query];\n } else {\n queries[_query] = [0, revocableReservations[_query]];\n }\n }\n\n var size = pool.maxBytes;\n\n if (Object.keys(queries).length === 0) {\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"table table-condensed\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n \"No queries using pool\"\n )\n )\n )\n )\n );\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n Object.keys(queries).map(function (key) {\n return WorkerStatus.renderPoolQuery(key, queries[key][0], queries[key][1], size);\n })\n )\n )\n );\n }\n }, {\n key: \"render\",\n value: function render() {\n var serverInfo = this.state.serverInfo;\n\n if (serverInfo === null) {\n if (this.state.initialized === false) {\n return _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading...\"\n );\n } else {\n return _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"Node information could not be loaded\"\n )\n )\n );\n }\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Overview\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Node ID\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n _react2.default.createElement(\n \"span\",\n { id: \"node-id\" },\n serverInfo.nodeId\n ),\n \"\\xA0\\xA0\",\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: \"copy-button\", \"data-clipboard-target\": \"#node-id\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\",\n title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", alt: \"Copy to clipboard\" })\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Heap Memory\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n _react2.default.createElement(\n \"span\",\n { id: \"node-heap-available\" },\n (0, _utils.formatDataSize)(serverInfo.heapAvailable)\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Processors\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n _react2.default.createElement(\n \"span\",\n { id: \"node-processors\" },\n serverInfo.processors\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Uptime\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n serverInfo.uptime\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"External Address\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n _react2.default.createElement(\n \"span\",\n { id: \"external-address\" },\n serverInfo.externalAddress\n ),\n \"\\xA0\\xA0\",\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: \"copy-button\", \"data-clipboard-target\": \"#external-address\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\",\n title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", alt: \"Copy to clipboard\" })\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Internal Address\"\n ),\n _react2.default.createElement(\n \"td\",\n { className: \"info-text wrap-text\" },\n _react2.default.createElement(\n \"span\",\n { id: \"internal-address\" },\n serverInfo.internalAddress\n ),\n \"\\xA0\\xA0\",\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: \"copy-button\", \"data-clipboard-target\": \"#internal-address\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\",\n title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", alt: \"Copy to clipboard\" })\n )\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Resource Utilization\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Process CPU Utilization\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"process-cpu-load-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatCount)(this.state.processCpuLoad[this.state.processCpuLoad.length - 1]),\n \"%\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"System CPU Utilization\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"system-cpu-load-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatCount)(this.state.systemCpuLoad[this.state.systemCpuLoad.length - 1]),\n \"%\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"table\",\n { className: \"table\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Heap Utilization\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"heap-percent-used-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatCount)(this.state.heapPercentUsed[this.state.heapPercentUsed.length - 1]),\n \"%\"\n )\n ),\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n { className: \"info-title\" },\n \"Non-Heap Memory Used\"\n ),\n _react2.default.createElement(\n \"td\",\n { rowSpan: \"2\" },\n _react2.default.createElement(\n \"div\",\n { className: \"query-stats-sparkline-container\" },\n _react2.default.createElement(\n \"span\",\n { className: \"sparkline\", id: \"nonheap-used-sparkline\" },\n _react2.default.createElement(\n \"div\",\n { className: \"loader\" },\n \"Loading ...\"\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"tr\",\n { className: \"tr-noborder\" },\n _react2.default.createElement(\n \"td\",\n { className: \"info-sparkline-text\" },\n (0, _utils.formatDataSize)(this.state.nonHeapUsed[this.state.nonHeapUsed.length - 1])\n )\n )\n )\n )\n )\n )\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Memory Pools\"\n ),\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n WorkerStatus.renderPoolBar(\"General\", serverInfo.memoryInfo.pools.general),\n this.renderPoolQueries(serverInfo.memoryInfo.pools.general)\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n WorkerStatus.renderPoolBar(\"Reserved\", serverInfo.memoryInfo.pools.reserved),\n this.renderPoolQueries(serverInfo.memoryInfo.pools.reserved)\n )\n )\n )\n )\n );\n }\n }], [{\n key: \"renderPoolBar\",\n value: function renderPoolBar(name, pool) {\n if (!pool) {\n return;\n }\n\n var size = pool.maxBytes;\n var reserved = pool.reservedBytes;\n var revocable = pool.reservedRevocableBytes;\n\n var percentageReservedNonRevocable = reserved - revocable === 0 ? 0 : Math.max(Math.round((reserved - revocable) * 100.0 / size), 15);\n var percentageRevocable = revocable === 0 ? 0 : Math.max(Math.round(revocable * 100.0 / size), 15);\n var percentageFree = 100 - (percentageRevocable + percentageReservedNonRevocable);\n\n return _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-8\" },\n _react2.default.createElement(\n \"h4\",\n null,\n name,\n \" Pool\"\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-4\" },\n _react2.default.createElement(\n \"div\",\n { className: \"progress\", style: { marginTop: \"6px\" } },\n _react2.default.createElement(\n \"div\",\n { className: \"progress-bar memory-progress-bar memory-progress-bar-info\", role: \"progressbar\", style: { width: \"100%\" } },\n (0, _utils.formatDataSize)(size),\n \" total\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\"hr\", { className: \"h4-hr\" }),\n _react2.default.createElement(\n \"div\",\n { className: \"progress\" },\n _react2.default.createElement(\n \"div\",\n { className: \"progress-bar memory-progress-bar progress-bar-warning progress-bar-striped active\", role: \"progressbar\",\n style: { width: percentageReservedNonRevocable + \"%\" } },\n (0, _utils.formatDataSize)(reserved - revocable)\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"progress-bar memory-progress-bar progress-bar-danger progress-bar-striped active\", role: \"progressbar\",\n style: { width: percentageRevocable + \"%\" } },\n (0, _utils.formatDataSize)(revocable)\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"progress-bar memory-progress-bar progress-bar-success\", role: \"progressbar\", style: { width: percentageFree + \"%\" } },\n (0, _utils.formatDataSize)(size - reserved)\n )\n )\n )\n )\n )\n );\n }\n }, {\n key: \"renderPoolQuery\",\n value: function renderPoolQuery(query, reserved, revocable, total) {\n return _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"row query-memory-list-header\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-7\" },\n _react2.default.createElement(\n \"a\",\n { href: \"query.html?\" + query, target: \"_blank\" },\n query\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-5\" },\n _react2.default.createElement(\n \"div\",\n { className: \"row text-right\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"top\", title: \"% of pool memory reserved\" },\n Math.round(reserved * 100.0 / total),\n \"%\"\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-6\" },\n _react2.default.createElement(\n \"span\",\n { \"data-toggle\": \"tooltip\", \"data-placement\": \"top\",\n title: \"Reserved: \" + (0, _utils.formatDataSize)(reserved) + \". Revocable: \" + (0, _utils.formatDataSize)(revocable) },\n (0, _utils.formatDataSize)(reserved)\n )\n )\n )\n )\n )\n )\n );\n }\n }]);\n\n return WorkerStatus;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/WorkerStatus.jsx?"); /***/ }), @@ -118,7 +118,7 @@ eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n}); /***/ (function(module, exports, __webpack_require__) { "use strict"; -eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.WorkerThreadList = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nvar _utils = __webpack_require__(/*! ../utils */ \"./utils.js\");\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nvar ALL_THREADS = \"All Threads\";\nvar QUERY_THREADS = \"Running Queries\";\n\nvar ALL_THREAD_STATE = \"ALL\";\nvar THREAD_STATES = [ALL_THREAD_STATE, \"RUNNABLE\", \"BLOCKED\", \"WAITING\", \"TIMED_WAITING\", \"NEW\", \"TERMINATED\"];\nvar QUERY_THREAD_REGEX = new RegExp(/([0-9])*_([0-9])*_([0-9])*_.*?\\.([0-9])*\\.([0-9])*-([0-9])*-([0-9])*/);\nvar THREAD_GROUP_REGEXP = new RegExp(/(.*?)-[0-9]+/);\n\nvar WorkerThreadList = exports.WorkerThreadList = function (_React$Component) {\n _inherits(WorkerThreadList, _React$Component);\n\n function WorkerThreadList(props) {\n _classCallCheck(this, WorkerThreadList);\n\n var _this = _possibleConstructorReturn(this, (WorkerThreadList.__proto__ || Object.getPrototypeOf(WorkerThreadList)).call(this, props));\n\n _this.state = {\n serverInfo: null,\n initialized: false,\n ended: false,\n\n threads: null,\n\n snapshotTime: null,\n\n selectedGroup: ALL_THREADS,\n selectedThreadState: ALL_THREAD_STATE\n };\n return _this;\n }\n\n _createClass(WorkerThreadList, [{\n key: \"captureSnapshot\",\n value: function captureSnapshot() {\n var nodeId = (0, _utils.getFirstParameter)(window.location.search);\n $.get('/ui/api/worker/' + nodeId + '/thread', function (threads) {\n this.setState({\n threads: WorkerThreadList.processThreads(threads),\n snapshotTime: new Date(),\n initialized: true\n });\n }.bind(this)).error(function () {\n this.setState({\n initialized: true\n });\n }.bind(this));\n }\n }, {\n key: \"componentDidUpdate\",\n value: function componentDidUpdate() {\n new window.ClipboardJS('.copy-button');\n }\n }, {\n key: \"handleGroupClick\",\n value: function handleGroupClick(selectedGroup, event) {\n this.setState({\n selectedGroup: selectedGroup\n });\n event.preventDefault();\n }\n }, {\n key: \"handleThreadStateClick\",\n value: function handleThreadStateClick(selectedThreadState, event) {\n this.setState({\n selectedThreadState: selectedThreadState\n });\n event.preventDefault();\n }\n }, {\n key: \"handleNewSnapshotClick\",\n value: function handleNewSnapshotClick(event) {\n this.setState({\n initialized: false\n });\n this.captureSnapshot();\n event.preventDefault();\n }\n }, {\n key: \"filterThreads\",\n value: function filterThreads(group, state) {\n return this.state.threads[group].filter(function (t) {\n return t.state === state || state === ALL_THREAD_STATE;\n });\n }\n }, {\n key: \"renderGroupListItem\",\n value: function renderGroupListItem(group) {\n return _react2.default.createElement(\n \"li\",\n { key: group },\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: this.state.selectedGroup === group ? \"selected\" : \"\", onClick: this.handleGroupClick.bind(this, group) },\n group,\n \" (\",\n this.filterThreads(group, this.state.selectedThreadState).length,\n \")\"\n )\n );\n }\n }, {\n key: \"renderThreadStateListItem\",\n value: function renderThreadStateListItem(threadState) {\n return _react2.default.createElement(\n \"li\",\n { key: threadState },\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: this.state.selectedThreadState === threadState ? \"selected\" : \"\", onClick: this.handleThreadStateClick.bind(this, threadState) },\n threadState,\n \" (\",\n this.filterThreads(this.state.selectedGroup, threadState).length,\n \")\"\n )\n );\n }\n }, {\n key: \"renderStackLine\",\n value: function renderStackLine(threadId) {\n return function (stackLine, index) {\n return _react2.default.createElement(\n \"div\",\n { key: threadId + index },\n \"\\xA0\\xA0at \",\n stackLine.className,\n \".\",\n stackLine.method,\n \"(\",\n _react2.default.createElement(\n \"span\",\n { className: \"font-light\" },\n stackLine.file,\n \":\",\n stackLine.line\n ),\n \")\"\n );\n };\n }\n }, {\n key: \"renderThread\",\n value: function renderThread(threadInfo) {\n return _react2.default.createElement(\n \"div\",\n { key: threadInfo.id },\n _react2.default.createElement(\n \"span\",\n { className: \"font-white\" },\n threadInfo.name,\n \" \",\n threadInfo.state,\n \" #\",\n threadInfo.id,\n \" \",\n threadInfo.lockOwnerId\n ),\n _react2.default.createElement(\n \"a\",\n { className: \"copy-button\", \"data-clipboard-target\": \"#stack-trace-\" + threadInfo.id, \"data-toggle\": \"tooltip\", \"data-placement\": \"right\",\n title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", alt: \"Copy to clipboard\" })\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"stack-traces\", id: \"stack-trace-\" + threadInfo.id },\n threadInfo.stackTrace.map(this.renderStackLine(threadInfo.id))\n ),\n _react2.default.createElement(\n \"div\",\n null,\n \"\\xA0\"\n )\n );\n }\n }, {\n key: \"render\",\n value: function render() {\n var _this2 = this;\n\n var threads = this.state.threads;\n\n var display = null;\n var toolbar = null;\n if (threads === null) {\n if (this.state.initialized === false) {\n display = _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"button\",\n { className: \"btn btn-info live-button\", onClick: this.handleNewSnapshotClick.bind(this) },\n \"Capture Snapshot\"\n )\n )\n );\n } else {\n display = _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"Thread snapshot could not be loaded\"\n )\n )\n );\n }\n } else {\n toolbar = _react2.default.createElement(\n \"div\",\n { className: \"col-xs-9\" },\n _react2.default.createElement(\n \"table\",\n { className: \"header-inline-links\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"small\",\n null,\n \"Snapshot at \",\n this.state.snapshotTime.toTimeString()\n ),\n \"\\xA0\\xA0\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"button\",\n { className: \"btn btn-info live-button\", onClick: this.handleNewSnapshotClick.bind(this) },\n \"New Snapshot\"\n ),\n \"\\xA0\\xA0 \\xA0\\xA0\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"input-group-btn text-right\" },\n _react2.default.createElement(\n \"button\",\n { type: \"button\", className: \"btn btn-default dropdown-toggle pull-right text-right\", \"data-toggle\": \"dropdown\", \"aria-haspopup\": \"true\",\n \"aria-expanded\": \"false\" },\n _react2.default.createElement(\n \"strong\",\n null,\n \"Group:\"\n ),\n \" \",\n this.state.selectedGroup,\n \" \",\n _react2.default.createElement(\"span\", { className: \"caret\" })\n ),\n _react2.default.createElement(\n \"ul\",\n { className: \"dropdown-menu\" },\n Object.keys(threads).map(function (group) {\n return _this2.renderGroupListItem(group);\n })\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"input-group-btn text-right\" },\n _react2.default.createElement(\n \"button\",\n { type: \"button\", className: \"btn btn-default dropdown-toggle pull-right text-right\", \"data-toggle\": \"dropdown\", \"aria-haspopup\": \"true\",\n \"aria-expanded\": \"false\" },\n _react2.default.createElement(\n \"strong\",\n null,\n \"State:\"\n ),\n \" \",\n this.state.selectedThreadState,\n \" \",\n _react2.default.createElement(\"span\", { className: \"caret\" })\n ),\n _react2.default.createElement(\n \"ul\",\n { className: \"dropdown-menu\" },\n THREAD_STATES.map(function (state) {\n return _this2.renderThreadStateListItem(state);\n })\n )\n )\n )\n )\n )\n )\n );\n\n var filteredThreads = this.filterThreads(this.state.selectedGroup, this.state.selectedThreadState);\n var displayedThreads = void 0;\n if (filteredThreads.length === 0 && this.state.selectedThreadState === ALL_THREAD_STATE) {\n displayedThreads = _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"No threads in group '\",\n this.state.selectedGroup,\n \"'\"\n )\n )\n );\n } else if (filteredThreads.length === 0 && this.state.selectedGroup === ALL_THREADS) {\n displayedThreads = _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"No threads with state \",\n this.state.selectedThreadState\n )\n )\n );\n } else if (filteredThreads.length === 0) {\n displayedThreads = _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"No threads in group '\",\n this.state.selectedGroup,\n \"' with state \",\n this.state.selectedThreadState\n )\n )\n );\n } else {\n displayedThreads = _react2.default.createElement(\n \"pre\",\n null,\n filteredThreads.map(function (t) {\n return _this2.renderThread(t);\n })\n );\n }\n\n display = _react2.default.createElement(\n \"div\",\n { id: \"stack-traces\" },\n displayedThreads\n );\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-3\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Thread Snapshot\",\n _react2.default.createElement(\n \"a\",\n { className: \"btn copy-button\", \"data-clipboard-target\": \"#stack-traces\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", alt: \"Copy to clipboard\" })\n ),\n \"\\xA0\"\n )\n ),\n toolbar\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n display\n )\n )\n );\n }\n }], [{\n key: \"processThreads\",\n value: function processThreads(threads) {\n var result = {};\n\n result[ALL_THREADS] = threads;\n\n for (var i = 0; i < threads.length; i++) {\n var thread = threads[i];\n if (thread.name.match(QUERY_THREAD_REGEX)) {\n result[QUERY_THREADS].push(thread);\n }\n\n var match = THREAD_GROUP_REGEXP.exec(thread.name);\n var threadGroup = match ? match[1] : thread.name;\n if (!result[threadGroup]) {\n result[threadGroup] = [];\n }\n result[threadGroup].push(thread);\n }\n\n return result;\n }\n }]);\n\n return WorkerThreadList;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/WorkerThreadList.jsx?"); +eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.WorkerThreadList = undefined;\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nvar _utils = __webpack_require__(/*! ../utils */ \"./utils.js\");\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nvar ALL_THREADS = \"All Threads\";\nvar QUERY_THREADS = \"Running Queries\";\n\nvar ALL_THREAD_STATE = \"ALL\";\nvar THREAD_STATES = [ALL_THREAD_STATE, \"RUNNABLE\", \"BLOCKED\", \"WAITING\", \"TIMED_WAITING\", \"NEW\", \"TERMINATED\"];\nvar QUERY_THREAD_REGEX = new RegExp(/([0-9])*_([0-9])*_([0-9])*_.*?\\.([0-9])*\\.([0-9])*-([0-9])*-([0-9])*/);\nvar THREAD_GROUP_REGEXP = new RegExp(/(.*?)-[0-9]+/);\n\nvar WorkerThreadList = exports.WorkerThreadList = function (_React$Component) {\n _inherits(WorkerThreadList, _React$Component);\n\n function WorkerThreadList(props) {\n _classCallCheck(this, WorkerThreadList);\n\n var _this = _possibleConstructorReturn(this, (WorkerThreadList.__proto__ || Object.getPrototypeOf(WorkerThreadList)).call(this, props));\n\n _this.state = {\n serverInfo: null,\n initialized: false,\n ended: false,\n\n threads: null,\n\n snapshotTime: null,\n\n selectedGroup: ALL_THREADS,\n selectedThreadState: ALL_THREAD_STATE\n };\n return _this;\n }\n\n _createClass(WorkerThreadList, [{\n key: \"captureSnapshot\",\n value: function captureSnapshot() {\n var nodeId = (0, _utils.getFirstParameter)(window.location.search);\n $.get('/ui/api/worker/' + nodeId + '/thread', function (threads) {\n this.setState({\n threads: WorkerThreadList.processThreads(threads),\n snapshotTime: new Date(),\n initialized: true\n });\n }.bind(this)).fail(function () {\n this.setState({\n initialized: true\n });\n }.bind(this));\n }\n }, {\n key: \"componentDidUpdate\",\n value: function componentDidUpdate() {\n new window.ClipboardJS('.copy-button');\n }\n }, {\n key: \"handleGroupClick\",\n value: function handleGroupClick(selectedGroup, event) {\n this.setState({\n selectedGroup: selectedGroup\n });\n event.preventDefault();\n }\n }, {\n key: \"handleThreadStateClick\",\n value: function handleThreadStateClick(selectedThreadState, event) {\n this.setState({\n selectedThreadState: selectedThreadState\n });\n event.preventDefault();\n }\n }, {\n key: \"handleNewSnapshotClick\",\n value: function handleNewSnapshotClick(event) {\n this.setState({\n initialized: false\n });\n this.captureSnapshot();\n event.preventDefault();\n }\n }, {\n key: \"filterThreads\",\n value: function filterThreads(group, state) {\n return this.state.threads[group].filter(function (t) {\n return t.state === state || state === ALL_THREAD_STATE;\n });\n }\n }, {\n key: \"renderGroupListItem\",\n value: function renderGroupListItem(group) {\n return _react2.default.createElement(\n \"li\",\n { key: group },\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: this.state.selectedGroup === group ? \"selected\" : \"\", onClick: this.handleGroupClick.bind(this, group) },\n group,\n \" (\",\n this.filterThreads(group, this.state.selectedThreadState).length,\n \")\"\n )\n );\n }\n }, {\n key: \"renderThreadStateListItem\",\n value: function renderThreadStateListItem(threadState) {\n return _react2.default.createElement(\n \"li\",\n { key: threadState },\n _react2.default.createElement(\n \"a\",\n { href: \"#\", className: this.state.selectedThreadState === threadState ? \"selected\" : \"\", onClick: this.handleThreadStateClick.bind(this, threadState) },\n threadState,\n \" (\",\n this.filterThreads(this.state.selectedGroup, threadState).length,\n \")\"\n )\n );\n }\n }, {\n key: \"renderStackLine\",\n value: function renderStackLine(threadId) {\n return function (stackLine, index) {\n return _react2.default.createElement(\n \"div\",\n { key: threadId + index },\n \"\\xA0\\xA0at \",\n stackLine.className,\n \".\",\n stackLine.method,\n \"(\",\n _react2.default.createElement(\n \"span\",\n { className: \"font-light\" },\n stackLine.file,\n \":\",\n stackLine.line\n ),\n \")\"\n );\n };\n }\n }, {\n key: \"renderThread\",\n value: function renderThread(threadInfo) {\n return _react2.default.createElement(\n \"div\",\n { key: threadInfo.id },\n _react2.default.createElement(\n \"span\",\n { className: \"font-white\" },\n threadInfo.name,\n \" \",\n threadInfo.state,\n \" #\",\n threadInfo.id,\n \" \",\n threadInfo.lockOwnerId\n ),\n _react2.default.createElement(\n \"a\",\n { className: \"copy-button\", \"data-clipboard-target\": \"#stack-trace-\" + threadInfo.id, \"data-toggle\": \"tooltip\", \"data-placement\": \"right\",\n title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", alt: \"Copy to clipboard\" })\n ),\n _react2.default.createElement(\"br\", null),\n _react2.default.createElement(\n \"span\",\n { className: \"stack-traces\", id: \"stack-trace-\" + threadInfo.id },\n threadInfo.stackTrace.map(this.renderStackLine(threadInfo.id))\n ),\n _react2.default.createElement(\n \"div\",\n null,\n \"\\xA0\"\n )\n );\n }\n }, {\n key: \"render\",\n value: function render() {\n var _this2 = this;\n\n var threads = this.state.threads;\n\n var display = null;\n var toolbar = null;\n if (threads === null) {\n if (this.state.initialized === false) {\n display = _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"button\",\n { className: \"btn btn-info live-button\", onClick: this.handleNewSnapshotClick.bind(this) },\n \"Capture Snapshot\"\n )\n )\n );\n } else {\n display = _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"Thread snapshot could not be loaded\"\n )\n )\n );\n }\n } else {\n toolbar = _react2.default.createElement(\n \"div\",\n { className: \"col-xs-9\" },\n _react2.default.createElement(\n \"table\",\n { className: \"header-inline-links\" },\n _react2.default.createElement(\n \"tbody\",\n null,\n _react2.default.createElement(\n \"tr\",\n null,\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"small\",\n null,\n \"Snapshot at \",\n this.state.snapshotTime.toTimeString()\n ),\n \"\\xA0\\xA0\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"button\",\n { className: \"btn btn-info live-button\", onClick: this.handleNewSnapshotClick.bind(this) },\n \"New Snapshot\"\n ),\n \"\\xA0\\xA0 \\xA0\\xA0\"\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"input-group-btn text-right\" },\n _react2.default.createElement(\n \"button\",\n { type: \"button\", className: \"btn btn-default dropdown-toggle pull-right text-right\", \"data-toggle\": \"dropdown\", \"aria-haspopup\": \"true\",\n \"aria-expanded\": \"false\" },\n _react2.default.createElement(\n \"strong\",\n null,\n \"Group:\"\n ),\n \" \",\n this.state.selectedGroup,\n \" \",\n _react2.default.createElement(\"span\", { className: \"caret\" })\n ),\n _react2.default.createElement(\n \"ul\",\n { className: \"dropdown-menu\" },\n Object.keys(threads).map(function (group) {\n return _this2.renderGroupListItem(group);\n })\n )\n )\n ),\n _react2.default.createElement(\n \"td\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"input-group-btn text-right\" },\n _react2.default.createElement(\n \"button\",\n { type: \"button\", className: \"btn btn-default dropdown-toggle pull-right text-right\", \"data-toggle\": \"dropdown\", \"aria-haspopup\": \"true\",\n \"aria-expanded\": \"false\" },\n _react2.default.createElement(\n \"strong\",\n null,\n \"State:\"\n ),\n \" \",\n this.state.selectedThreadState,\n \" \",\n _react2.default.createElement(\"span\", { className: \"caret\" })\n ),\n _react2.default.createElement(\n \"ul\",\n { className: \"dropdown-menu\" },\n THREAD_STATES.map(function (state) {\n return _this2.renderThreadStateListItem(state);\n })\n )\n )\n )\n )\n )\n )\n );\n\n var filteredThreads = this.filterThreads(this.state.selectedGroup, this.state.selectedThreadState);\n var displayedThreads = void 0;\n if (filteredThreads.length === 0 && this.state.selectedThreadState === ALL_THREAD_STATE) {\n displayedThreads = _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"No threads in group '\",\n this.state.selectedGroup,\n \"'\"\n )\n )\n );\n } else if (filteredThreads.length === 0 && this.state.selectedGroup === ALL_THREADS) {\n displayedThreads = _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"No threads with state \",\n this.state.selectedThreadState\n )\n )\n );\n } else if (filteredThreads.length === 0) {\n displayedThreads = _react2.default.createElement(\n \"div\",\n { className: \"row error-message\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\n \"h4\",\n null,\n \"No threads in group '\",\n this.state.selectedGroup,\n \"' with state \",\n this.state.selectedThreadState\n )\n )\n );\n } else {\n displayedThreads = _react2.default.createElement(\n \"pre\",\n null,\n filteredThreads.map(function (t) {\n return _this2.renderThread(t);\n })\n );\n }\n\n display = _react2.default.createElement(\n \"div\",\n { id: \"stack-traces\" },\n displayedThreads\n );\n }\n\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-3\" },\n _react2.default.createElement(\n \"h3\",\n null,\n \"Thread Snapshot\",\n _react2.default.createElement(\n \"a\",\n { className: \"btn copy-button\", \"data-clipboard-target\": \"#stack-traces\", \"data-toggle\": \"tooltip\", \"data-placement\": \"right\", title: \"Copy to clipboard\" },\n _react2.default.createElement(\"span\", { className: \"glyphicon glyphicon-copy\", alt: \"Copy to clipboard\" })\n ),\n \"\\xA0\"\n )\n ),\n toolbar\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"row\" },\n _react2.default.createElement(\n \"div\",\n { className: \"col-xs-12\" },\n _react2.default.createElement(\"hr\", { className: \"h3-hr\" }),\n display\n )\n )\n );\n }\n }], [{\n key: \"processThreads\",\n value: function processThreads(threads) {\n var result = {};\n\n result[ALL_THREADS] = threads;\n\n for (var i = 0; i < threads.length; i++) {\n var thread = threads[i];\n if (thread.name.match(QUERY_THREAD_REGEX)) {\n result[QUERY_THREADS].push(thread);\n }\n\n var match = THREAD_GROUP_REGEXP.exec(thread.name);\n var threadGroup = match ? match[1] : thread.name;\n if (!result[threadGroup]) {\n result[threadGroup] = [];\n }\n result[threadGroup].push(thread);\n }\n\n return result;\n }\n }]);\n\n return WorkerThreadList;\n}(_react2.default.Component);\n\n//# sourceURL=webpack:///./components/WorkerThreadList.jsx?"); /***/ }), diff --git a/presto-main/src/main/resources/webapp/embedded_plan.html b/presto-main/src/main/resources/webapp/embedded_plan.html index 0ab19f29c1da..a3edc098703a 100644 --- a/presto-main/src/main/resources/webapp/embedded_plan.html +++ b/presto-main/src/main/resources/webapp/embedded_plan.html @@ -19,7 +19,7 @@ - + diff --git a/presto-main/src/main/resources/webapp/index.html b/presto-main/src/main/resources/webapp/index.html index 84fbe187fe7c..9cbaef869390 100644 --- a/presto-main/src/main/resources/webapp/index.html +++ b/presto-main/src/main/resources/webapp/index.html @@ -19,7 +19,7 @@ - + diff --git a/presto-main/src/main/resources/webapp/login.html b/presto-main/src/main/resources/webapp/login.html index c907d7deb46d..44b62aad1670 100644 --- a/presto-main/src/main/resources/webapp/login.html +++ b/presto-main/src/main/resources/webapp/login.html @@ -32,7 +32,7 @@ - + diff --git a/presto-main/src/main/resources/webapp/plan.html b/presto-main/src/main/resources/webapp/plan.html index bb861132f1f6..990407ae5e96 100644 --- a/presto-main/src/main/resources/webapp/plan.html +++ b/presto-main/src/main/resources/webapp/plan.html @@ -19,7 +19,7 @@ - + diff --git a/presto-main/src/main/resources/webapp/query.html b/presto-main/src/main/resources/webapp/query.html index 0ae21e601bb8..9b97d76dc0a0 100644 --- a/presto-main/src/main/resources/webapp/query.html +++ b/presto-main/src/main/resources/webapp/query.html @@ -19,7 +19,7 @@ - + diff --git a/presto-main/src/main/resources/webapp/src/components/ClusterHUD.jsx b/presto-main/src/main/resources/webapp/src/components/ClusterHUD.jsx index 873ce359eea1..4f2ed43f88d3 100644 --- a/presto-main/src/main/resources/webapp/src/components/ClusterHUD.jsx +++ b/presto-main/src/main/resources/webapp/src/components/ClusterHUD.jsx @@ -111,7 +111,7 @@ export class ClusterHUD extends React.Component { }); this.resetTimer(); }.bind(this)) - .error(function () { + .fail(function () { this.resetTimer(); }.bind(this)); } diff --git a/presto-main/src/main/resources/webapp/src/components/PageTitle.jsx b/presto-main/src/main/resources/webapp/src/components/PageTitle.jsx index 5e43dcbf0d54..5a488bb6e181 100644 --- a/presto-main/src/main/resources/webapp/src/components/PageTitle.jsx +++ b/presto-main/src/main/resources/webapp/src/components/PageTitle.jsx @@ -62,15 +62,15 @@ export class PageTitle extends React.Component { $('#no-connection-modal').modal('hide'); this.resetTimer(); }) - .catch(error => { + .catch(fail => { this.setState({ noConnection: true, lightShown: !this.state.lightShown, - errorText: error + errorText: fail }); this.resetTimer(); - if (!this.state.modalShown && (error || (Date.now() - this.state.lastSuccess) > 30 * 1000)) { + if (!this.state.modalShown && (fail || (Date.now() - this.state.lastSuccess) > 30 * 1000)) { //$FlowFixMe$ Bootstrap 3 plugin $('#no-connection-modal').modal(); this.setState({modalShown: true}); diff --git a/presto-main/src/main/resources/webapp/src/components/QueryDetail.jsx b/presto-main/src/main/resources/webapp/src/components/QueryDetail.jsx index 45d2666df777..99dd66ac07b2 100644 --- a/presto-main/src/main/resources/webapp/src/components/QueryDetail.jsx +++ b/presto-main/src/main/resources/webapp/src/components/QueryDetail.jsx @@ -793,7 +793,7 @@ export class QueryDetail extends React.Component { } this.resetTimer(); }.bind(this)) - .error(() => { + .fail(() => { this.setState({ initialized: true, }); diff --git a/presto-main/src/main/resources/webapp/src/components/QueryList.jsx b/presto-main/src/main/resources/webapp/src/components/QueryList.jsx index c56b574a42d1..776b5cbf0a19 100644 --- a/presto-main/src/main/resources/webapp/src/components/QueryList.jsx +++ b/presto-main/src/main/resources/webapp/src/components/QueryList.jsx @@ -376,7 +376,7 @@ export class QueryList extends React.Component { }); this.resetTimer(); }.bind(this)) - .error(function () { + .fail(function () { this.setState({ initialized: true, }); diff --git a/presto-main/src/main/resources/webapp/src/components/StageDetail.jsx b/presto-main/src/main/resources/webapp/src/components/StageDetail.jsx index ffc39220b041..543bbf8dd7ff 100644 --- a/presto-main/src/main/resources/webapp/src/components/StageDetail.jsx +++ b/presto-main/src/main/resources/webapp/src/components/StageDetail.jsx @@ -571,7 +571,7 @@ export class StageDetail extends React.Component { query: query, }); this.resetTimer(); - }).error(() => { + }).fail(() => { this.setState({ initialized: true, }); diff --git a/presto-main/src/main/resources/webapp/src/components/WorkerStatus.jsx b/presto-main/src/main/resources/webapp/src/components/WorkerStatus.jsx index db7710020166..0a268b734a44 100644 --- a/presto-main/src/main/resources/webapp/src/components/WorkerStatus.jsx +++ b/presto-main/src/main/resources/webapp/src/components/WorkerStatus.jsx @@ -73,7 +73,7 @@ export class WorkerStatus extends React.Component { this.resetTimer(); }.bind(this)) - .error(function () { + .fail(function () { this.setState({ initialized: true, }); diff --git a/presto-main/src/main/resources/webapp/src/components/WorkerThreadList.jsx b/presto-main/src/main/resources/webapp/src/components/WorkerThreadList.jsx index d09bb2bcc5a2..9f84a8c37f2a 100644 --- a/presto-main/src/main/resources/webapp/src/components/WorkerThreadList.jsx +++ b/presto-main/src/main/resources/webapp/src/components/WorkerThreadList.jsx @@ -52,7 +52,7 @@ export class WorkerThreadList extends React.Component { initialized: true, }); }.bind(this)) - .error(function () { + .fail(function () { this.setState({ initialized: true, }); diff --git a/presto-main/src/main/resources/webapp/stage.html b/presto-main/src/main/resources/webapp/stage.html index 7f1876d48680..defd2942ba0e 100644 --- a/presto-main/src/main/resources/webapp/stage.html +++ b/presto-main/src/main/resources/webapp/stage.html @@ -19,7 +19,7 @@ - + diff --git a/presto-main/src/main/resources/webapp/timeline.html b/presto-main/src/main/resources/webapp/timeline.html index 95d0a1da8b3f..f28d3f1d4a64 100644 --- a/presto-main/src/main/resources/webapp/timeline.html +++ b/presto-main/src/main/resources/webapp/timeline.html @@ -23,7 +23,7 @@ - + diff --git a/presto-main/src/main/resources/webapp/vendor/bootstrap/css/bootstrap-theme.css b/presto-main/src/main/resources/webapp/vendor/bootstrap/css/bootstrap-theme.css old mode 100755 new mode 100644 index 927cd0b57372..9c344574cfd9 --- a/presto-main/src/main/resources/webapp/vendor/bootstrap/css/bootstrap-theme.css +++ b/presto-main/src/main/resources/webapp/vendor/bootstrap/css/bootstrap-theme.css @@ -1,16 +1,9 @@ /*! - * Bootstrap v3.3.5 (http://getbootstrap.com) - * Copyright 2011-2016 Twitter, Inc. - * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) - */ - -/*! - * Generated using the Bootstrap Customizer (http://getbootstrap.com/customize/?id=71446b832bd9dbb87141a654eb911637) - * Config saved to config.json and https://gist.github.com/71446b832bd9dbb87141a654eb911637 + * Generated using the Bootstrap Customizer (https://getbootstrap.com/docs/3.4/customize/) */ /*! - * Bootstrap v3.3.6 (http://getbootstrap.com) - * Copyright 2011-2015 Twitter, Inc. + * Bootstrap v3.4.1 (https://getbootstrap.com/) + * Copyright 2011-2019 Twitter, Inc. * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) */ .btn-default, @@ -330,8 +323,8 @@ fieldset[disabled] .btn-danger.active { background-image: -o-linear-gradient(top, #f5f5f5 0%, #e8e8e8 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#f5f5f5), to(#e8e8e8)); background-image: linear-gradient(to bottom, #f5f5f5 0%, #e8e8e8 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff5f5f5', endColorstr='#ffe8e8e8', GradientType=0); + background-repeat: repeat-x; background-color: #e8e8e8; } .dropdown-menu > .active > a, @@ -341,8 +334,8 @@ fieldset[disabled] .btn-danger.active { background-image: -o-linear-gradient(top, #418194 0%, #397282 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#418194), to(#397282)); background-image: linear-gradient(to bottom, #418194 0%, #397282 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff418194', endColorstr='#ff397282', GradientType=0); + background-repeat: repeat-x; background-color: #397282; } .navbar-default { @@ -350,8 +343,8 @@ fieldset[disabled] .btn-danger.active { background-image: -o-linear-gradient(top, #ffffff 0%, #f8f8f8 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#ffffff), to(#f8f8f8)); background-image: linear-gradient(to bottom, #ffffff 0%, #f8f8f8 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff', endColorstr='#fff8f8f8', GradientType=0); + background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(enabled = false); border-radius: 0; -webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.15), 0 1px 5px rgba(0, 0, 0, 0.075); @@ -363,8 +356,8 @@ fieldset[disabled] .btn-danger.active { background-image: -o-linear-gradient(top, #dbdbdb 0%, #e2e2e2 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#dbdbdb), to(#e2e2e2)); background-image: linear-gradient(to bottom, #dbdbdb 0%, #e2e2e2 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffdbdbdb', endColorstr='#ffe2e2e2', GradientType=0); + background-repeat: repeat-x; -webkit-box-shadow: inset 0 3px 9px rgba(0, 0, 0, 0.075); box-shadow: inset 0 3px 9px rgba(0, 0, 0, 0.075); } @@ -377,8 +370,8 @@ fieldset[disabled] .btn-danger.active { background-image: -o-linear-gradient(top, #3c3c3c 0%, #222222 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#3c3c3c), to(#222222)); background-image: linear-gradient(to bottom, #3c3c3c 0%, #222222 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff3c3c3c', endColorstr='#ff222222', GradientType=0); + background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(enabled = false); border-radius: 0; } @@ -388,8 +381,8 @@ fieldset[disabled] .btn-danger.active { background-image: -o-linear-gradient(top, #080808 0%, #0f0f0f 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#080808), to(#0f0f0f)); background-image: linear-gradient(to bottom, #080808 0%, #0f0f0f 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff080808', endColorstr='#ff0f0f0f', GradientType=0); + background-repeat: repeat-x; -webkit-box-shadow: inset 0 3px 9px rgba(0, 0, 0, 0.25); box-shadow: inset 0 3px 9px rgba(0, 0, 0, 0.25); } @@ -411,8 +404,8 @@ fieldset[disabled] .btn-danger.active { background-image: -o-linear-gradient(top, #418194 0%, #397282 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#418194), to(#397282)); background-image: linear-gradient(to bottom, #418194 0%, #397282 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff418194', endColorstr='#ff397282', GradientType=0); + background-repeat: repeat-x; } } .alert { @@ -425,8 +418,8 @@ fieldset[disabled] .btn-danger.active { background-image: -o-linear-gradient(top, #dff0d8 0%, #c8e5bc 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#dff0d8), to(#c8e5bc)); background-image: linear-gradient(to bottom, #dff0d8 0%, #c8e5bc 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffdff0d8', endColorstr='#ffc8e5bc', GradientType=0); + background-repeat: repeat-x; border-color: #b2dba1; } .alert-info { @@ -434,8 +427,8 @@ fieldset[disabled] .btn-danger.active { background-image: -o-linear-gradient(top, #d9edf7 0%, #b9def0 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#d9edf7), to(#b9def0)); background-image: linear-gradient(to bottom, #d9edf7 0%, #b9def0 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9edf7', endColorstr='#ffb9def0', GradientType=0); + background-repeat: repeat-x; border-color: #9acfea; } .alert-warning { @@ -443,8 +436,8 @@ fieldset[disabled] .btn-danger.active { background-image: -o-linear-gradient(top, #fcf8e3 0%, #f8efc0 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#fcf8e3), to(#f8efc0)); background-image: linear-gradient(to bottom, #fcf8e3 0%, #f8efc0 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffcf8e3', endColorstr='#fff8efc0', GradientType=0); + background-repeat: repeat-x; border-color: #f5e79e; } .alert-danger { @@ -452,8 +445,8 @@ fieldset[disabled] .btn-danger.active { background-image: -o-linear-gradient(top, #f2dede 0%, #e7c3c3 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#f2dede), to(#e7c3c3)); background-image: linear-gradient(to bottom, #f2dede 0%, #e7c3c3 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff2dede', endColorstr='#ffe7c3c3', GradientType=0); + background-repeat: repeat-x; border-color: #dca7a7; } .progress { @@ -461,48 +454,48 @@ fieldset[disabled] .btn-danger.active { background-image: -o-linear-gradient(top, #ebebeb 0%, #f5f5f5 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#ebebeb), to(#f5f5f5)); background-image: linear-gradient(to bottom, #ebebeb 0%, #f5f5f5 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffebebeb', endColorstr='#fff5f5f5', GradientType=0); + background-repeat: repeat-x; } .progress-bar { background-image: -webkit-linear-gradient(top, #98e8ff 0%, #65ddff 100%); background-image: -o-linear-gradient(top, #98e8ff 0%, #65ddff 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#98e8ff), to(#65ddff)); background-image: linear-gradient(to bottom, #98e8ff 0%, #65ddff 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff98e8ff', endColorstr='#ff65ddff', GradientType=0); + background-repeat: repeat-x; } .progress-bar-success { background-image: -webkit-linear-gradient(top, #5cb85c 0%, #449d44 100%); background-image: -o-linear-gradient(top, #5cb85c 0%, #449d44 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#5cb85c), to(#449d44)); background-image: linear-gradient(to bottom, #5cb85c 0%, #449d44 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5cb85c', endColorstr='#ff449d44', GradientType=0); + background-repeat: repeat-x; } .progress-bar-info { background-image: -webkit-linear-gradient(top, #5bc0de 0%, #31b0d5 100%); background-image: -o-linear-gradient(top, #5bc0de 0%, #31b0d5 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#5bc0de), to(#31b0d5)); background-image: linear-gradient(to bottom, #5bc0de 0%, #31b0d5 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de', endColorstr='#ff31b0d5', GradientType=0); + background-repeat: repeat-x; } .progress-bar-warning { background-image: -webkit-linear-gradient(top, #f0ad4e 0%, #ec971f 100%); background-image: -o-linear-gradient(top, #f0ad4e 0%, #ec971f 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#f0ad4e), to(#ec971f)); background-image: linear-gradient(to bottom, #f0ad4e 0%, #ec971f 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff0ad4e', endColorstr='#ffec971f', GradientType=0); + background-repeat: repeat-x; } .progress-bar-danger { background-image: -webkit-linear-gradient(top, #d9534f 0%, #c9302c 100%); background-image: -o-linear-gradient(top, #d9534f 0%, #c9302c 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#d9534f), to(#c9302c)); background-image: linear-gradient(to bottom, #d9534f 0%, #c9302c 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9534f', endColorstr='#ffc9302c', GradientType=0); + background-repeat: repeat-x; } .progress-bar-striped { background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); @@ -522,8 +515,8 @@ fieldset[disabled] .btn-danger.active { background-image: -o-linear-gradient(top, #98e8ff 0%, #72dfff 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#98e8ff), to(#72dfff)); background-image: linear-gradient(to bottom, #98e8ff 0%, #72dfff 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff98e8ff', endColorstr='#ff72dfff', GradientType=0); + background-repeat: repeat-x; border-color: #72dfff; } .list-group-item.active .badge, @@ -540,56 +533,56 @@ fieldset[disabled] .btn-danger.active { background-image: -o-linear-gradient(top, #f5f5f5 0%, #e8e8e8 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#f5f5f5), to(#e8e8e8)); background-image: linear-gradient(to bottom, #f5f5f5 0%, #e8e8e8 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff5f5f5', endColorstr='#ffe8e8e8', GradientType=0); + background-repeat: repeat-x; } .panel-primary > .panel-heading { background-image: -webkit-linear-gradient(top, #98e8ff 0%, #7fe2ff 100%); background-image: -o-linear-gradient(top, #98e8ff 0%, #7fe2ff 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#98e8ff), to(#7fe2ff)); background-image: linear-gradient(to bottom, #98e8ff 0%, #7fe2ff 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff98e8ff', endColorstr='#ff7fe2ff', GradientType=0); + background-repeat: repeat-x; } .panel-success > .panel-heading { background-image: -webkit-linear-gradient(top, #dff0d8 0%, #d0e9c6 100%); background-image: -o-linear-gradient(top, #dff0d8 0%, #d0e9c6 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#dff0d8), to(#d0e9c6)); background-image: linear-gradient(to bottom, #dff0d8 0%, #d0e9c6 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffdff0d8', endColorstr='#ffd0e9c6', GradientType=0); + background-repeat: repeat-x; } .panel-info > .panel-heading { background-image: -webkit-linear-gradient(top, #d9edf7 0%, #c4e3f3 100%); background-image: -o-linear-gradient(top, #d9edf7 0%, #c4e3f3 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#d9edf7), to(#c4e3f3)); background-image: linear-gradient(to bottom, #d9edf7 0%, #c4e3f3 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9edf7', endColorstr='#ffc4e3f3', GradientType=0); + background-repeat: repeat-x; } .panel-warning > .panel-heading { background-image: -webkit-linear-gradient(top, #fcf8e3 0%, #faf2cc 100%); background-image: -o-linear-gradient(top, #fcf8e3 0%, #faf2cc 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#fcf8e3), to(#faf2cc)); background-image: linear-gradient(to bottom, #fcf8e3 0%, #faf2cc 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffcf8e3', endColorstr='#fffaf2cc', GradientType=0); + background-repeat: repeat-x; } .panel-danger > .panel-heading { background-image: -webkit-linear-gradient(top, #f2dede 0%, #ebcccc 100%); background-image: -o-linear-gradient(top, #f2dede 0%, #ebcccc 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#f2dede), to(#ebcccc)); background-image: linear-gradient(to bottom, #f2dede 0%, #ebcccc 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff2dede', endColorstr='#ffebcccc', GradientType=0); + background-repeat: repeat-x; } .well { background-image: -webkit-linear-gradient(top, #e8e8e8 0%, #f5f5f5 100%); background-image: -o-linear-gradient(top, #e8e8e8 0%, #f5f5f5 100%); background-image: -webkit-gradient(linear, left top, left bottom, from(#e8e8e8), to(#f5f5f5)); background-image: linear-gradient(to bottom, #e8e8e8 0%, #f5f5f5 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffe8e8e8', endColorstr='#fff5f5f5', GradientType=0); + background-repeat: repeat-x; border-color: #dcdcdc; -webkit-box-shadow: inset 0 1px 3px rgba(0, 0, 0, 0.05), 0 1px 0 rgba(255, 255, 255, 0.1); box-shadow: inset 0 1px 3px rgba(0, 0, 0, 0.05), 0 1px 0 rgba(255, 255, 255, 0.1); diff --git a/presto-main/src/main/resources/webapp/vendor/bootstrap/css/bootstrap-theme.min.css b/presto-main/src/main/resources/webapp/vendor/bootstrap/css/bootstrap-theme.min.css old mode 100755 new mode 100644 index e166fa881e23..cc9aa23ef9cd --- a/presto-main/src/main/resources/webapp/vendor/bootstrap/css/bootstrap-theme.min.css +++ b/presto-main/src/main/resources/webapp/vendor/bootstrap/css/bootstrap-theme.min.css @@ -1,14 +1,7 @@ /*! - * Bootstrap v3.3.5 (http://getbootstrap.com) - * Copyright 2011-2016 Twitter, Inc. - * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) - */ - -/*! - * Generated using the Bootstrap Customizer (http://getbootstrap.com/customize/?id=71446b832bd9dbb87141a654eb911637) - * Config saved to config.json and https://gist.github.com/71446b832bd9dbb87141a654eb911637 + * Generated using the Bootstrap Customizer (https://getbootstrap.com/docs/3.4/customize/) *//*! - * Bootstrap v3.3.6 (http://getbootstrap.com) - * Copyright 2011-2015 Twitter, Inc. + * Bootstrap v3.4.1 (https://getbootstrap.com/) + * Copyright 2011-2019 Twitter, Inc. * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) - */.btn-default,.btn-primary,.btn-success,.btn-info,.btn-warning,.btn-danger{text-shadow:0 -1px 0 rgba(0,0,0,0.2);-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.15),0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 0 rgba(255,255,255,0.15),0 1px 1px rgba(0,0,0,0.075)}.btn-default:active,.btn-primary:active,.btn-success:active,.btn-info:active,.btn-warning:active,.btn-danger:active,.btn-default.active,.btn-primary.active,.btn-success.active,.btn-info.active,.btn-warning.active,.btn-danger.active{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,0.125);box-shadow:inset 0 3px 5px rgba(0,0,0,0.125)}.btn-default.disabled,.btn-primary.disabled,.btn-success.disabled,.btn-info.disabled,.btn-warning.disabled,.btn-danger.disabled,.btn-default[disabled],.btn-primary[disabled],.btn-success[disabled],.btn-info[disabled],.btn-warning[disabled],.btn-danger[disabled],fieldset[disabled] .btn-default,fieldset[disabled] .btn-primary,fieldset[disabled] .btn-success,fieldset[disabled] .btn-info,fieldset[disabled] .btn-warning,fieldset[disabled] .btn-danger{-webkit-box-shadow:none;box-shadow:none}.btn-default .badge,.btn-primary .badge,.btn-success .badge,.btn-info .badge,.btn-warning .badge,.btn-danger .badge{text-shadow:none}.btn:active,.btn.active{background-image:none}.btn-default{background-image:-webkit-linear-gradient(top, #fff 0, #e0e0e0 100%);background-image:-o-linear-gradient(top, #fff 0, #e0e0e0 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #fff), to(#e0e0e0));background-image:linear-gradient(to bottom, #fff 0, #e0e0e0 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff', endColorstr='#ffe0e0e0', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);background-repeat:repeat-x;border-color:#dbdbdb;text-shadow:0 1px 0 #fff;border-color:#ccc}.btn-default:hover,.btn-default:focus{background-color:#e0e0e0;background-position:0 -15px}.btn-default:active,.btn-default.active{background-color:#e0e0e0;border-color:#dbdbdb}.btn-default.disabled,.btn-default[disabled],fieldset[disabled] .btn-default,.btn-default.disabled:hover,.btn-default[disabled]:hover,fieldset[disabled] .btn-default:hover,.btn-default.disabled:focus,.btn-default[disabled]:focus,fieldset[disabled] .btn-default:focus,.btn-default.disabled.focus,.btn-default[disabled].focus,fieldset[disabled] .btn-default.focus,.btn-default.disabled:active,.btn-default[disabled]:active,fieldset[disabled] .btn-default:active,.btn-default.disabled.active,.btn-default[disabled].active,fieldset[disabled] .btn-default.active{background-color:#e0e0e0;background-image:none}.btn-primary{background-image:-webkit-linear-gradient(top, #98e8ff 0, #5bdaff 100%);background-image:-o-linear-gradient(top, #98e8ff 0, #5bdaff 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #98e8ff), to(#5bdaff));background-image:linear-gradient(to bottom, #98e8ff 0, #5bdaff 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff98e8ff', endColorstr='#ff5bdaff', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);background-repeat:repeat-x;border-color:#51d8ff}.btn-primary:hover,.btn-primary:focus{background-color:#5bdaff;background-position:0 -15px}.btn-primary:active,.btn-primary.active{background-color:#5bdaff;border-color:#51d8ff}.btn-primary.disabled,.btn-primary[disabled],fieldset[disabled] .btn-primary,.btn-primary.disabled:hover,.btn-primary[disabled]:hover,fieldset[disabled] .btn-primary:hover,.btn-primary.disabled:focus,.btn-primary[disabled]:focus,fieldset[disabled] .btn-primary:focus,.btn-primary.disabled.focus,.btn-primary[disabled].focus,fieldset[disabled] .btn-primary.focus,.btn-primary.disabled:active,.btn-primary[disabled]:active,fieldset[disabled] .btn-primary:active,.btn-primary.disabled.active,.btn-primary[disabled].active,fieldset[disabled] .btn-primary.active{background-color:#5bdaff;background-image:none}.btn-success{background-image:-webkit-linear-gradient(top, #5cb85c 0, #419641 100%);background-image:-o-linear-gradient(top, #5cb85c 0, #419641 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #5cb85c), to(#419641));background-image:linear-gradient(to bottom, #5cb85c 0, #419641 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5cb85c', endColorstr='#ff419641', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);background-repeat:repeat-x;border-color:#3e8f3e}.btn-success:hover,.btn-success:focus{background-color:#419641;background-position:0 -15px}.btn-success:active,.btn-success.active{background-color:#419641;border-color:#3e8f3e}.btn-success.disabled,.btn-success[disabled],fieldset[disabled] .btn-success,.btn-success.disabled:hover,.btn-success[disabled]:hover,fieldset[disabled] .btn-success:hover,.btn-success.disabled:focus,.btn-success[disabled]:focus,fieldset[disabled] .btn-success:focus,.btn-success.disabled.focus,.btn-success[disabled].focus,fieldset[disabled] .btn-success.focus,.btn-success.disabled:active,.btn-success[disabled]:active,fieldset[disabled] .btn-success:active,.btn-success.disabled.active,.btn-success[disabled].active,fieldset[disabled] .btn-success.active{background-color:#419641;background-image:none}.btn-info{background-image:-webkit-linear-gradient(top, #5bc0de 0, #2aabd2 100%);background-image:-o-linear-gradient(top, #5bc0de 0, #2aabd2 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #5bc0de), to(#2aabd2));background-image:linear-gradient(to bottom, #5bc0de 0, #2aabd2 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de', endColorstr='#ff2aabd2', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);background-repeat:repeat-x;border-color:#28a4c9}.btn-info:hover,.btn-info:focus{background-color:#2aabd2;background-position:0 -15px}.btn-info:active,.btn-info.active{background-color:#2aabd2;border-color:#28a4c9}.btn-info.disabled,.btn-info[disabled],fieldset[disabled] .btn-info,.btn-info.disabled:hover,.btn-info[disabled]:hover,fieldset[disabled] .btn-info:hover,.btn-info.disabled:focus,.btn-info[disabled]:focus,fieldset[disabled] .btn-info:focus,.btn-info.disabled.focus,.btn-info[disabled].focus,fieldset[disabled] .btn-info.focus,.btn-info.disabled:active,.btn-info[disabled]:active,fieldset[disabled] .btn-info:active,.btn-info.disabled.active,.btn-info[disabled].active,fieldset[disabled] .btn-info.active{background-color:#2aabd2;background-image:none}.btn-warning{background-image:-webkit-linear-gradient(top, #f0ad4e 0, #eb9316 100%);background-image:-o-linear-gradient(top, #f0ad4e 0, #eb9316 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #f0ad4e), to(#eb9316));background-image:linear-gradient(to bottom, #f0ad4e 0, #eb9316 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff0ad4e', endColorstr='#ffeb9316', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);background-repeat:repeat-x;border-color:#e38d13}.btn-warning:hover,.btn-warning:focus{background-color:#eb9316;background-position:0 -15px}.btn-warning:active,.btn-warning.active{background-color:#eb9316;border-color:#e38d13}.btn-warning.disabled,.btn-warning[disabled],fieldset[disabled] .btn-warning,.btn-warning.disabled:hover,.btn-warning[disabled]:hover,fieldset[disabled] .btn-warning:hover,.btn-warning.disabled:focus,.btn-warning[disabled]:focus,fieldset[disabled] .btn-warning:focus,.btn-warning.disabled.focus,.btn-warning[disabled].focus,fieldset[disabled] .btn-warning.focus,.btn-warning.disabled:active,.btn-warning[disabled]:active,fieldset[disabled] .btn-warning:active,.btn-warning.disabled.active,.btn-warning[disabled].active,fieldset[disabled] .btn-warning.active{background-color:#eb9316;background-image:none}.btn-danger{background-image:-webkit-linear-gradient(top, #d9534f 0, #c12e2a 100%);background-image:-o-linear-gradient(top, #d9534f 0, #c12e2a 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #d9534f), to(#c12e2a));background-image:linear-gradient(to bottom, #d9534f 0, #c12e2a 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9534f', endColorstr='#ffc12e2a', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);background-repeat:repeat-x;border-color:#b92c28}.btn-danger:hover,.btn-danger:focus{background-color:#c12e2a;background-position:0 -15px}.btn-danger:active,.btn-danger.active{background-color:#c12e2a;border-color:#b92c28}.btn-danger.disabled,.btn-danger[disabled],fieldset[disabled] .btn-danger,.btn-danger.disabled:hover,.btn-danger[disabled]:hover,fieldset[disabled] .btn-danger:hover,.btn-danger.disabled:focus,.btn-danger[disabled]:focus,fieldset[disabled] .btn-danger:focus,.btn-danger.disabled.focus,.btn-danger[disabled].focus,fieldset[disabled] .btn-danger.focus,.btn-danger.disabled:active,.btn-danger[disabled]:active,fieldset[disabled] .btn-danger:active,.btn-danger.disabled.active,.btn-danger[disabled].active,fieldset[disabled] .btn-danger.active{background-color:#c12e2a;background-image:none}.thumbnail,.img-thumbnail{-webkit-box-shadow:0 1px 2px rgba(0,0,0,0.075);box-shadow:0 1px 2px rgba(0,0,0,0.075)}.dropdown-menu>li>a:hover,.dropdown-menu>li>a:focus{background-image:-webkit-linear-gradient(top, #f5f5f5 0, #e8e8e8 100%);background-image:-o-linear-gradient(top, #f5f5f5 0, #e8e8e8 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #f5f5f5), to(#e8e8e8));background-image:linear-gradient(to bottom, #f5f5f5 0, #e8e8e8 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff5f5f5', endColorstr='#ffe8e8e8', GradientType=0);background-color:#e8e8e8}.dropdown-menu>.active>a,.dropdown-menu>.active>a:hover,.dropdown-menu>.active>a:focus{background-image:-webkit-linear-gradient(top, #418194 0, #397282 100%);background-image:-o-linear-gradient(top, #418194 0, #397282 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #418194), to(#397282));background-image:linear-gradient(to bottom, #418194 0, #397282 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff418194', endColorstr='#ff397282', GradientType=0);background-color:#397282}.navbar-default{background-image:-webkit-linear-gradient(top, #fff 0, #f8f8f8 100%);background-image:-o-linear-gradient(top, #fff 0, #f8f8f8 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #fff), to(#f8f8f8));background-image:linear-gradient(to bottom, #fff 0, #f8f8f8 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff', endColorstr='#fff8f8f8', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);border-radius:0;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.15),0 1px 5px rgba(0,0,0,0.075);box-shadow:inset 0 1px 0 rgba(255,255,255,0.15),0 1px 5px rgba(0,0,0,0.075)}.navbar-default .navbar-nav>.open>a,.navbar-default .navbar-nav>.active>a{background-image:-webkit-linear-gradient(top, #dbdbdb 0, #e2e2e2 100%);background-image:-o-linear-gradient(top, #dbdbdb 0, #e2e2e2 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #dbdbdb), to(#e2e2e2));background-image:linear-gradient(to bottom, #dbdbdb 0, #e2e2e2 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffdbdbdb', endColorstr='#ffe2e2e2', GradientType=0);-webkit-box-shadow:inset 0 3px 9px rgba(0,0,0,0.075);box-shadow:inset 0 3px 9px rgba(0,0,0,0.075)}.navbar-brand,.navbar-nav>li>a{text-shadow:0 1px 0 rgba(255,255,255,0.25)}.navbar-inverse{background-image:-webkit-linear-gradient(top, #3c3c3c 0, #222 100%);background-image:-o-linear-gradient(top, #3c3c3c 0, #222 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #3c3c3c), to(#222));background-image:linear-gradient(to bottom, #3c3c3c 0, #222 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff3c3c3c', endColorstr='#ff222222', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);border-radius:0}.navbar-inverse .navbar-nav>.open>a,.navbar-inverse .navbar-nav>.active>a{background-image:-webkit-linear-gradient(top, #080808 0, #0f0f0f 100%);background-image:-o-linear-gradient(top, #080808 0, #0f0f0f 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #080808), to(#0f0f0f));background-image:linear-gradient(to bottom, #080808 0, #0f0f0f 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff080808', endColorstr='#ff0f0f0f', GradientType=0);-webkit-box-shadow:inset 0 3px 9px rgba(0,0,0,0.25);box-shadow:inset 0 3px 9px rgba(0,0,0,0.25)}.navbar-inverse .navbar-brand,.navbar-inverse .navbar-nav>li>a{text-shadow:0 -1px 0 rgba(0,0,0,0.25)}.navbar-static-top,.navbar-fixed-top,.navbar-fixed-bottom{border-radius:0}@media (max-width:767px){.navbar .navbar-nav .open .dropdown-menu>.active>a,.navbar .navbar-nav .open .dropdown-menu>.active>a:hover,.navbar .navbar-nav .open .dropdown-menu>.active>a:focus{color:#fff;background-image:-webkit-linear-gradient(top, #418194 0, #397282 100%);background-image:-o-linear-gradient(top, #418194 0, #397282 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #418194), to(#397282));background-image:linear-gradient(to bottom, #418194 0, #397282 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff418194', endColorstr='#ff397282', GradientType=0)}}.alert{text-shadow:0 1px 0 rgba(255,255,255,0.2);-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.25),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 0 1px 0 rgba(255,255,255,0.25),0 1px 2px rgba(0,0,0,0.05)}.alert-success{background-image:-webkit-linear-gradient(top, #dff0d8 0, #c8e5bc 100%);background-image:-o-linear-gradient(top, #dff0d8 0, #c8e5bc 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #dff0d8), to(#c8e5bc));background-image:linear-gradient(to bottom, #dff0d8 0, #c8e5bc 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffdff0d8', endColorstr='#ffc8e5bc', GradientType=0);border-color:#b2dba1}.alert-info{background-image:-webkit-linear-gradient(top, #d9edf7 0, #b9def0 100%);background-image:-o-linear-gradient(top, #d9edf7 0, #b9def0 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #d9edf7), to(#b9def0));background-image:linear-gradient(to bottom, #d9edf7 0, #b9def0 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9edf7', endColorstr='#ffb9def0', GradientType=0);border-color:#9acfea}.alert-warning{background-image:-webkit-linear-gradient(top, #fcf8e3 0, #f8efc0 100%);background-image:-o-linear-gradient(top, #fcf8e3 0, #f8efc0 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #fcf8e3), to(#f8efc0));background-image:linear-gradient(to bottom, #fcf8e3 0, #f8efc0 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffcf8e3', endColorstr='#fff8efc0', GradientType=0);border-color:#f5e79e}.alert-danger{background-image:-webkit-linear-gradient(top, #f2dede 0, #e7c3c3 100%);background-image:-o-linear-gradient(top, #f2dede 0, #e7c3c3 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #f2dede), to(#e7c3c3));background-image:linear-gradient(to bottom, #f2dede 0, #e7c3c3 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff2dede', endColorstr='#ffe7c3c3', GradientType=0);border-color:#dca7a7}.progress{background-image:-webkit-linear-gradient(top, #ebebeb 0, #f5f5f5 100%);background-image:-o-linear-gradient(top, #ebebeb 0, #f5f5f5 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #ebebeb), to(#f5f5f5));background-image:linear-gradient(to bottom, #ebebeb 0, #f5f5f5 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffebebeb', endColorstr='#fff5f5f5', GradientType=0)}.progress-bar{background-image:-webkit-linear-gradient(top, #98e8ff 0, #65ddff 100%);background-image:-o-linear-gradient(top, #98e8ff 0, #65ddff 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #98e8ff), to(#65ddff));background-image:linear-gradient(to bottom, #98e8ff 0, #65ddff 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff98e8ff', endColorstr='#ff65ddff', GradientType=0)}.progress-bar-success{background-image:-webkit-linear-gradient(top, #5cb85c 0, #449d44 100%);background-image:-o-linear-gradient(top, #5cb85c 0, #449d44 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #5cb85c), to(#449d44));background-image:linear-gradient(to bottom, #5cb85c 0, #449d44 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5cb85c', endColorstr='#ff449d44', GradientType=0)}.progress-bar-info{background-image:-webkit-linear-gradient(top, #5bc0de 0, #31b0d5 100%);background-image:-o-linear-gradient(top, #5bc0de 0, #31b0d5 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #5bc0de), to(#31b0d5));background-image:linear-gradient(to bottom, #5bc0de 0, #31b0d5 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de', endColorstr='#ff31b0d5', GradientType=0)}.progress-bar-warning{background-image:-webkit-linear-gradient(top, #f0ad4e 0, #ec971f 100%);background-image:-o-linear-gradient(top, #f0ad4e 0, #ec971f 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #f0ad4e), to(#ec971f));background-image:linear-gradient(to bottom, #f0ad4e 0, #ec971f 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff0ad4e', endColorstr='#ffec971f', GradientType=0)}.progress-bar-danger{background-image:-webkit-linear-gradient(top, #d9534f 0, #c9302c 100%);background-image:-o-linear-gradient(top, #d9534f 0, #c9302c 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #d9534f), to(#c9302c));background-image:linear-gradient(to bottom, #d9534f 0, #c9302c 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9534f', endColorstr='#ffc9302c', GradientType=0)}.progress-bar-striped{background-image:-webkit-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent)}.list-group{border-radius:0;-webkit-box-shadow:0 1px 2px rgba(0,0,0,0.075);box-shadow:0 1px 2px rgba(0,0,0,0.075)}.list-group-item.active,.list-group-item.active:hover,.list-group-item.active:focus{text-shadow:0 -1px 0 #65ddff;background-image:-webkit-linear-gradient(top, #98e8ff 0, #72dfff 100%);background-image:-o-linear-gradient(top, #98e8ff 0, #72dfff 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #98e8ff), to(#72dfff));background-image:linear-gradient(to bottom, #98e8ff 0, #72dfff 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff98e8ff', endColorstr='#ff72dfff', GradientType=0);border-color:#72dfff}.list-group-item.active .badge,.list-group-item.active:hover .badge,.list-group-item.active:focus .badge{text-shadow:none}.panel{-webkit-box-shadow:0 1px 2px rgba(0,0,0,0.05);box-shadow:0 1px 2px rgba(0,0,0,0.05)}.panel-default>.panel-heading{background-image:-webkit-linear-gradient(top, #f5f5f5 0, #e8e8e8 100%);background-image:-o-linear-gradient(top, #f5f5f5 0, #e8e8e8 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #f5f5f5), to(#e8e8e8));background-image:linear-gradient(to bottom, #f5f5f5 0, #e8e8e8 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff5f5f5', endColorstr='#ffe8e8e8', GradientType=0)}.panel-primary>.panel-heading{background-image:-webkit-linear-gradient(top, #98e8ff 0, #7fe2ff 100%);background-image:-o-linear-gradient(top, #98e8ff 0, #7fe2ff 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #98e8ff), to(#7fe2ff));background-image:linear-gradient(to bottom, #98e8ff 0, #7fe2ff 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff98e8ff', endColorstr='#ff7fe2ff', GradientType=0)}.panel-success>.panel-heading{background-image:-webkit-linear-gradient(top, #dff0d8 0, #d0e9c6 100%);background-image:-o-linear-gradient(top, #dff0d8 0, #d0e9c6 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #dff0d8), to(#d0e9c6));background-image:linear-gradient(to bottom, #dff0d8 0, #d0e9c6 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffdff0d8', endColorstr='#ffd0e9c6', GradientType=0)}.panel-info>.panel-heading{background-image:-webkit-linear-gradient(top, #d9edf7 0, #c4e3f3 100%);background-image:-o-linear-gradient(top, #d9edf7 0, #c4e3f3 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #d9edf7), to(#c4e3f3));background-image:linear-gradient(to bottom, #d9edf7 0, #c4e3f3 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9edf7', endColorstr='#ffc4e3f3', GradientType=0)}.panel-warning>.panel-heading{background-image:-webkit-linear-gradient(top, #fcf8e3 0, #faf2cc 100%);background-image:-o-linear-gradient(top, #fcf8e3 0, #faf2cc 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #fcf8e3), to(#faf2cc));background-image:linear-gradient(to bottom, #fcf8e3 0, #faf2cc 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffcf8e3', endColorstr='#fffaf2cc', GradientType=0)}.panel-danger>.panel-heading{background-image:-webkit-linear-gradient(top, #f2dede 0, #ebcccc 100%);background-image:-o-linear-gradient(top, #f2dede 0, #ebcccc 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #f2dede), to(#ebcccc));background-image:linear-gradient(to bottom, #f2dede 0, #ebcccc 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff2dede', endColorstr='#ffebcccc', GradientType=0)}.well{background-image:-webkit-linear-gradient(top, #e8e8e8 0, #f5f5f5 100%);background-image:-o-linear-gradient(top, #e8e8e8 0, #f5f5f5 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #e8e8e8), to(#f5f5f5));background-image:linear-gradient(to bottom, #e8e8e8 0, #f5f5f5 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffe8e8e8', endColorstr='#fff5f5f5', GradientType=0);border-color:#dcdcdc;-webkit-box-shadow:inset 0 1px 3px rgba(0,0,0,0.05),0 1px 0 rgba(255,255,255,0.1);box-shadow:inset 0 1px 3px rgba(0,0,0,0.05),0 1px 0 rgba(255,255,255,0.1)} \ No newline at end of file + */.btn-default,.btn-primary,.btn-success,.btn-info,.btn-warning,.btn-danger{text-shadow:0 -1px 0 rgba(0,0,0,0.2);-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.15),0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 0 rgba(255,255,255,0.15),0 1px 1px rgba(0,0,0,0.075)}.btn-default:active,.btn-primary:active,.btn-success:active,.btn-info:active,.btn-warning:active,.btn-danger:active,.btn-default.active,.btn-primary.active,.btn-success.active,.btn-info.active,.btn-warning.active,.btn-danger.active{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,0.125);box-shadow:inset 0 3px 5px rgba(0,0,0,0.125)}.btn-default.disabled,.btn-primary.disabled,.btn-success.disabled,.btn-info.disabled,.btn-warning.disabled,.btn-danger.disabled,.btn-default[disabled],.btn-primary[disabled],.btn-success[disabled],.btn-info[disabled],.btn-warning[disabled],.btn-danger[disabled],fieldset[disabled] .btn-default,fieldset[disabled] .btn-primary,fieldset[disabled] .btn-success,fieldset[disabled] .btn-info,fieldset[disabled] .btn-warning,fieldset[disabled] .btn-danger{-webkit-box-shadow:none;box-shadow:none}.btn-default .badge,.btn-primary .badge,.btn-success .badge,.btn-info .badge,.btn-warning .badge,.btn-danger .badge{text-shadow:none}.btn:active,.btn.active{background-image:none}.btn-default{background-image:-webkit-linear-gradient(top, #fff 0, #e0e0e0 100%);background-image:-o-linear-gradient(top, #fff 0, #e0e0e0 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #fff), to(#e0e0e0));background-image:linear-gradient(to bottom, #fff 0, #e0e0e0 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff', endColorstr='#ffe0e0e0', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);background-repeat:repeat-x;border-color:#dbdbdb;text-shadow:0 1px 0 #fff;border-color:#ccc}.btn-default:hover,.btn-default:focus{background-color:#e0e0e0;background-position:0 -15px}.btn-default:active,.btn-default.active{background-color:#e0e0e0;border-color:#dbdbdb}.btn-default.disabled,.btn-default[disabled],fieldset[disabled] .btn-default,.btn-default.disabled:hover,.btn-default[disabled]:hover,fieldset[disabled] .btn-default:hover,.btn-default.disabled:focus,.btn-default[disabled]:focus,fieldset[disabled] .btn-default:focus,.btn-default.disabled.focus,.btn-default[disabled].focus,fieldset[disabled] .btn-default.focus,.btn-default.disabled:active,.btn-default[disabled]:active,fieldset[disabled] .btn-default:active,.btn-default.disabled.active,.btn-default[disabled].active,fieldset[disabled] .btn-default.active{background-color:#e0e0e0;background-image:none}.btn-primary{background-image:-webkit-linear-gradient(top, #98e8ff 0, #5bdaff 100%);background-image:-o-linear-gradient(top, #98e8ff 0, #5bdaff 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #98e8ff), to(#5bdaff));background-image:linear-gradient(to bottom, #98e8ff 0, #5bdaff 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff98e8ff', endColorstr='#ff5bdaff', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);background-repeat:repeat-x;border-color:#51d8ff}.btn-primary:hover,.btn-primary:focus{background-color:#5bdaff;background-position:0 -15px}.btn-primary:active,.btn-primary.active{background-color:#5bdaff;border-color:#51d8ff}.btn-primary.disabled,.btn-primary[disabled],fieldset[disabled] .btn-primary,.btn-primary.disabled:hover,.btn-primary[disabled]:hover,fieldset[disabled] .btn-primary:hover,.btn-primary.disabled:focus,.btn-primary[disabled]:focus,fieldset[disabled] .btn-primary:focus,.btn-primary.disabled.focus,.btn-primary[disabled].focus,fieldset[disabled] .btn-primary.focus,.btn-primary.disabled:active,.btn-primary[disabled]:active,fieldset[disabled] .btn-primary:active,.btn-primary.disabled.active,.btn-primary[disabled].active,fieldset[disabled] .btn-primary.active{background-color:#5bdaff;background-image:none}.btn-success{background-image:-webkit-linear-gradient(top, #5cb85c 0, #419641 100%);background-image:-o-linear-gradient(top, #5cb85c 0, #419641 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #5cb85c), to(#419641));background-image:linear-gradient(to bottom, #5cb85c 0, #419641 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5cb85c', endColorstr='#ff419641', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);background-repeat:repeat-x;border-color:#3e8f3e}.btn-success:hover,.btn-success:focus{background-color:#419641;background-position:0 -15px}.btn-success:active,.btn-success.active{background-color:#419641;border-color:#3e8f3e}.btn-success.disabled,.btn-success[disabled],fieldset[disabled] .btn-success,.btn-success.disabled:hover,.btn-success[disabled]:hover,fieldset[disabled] .btn-success:hover,.btn-success.disabled:focus,.btn-success[disabled]:focus,fieldset[disabled] .btn-success:focus,.btn-success.disabled.focus,.btn-success[disabled].focus,fieldset[disabled] .btn-success.focus,.btn-success.disabled:active,.btn-success[disabled]:active,fieldset[disabled] .btn-success:active,.btn-success.disabled.active,.btn-success[disabled].active,fieldset[disabled] .btn-success.active{background-color:#419641;background-image:none}.btn-info{background-image:-webkit-linear-gradient(top, #5bc0de 0, #2aabd2 100%);background-image:-o-linear-gradient(top, #5bc0de 0, #2aabd2 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #5bc0de), to(#2aabd2));background-image:linear-gradient(to bottom, #5bc0de 0, #2aabd2 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de', endColorstr='#ff2aabd2', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);background-repeat:repeat-x;border-color:#28a4c9}.btn-info:hover,.btn-info:focus{background-color:#2aabd2;background-position:0 -15px}.btn-info:active,.btn-info.active{background-color:#2aabd2;border-color:#28a4c9}.btn-info.disabled,.btn-info[disabled],fieldset[disabled] .btn-info,.btn-info.disabled:hover,.btn-info[disabled]:hover,fieldset[disabled] .btn-info:hover,.btn-info.disabled:focus,.btn-info[disabled]:focus,fieldset[disabled] .btn-info:focus,.btn-info.disabled.focus,.btn-info[disabled].focus,fieldset[disabled] .btn-info.focus,.btn-info.disabled:active,.btn-info[disabled]:active,fieldset[disabled] .btn-info:active,.btn-info.disabled.active,.btn-info[disabled].active,fieldset[disabled] .btn-info.active{background-color:#2aabd2;background-image:none}.btn-warning{background-image:-webkit-linear-gradient(top, #f0ad4e 0, #eb9316 100%);background-image:-o-linear-gradient(top, #f0ad4e 0, #eb9316 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #f0ad4e), to(#eb9316));background-image:linear-gradient(to bottom, #f0ad4e 0, #eb9316 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff0ad4e', endColorstr='#ffeb9316', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);background-repeat:repeat-x;border-color:#e38d13}.btn-warning:hover,.btn-warning:focus{background-color:#eb9316;background-position:0 -15px}.btn-warning:active,.btn-warning.active{background-color:#eb9316;border-color:#e38d13}.btn-warning.disabled,.btn-warning[disabled],fieldset[disabled] .btn-warning,.btn-warning.disabled:hover,.btn-warning[disabled]:hover,fieldset[disabled] .btn-warning:hover,.btn-warning.disabled:focus,.btn-warning[disabled]:focus,fieldset[disabled] .btn-warning:focus,.btn-warning.disabled.focus,.btn-warning[disabled].focus,fieldset[disabled] .btn-warning.focus,.btn-warning.disabled:active,.btn-warning[disabled]:active,fieldset[disabled] .btn-warning:active,.btn-warning.disabled.active,.btn-warning[disabled].active,fieldset[disabled] .btn-warning.active{background-color:#eb9316;background-image:none}.btn-danger{background-image:-webkit-linear-gradient(top, #d9534f 0, #c12e2a 100%);background-image:-o-linear-gradient(top, #d9534f 0, #c12e2a 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #d9534f), to(#c12e2a));background-image:linear-gradient(to bottom, #d9534f 0, #c12e2a 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9534f', endColorstr='#ffc12e2a', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);background-repeat:repeat-x;border-color:#b92c28}.btn-danger:hover,.btn-danger:focus{background-color:#c12e2a;background-position:0 -15px}.btn-danger:active,.btn-danger.active{background-color:#c12e2a;border-color:#b92c28}.btn-danger.disabled,.btn-danger[disabled],fieldset[disabled] .btn-danger,.btn-danger.disabled:hover,.btn-danger[disabled]:hover,fieldset[disabled] .btn-danger:hover,.btn-danger.disabled:focus,.btn-danger[disabled]:focus,fieldset[disabled] .btn-danger:focus,.btn-danger.disabled.focus,.btn-danger[disabled].focus,fieldset[disabled] .btn-danger.focus,.btn-danger.disabled:active,.btn-danger[disabled]:active,fieldset[disabled] .btn-danger:active,.btn-danger.disabled.active,.btn-danger[disabled].active,fieldset[disabled] .btn-danger.active{background-color:#c12e2a;background-image:none}.thumbnail,.img-thumbnail{-webkit-box-shadow:0 1px 2px rgba(0,0,0,0.075);box-shadow:0 1px 2px rgba(0,0,0,0.075)}.dropdown-menu>li>a:hover,.dropdown-menu>li>a:focus{background-image:-webkit-linear-gradient(top, #f5f5f5 0, #e8e8e8 100%);background-image:-o-linear-gradient(top, #f5f5f5 0, #e8e8e8 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #f5f5f5), to(#e8e8e8));background-image:linear-gradient(to bottom, #f5f5f5 0, #e8e8e8 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff5f5f5', endColorstr='#ffe8e8e8', GradientType=0);background-repeat:repeat-x;background-color:#e8e8e8}.dropdown-menu>.active>a,.dropdown-menu>.active>a:hover,.dropdown-menu>.active>a:focus{background-image:-webkit-linear-gradient(top, #418194 0, #397282 100%);background-image:-o-linear-gradient(top, #418194 0, #397282 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #418194), to(#397282));background-image:linear-gradient(to bottom, #418194 0, #397282 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff418194', endColorstr='#ff397282', GradientType=0);background-repeat:repeat-x;background-color:#397282}.navbar-default{background-image:-webkit-linear-gradient(top, #fff 0, #f8f8f8 100%);background-image:-o-linear-gradient(top, #fff 0, #f8f8f8 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #fff), to(#f8f8f8));background-image:linear-gradient(to bottom, #fff 0, #f8f8f8 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff', endColorstr='#fff8f8f8', GradientType=0);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);border-radius:0;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.15),0 1px 5px rgba(0,0,0,0.075);box-shadow:inset 0 1px 0 rgba(255,255,255,0.15),0 1px 5px rgba(0,0,0,0.075)}.navbar-default .navbar-nav>.open>a,.navbar-default .navbar-nav>.active>a{background-image:-webkit-linear-gradient(top, #dbdbdb 0, #e2e2e2 100%);background-image:-o-linear-gradient(top, #dbdbdb 0, #e2e2e2 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #dbdbdb), to(#e2e2e2));background-image:linear-gradient(to bottom, #dbdbdb 0, #e2e2e2 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffdbdbdb', endColorstr='#ffe2e2e2', GradientType=0);background-repeat:repeat-x;-webkit-box-shadow:inset 0 3px 9px rgba(0,0,0,0.075);box-shadow:inset 0 3px 9px rgba(0,0,0,0.075)}.navbar-brand,.navbar-nav>li>a{text-shadow:0 1px 0 rgba(255,255,255,0.25)}.navbar-inverse{background-image:-webkit-linear-gradient(top, #3c3c3c 0, #222 100%);background-image:-o-linear-gradient(top, #3c3c3c 0, #222 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #3c3c3c), to(#222));background-image:linear-gradient(to bottom, #3c3c3c 0, #222 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff3c3c3c', endColorstr='#ff222222', GradientType=0);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);border-radius:0}.navbar-inverse .navbar-nav>.open>a,.navbar-inverse .navbar-nav>.active>a{background-image:-webkit-linear-gradient(top, #080808 0, #0f0f0f 100%);background-image:-o-linear-gradient(top, #080808 0, #0f0f0f 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #080808), to(#0f0f0f));background-image:linear-gradient(to bottom, #080808 0, #0f0f0f 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff080808', endColorstr='#ff0f0f0f', GradientType=0);background-repeat:repeat-x;-webkit-box-shadow:inset 0 3px 9px rgba(0,0,0,0.25);box-shadow:inset 0 3px 9px rgba(0,0,0,0.25)}.navbar-inverse .navbar-brand,.navbar-inverse .navbar-nav>li>a{text-shadow:0 -1px 0 rgba(0,0,0,0.25)}.navbar-static-top,.navbar-fixed-top,.navbar-fixed-bottom{border-radius:0}@media (max-width:767px){.navbar .navbar-nav .open .dropdown-menu>.active>a,.navbar .navbar-nav .open .dropdown-menu>.active>a:hover,.navbar .navbar-nav .open .dropdown-menu>.active>a:focus{color:#fff;background-image:-webkit-linear-gradient(top, #418194 0, #397282 100%);background-image:-o-linear-gradient(top, #418194 0, #397282 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #418194), to(#397282));background-image:linear-gradient(to bottom, #418194 0, #397282 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff418194', endColorstr='#ff397282', GradientType=0);background-repeat:repeat-x}}.alert{text-shadow:0 1px 0 rgba(255,255,255,0.2);-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.25),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 0 1px 0 rgba(255,255,255,0.25),0 1px 2px rgba(0,0,0,0.05)}.alert-success{background-image:-webkit-linear-gradient(top, #dff0d8 0, #c8e5bc 100%);background-image:-o-linear-gradient(top, #dff0d8 0, #c8e5bc 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #dff0d8), to(#c8e5bc));background-image:linear-gradient(to bottom, #dff0d8 0, #c8e5bc 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffdff0d8', endColorstr='#ffc8e5bc', GradientType=0);background-repeat:repeat-x;border-color:#b2dba1}.alert-info{background-image:-webkit-linear-gradient(top, #d9edf7 0, #b9def0 100%);background-image:-o-linear-gradient(top, #d9edf7 0, #b9def0 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #d9edf7), to(#b9def0));background-image:linear-gradient(to bottom, #d9edf7 0, #b9def0 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9edf7', endColorstr='#ffb9def0', GradientType=0);background-repeat:repeat-x;border-color:#9acfea}.alert-warning{background-image:-webkit-linear-gradient(top, #fcf8e3 0, #f8efc0 100%);background-image:-o-linear-gradient(top, #fcf8e3 0, #f8efc0 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #fcf8e3), to(#f8efc0));background-image:linear-gradient(to bottom, #fcf8e3 0, #f8efc0 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffcf8e3', endColorstr='#fff8efc0', GradientType=0);background-repeat:repeat-x;border-color:#f5e79e}.alert-danger{background-image:-webkit-linear-gradient(top, #f2dede 0, #e7c3c3 100%);background-image:-o-linear-gradient(top, #f2dede 0, #e7c3c3 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #f2dede), to(#e7c3c3));background-image:linear-gradient(to bottom, #f2dede 0, #e7c3c3 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff2dede', endColorstr='#ffe7c3c3', GradientType=0);background-repeat:repeat-x;border-color:#dca7a7}.progress{background-image:-webkit-linear-gradient(top, #ebebeb 0, #f5f5f5 100%);background-image:-o-linear-gradient(top, #ebebeb 0, #f5f5f5 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #ebebeb), to(#f5f5f5));background-image:linear-gradient(to bottom, #ebebeb 0, #f5f5f5 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffebebeb', endColorstr='#fff5f5f5', GradientType=0);background-repeat:repeat-x}.progress-bar{background-image:-webkit-linear-gradient(top, #98e8ff 0, #65ddff 100%);background-image:-o-linear-gradient(top, #98e8ff 0, #65ddff 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #98e8ff), to(#65ddff));background-image:linear-gradient(to bottom, #98e8ff 0, #65ddff 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff98e8ff', endColorstr='#ff65ddff', GradientType=0);background-repeat:repeat-x}.progress-bar-success{background-image:-webkit-linear-gradient(top, #5cb85c 0, #449d44 100%);background-image:-o-linear-gradient(top, #5cb85c 0, #449d44 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #5cb85c), to(#449d44));background-image:linear-gradient(to bottom, #5cb85c 0, #449d44 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5cb85c', endColorstr='#ff449d44', GradientType=0);background-repeat:repeat-x}.progress-bar-info{background-image:-webkit-linear-gradient(top, #5bc0de 0, #31b0d5 100%);background-image:-o-linear-gradient(top, #5bc0de 0, #31b0d5 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #5bc0de), to(#31b0d5));background-image:linear-gradient(to bottom, #5bc0de 0, #31b0d5 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de', endColorstr='#ff31b0d5', GradientType=0);background-repeat:repeat-x}.progress-bar-warning{background-image:-webkit-linear-gradient(top, #f0ad4e 0, #ec971f 100%);background-image:-o-linear-gradient(top, #f0ad4e 0, #ec971f 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #f0ad4e), to(#ec971f));background-image:linear-gradient(to bottom, #f0ad4e 0, #ec971f 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff0ad4e', endColorstr='#ffec971f', GradientType=0);background-repeat:repeat-x}.progress-bar-danger{background-image:-webkit-linear-gradient(top, #d9534f 0, #c9302c 100%);background-image:-o-linear-gradient(top, #d9534f 0, #c9302c 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #d9534f), to(#c9302c));background-image:linear-gradient(to bottom, #d9534f 0, #c9302c 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9534f', endColorstr='#ffc9302c', GradientType=0);background-repeat:repeat-x}.progress-bar-striped{background-image:-webkit-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent)}.list-group{border-radius:0;-webkit-box-shadow:0 1px 2px rgba(0,0,0,0.075);box-shadow:0 1px 2px rgba(0,0,0,0.075)}.list-group-item.active,.list-group-item.active:hover,.list-group-item.active:focus{text-shadow:0 -1px 0 #65ddff;background-image:-webkit-linear-gradient(top, #98e8ff 0, #72dfff 100%);background-image:-o-linear-gradient(top, #98e8ff 0, #72dfff 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #98e8ff), to(#72dfff));background-image:linear-gradient(to bottom, #98e8ff 0, #72dfff 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff98e8ff', endColorstr='#ff72dfff', GradientType=0);background-repeat:repeat-x;border-color:#72dfff}.list-group-item.active .badge,.list-group-item.active:hover .badge,.list-group-item.active:focus .badge{text-shadow:none}.panel{-webkit-box-shadow:0 1px 2px rgba(0,0,0,0.05);box-shadow:0 1px 2px rgba(0,0,0,0.05)}.panel-default>.panel-heading{background-image:-webkit-linear-gradient(top, #f5f5f5 0, #e8e8e8 100%);background-image:-o-linear-gradient(top, #f5f5f5 0, #e8e8e8 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #f5f5f5), to(#e8e8e8));background-image:linear-gradient(to bottom, #f5f5f5 0, #e8e8e8 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff5f5f5', endColorstr='#ffe8e8e8', GradientType=0);background-repeat:repeat-x}.panel-primary>.panel-heading{background-image:-webkit-linear-gradient(top, #98e8ff 0, #7fe2ff 100%);background-image:-o-linear-gradient(top, #98e8ff 0, #7fe2ff 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #98e8ff), to(#7fe2ff));background-image:linear-gradient(to bottom, #98e8ff 0, #7fe2ff 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff98e8ff', endColorstr='#ff7fe2ff', GradientType=0);background-repeat:repeat-x}.panel-success>.panel-heading{background-image:-webkit-linear-gradient(top, #dff0d8 0, #d0e9c6 100%);background-image:-o-linear-gradient(top, #dff0d8 0, #d0e9c6 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #dff0d8), to(#d0e9c6));background-image:linear-gradient(to bottom, #dff0d8 0, #d0e9c6 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffdff0d8', endColorstr='#ffd0e9c6', GradientType=0);background-repeat:repeat-x}.panel-info>.panel-heading{background-image:-webkit-linear-gradient(top, #d9edf7 0, #c4e3f3 100%);background-image:-o-linear-gradient(top, #d9edf7 0, #c4e3f3 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #d9edf7), to(#c4e3f3));background-image:linear-gradient(to bottom, #d9edf7 0, #c4e3f3 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9edf7', endColorstr='#ffc4e3f3', GradientType=0);background-repeat:repeat-x}.panel-warning>.panel-heading{background-image:-webkit-linear-gradient(top, #fcf8e3 0, #faf2cc 100%);background-image:-o-linear-gradient(top, #fcf8e3 0, #faf2cc 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #fcf8e3), to(#faf2cc));background-image:linear-gradient(to bottom, #fcf8e3 0, #faf2cc 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffcf8e3', endColorstr='#fffaf2cc', GradientType=0);background-repeat:repeat-x}.panel-danger>.panel-heading{background-image:-webkit-linear-gradient(top, #f2dede 0, #ebcccc 100%);background-image:-o-linear-gradient(top, #f2dede 0, #ebcccc 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #f2dede), to(#ebcccc));background-image:linear-gradient(to bottom, #f2dede 0, #ebcccc 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff2dede', endColorstr='#ffebcccc', GradientType=0);background-repeat:repeat-x}.well{background-image:-webkit-linear-gradient(top, #e8e8e8 0, #f5f5f5 100%);background-image:-o-linear-gradient(top, #e8e8e8 0, #f5f5f5 100%);background-image:-webkit-gradient(linear, left top, left bottom, color-stop(0, #e8e8e8), to(#f5f5f5));background-image:linear-gradient(to bottom, #e8e8e8 0, #f5f5f5 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffe8e8e8', endColorstr='#fff5f5f5', GradientType=0);background-repeat:repeat-x;border-color:#dcdcdc;-webkit-box-shadow:inset 0 1px 3px rgba(0,0,0,0.05),0 1px 0 rgba(255,255,255,0.1);box-shadow:inset 0 1px 3px rgba(0,0,0,0.05),0 1px 0 rgba(255,255,255,0.1)} \ No newline at end of file diff --git a/presto-main/src/main/resources/webapp/vendor/bootstrap/css/bootstrap.css b/presto-main/src/main/resources/webapp/vendor/bootstrap/css/bootstrap.css old mode 100755 new mode 100644 index 53673ee7952f..911aa943f343 --- a/presto-main/src/main/resources/webapp/vendor/bootstrap/css/bootstrap.css +++ b/presto-main/src/main/resources/webapp/vendor/bootstrap/css/bootstrap.css @@ -1,16 +1,9 @@ /*! - * Bootstrap v3.3.5 (http://getbootstrap.com) - * Copyright 2011-2016 Twitter, Inc. - * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) - */ - -/*! - * Generated using the Bootstrap Customizer (http://getbootstrap.com/customize/?id=71446b832bd9dbb87141a654eb911637) - * Config saved to config.json and https://gist.github.com/71446b832bd9dbb87141a654eb911637 + * Generated using the Bootstrap Customizer (https://getbootstrap.com/docs/3.4/customize/) */ /*! - * Bootstrap v3.3.6 (http://getbootstrap.com) - * Copyright 2011-2015 Twitter, Inc. + * Bootstrap v3.4.1 (https://getbootstrap.com/) + * Copyright 2011-2019 Twitter, Inc. * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) */ /*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css */ @@ -60,7 +53,9 @@ a:hover { outline: 0; } abbr[title] { - border-bottom: 1px dotted; + border-bottom: none; + text-decoration: underline; + text-decoration: underline dotted; } b, strong { @@ -104,8 +99,8 @@ figure { } hr { -webkit-box-sizing: content-box; - -moz-box-sizing: content-box; - box-sizing: content-box; + -moz-box-sizing: content-box; + box-sizing: content-box; height: 0; } pre { @@ -156,8 +151,8 @@ input { input[type="checkbox"], input[type="radio"] { -webkit-box-sizing: border-box; - -moz-box-sizing: border-box; - box-sizing: border-box; + -moz-box-sizing: border-box; + box-sizing: border-box; padding: 0; } input[type="number"]::-webkit-inner-spin-button, @@ -167,8 +162,8 @@ input[type="number"]::-webkit-outer-spin-button { input[type="search"] { -webkit-appearance: textfield; -webkit-box-sizing: content-box; - -moz-box-sizing: content-box; - box-sizing: content-box; + -moz-box-sizing: content-box; + box-sizing: content-box; } input[type="search"]::-webkit-search-cancel-button, input[type="search"]::-webkit-search-decoration { @@ -202,11 +197,11 @@ th { *, *:before, *:after { - background: transparent !important; color: #000 !important; - -webkit-box-shadow: none !important; - box-shadow: none !important; text-shadow: none !important; + background: transparent !important; + -webkit-box-shadow: none !important; + box-shadow: none !important; } a, a:visited { @@ -270,17 +265,17 @@ th { } } @font-face { - font-family: 'Glyphicons Halflings'; - src: url('../fonts/glyphicons-halflings-regular.eot'); - src: url('../fonts/glyphicons-halflings-regular.eot?#iefix') format('embedded-opentype'), url('../fonts/glyphicons-halflings-regular.woff2') format('woff2'), url('../fonts/glyphicons-halflings-regular.woff') format('woff'), url('../fonts/glyphicons-halflings-regular.ttf') format('truetype'), url('../fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular') format('svg'); + font-family: "Glyphicons Halflings"; + src: url("../fonts/glyphicons-halflings-regular.eot"); + src: url("../fonts/glyphicons-halflings-regular.eot?#iefix") format("embedded-opentype"), url("../fonts/glyphicons-halflings-regular.woff2") format("woff2"), url("../fonts/glyphicons-halflings-regular.woff") format("woff"), url("../fonts/glyphicons-halflings-regular.ttf") format("truetype"), url("../fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular") format("svg"); } .glyphicon { position: relative; top: 1px; display: inline-block; - font-family: 'Glyphicons Halflings'; + font-family: "Glyphicons Halflings"; font-style: normal; - font-weight: normal; + font-weight: 400; line-height: 1; -webkit-font-smoothing: antialiased; -moz-osx-font-smoothing: grayscale; @@ -1112,7 +1107,6 @@ a:focus { text-decoration: underline; } a:focus { - outline: thin dotted; outline: 5px auto -webkit-focus-ring-color; outline-offset: -2px; } @@ -1160,8 +1154,8 @@ hr { position: absolute; width: 1px; height: 1px; - margin: -1px; padding: 0; + margin: -1px; overflow: hidden; clip: rect(0, 0, 0, 0); border: 0; @@ -1219,7 +1213,7 @@ h6 .small, .h4 .small, .h5 .small, .h6 .small { - font-weight: normal; + font-weight: 400; line-height: 1; color: #222222; } @@ -1313,8 +1307,8 @@ small, } mark, .mark { - background-color: #fcf8e3; padding: .2em; + background-color: #fcf8e3; } .text-left { text-align: left; @@ -1441,8 +1435,8 @@ ol ol { } .list-inline > li { display: inline-block; - padding-left: 5px; padding-right: 5px; + padding-left: 5px; } dl { margin-top: 0; @@ -1453,7 +1447,7 @@ dd { line-height: 1.42857143; } dt { - font-weight: bold; + font-weight: 700; } dd { margin-left: 0; @@ -1475,7 +1469,6 @@ dd { abbr[title], abbr[data-original-title] { cursor: help; - border-bottom: 1px dotted #222222; } .initialism { font-size: 90%; @@ -1503,15 +1496,15 @@ blockquote .small { blockquote footer:before, blockquote small:before, blockquote .small:before { - content: '\2014 \00A0'; + content: "\2014 \00A0"; } .blockquote-reverse, blockquote.pull-right { padding-right: 15px; padding-left: 0; + text-align: right; border-right: 5px solid #555555; border-left: 0; - text-align: right; } .blockquote-reverse footer:before, blockquote.pull-right footer:before, @@ -1519,7 +1512,7 @@ blockquote.pull-right footer:before, blockquote.pull-right small:before, .blockquote-reverse .small:before, blockquote.pull-right .small:before { - content: ''; + content: ""; } .blockquote-reverse footer:after, blockquote.pull-right footer:after, @@ -1527,7 +1520,7 @@ blockquote.pull-right footer:after, blockquote.pull-right small:after, .blockquote-reverse .small:after, blockquote.pull-right .small:after { - content: '\00A0 \2014'; + content: "\00A0 \2014"; } address { margin-bottom: 20px; @@ -1554,14 +1547,14 @@ kbd { background-color: #333333; border-radius: 0; -webkit-box-shadow: inset 0 -1px 0 rgba(0, 0, 0, 0.25); - box-shadow: inset 0 -1px 0 rgba(0, 0, 0, 0.25); + box-shadow: inset 0 -1px 0 rgba(0, 0, 0, 0.25); } kbd kbd { padding: 0; font-size: 100%; - font-weight: bold; + font-weight: 700; -webkit-box-shadow: none; - box-shadow: none; + box-shadow: none; } pre { display: block; @@ -1569,9 +1562,9 @@ pre { margin: 0 0 10px; font-size: 13px; line-height: 1.42857143; + color: #999999; word-break: break-all; word-wrap: break-word; - color: #999999; background-color: #f5f5f5; border: 1px solid #cccccc; border-radius: 0; @@ -1589,10 +1582,10 @@ pre code { overflow-y: scroll; } .container { + padding-right: 15px; + padding-left: 15px; margin-right: auto; margin-left: auto; - padding-left: 15px; - padding-right: 15px; } @media (min-width: 768px) { .container { @@ -1610,20 +1603,28 @@ pre code { } } .container-fluid { + padding-right: 15px; + padding-left: 15px; margin-right: auto; margin-left: auto; - padding-left: 15px; - padding-right: 15px; } .row { - margin-left: -15px; margin-right: -15px; + margin-left: -15px; +} +.row-no-gutters { + margin-right: 0; + margin-left: 0; +} +.row-no-gutters [class*="col-"] { + padding-right: 0; + padding-left: 0; } .col-xs-1, .col-sm-1, .col-md-1, .col-lg-1, .col-xs-2, .col-sm-2, .col-md-2, .col-lg-2, .col-xs-3, .col-sm-3, .col-md-3, .col-lg-3, .col-xs-4, .col-sm-4, .col-md-4, .col-lg-4, .col-xs-5, .col-sm-5, .col-md-5, .col-lg-5, .col-xs-6, .col-sm-6, .col-md-6, .col-lg-6, .col-xs-7, .col-sm-7, .col-md-7, .col-lg-7, .col-xs-8, .col-sm-8, .col-md-8, .col-lg-8, .col-xs-9, .col-sm-9, .col-md-9, .col-lg-9, .col-xs-10, .col-sm-10, .col-md-10, .col-lg-10, .col-xs-11, .col-sm-11, .col-md-11, .col-lg-11, .col-xs-12, .col-sm-12, .col-md-12, .col-lg-12 { position: relative; min-height: 1px; - padding-left: 15px; padding-right: 15px; + padding-left: 15px; } .col-xs-1, .col-xs-2, .col-xs-3, .col-xs-4, .col-xs-5, .col-xs-6, .col-xs-7, .col-xs-8, .col-xs-9, .col-xs-10, .col-xs-11, .col-xs-12 { float: left; @@ -2258,6 +2259,17 @@ pre code { table { background-color: transparent; } +table col[class*="col-"] { + position: static; + display: table-column; + float: none; +} +table td[class*="col-"], +table th[class*="col-"] { + position: static; + display: table-cell; + float: none; +} caption { padding-top: 8px; padding-bottom: 8px; @@ -2330,17 +2342,6 @@ th { .table-hover > tbody > tr:hover { background-color: #f5f5f5; } -table col[class*="col-"] { - position: static; - float: none; - display: table-column; -} -table td[class*="col-"], -table th[class*="col-"] { - position: static; - float: none; - display: table-cell; -} .table > thead > tr > td.active, .table > tbody > tr > td.active, .table > tfoot > tr > td.active, @@ -2447,8 +2448,8 @@ table th[class*="col-"] { background-color: #ebcccc; } .table-responsive { + min-height: .01%; overflow-x: auto; - min-height: 0.01%; } @media screen and (max-width: 767px) { .table-responsive { @@ -2496,10 +2497,10 @@ table th[class*="col-"] { } } fieldset { + min-width: 0; padding: 0; margin: 0; border: 0; - min-width: 0; } legend { display: block; @@ -2516,12 +2517,14 @@ label { display: inline-block; max-width: 100%; margin-bottom: 5px; - font-weight: bold; + font-weight: 700; } input[type="search"] { -webkit-box-sizing: border-box; -moz-box-sizing: border-box; box-sizing: border-box; + -webkit-appearance: none; + appearance: none; } input[type="radio"], input[type="checkbox"] { @@ -2529,6 +2532,14 @@ input[type="checkbox"] { margin-top: 1px \9; line-height: normal; } +input[type="radio"][disabled], +input[type="checkbox"][disabled], +input[type="radio"].disabled, +input[type="checkbox"].disabled, +fieldset[disabled] input[type="radio"], +fieldset[disabled] input[type="checkbox"] { + cursor: not-allowed; +} input[type="file"] { display: block; } @@ -2543,7 +2554,6 @@ select[size] { input[type="file"]:focus, input[type="radio"]:focus, input[type="checkbox"]:focus { - outline: thin dotted; outline: 5px auto -webkit-focus-ring-color; outline-offset: -2px; } @@ -2575,8 +2585,8 @@ output { .form-control:focus { border-color: #66afe9; outline: 0; - -webkit-box-shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(102, 175, 233, 0.6); - box-shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(102, 175, 233, 0.6); + -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, .075), 0 0 8px rgba(102, 175, 233, 0.6); + box-shadow: inset 0 1px 1px rgba(0, 0, 0, .075), 0 0 8px rgba(102, 175, 233, 0.6); } .form-control::-moz-placeholder { color: #999999; @@ -2589,8 +2599,8 @@ output { color: #999999; } .form-control::-ms-expand { - border: 0; background-color: transparent; + border: 0; } .form-control[disabled], .form-control[readonly], @@ -2605,9 +2615,6 @@ fieldset[disabled] .form-control { textarea.form-control { height: auto; } -input[type="search"] { - -webkit-appearance: none; -} @media screen and (-webkit-min-device-pixel-ratio: 0) { input[type="date"].form-control, input[type="time"].form-control, @@ -2646,12 +2653,18 @@ input[type="search"] { margin-top: 10px; margin-bottom: 10px; } +.radio.disabled label, +.checkbox.disabled label, +fieldset[disabled] .radio label, +fieldset[disabled] .checkbox label { + cursor: not-allowed; +} .radio label, .checkbox label { min-height: 20px; padding-left: 20px; margin-bottom: 0; - font-weight: normal; + font-weight: 400; cursor: pointer; } .radio input[type="radio"], @@ -2659,8 +2672,8 @@ input[type="search"] { .checkbox input[type="checkbox"], .checkbox-inline input[type="checkbox"] { position: absolute; - margin-left: -20px; margin-top: 4px \9; + margin-left: -20px; } .radio + .radio, .checkbox + .checkbox { @@ -2672,45 +2685,31 @@ input[type="search"] { display: inline-block; padding-left: 20px; margin-bottom: 0; + font-weight: 400; vertical-align: middle; - font-weight: normal; cursor: pointer; } -.radio-inline + .radio-inline, -.checkbox-inline + .checkbox-inline { - margin-top: 0; - margin-left: 10px; -} -input[type="radio"][disabled], -input[type="checkbox"][disabled], -input[type="radio"].disabled, -input[type="checkbox"].disabled, -fieldset[disabled] input[type="radio"], -fieldset[disabled] input[type="checkbox"] { - cursor: not-allowed; -} .radio-inline.disabled, .checkbox-inline.disabled, fieldset[disabled] .radio-inline, fieldset[disabled] .checkbox-inline { cursor: not-allowed; } -.radio.disabled label, -.checkbox.disabled label, -fieldset[disabled] .radio label, -fieldset[disabled] .checkbox label { - cursor: not-allowed; +.radio-inline + .radio-inline, +.checkbox-inline + .checkbox-inline { + margin-top: 0; + margin-left: 10px; } .form-control-static { + min-height: 34px; padding-top: 7px; padding-bottom: 7px; margin-bottom: 0; - min-height: 34px; } .form-control-static.input-lg, .form-control-static.input-sm { - padding-left: 0; padding-right: 0; + padding-left: 0; } .input-sm { height: 30px; @@ -2842,8 +2841,8 @@ select[multiple].input-lg { } .has-success .input-group-addon { color: #3c763d; - border-color: #3c763d; background-color: #dff0d8; + border-color: #3c763d; } .has-success .form-control-feedback { color: #3c763d; @@ -2872,8 +2871,8 @@ select[multiple].input-lg { } .has-warning .input-group-addon { color: #8a6d3b; - border-color: #8a6d3b; background-color: #fcf8e3; + border-color: #8a6d3b; } .has-warning .form-control-feedback { color: #8a6d3b; @@ -2902,8 +2901,8 @@ select[multiple].input-lg { } .has-error .input-group-addon { color: #a94442; - border-color: #a94442; background-color: #f2dede; + border-color: #a94442; } .has-error .form-control-feedback { color: #a94442; @@ -2974,23 +2973,23 @@ select[multiple].input-lg { .form-horizontal .checkbox, .form-horizontal .radio-inline, .form-horizontal .checkbox-inline { + padding-top: 7px; margin-top: 0; margin-bottom: 0; - padding-top: 7px; } .form-horizontal .radio, .form-horizontal .checkbox { min-height: 27px; } .form-horizontal .form-group { - margin-left: -15px; margin-right: -15px; + margin-left: -15px; } @media (min-width: 768px) { .form-horizontal .control-label { - text-align: right; - margin-bottom: 0; padding-top: 7px; + margin-bottom: 0; + text-align: right; } } .form-horizontal .has-feedback .form-control-feedback { @@ -3013,13 +3012,13 @@ select[multiple].input-lg { margin-bottom: 0; font-weight: normal; text-align: center; + white-space: nowrap; vertical-align: middle; -ms-touch-action: manipulation; - touch-action: manipulation; + touch-action: manipulation; cursor: pointer; background-image: none; border: 1px solid transparent; - white-space: nowrap; padding: 6px 12px; font-size: 14px; line-height: 1.42857143; @@ -3035,7 +3034,6 @@ select[multiple].input-lg { .btn.focus, .btn:active.focus, .btn.active.focus { - outline: thin dotted; outline: 5px auto -webkit-focus-ring-color; outline-offset: -2px; } @@ -3047,8 +3045,8 @@ select[multiple].input-lg { } .btn:active, .btn.active { - outline: 0; background-image: none; + outline: 0; -webkit-box-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125); box-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125); } @@ -3056,8 +3054,8 @@ select[multiple].input-lg { .btn[disabled], fieldset[disabled] .btn { cursor: not-allowed; - opacity: 0.65; filter: alpha(opacity=65); + opacity: 0.65; -webkit-box-shadow: none; box-shadow: none; } @@ -3086,6 +3084,7 @@ fieldset[disabled] a.btn { .open > .dropdown-toggle.btn-default { color: #333333; background-color: #e6e6e6; + background-image: none; border-color: #adadad; } .btn-default:active:hover, @@ -3101,11 +3100,6 @@ fieldset[disabled] a.btn { background-color: #d4d4d4; border-color: #8c8c8c; } -.btn-default:active, -.btn-default.active, -.open > .dropdown-toggle.btn-default { - background-image: none; -} .btn-default.disabled:hover, .btn-default[disabled]:hover, fieldset[disabled] .btn-default:hover, @@ -3143,6 +3137,7 @@ fieldset[disabled] .btn-default.focus { .open > .dropdown-toggle.btn-primary { color: #ffffff; background-color: #65ddff; + background-image: none; border-color: #41d5ff; } .btn-primary:active:hover, @@ -3158,11 +3153,6 @@ fieldset[disabled] .btn-default.focus { background-color: #41d5ff; border-color: #00c5fe; } -.btn-primary:active, -.btn-primary.active, -.open > .dropdown-toggle.btn-primary { - background-image: none; -} .btn-primary.disabled:hover, .btn-primary[disabled]:hover, fieldset[disabled] .btn-primary:hover, @@ -3200,6 +3190,7 @@ fieldset[disabled] .btn-primary.focus { .open > .dropdown-toggle.btn-success { color: #ffffff; background-color: #449d44; + background-image: none; border-color: #398439; } .btn-success:active:hover, @@ -3215,11 +3206,6 @@ fieldset[disabled] .btn-primary.focus { background-color: #398439; border-color: #255625; } -.btn-success:active, -.btn-success.active, -.open > .dropdown-toggle.btn-success { - background-image: none; -} .btn-success.disabled:hover, .btn-success[disabled]:hover, fieldset[disabled] .btn-success:hover, @@ -3257,6 +3243,7 @@ fieldset[disabled] .btn-success.focus { .open > .dropdown-toggle.btn-info { color: #ffffff; background-color: #31b0d5; + background-image: none; border-color: #269abc; } .btn-info:active:hover, @@ -3272,11 +3259,6 @@ fieldset[disabled] .btn-success.focus { background-color: #269abc; border-color: #1b6d85; } -.btn-info:active, -.btn-info.active, -.open > .dropdown-toggle.btn-info { - background-image: none; -} .btn-info.disabled:hover, .btn-info[disabled]:hover, fieldset[disabled] .btn-info:hover, @@ -3314,6 +3296,7 @@ fieldset[disabled] .btn-info.focus { .open > .dropdown-toggle.btn-warning { color: #ffffff; background-color: #ec971f; + background-image: none; border-color: #d58512; } .btn-warning:active:hover, @@ -3329,11 +3312,6 @@ fieldset[disabled] .btn-info.focus { background-color: #d58512; border-color: #985f0d; } -.btn-warning:active, -.btn-warning.active, -.open > .dropdown-toggle.btn-warning { - background-image: none; -} .btn-warning.disabled:hover, .btn-warning[disabled]:hover, fieldset[disabled] .btn-warning:hover, @@ -3371,6 +3349,7 @@ fieldset[disabled] .btn-warning.focus { .open > .dropdown-toggle.btn-danger { color: #ffffff; background-color: #c9302c; + background-image: none; border-color: #ac2925; } .btn-danger:active:hover, @@ -3386,11 +3365,6 @@ fieldset[disabled] .btn-warning.focus { background-color: #ac2925; border-color: #761c19; } -.btn-danger:active, -.btn-danger.active, -.open > .dropdown-toggle.btn-danger { - background-image: none; -} .btn-danger.disabled:hover, .btn-danger[disabled]:hover, fieldset[disabled] .btn-danger:hover, @@ -3408,8 +3382,8 @@ fieldset[disabled] .btn-danger.focus { background-color: #ffffff; } .btn-link { + font-weight: 400; color: #98e8ff; - font-weight: normal; border-radius: 0; } .btn-link, @@ -3500,13 +3474,13 @@ tbody.collapse.in { overflow: hidden; -webkit-transition-property: height, visibility; -o-transition-property: height, visibility; - transition-property: height, visibility; + transition-property: height, visibility; -webkit-transition-duration: 0.35s; -o-transition-duration: 0.35s; - transition-duration: 0.35s; + transition-duration: 0.35s; -webkit-transition-timing-function: ease; -o-transition-timing-function: ease; - transition-timing-function: ease; + transition-timing-function: ease; } .caret { display: inline-block; @@ -3536,17 +3510,17 @@ tbody.collapse.in { min-width: 160px; padding: 5px 0; margin: 2px 0 0; - list-style: none; font-size: 14px; text-align: left; + list-style: none; background-color: #ffffff; + -webkit-background-clip: padding-box; + background-clip: padding-box; border: 1px solid #cccccc; border: 1px solid rgba(0, 0, 0, 0.15); border-radius: 0; -webkit-box-shadow: 0 6px 12px rgba(0, 0, 0, 0.175); box-shadow: 0 6px 12px rgba(0, 0, 0, 0.175); - -webkit-background-clip: padding-box; - background-clip: padding-box; } .dropdown-menu.pull-right { right: 0; @@ -3562,15 +3536,15 @@ tbody.collapse.in { display: block; padding: 3px 20px; clear: both; - font-weight: normal; + font-weight: 400; line-height: 1.42857143; color: #424242; white-space: nowrap; } .dropdown-menu > li > a:hover, .dropdown-menu > li > a:focus { - text-decoration: none; color: #8c8c8c; + text-decoration: none; background-color: #f5f5f5; } .dropdown-menu > .active > a, @@ -3578,8 +3552,8 @@ tbody.collapse.in { .dropdown-menu > .active > a:focus { color: #ffffff; text-decoration: none; - outline: 0; background-color: #418194; + outline: 0; } .dropdown-menu > .disabled > a, .dropdown-menu > .disabled > a:hover, @@ -3589,10 +3563,10 @@ tbody.collapse.in { .dropdown-menu > .disabled > a:hover, .dropdown-menu > .disabled > a:focus { text-decoration: none; + cursor: not-allowed; background-color: transparent; background-image: none; filter: progid:DXImageTransform.Microsoft.gradient(enabled = false); - cursor: not-allowed; } .open > .dropdown-menu { display: block; @@ -3601,12 +3575,12 @@ tbody.collapse.in { outline: 0; } .dropdown-menu-right { - left: auto; right: 0; + left: auto; } .dropdown-menu-left { - left: 0; right: auto; + left: 0; } .dropdown-header { display: block; @@ -3618,10 +3592,10 @@ tbody.collapse.in { } .dropdown-backdrop { position: fixed; - left: 0; + top: 0; right: 0; bottom: 0; - top: 0; + left: 0; z-index: 990; } .pull-right > .dropdown-menu { @@ -3630,10 +3604,10 @@ tbody.collapse.in { } .dropup .caret, .navbar-fixed-bottom .dropdown .caret { + content: ""; border-top: 0; border-bottom: 4px dashed; border-bottom: 4px solid \9; - content: ""; } .dropup .dropdown-menu, .navbar-fixed-bottom .dropdown .dropdown-menu { @@ -3643,12 +3617,12 @@ tbody.collapse.in { } @media (min-width: 768px) { .navbar-right .dropdown-menu { - left: auto; right: 0; + left: auto; } .navbar-right .dropdown-menu-left { - left: 0; right: auto; + left: 0; } } .btn-group, @@ -3698,13 +3672,13 @@ tbody.collapse.in { margin-left: 0; } .btn-group > .btn:first-child:not(:last-child):not(.dropdown-toggle) { - border-bottom-right-radius: 0; border-top-right-radius: 0; + border-bottom-right-radius: 0; } .btn-group > .btn:last-child:not(:first-child), .btn-group > .dropdown-toggle:not(:first-child) { - border-bottom-left-radius: 0; border-top-left-radius: 0; + border-bottom-left-radius: 0; } .btn-group > .btn-group { float: left; @@ -3714,24 +3688,24 @@ tbody.collapse.in { } .btn-group > .btn-group:first-child:not(:last-child) > .btn:last-child, .btn-group > .btn-group:first-child:not(:last-child) > .dropdown-toggle { - border-bottom-right-radius: 0; border-top-right-radius: 0; + border-bottom-right-radius: 0; } .btn-group > .btn-group:last-child:not(:first-child) > .btn:first-child { - border-bottom-left-radius: 0; border-top-left-radius: 0; + border-bottom-left-radius: 0; } .btn-group .dropdown-toggle:active, .btn-group.open .dropdown-toggle { outline: 0; } .btn-group > .btn + .dropdown-toggle { - padding-left: 8px; padding-right: 8px; + padding-left: 8px; } .btn-group > .btn-lg + .dropdown-toggle { - padding-left: 12px; padding-right: 12px; + padding-left: 12px; } .btn-group.open .dropdown-toggle { -webkit-box-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125); @@ -3773,14 +3747,14 @@ tbody.collapse.in { border-radius: 0; } .btn-group-vertical > .btn:first-child:not(:last-child) { - border-top-right-radius: 0; border-top-left-radius: 0; + border-top-right-radius: 0; border-bottom-right-radius: 0; border-bottom-left-radius: 0; } .btn-group-vertical > .btn:last-child:not(:first-child) { - border-top-right-radius: 0; border-top-left-radius: 0; + border-top-right-radius: 0; border-bottom-right-radius: 0; border-bottom-left-radius: 0; } @@ -3793,8 +3767,8 @@ tbody.collapse.in { border-bottom-left-radius: 0; } .btn-group-vertical > .btn-group:last-child:not(:first-child) > .btn:first-child { - border-top-right-radius: 0; border-top-left-radius: 0; + border-top-right-radius: 0; } .btn-group-justified { display: table; @@ -3804,8 +3778,8 @@ tbody.collapse.in { } .btn-group-justified > .btn, .btn-group-justified > .btn-group { - float: none; display: table-cell; + float: none; width: 1%; } .btn-group-justified > .btn-group .btn { @@ -3829,8 +3803,8 @@ tbody.collapse.in { } .input-group[class*="col-"] { float: none; - padding-left: 0; padding-right: 0; + padding-left: 0; } .input-group .form-control { position: relative; @@ -3907,7 +3881,7 @@ select[multiple].input-group-sm > .input-group-btn > .btn { .input-group-addon { padding: 6px 12px; font-size: 14px; - font-weight: normal; + font-weight: 400; line-height: 1; color: #000000; text-align: center; @@ -3936,8 +3910,8 @@ select[multiple].input-group-sm > .input-group-btn > .btn { .input-group-btn:first-child > .dropdown-toggle, .input-group-btn:last-child > .btn:not(:last-child):not(.dropdown-toggle), .input-group-btn:last-child > .btn-group:not(:last-child) > .btn { - border-bottom-right-radius: 0; border-top-right-radius: 0; + border-bottom-right-radius: 0; } .input-group-addon:first-child { border-right: 0; @@ -3949,8 +3923,8 @@ select[multiple].input-group-sm > .input-group-btn > .btn { .input-group-btn:last-child > .dropdown-toggle, .input-group-btn:first-child > .btn:not(:first-child), .input-group-btn:first-child > .btn-group:not(:first-child) > .btn { - border-bottom-left-radius: 0; border-top-left-radius: 0; + border-bottom-left-radius: 0; } .input-group-addon:last-child { border-left: 0; @@ -3981,8 +3955,8 @@ select[multiple].input-group-sm > .input-group-btn > .btn { margin-left: -1px; } .nav { - margin-bottom: 0; padding-left: 0; + margin-bottom: 0; list-style: none; } .nav > li { @@ -4006,8 +3980,8 @@ select[multiple].input-group-sm > .input-group-btn > .btn { .nav > li.disabled > a:focus { color: #222222; text-decoration: none; - background-color: transparent; cursor: not-allowed; + background-color: transparent; } .nav .open > a, .nav .open > a:hover, @@ -4044,10 +4018,10 @@ select[multiple].input-group-sm > .input-group-btn > .btn { .nav-tabs > li.active > a:hover, .nav-tabs > li.active > a:focus { color: #eeeeee; + cursor: default; background-color: #252830; border: 1px solid #dddddd; border-bottom-color: transparent; - cursor: default; } .nav-tabs.nav-justified { width: 100%; @@ -4057,8 +4031,8 @@ select[multiple].input-group-sm > .input-group-btn > .btn { float: none; } .nav-tabs.nav-justified > li > a { - text-align: center; margin-bottom: 5px; + text-align: center; } .nav-tabs.nav-justified > .dropdown .dropdown-menu { top: auto; @@ -4122,8 +4096,8 @@ select[multiple].input-group-sm > .input-group-btn > .btn { float: none; } .nav-justified > li > a { - text-align: center; margin-bottom: 5px; + text-align: center; } .nav-justified > .dropdown .dropdown-menu { top: auto; @@ -4169,8 +4143,8 @@ select[multiple].input-group-sm > .input-group-btn > .btn { } .nav-tabs .dropdown-menu { margin-top: -1px; - border-top-right-radius: 0; border-top-left-radius: 0; + border-top-right-radius: 0; } .navbar { position: relative; @@ -4189,12 +4163,12 @@ select[multiple].input-group-sm > .input-group-btn > .btn { } } .navbar-collapse { - overflow-x: visible; padding-right: 15px; padding-left: 15px; + overflow-x: visible; border-top: 1px solid transparent; -webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1); - box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1); + box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1); -webkit-overflow-scrolling: touch; } .navbar-collapse.in { @@ -4205,7 +4179,7 @@ select[multiple].input-group-sm > .input-group-btn > .btn { width: auto; border-top: 0; -webkit-box-shadow: none; - box-shadow: none; + box-shadow: none; } .navbar-collapse.collapse { display: block !important; @@ -4219,10 +4193,17 @@ select[multiple].input-group-sm > .input-group-btn > .btn { .navbar-fixed-top .navbar-collapse, .navbar-static-top .navbar-collapse, .navbar-fixed-bottom .navbar-collapse { - padding-left: 0; padding-right: 0; + padding-left: 0; } } +.navbar-fixed-top, +.navbar-fixed-bottom { + position: fixed; + right: 0; + left: 0; + z-index: 1030; +} .navbar-fixed-top .navbar-collapse, .navbar-fixed-bottom .navbar-collapse { max-height: 340px; @@ -4233,10 +4214,25 @@ select[multiple].input-group-sm > .input-group-btn > .btn { max-height: 200px; } } -.container > .navbar-header, -.container-fluid > .navbar-header, -.container > .navbar-collapse, -.container-fluid > .navbar-collapse { +@media (min-width: 768px) { + .navbar-fixed-top, + .navbar-fixed-bottom { + border-radius: 0; + } +} +.navbar-fixed-top { + top: 0; + border-width: 0 0 1px; +} +.navbar-fixed-bottom { + bottom: 0; + margin-bottom: 0; + border-width: 1px 0 0; +} +.container > .navbar-header, +.container-fluid > .navbar-header, +.container > .navbar-collapse, +.container-fluid > .navbar-collapse { margin-right: -15px; margin-left: -15px; } @@ -4258,34 +4254,12 @@ select[multiple].input-group-sm > .input-group-btn > .btn { border-radius: 0; } } -.navbar-fixed-top, -.navbar-fixed-bottom { - position: fixed; - right: 0; - left: 0; - z-index: 1030; -} -@media (min-width: 768px) { - .navbar-fixed-top, - .navbar-fixed-bottom { - border-radius: 0; - } -} -.navbar-fixed-top { - top: 0; - border-width: 0 0 1px; -} -.navbar-fixed-bottom { - bottom: 0; - margin-bottom: 0; - border-width: 1px 0 0; -} .navbar-brand { float: left; + height: 50px; padding: 15px 15px; font-size: 18px; line-height: 20px; - height: 50px; } .navbar-brand:hover, .navbar-brand:focus { @@ -4303,8 +4277,8 @@ select[multiple].input-group-sm > .input-group-btn > .btn { .navbar-toggle { position: relative; float: right; - margin-right: 15px; padding: 9px 10px; + margin-right: 15px; margin-top: 8px; margin-bottom: 8px; background-color: transparent; @@ -4346,7 +4320,7 @@ select[multiple].input-group-sm > .input-group-btn > .btn { background-color: transparent; border: 0; -webkit-box-shadow: none; - box-shadow: none; + box-shadow: none; } .navbar-nav .open .dropdown-menu > li > a, .navbar-nav .open .dropdown-menu .dropdown-header { @@ -4374,9 +4348,9 @@ select[multiple].input-group-sm > .input-group-btn > .btn { } } .navbar-form { - margin-left: -15px; - margin-right: -15px; padding: 10px 15px; + margin-right: -15px; + margin-left: -15px; border-top: 1px solid transparent; border-bottom: 1px solid transparent; -webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1), 0 1px 0 rgba(255, 255, 255, 0.1); @@ -4445,24 +4419,24 @@ select[multiple].input-group-sm > .input-group-btn > .btn { @media (min-width: 768px) { .navbar-form { width: auto; - border: 0; - margin-left: 0; - margin-right: 0; padding-top: 0; padding-bottom: 0; + margin-right: 0; + margin-left: 0; + border: 0; -webkit-box-shadow: none; box-shadow: none; } } .navbar-nav > li > .dropdown-menu { margin-top: 0; - border-top-right-radius: 0; border-top-left-radius: 0; + border-top-right-radius: 0; } .navbar-fixed-bottom .navbar-nav > li > .dropdown-menu { margin-bottom: 0; - border-top-right-radius: 0; border-top-left-radius: 0; + border-top-right-radius: 0; border-bottom-right-radius: 0; border-bottom-left-radius: 0; } @@ -4485,8 +4459,8 @@ select[multiple].input-group-sm > .input-group-btn > .btn { @media (min-width: 768px) { .navbar-text { float: left; - margin-left: 15px; margin-right: 15px; + margin-left: 15px; } } @media (min-width: 768px) { @@ -4536,25 +4510,11 @@ select[multiple].input-group-sm > .input-group-btn > .btn { color: #cccccc; background-color: transparent; } -.navbar-default .navbar-toggle { - border-color: #dddddd; -} -.navbar-default .navbar-toggle:hover, -.navbar-default .navbar-toggle:focus { - background-color: #dddddd; -} -.navbar-default .navbar-toggle .icon-bar { - background-color: #888888; -} -.navbar-default .navbar-collapse, -.navbar-default .navbar-form { - border-color: #e7e7e7; -} .navbar-default .navbar-nav > .open > a, .navbar-default .navbar-nav > .open > a:hover, .navbar-default .navbar-nav > .open > a:focus { - background-color: #e7e7e7; color: #555555; + background-color: #e7e7e7; } @media (max-width: 767px) { .navbar-default .navbar-nav .open .dropdown-menu > li > a { @@ -4578,6 +4538,20 @@ select[multiple].input-group-sm > .input-group-btn > .btn { background-color: transparent; } } +.navbar-default .navbar-toggle { + border-color: #dddddd; +} +.navbar-default .navbar-toggle:hover, +.navbar-default .navbar-toggle:focus { + background-color: #dddddd; +} +.navbar-default .navbar-toggle .icon-bar { + background-color: #888888; +} +.navbar-default .navbar-collapse, +.navbar-default .navbar-form { + border-color: #e7e7e7; +} .navbar-default .navbar-link { color: #777777; } @@ -4632,25 +4606,11 @@ fieldset[disabled] .navbar-default .btn-link:focus { color: #444444; background-color: transparent; } -.navbar-inverse .navbar-toggle { - border-color: #333333; -} -.navbar-inverse .navbar-toggle:hover, -.navbar-inverse .navbar-toggle:focus { - background-color: #333333; -} -.navbar-inverse .navbar-toggle .icon-bar { - background-color: #ffffff; -} -.navbar-inverse .navbar-collapse, -.navbar-inverse .navbar-form { - border-color: #101010; -} .navbar-inverse .navbar-nav > .open > a, .navbar-inverse .navbar-nav > .open > a:hover, .navbar-inverse .navbar-nav > .open > a:focus { - background-color: #080808; color: #ffffff; + background-color: #080808; } @media (max-width: 767px) { .navbar-inverse .navbar-nav .open .dropdown-menu > .dropdown-header { @@ -4680,6 +4640,20 @@ fieldset[disabled] .navbar-default .btn-link:focus { background-color: transparent; } } +.navbar-inverse .navbar-toggle { + border-color: #333333; +} +.navbar-inverse .navbar-toggle:hover, +.navbar-inverse .navbar-toggle:focus { + background-color: #333333; +} +.navbar-inverse .navbar-toggle .icon-bar { + background-color: #ffffff; +} +.navbar-inverse .navbar-collapse, +.navbar-inverse .navbar-form { + border-color: #101010; +} .navbar-inverse .navbar-link { color: #494949; } @@ -4710,9 +4684,9 @@ fieldset[disabled] .navbar-inverse .btn-link:focus { display: inline-block; } .breadcrumb > li + li:before { - content: "/\00a0"; padding: 0 5px; color: #cccccc; + content: "/\00a0"; } .breadcrumb > .active { color: #222222; @@ -4731,23 +4705,12 @@ fieldset[disabled] .navbar-inverse .btn-link:focus { position: relative; float: left; padding: 6px 12px; + margin-left: -1px; line-height: 1.42857143; - text-decoration: none; color: #98e8ff; + text-decoration: none; background-color: #ffffff; border: 1px solid #dddddd; - margin-left: -1px; -} -.pagination > li:first-child > a, -.pagination > li:first-child > span { - margin-left: 0; - border-bottom-left-radius: 0; - border-top-left-radius: 0; -} -.pagination > li:last-child > a, -.pagination > li:last-child > span { - border-bottom-right-radius: 0; - border-top-right-radius: 0; } .pagination > li > a:hover, .pagination > li > span:hover, @@ -4758,6 +4721,17 @@ fieldset[disabled] .navbar-inverse .btn-link:focus { background-color: #555555; border-color: #dddddd; } +.pagination > li:first-child > a, +.pagination > li:first-child > span { + margin-left: 0; + border-top-left-radius: 0; + border-bottom-left-radius: 0; +} +.pagination > li:last-child > a, +.pagination > li:last-child > span { + border-top-right-radius: 0; + border-bottom-right-radius: 0; +} .pagination > .active > a, .pagination > .active > span, .pagination > .active > a:hover, @@ -4766,9 +4740,9 @@ fieldset[disabled] .navbar-inverse .btn-link:focus { .pagination > .active > span:focus { z-index: 3; color: #ffffff; + cursor: default; background-color: #98e8ff; border-color: #98e8ff; - cursor: default; } .pagination > .disabled > span, .pagination > .disabled > span:hover, @@ -4777,9 +4751,9 @@ fieldset[disabled] .navbar-inverse .btn-link:focus { .pagination > .disabled > a:hover, .pagination > .disabled > a:focus { color: #222222; + cursor: not-allowed; background-color: #ffffff; border-color: #dddddd; - cursor: not-allowed; } .pagination-lg > li > a, .pagination-lg > li > span { @@ -4789,13 +4763,13 @@ fieldset[disabled] .navbar-inverse .btn-link:focus { } .pagination-lg > li:first-child > a, .pagination-lg > li:first-child > span { - border-bottom-left-radius: 0; border-top-left-radius: 0; + border-bottom-left-radius: 0; } .pagination-lg > li:last-child > a, .pagination-lg > li:last-child > span { - border-bottom-right-radius: 0; border-top-right-radius: 0; + border-bottom-right-radius: 0; } .pagination-sm > li > a, .pagination-sm > li > span { @@ -4805,19 +4779,19 @@ fieldset[disabled] .navbar-inverse .btn-link:focus { } .pagination-sm > li:first-child > a, .pagination-sm > li:first-child > span { - border-bottom-left-radius: 0; border-top-left-radius: 0; + border-bottom-left-radius: 0; } .pagination-sm > li:last-child > a, .pagination-sm > li:last-child > span { - border-bottom-right-radius: 0; border-top-right-radius: 0; + border-bottom-right-radius: 0; } .pager { padding-left: 0; margin: 20px 0; - list-style: none; text-align: center; + list-style: none; } .pager li { display: inline; @@ -4848,14 +4822,14 @@ fieldset[disabled] .navbar-inverse .btn-link:focus { .pager .disabled > a:focus, .pager .disabled > span { color: #222222; - background-color: #ffffff; cursor: not-allowed; + background-color: #ffffff; } .label { display: inline; padding: .2em .6em .3em; font-size: 75%; - font-weight: bold; + font-weight: 700; line-height: 1; color: #ffffff; text-align: center; @@ -4924,11 +4898,11 @@ a.label:focus { padding: 3px 7px; font-size: 12px; font-weight: bold; - color: #ffffff; line-height: 1; - vertical-align: middle; - white-space: nowrap; + color: #ffffff; text-align: center; + white-space: nowrap; + vertical-align: middle; background-color: #222222; border-radius: 10px; } @@ -4985,9 +4959,9 @@ a.badge:focus { } .container .jumbotron, .container-fluid .jumbotron { - border-radius: 0; - padding-left: 15px; padding-right: 15px; + padding-left: 15px; + border-radius: 0; } .jumbotron .container { max-width: 100%; @@ -4999,8 +4973,8 @@ a.badge:focus { } .container .jumbotron, .container-fluid .jumbotron { - padding-left: 60px; padding-right: 60px; + padding-left: 60px; } .jumbotron h1, .jumbotron .h1 { @@ -5021,8 +4995,8 @@ a.badge:focus { } .thumbnail > img, .thumbnail a > img { - margin-left: auto; margin-right: auto; + margin-left: auto; } a.thumbnail:hover, a.thumbnail:focus, @@ -5065,9 +5039,9 @@ a.thumbnail.active { color: inherit; } .alert-success { + color: #3c763d; background-color: #dff0d8; border-color: #d6e9c6; - color: #3c763d; } .alert-success hr { border-top-color: #c9e2b3; @@ -5076,9 +5050,9 @@ a.thumbnail.active { color: #2b542c; } .alert-info { + color: #31708f; background-color: #d9edf7; border-color: #bce8f1; - color: #31708f; } .alert-info hr { border-top-color: #a6e1ec; @@ -5087,9 +5061,9 @@ a.thumbnail.active { color: #245269; } .alert-warning { + color: #8a6d3b; background-color: #fcf8e3; border-color: #faebcc; - color: #8a6d3b; } .alert-warning hr { border-top-color: #f7e1b5; @@ -5098,9 +5072,9 @@ a.thumbnail.active { color: #66512c; } .alert-danger { + color: #a94442; background-color: #f2dede; border-color: #ebccd1; - color: #a94442; } .alert-danger hr { border-top-color: #e4b9c0; @@ -5133,9 +5107,9 @@ a.thumbnail.active { } } .progress { - overflow: hidden; height: 20px; margin-bottom: 20px; + overflow: hidden; background-color: #f5f5f5; border-radius: 0; -webkit-box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.1); @@ -5162,7 +5136,7 @@ a.thumbnail.active { background-image: -o-linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); background-image: linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent); -webkit-background-size: 40px 40px; - background-size: 40px 40px; + background-size: 40px 40px; } .progress.active .progress-bar, .progress-bar.active { @@ -5210,8 +5184,8 @@ a.thumbnail.active { } .media, .media-body { - zoom: 1; overflow: hidden; + zoom: 1; } .media-body { width: 10000px; @@ -5251,8 +5225,8 @@ a.thumbnail.active { list-style: none; } .list-group { - margin-bottom: 20px; padding-left: 0; + margin-bottom: 20px; } .list-group-item { position: relative; @@ -5263,40 +5237,20 @@ a.thumbnail.active { border: 1px solid #dddddd; } .list-group-item:first-child { - border-top-right-radius: 0; border-top-left-radius: 0; + border-top-right-radius: 0; } .list-group-item:last-child { margin-bottom: 0; border-bottom-right-radius: 0; border-bottom-left-radius: 0; } -a.list-group-item, -button.list-group-item { - color: #555555; -} -a.list-group-item .list-group-item-heading, -button.list-group-item .list-group-item-heading { - color: #333333; -} -a.list-group-item:hover, -button.list-group-item:hover, -a.list-group-item:focus, -button.list-group-item:focus { - text-decoration: none; - color: #555555; - background-color: #f5f5f5; -} -button.list-group-item { - width: 100%; - text-align: left; -} .list-group-item.disabled, .list-group-item.disabled:hover, .list-group-item.disabled:focus { - background-color: #555555; color: #222222; cursor: not-allowed; + background-color: #555555; } .list-group-item.disabled .list-group-item-heading, .list-group-item.disabled:hover .list-group-item-heading, @@ -5332,6 +5286,26 @@ button.list-group-item { .list-group-item.active:focus .list-group-item-text { color: #ffffff; } +a.list-group-item, +button.list-group-item { + color: #555555; +} +a.list-group-item .list-group-item-heading, +button.list-group-item .list-group-item-heading { + color: #333333; +} +a.list-group-item:hover, +button.list-group-item:hover, +a.list-group-item:focus, +button.list-group-item:focus { + color: #555555; + text-decoration: none; + background-color: #f5f5f5; +} +button.list-group-item { + width: 100%; + text-align: left; +} .list-group-item-success { color: #3c763d; background-color: #dff0d8; @@ -5470,8 +5444,8 @@ button.list-group-item-danger.active:focus { .panel-heading { padding: 10px 15px; border-bottom: 1px solid transparent; - border-top-right-radius: -1; border-top-left-radius: -1; + border-top-right-radius: -1; } .panel-heading > .dropdown .dropdown-toggle { color: inherit; @@ -5508,8 +5482,8 @@ button.list-group-item-danger.active:focus { .panel > .list-group:first-child .list-group-item:first-child, .panel > .panel-collapse > .list-group:first-child .list-group-item:first-child { border-top: 0; - border-top-right-radius: -1; border-top-left-radius: -1; + border-top-right-radius: -1; } .panel > .list-group:last-child .list-group-item:last-child, .panel > .panel-collapse > .list-group:last-child .list-group-item:last-child { @@ -5518,8 +5492,8 @@ button.list-group-item-danger.active:focus { border-bottom-left-radius: -1; } .panel > .panel-heading + .panel-collapse > .list-group .list-group-item:first-child { - border-top-right-radius: 0; border-top-left-radius: 0; + border-top-right-radius: 0; } .panel-heading + .list-group .list-group-item:first-child { border-top-width: 0; @@ -5535,13 +5509,13 @@ button.list-group-item-danger.active:focus { .panel > .table caption, .panel > .table-responsive > .table caption, .panel > .panel-collapse > .table caption { - padding-left: 15px; padding-right: 15px; + padding-left: 15px; } .panel > .table:first-child, .panel > .table-responsive:first-child > .table:first-child { - border-top-right-radius: -1; border-top-left-radius: -1; + border-top-right-radius: -1; } .panel > .table:first-child > thead:first-child > tr:first-child, .panel > .table-responsive:first-child > .table:first-child > thead:first-child > tr:first-child, @@ -5579,8 +5553,8 @@ button.list-group-item-danger.active:focus { .panel > .table-responsive:last-child > .table:last-child > tbody:last-child > tr:last-child, .panel > .table:last-child > tfoot:last-child > tr:last-child, .panel > .table-responsive:last-child > .table:last-child > tfoot:last-child > tr:last-child { - border-bottom-left-radius: -1; border-bottom-right-radius: -1; + border-bottom-left-radius: -1; } .panel > .table:last-child > tbody:last-child > tr:last-child td:first-child, .panel > .table-responsive:last-child > .table:last-child > tbody:last-child > tr:last-child td:first-child, @@ -5665,8 +5639,8 @@ button.list-group-item-danger.active:focus { border-bottom: 0; } .panel > .table-responsive { - border: 0; margin-bottom: 0; + border: 0; } .panel-group { margin-bottom: 20px; @@ -5813,10 +5787,10 @@ button.list-group-item-danger.active:focus { .embed-responsive video { position: absolute; top: 0; - left: 0; bottom: 0; - height: 100%; + left: 0; width: 100%; + height: 100%; border: 0; } .embed-responsive-16by9 { @@ -5854,16 +5828,16 @@ button.list-group-item-danger.active:focus { line-height: 1; color: #000000; text-shadow: 0 1px 0 #ffffff; - opacity: 0.2; filter: alpha(opacity=20); + opacity: 0.2; } .close:hover, .close:focus { color: #000000; text-decoration: none; cursor: pointer; - opacity: 0.5; filter: alpha(opacity=50); + opacity: 0.5; } button.close { padding: 0; @@ -5871,19 +5845,20 @@ button.close { background: transparent; border: 0; -webkit-appearance: none; + appearance: none; } .modal-open { overflow: hidden; } .modal { - display: none; - overflow: hidden; position: fixed; top: 0; right: 0; bottom: 0; left: 0; z-index: 1050; + display: none; + overflow: hidden; -webkit-overflow-scrolling: touch; outline: 0; } @@ -5914,13 +5889,13 @@ button.close { .modal-content { position: relative; background-color: #ffffff; + -webkit-background-clip: padding-box; + background-clip: padding-box; border: 1px solid #999999; border: 1px solid rgba(0, 0, 0, 0.2); border-radius: 0; -webkit-box-shadow: 0 3px 9px rgba(0, 0, 0, 0.5); box-shadow: 0 3px 9px rgba(0, 0, 0, 0.5); - -webkit-background-clip: padding-box; - background-clip: padding-box; outline: 0; } .modal-backdrop { @@ -5933,12 +5908,12 @@ button.close { background-color: #000000; } .modal-backdrop.fade { - opacity: 0; filter: alpha(opacity=0); + opacity: 0; } .modal-backdrop.in { - opacity: 0.5; filter: alpha(opacity=50); + opacity: 0.5; } .modal-header { padding: 15px; @@ -5961,8 +5936,8 @@ button.close { border-top: 1px solid #e5e5e5; } .modal-footer .btn + .btn { - margin-left: 5px; margin-bottom: 0; + margin-left: 5px; } .modal-footer .btn-group .btn + .btn { margin-left: -1px; @@ -6001,57 +5976,42 @@ button.close { display: block; font-family: "Roboto", "Helvetica Neue", Helvetica, Arial, sans-serif; font-style: normal; - font-weight: normal; - letter-spacing: normal; - line-break: auto; + font-weight: 400; line-height: 1.42857143; + line-break: auto; text-align: left; text-align: start; text-decoration: none; text-shadow: none; text-transform: none; - white-space: normal; + letter-spacing: normal; word-break: normal; word-spacing: normal; word-wrap: normal; + white-space: normal; font-size: 12px; - opacity: 0; filter: alpha(opacity=0); + opacity: 0; } .tooltip.in { - opacity: 0.9; filter: alpha(opacity=90); + opacity: 0.9; } .tooltip.top { - margin-top: -3px; padding: 5px 0; + margin-top: -3px; } .tooltip.right { - margin-left: 3px; padding: 0 5px; + margin-left: 3px; } .tooltip.bottom { - margin-top: 3px; padding: 5px 0; + margin-top: 3px; } .tooltip.left { - margin-left: -3px; padding: 0 5px; -} -.tooltip-inner { - max-width: 200px; - padding: 3px 8px; - color: #ffffff; - text-align: center; - background-color: #000000; - border-radius: 0; -} -.tooltip-arrow { - position: absolute; - width: 0; - height: 0; - border-color: transparent; - border-style: solid; + margin-left: -3px; } .tooltip.top .tooltip-arrow { bottom: 0; @@ -6061,8 +6021,8 @@ button.close { border-top-color: #000000; } .tooltip.top-left .tooltip-arrow { - bottom: 0; right: 5px; + bottom: 0; margin-bottom: -5px; border-width: 5px 5px 0; border-top-color: #000000; @@ -6109,6 +6069,21 @@ button.close { border-width: 0 5px 5px; border-bottom-color: #000000; } +.tooltip-inner { + max-width: 200px; + padding: 3px 8px; + color: #ffffff; + text-align: center; + background-color: #000000; + border-radius: 0; +} +.tooltip-arrow { + position: absolute; + width: 0; + height: 0; + border-color: transparent; + border-style: solid; +} .popover { position: absolute; top: 0; @@ -6119,23 +6094,23 @@ button.close { padding: 1px; font-family: "Roboto", "Helvetica Neue", Helvetica, Arial, sans-serif; font-style: normal; - font-weight: normal; - letter-spacing: normal; - line-break: auto; + font-weight: 400; line-height: 1.42857143; + line-break: auto; text-align: left; text-align: start; text-decoration: none; text-shadow: none; text-transform: none; - white-space: normal; + letter-spacing: normal; word-break: normal; word-spacing: normal; word-wrap: normal; + white-space: normal; font-size: 14px; background-color: #ffffff; -webkit-background-clip: padding-box; - background-clip: padding-box; + background-clip: padding-box; border: 1px solid #cccccc; border: 1px solid rgba(0, 0, 0, 0.2); border-radius: 0; @@ -6154,16 +6129,8 @@ button.close { .popover.left { margin-left: -10px; } -.popover-title { - margin: 0; - padding: 8px 14px; - font-size: 14px; - background-color: #f7f7f7; - border-bottom: 1px solid #ebebeb; - border-radius: -1 -1 0 0; -} -.popover-content { - padding: 9px 14px; +.popover > .arrow { + border-width: 11px; } .popover > .arrow, .popover > .arrow:after { @@ -6174,55 +6141,52 @@ button.close { border-color: transparent; border-style: solid; } -.popover > .arrow { - border-width: 11px; -} .popover > .arrow:after { - border-width: 10px; content: ""; + border-width: 10px; } .popover.top > .arrow { + bottom: -11px; left: 50%; margin-left: -11px; - border-bottom-width: 0; border-top-color: #999999; border-top-color: rgba(0, 0, 0, 0.25); - bottom: -11px; + border-bottom-width: 0; } .popover.top > .arrow:after { - content: " "; bottom: 1px; margin-left: -10px; - border-bottom-width: 0; + content: " "; border-top-color: #ffffff; + border-bottom-width: 0; } .popover.right > .arrow { top: 50%; left: -11px; margin-top: -11px; - border-left-width: 0; border-right-color: #999999; border-right-color: rgba(0, 0, 0, 0.25); + border-left-width: 0; } .popover.right > .arrow:after { - content: " "; - left: 1px; bottom: -10px; - border-left-width: 0; + left: 1px; + content: " "; border-right-color: #ffffff; + border-left-width: 0; } .popover.bottom > .arrow { + top: -11px; left: 50%; margin-left: -11px; border-top-width: 0; border-bottom-color: #999999; border-bottom-color: rgba(0, 0, 0, 0.25); - top: -11px; } .popover.bottom > .arrow:after { - content: " "; top: 1px; margin-left: -10px; + content: " "; border-top-width: 0; border-bottom-color: #ffffff; } @@ -6235,23 +6199,34 @@ button.close { border-left-color: rgba(0, 0, 0, 0.25); } .popover.left > .arrow:after { - content: " "; right: 1px; + bottom: -10px; + content: " "; border-right-width: 0; border-left-color: #ffffff; - bottom: -10px; +} +.popover-title { + padding: 8px 14px; + margin: 0; + font-size: 14px; + background-color: #f7f7f7; + border-bottom: 1px solid #ebebeb; + border-radius: -1 -1 0 0; +} +.popover-content { + padding: 9px 14px; } .carousel { position: relative; } .carousel-inner { position: relative; - overflow: hidden; width: 100%; + overflow: hidden; } .carousel-inner > .item { - display: none; position: relative; + display: none; -webkit-transition: 0.6s ease-in-out left; -o-transition: 0.6s ease-in-out left; transition: 0.6s ease-in-out left; @@ -6323,42 +6298,42 @@ button.close { .carousel-control { position: absolute; top: 0; - left: 0; bottom: 0; + left: 0; width: 15%; - opacity: 0.5; - filter: alpha(opacity=50); font-size: 20px; color: #ffffff; text-align: center; text-shadow: 0 1px 2px rgba(0, 0, 0, 0.6); background-color: rgba(0, 0, 0, 0); + filter: alpha(opacity=50); + opacity: 0.5; } .carousel-control.left { background-image: -webkit-linear-gradient(left, rgba(0, 0, 0, 0.5) 0%, rgba(0, 0, 0, 0.0001) 100%); background-image: -o-linear-gradient(left, rgba(0, 0, 0, 0.5) 0%, rgba(0, 0, 0, 0.0001) 100%); background-image: -webkit-gradient(linear, left top, right top, from(rgba(0, 0, 0, 0.5)), to(rgba(0, 0, 0, 0.0001))); background-image: linear-gradient(to right, rgba(0, 0, 0, 0.5) 0%, rgba(0, 0, 0, 0.0001) 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#80000000', endColorstr='#00000000', GradientType=1); + background-repeat: repeat-x; } .carousel-control.right { - left: auto; right: 0; + left: auto; background-image: -webkit-linear-gradient(left, rgba(0, 0, 0, 0.0001) 0%, rgba(0, 0, 0, 0.5) 100%); background-image: -o-linear-gradient(left, rgba(0, 0, 0, 0.0001) 0%, rgba(0, 0, 0, 0.5) 100%); background-image: -webkit-gradient(linear, left top, right top, from(rgba(0, 0, 0, 0.0001)), to(rgba(0, 0, 0, 0.5))); background-image: linear-gradient(to right, rgba(0, 0, 0, 0.0001) 0%, rgba(0, 0, 0, 0.5) 100%); - background-repeat: repeat-x; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#00000000', endColorstr='#80000000', GradientType=1); + background-repeat: repeat-x; } .carousel-control:hover, .carousel-control:focus { - outline: 0; color: #ffffff; text-decoration: none; - opacity: 0.9; + outline: 0; filter: alpha(opacity=90); + opacity: 0.9; } .carousel-control .icon-prev, .carousel-control .icon-next, @@ -6366,9 +6341,9 @@ button.close { .carousel-control .glyphicon-chevron-right { position: absolute; top: 50%; - margin-top: -10px; z-index: 5; display: inline-block; + margin-top: -10px; } .carousel-control .icon-prev, .carousel-control .glyphicon-chevron-left { @@ -6384,14 +6359,14 @@ button.close { .carousel-control .icon-next { width: 20px; height: 20px; - line-height: 1; font-family: serif; + line-height: 1; } .carousel-control .icon-prev:before { - content: '\2039'; + content: "\2039"; } .carousel-control .icon-next:before { - content: '\203a'; + content: "\203a"; } .carousel-indicators { position: absolute; @@ -6399,10 +6374,10 @@ button.close { left: 50%; z-index: 15; width: 60%; - margin-left: -30%; padding-left: 0; - list-style: none; + margin-left: -30%; text-align: center; + list-style: none; } .carousel-indicators li { display: inline-block; @@ -6410,23 +6385,23 @@ button.close { height: 10px; margin: 1px; text-indent: -999px; - border: 1px solid #ffffff; - border-radius: 10px; cursor: pointer; background-color: #000 \9; background-color: rgba(0, 0, 0, 0); + border: 1px solid #ffffff; + border-radius: 10px; } .carousel-indicators .active { - margin: 0; width: 12px; height: 12px; + margin: 0; background-color: #ffffff; } .carousel-caption { position: absolute; - left: 15%; right: 15%; bottom: 20px; + left: 15%; z-index: 10; padding-top: 20px; padding-bottom: 20px; @@ -6456,8 +6431,8 @@ button.close { margin-right: -10px; } .carousel-caption { - left: 20%; right: 20%; + left: 20%; padding-bottom: 30px; } .carousel-indicators { @@ -6496,8 +6471,8 @@ button.close { .modal-header:after, .modal-footer:before, .modal-footer:after { - content: " "; display: table; + content: " "; } .clearfix:after, .dl-horizontal dd:after, @@ -6519,8 +6494,8 @@ button.close { } .center-block { display: block; - margin-left: auto; margin-right: auto; + margin-left: auto; } .pull-right { float: right !important; diff --git a/presto-main/src/main/resources/webapp/vendor/bootstrap/css/bootstrap.min.css b/presto-main/src/main/resources/webapp/vendor/bootstrap/css/bootstrap.min.css old mode 100755 new mode 100644 index f7cf1df45b7d..5f9e23360bbd --- a/presto-main/src/main/resources/webapp/vendor/bootstrap/css/bootstrap.min.css +++ b/presto-main/src/main/resources/webapp/vendor/bootstrap/css/bootstrap.min.css @@ -1,14 +1,7 @@ /*! - * Bootstrap v3.3.5 (http://getbootstrap.com) - * Copyright 2011-2016 Twitter, Inc. - * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) - */ - -/*! - * Generated using the Bootstrap Customizer (http://getbootstrap.com/customize/?id=71446b832bd9dbb87141a654eb911637) - * Config saved to config.json and https://gist.github.com/71446b832bd9dbb87141a654eb911637 + * Generated using the Bootstrap Customizer (https://getbootstrap.com/docs/3.4/customize/) *//*! - * Bootstrap v3.3.6 (http://getbootstrap.com) - * Copyright 2011-2015 Twitter, Inc. + * Bootstrap v3.4.1 (https://getbootstrap.com/) + * Copyright 2011-2019 Twitter, Inc. * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) - *//*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css */html{font-family:sans-serif;-ms-text-size-adjust:100%;-webkit-text-size-adjust:100%}body{margin:0}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section,summary{display:block}audio,canvas,progress,video{display:inline-block;vertical-align:baseline}audio:not([controls]){display:none;height:0}[hidden],template{display:none}a{background-color:transparent}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:bold}dfn{font-style:italic}h1{font-size:2em;margin:0.67em 0}mark{background:#ff0;color:#000}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-0.5em}sub{bottom:-0.25em}img{border:0}svg:not(:root){overflow:hidden}figure{margin:1em 40px}hr{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;height:0}pre{overflow:auto}code,kbd,pre,samp{font-family:monospace, monospace;font-size:1em}button,input,optgroup,select,textarea{color:inherit;font:inherit;margin:0}button{overflow:visible}button,select{text-transform:none}button,html input[type="button"],input[type="reset"],input[type="submit"]{-webkit-appearance:button;cursor:pointer}button[disabled],html input[disabled]{cursor:default}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}input{line-height:normal}input[type="checkbox"],input[type="radio"]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:0}input[type="number"]::-webkit-inner-spin-button,input[type="number"]::-webkit-outer-spin-button{height:auto}input[type="search"]{-webkit-appearance:textfield;-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box}input[type="search"]::-webkit-search-cancel-button,input[type="search"]::-webkit-search-decoration{-webkit-appearance:none}fieldset{border:1px solid #c0c0c0;margin:0 2px;padding:0.35em 0.625em 0.75em}legend{border:0;padding:0}textarea{overflow:auto}optgroup{font-weight:bold}table{border-collapse:collapse;border-spacing:0}td,th{padding:0}/*! Source: https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css */@media print{*,*:before,*:after{background:transparent !important;color:#000 !important;-webkit-box-shadow:none !important;box-shadow:none !important;text-shadow:none !important}a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) ")"}a[href^="#"]:after,a[href^="javascript:"]:after{content:""}pre,blockquote{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100% !important}p,h2,h3{orphans:3;widows:3}h2,h3{page-break-after:avoid}.navbar{display:none}.btn>.caret,.dropup>.btn>.caret{border-top-color:#000 !important}.label{border:1px solid #000}.table{border-collapse:collapse !important}.table td,.table th{background-color:#fff !important}.table-bordered th,.table-bordered td{border:1px solid #ddd !important}}@font-face{font-family:'Glyphicons Halflings';src:url('../fonts/glyphicons-halflings-regular.eot');src:url('../fonts/glyphicons-halflings-regular.eot?#iefix') format('embedded-opentype'),url('../fonts/glyphicons-halflings-regular.woff2') format('woff2'),url('../fonts/glyphicons-halflings-regular.woff') format('woff'),url('../fonts/glyphicons-halflings-regular.ttf') format('truetype'),url('../fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular') format('svg')}.glyphicon{position:relative;top:1px;display:inline-block;font-family:'Glyphicons Halflings';font-style:normal;font-weight:normal;line-height:1;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.glyphicon-asterisk:before{content:"\002a"}.glyphicon-plus:before{content:"\002b"}.glyphicon-euro:before,.glyphicon-eur:before{content:"\20ac"}.glyphicon-minus:before{content:"\2212"}.glyphicon-cloud:before{content:"\2601"}.glyphicon-envelope:before{content:"\2709"}.glyphicon-pencil:before{content:"\270f"}.glyphicon-glass:before{content:"\e001"}.glyphicon-music:before{content:"\e002"}.glyphicon-search:before{content:"\e003"}.glyphicon-heart:before{content:"\e005"}.glyphicon-star:before{content:"\e006"}.glyphicon-star-empty:before{content:"\e007"}.glyphicon-user:before{content:"\e008"}.glyphicon-film:before{content:"\e009"}.glyphicon-th-large:before{content:"\e010"}.glyphicon-th:before{content:"\e011"}.glyphicon-th-list:before{content:"\e012"}.glyphicon-ok:before{content:"\e013"}.glyphicon-remove:before{content:"\e014"}.glyphicon-zoom-in:before{content:"\e015"}.glyphicon-zoom-out:before{content:"\e016"}.glyphicon-off:before{content:"\e017"}.glyphicon-signal:before{content:"\e018"}.glyphicon-cog:before{content:"\e019"}.glyphicon-trash:before{content:"\e020"}.glyphicon-home:before{content:"\e021"}.glyphicon-file:before{content:"\e022"}.glyphicon-time:before{content:"\e023"}.glyphicon-road:before{content:"\e024"}.glyphicon-download-alt:before{content:"\e025"}.glyphicon-download:before{content:"\e026"}.glyphicon-upload:before{content:"\e027"}.glyphicon-inbox:before{content:"\e028"}.glyphicon-play-circle:before{content:"\e029"}.glyphicon-repeat:before{content:"\e030"}.glyphicon-refresh:before{content:"\e031"}.glyphicon-list-alt:before{content:"\e032"}.glyphicon-lock:before{content:"\e033"}.glyphicon-flag:before{content:"\e034"}.glyphicon-headphones:before{content:"\e035"}.glyphicon-volume-off:before{content:"\e036"}.glyphicon-volume-down:before{content:"\e037"}.glyphicon-volume-up:before{content:"\e038"}.glyphicon-qrcode:before{content:"\e039"}.glyphicon-barcode:before{content:"\e040"}.glyphicon-tag:before{content:"\e041"}.glyphicon-tags:before{content:"\e042"}.glyphicon-book:before{content:"\e043"}.glyphicon-bookmark:before{content:"\e044"}.glyphicon-print:before{content:"\e045"}.glyphicon-camera:before{content:"\e046"}.glyphicon-font:before{content:"\e047"}.glyphicon-bold:before{content:"\e048"}.glyphicon-italic:before{content:"\e049"}.glyphicon-text-height:before{content:"\e050"}.glyphicon-text-width:before{content:"\e051"}.glyphicon-align-left:before{content:"\e052"}.glyphicon-align-center:before{content:"\e053"}.glyphicon-align-right:before{content:"\e054"}.glyphicon-align-justify:before{content:"\e055"}.glyphicon-list:before{content:"\e056"}.glyphicon-indent-left:before{content:"\e057"}.glyphicon-indent-right:before{content:"\e058"}.glyphicon-facetime-video:before{content:"\e059"}.glyphicon-picture:before{content:"\e060"}.glyphicon-map-marker:before{content:"\e062"}.glyphicon-adjust:before{content:"\e063"}.glyphicon-tint:before{content:"\e064"}.glyphicon-edit:before{content:"\e065"}.glyphicon-share:before{content:"\e066"}.glyphicon-check:before{content:"\e067"}.glyphicon-move:before{content:"\e068"}.glyphicon-step-backward:before{content:"\e069"}.glyphicon-fast-backward:before{content:"\e070"}.glyphicon-backward:before{content:"\e071"}.glyphicon-play:before{content:"\e072"}.glyphicon-pause:before{content:"\e073"}.glyphicon-stop:before{content:"\e074"}.glyphicon-forward:before{content:"\e075"}.glyphicon-fast-forward:before{content:"\e076"}.glyphicon-step-forward:before{content:"\e077"}.glyphicon-eject:before{content:"\e078"}.glyphicon-chevron-left:before{content:"\e079"}.glyphicon-chevron-right:before{content:"\e080"}.glyphicon-plus-sign:before{content:"\e081"}.glyphicon-minus-sign:before{content:"\e082"}.glyphicon-remove-sign:before{content:"\e083"}.glyphicon-ok-sign:before{content:"\e084"}.glyphicon-question-sign:before{content:"\e085"}.glyphicon-info-sign:before{content:"\e086"}.glyphicon-screenshot:before{content:"\e087"}.glyphicon-remove-circle:before{content:"\e088"}.glyphicon-ok-circle:before{content:"\e089"}.glyphicon-ban-circle:before{content:"\e090"}.glyphicon-arrow-left:before{content:"\e091"}.glyphicon-arrow-right:before{content:"\e092"}.glyphicon-arrow-up:before{content:"\e093"}.glyphicon-arrow-down:before{content:"\e094"}.glyphicon-share-alt:before{content:"\e095"}.glyphicon-resize-full:before{content:"\e096"}.glyphicon-resize-small:before{content:"\e097"}.glyphicon-exclamation-sign:before{content:"\e101"}.glyphicon-gift:before{content:"\e102"}.glyphicon-leaf:before{content:"\e103"}.glyphicon-fire:before{content:"\e104"}.glyphicon-eye-open:before{content:"\e105"}.glyphicon-eye-close:before{content:"\e106"}.glyphicon-warning-sign:before{content:"\e107"}.glyphicon-plane:before{content:"\e108"}.glyphicon-calendar:before{content:"\e109"}.glyphicon-random:before{content:"\e110"}.glyphicon-comment:before{content:"\e111"}.glyphicon-magnet:before{content:"\e112"}.glyphicon-chevron-up:before{content:"\e113"}.glyphicon-chevron-down:before{content:"\e114"}.glyphicon-retweet:before{content:"\e115"}.glyphicon-shopping-cart:before{content:"\e116"}.glyphicon-folder-close:before{content:"\e117"}.glyphicon-folder-open:before{content:"\e118"}.glyphicon-resize-vertical:before{content:"\e119"}.glyphicon-resize-horizontal:before{content:"\e120"}.glyphicon-hdd:before{content:"\e121"}.glyphicon-bullhorn:before{content:"\e122"}.glyphicon-bell:before{content:"\e123"}.glyphicon-certificate:before{content:"\e124"}.glyphicon-thumbs-up:before{content:"\e125"}.glyphicon-thumbs-down:before{content:"\e126"}.glyphicon-hand-right:before{content:"\e127"}.glyphicon-hand-left:before{content:"\e128"}.glyphicon-hand-up:before{content:"\e129"}.glyphicon-hand-down:before{content:"\e130"}.glyphicon-circle-arrow-right:before{content:"\e131"}.glyphicon-circle-arrow-left:before{content:"\e132"}.glyphicon-circle-arrow-up:before{content:"\e133"}.glyphicon-circle-arrow-down:before{content:"\e134"}.glyphicon-globe:before{content:"\e135"}.glyphicon-wrench:before{content:"\e136"}.glyphicon-tasks:before{content:"\e137"}.glyphicon-filter:before{content:"\e138"}.glyphicon-briefcase:before{content:"\e139"}.glyphicon-fullscreen:before{content:"\e140"}.glyphicon-dashboard:before{content:"\e141"}.glyphicon-paperclip:before{content:"\e142"}.glyphicon-heart-empty:before{content:"\e143"}.glyphicon-link:before{content:"\e144"}.glyphicon-phone:before{content:"\e145"}.glyphicon-pushpin:before{content:"\e146"}.glyphicon-usd:before{content:"\e148"}.glyphicon-gbp:before{content:"\e149"}.glyphicon-sort:before{content:"\e150"}.glyphicon-sort-by-alphabet:before{content:"\e151"}.glyphicon-sort-by-alphabet-alt:before{content:"\e152"}.glyphicon-sort-by-order:before{content:"\e153"}.glyphicon-sort-by-order-alt:before{content:"\e154"}.glyphicon-sort-by-attributes:before{content:"\e155"}.glyphicon-sort-by-attributes-alt:before{content:"\e156"}.glyphicon-unchecked:before{content:"\e157"}.glyphicon-expand:before{content:"\e158"}.glyphicon-collapse-down:before{content:"\e159"}.glyphicon-collapse-up:before{content:"\e160"}.glyphicon-log-in:before{content:"\e161"}.glyphicon-flash:before{content:"\e162"}.glyphicon-log-out:before{content:"\e163"}.glyphicon-new-window:before{content:"\e164"}.glyphicon-record:before{content:"\e165"}.glyphicon-save:before{content:"\e166"}.glyphicon-open:before{content:"\e167"}.glyphicon-saved:before{content:"\e168"}.glyphicon-import:before{content:"\e169"}.glyphicon-export:before{content:"\e170"}.glyphicon-send:before{content:"\e171"}.glyphicon-floppy-disk:before{content:"\e172"}.glyphicon-floppy-saved:before{content:"\e173"}.glyphicon-floppy-remove:before{content:"\e174"}.glyphicon-floppy-save:before{content:"\e175"}.glyphicon-floppy-open:before{content:"\e176"}.glyphicon-credit-card:before{content:"\e177"}.glyphicon-transfer:before{content:"\e178"}.glyphicon-cutlery:before{content:"\e179"}.glyphicon-header:before{content:"\e180"}.glyphicon-compressed:before{content:"\e181"}.glyphicon-earphone:before{content:"\e182"}.glyphicon-phone-alt:before{content:"\e183"}.glyphicon-tower:before{content:"\e184"}.glyphicon-stats:before{content:"\e185"}.glyphicon-sd-video:before{content:"\e186"}.glyphicon-hd-video:before{content:"\e187"}.glyphicon-subtitles:before{content:"\e188"}.glyphicon-sound-stereo:before{content:"\e189"}.glyphicon-sound-dolby:before{content:"\e190"}.glyphicon-sound-5-1:before{content:"\e191"}.glyphicon-sound-6-1:before{content:"\e192"}.glyphicon-sound-7-1:before{content:"\e193"}.glyphicon-copyright-mark:before{content:"\e194"}.glyphicon-registration-mark:before{content:"\e195"}.glyphicon-cloud-download:before{content:"\e197"}.glyphicon-cloud-upload:before{content:"\e198"}.glyphicon-tree-conifer:before{content:"\e199"}.glyphicon-tree-deciduous:before{content:"\e200"}.glyphicon-cd:before{content:"\e201"}.glyphicon-save-file:before{content:"\e202"}.glyphicon-open-file:before{content:"\e203"}.glyphicon-level-up:before{content:"\e204"}.glyphicon-copy:before{content:"\e205"}.glyphicon-paste:before{content:"\e206"}.glyphicon-alert:before{content:"\e209"}.glyphicon-equalizer:before{content:"\e210"}.glyphicon-king:before{content:"\e211"}.glyphicon-queen:before{content:"\e212"}.glyphicon-pawn:before{content:"\e213"}.glyphicon-bishop:before{content:"\e214"}.glyphicon-knight:before{content:"\e215"}.glyphicon-baby-formula:before{content:"\e216"}.glyphicon-tent:before{content:"\26fa"}.glyphicon-blackboard:before{content:"\e218"}.glyphicon-bed:before{content:"\e219"}.glyphicon-apple:before{content:"\f8ff"}.glyphicon-erase:before{content:"\e221"}.glyphicon-hourglass:before{content:"\231b"}.glyphicon-lamp:before{content:"\e223"}.glyphicon-duplicate:before{content:"\e224"}.glyphicon-piggy-bank:before{content:"\e225"}.glyphicon-scissors:before{content:"\e226"}.glyphicon-bitcoin:before{content:"\e227"}.glyphicon-btc:before{content:"\e227"}.glyphicon-xbt:before{content:"\e227"}.glyphicon-yen:before{content:"\00a5"}.glyphicon-jpy:before{content:"\00a5"}.glyphicon-ruble:before{content:"\20bd"}.glyphicon-rub:before{content:"\20bd"}.glyphicon-scale:before{content:"\e230"}.glyphicon-ice-lolly:before{content:"\e231"}.glyphicon-ice-lolly-tasted:before{content:"\e232"}.glyphicon-education:before{content:"\e233"}.glyphicon-option-horizontal:before{content:"\e234"}.glyphicon-option-vertical:before{content:"\e235"}.glyphicon-menu-hamburger:before{content:"\e236"}.glyphicon-modal-window:before{content:"\e237"}.glyphicon-oil:before{content:"\e238"}.glyphicon-grain:before{content:"\e239"}.glyphicon-sunglasses:before{content:"\e240"}.glyphicon-text-size:before{content:"\e241"}.glyphicon-text-color:before{content:"\e242"}.glyphicon-text-background:before{content:"\e243"}.glyphicon-object-align-top:before{content:"\e244"}.glyphicon-object-align-bottom:before{content:"\e245"}.glyphicon-object-align-horizontal:before{content:"\e246"}.glyphicon-object-align-left:before{content:"\e247"}.glyphicon-object-align-vertical:before{content:"\e248"}.glyphicon-object-align-right:before{content:"\e249"}.glyphicon-triangle-right:before{content:"\e250"}.glyphicon-triangle-left:before{content:"\e251"}.glyphicon-triangle-bottom:before{content:"\e252"}.glyphicon-triangle-top:before{content:"\e253"}.glyphicon-console:before{content:"\e254"}.glyphicon-superscript:before{content:"\e255"}.glyphicon-subscript:before{content:"\e256"}.glyphicon-menu-left:before{content:"\e257"}.glyphicon-menu-right:before{content:"\e258"}.glyphicon-menu-down:before{content:"\e259"}.glyphicon-menu-up:before{content:"\e260"}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}*:before,*:after{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:10px;-webkit-tap-highlight-color:rgba(0,0,0,0)}body{font-family:"Roboto","Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;line-height:1.42857143;color:#999;background-color:#252830}input,button,select,textarea{font-family:inherit;font-size:inherit;line-height:inherit}a{color:#98e8ff;text-decoration:none}a:hover,a:focus{color:#4cd7ff;text-decoration:underline}a:focus{outline:thin dotted;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}figure{margin:0}img{vertical-align:middle}.img-responsive,.thumbnail>img,.thumbnail a>img,.carousel-inner>.item>img,.carousel-inner>.item>a>img{display:block;max-width:100%;height:auto}.img-rounded{border-radius:0}.img-thumbnail{padding:4px;line-height:1.42857143;background-color:#252830;border:1px solid #ddd;border-radius:0;-webkit-transition:all .2s ease-in-out;-o-transition:all .2s ease-in-out;transition:all .2s ease-in-out;display:inline-block;max-width:100%;height:auto}.img-circle{border-radius:50%}hr{margin-top:20px;margin-bottom:20px;border:0;border-top:1px solid #555}.sr-only{position:absolute;width:1px;height:1px;margin:-1px;padding:0;overflow:hidden;clip:rect(0, 0, 0, 0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}[role="button"]{cursor:pointer}h1,h2,h3,h4,h5,h6,.h1,.h2,.h3,.h4,.h5,.h6{font-family:inherit;font-weight:500;line-height:1.1;color:inherit}h1 small,h2 small,h3 small,h4 small,h5 small,h6 small,.h1 small,.h2 small,.h3 small,.h4 small,.h5 small,.h6 small,h1 .small,h2 .small,h3 .small,h4 .small,h5 .small,h6 .small,.h1 .small,.h2 .small,.h3 .small,.h4 .small,.h5 .small,.h6 .small{font-weight:normal;line-height:1;color:#222}h1,.h1,h2,.h2,h3,.h3{margin-top:20px;margin-bottom:10px}h1 small,.h1 small,h2 small,.h2 small,h3 small,.h3 small,h1 .small,.h1 .small,h2 .small,.h2 .small,h3 .small,.h3 .small{font-size:65%}h4,.h4,h5,.h5,h6,.h6{margin-top:10px;margin-bottom:10px}h4 small,.h4 small,h5 small,.h5 small,h6 small,.h6 small,h4 .small,.h4 .small,h5 .small,.h5 .small,h6 .small,.h6 .small{font-size:75%}h1,.h1{font-size:36px}h2,.h2{font-size:30px}h3,.h3{font-size:24px}h4,.h4{font-size:18px}h5,.h5{font-size:14px}h6,.h6{font-size:12px}p{margin:0 0 10px}.lead{margin-bottom:20px;font-size:16px;font-weight:300;line-height:1.4}@media (min-width:768px){.lead{font-size:21px}}small,.small{font-size:85%}mark,.mark{background-color:#fcf8e3;padding:.2em}.text-left{text-align:left}.text-right{text-align:right}.text-center{text-align:center}.text-justify{text-align:justify}.text-nowrap{white-space:nowrap}.text-lowercase{text-transform:lowercase}.text-uppercase{text-transform:uppercase}.text-capitalize{text-transform:capitalize}.text-muted{color:#222}.text-primary{color:#98e8ff}a.text-primary:hover,a.text-primary:focus{color:#65ddff}.text-success{color:#3c763d}a.text-success:hover,a.text-success:focus{color:#2b542c}.text-info{color:#31708f}a.text-info:hover,a.text-info:focus{color:#245269}.text-warning{color:#8a6d3b}a.text-warning:hover,a.text-warning:focus{color:#66512c}.text-danger{color:#a94442}a.text-danger:hover,a.text-danger:focus{color:#843534}.bg-primary{color:#fff;background-color:#98e8ff}a.bg-primary:hover,a.bg-primary:focus{background-color:#65ddff}.bg-success{background-color:#dff0d8}a.bg-success:hover,a.bg-success:focus{background-color:#c1e2b3}.bg-info{background-color:#d9edf7}a.bg-info:hover,a.bg-info:focus{background-color:#afd9ee}.bg-warning{background-color:#fcf8e3}a.bg-warning:hover,a.bg-warning:focus{background-color:#f7ecb5}.bg-danger{background-color:#f2dede}a.bg-danger:hover,a.bg-danger:focus{background-color:#e4b9b9}.page-header{padding-bottom:9px;margin:40px 0 20px;border-bottom:1px solid #555}ul,ol{margin-top:0;margin-bottom:10px}ul ul,ol ul,ul ol,ol ol{margin-bottom:0}.list-unstyled{padding-left:0;list-style:none}.list-inline{padding-left:0;list-style:none;margin-left:-5px}.list-inline>li{display:inline-block;padding-left:5px;padding-right:5px}dl{margin-top:0;margin-bottom:20px}dt,dd{line-height:1.42857143}dt{font-weight:bold}dd{margin-left:0}@media (min-width:768px){.dl-horizontal dt{float:left;width:160px;clear:left;text-align:right;overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.dl-horizontal dd{margin-left:180px}}abbr[title],abbr[data-original-title]{cursor:help;border-bottom:1px dotted #222}.initialism{font-size:90%;text-transform:uppercase}blockquote{padding:10px 20px;margin:0 0 20px;font-size:17.5px;border-left:5px solid #555}blockquote p:last-child,blockquote ul:last-child,blockquote ol:last-child{margin-bottom:0}blockquote footer,blockquote small,blockquote .small{display:block;font-size:80%;line-height:1.42857143;color:#222}blockquote footer:before,blockquote small:before,blockquote .small:before{content:'\2014 \00A0'}.blockquote-reverse,blockquote.pull-right{padding-right:15px;padding-left:0;border-right:5px solid #555;border-left:0;text-align:right}.blockquote-reverse footer:before,blockquote.pull-right footer:before,.blockquote-reverse small:before,blockquote.pull-right small:before,.blockquote-reverse .small:before,blockquote.pull-right .small:before{content:''}.blockquote-reverse footer:after,blockquote.pull-right footer:after,.blockquote-reverse small:after,blockquote.pull-right small:after,.blockquote-reverse .small:after,blockquote.pull-right .small:after{content:'\00A0 \2014'}address{margin-bottom:20px;font-style:normal;line-height:1.42857143}code,kbd,pre,samp{font-family:Menlo,Monaco,Consolas,"Courier New",monospace}code{padding:2px 4px;font-size:90%;color:#c7254e;background-color:#f9f2f4;border-radius:0}kbd{padding:2px 4px;font-size:90%;color:#fff;background-color:#333;border-radius:0;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,0.25);box-shadow:inset 0 -1px 0 rgba(0,0,0,0.25)}kbd kbd{padding:0;font-size:100%;font-weight:bold;-webkit-box-shadow:none;box-shadow:none}pre{display:block;padding:9.5px;margin:0 0 10px;font-size:13px;line-height:1.42857143;word-break:break-all;word-wrap:break-word;color:#999;background-color:#f5f5f5;border:1px solid #ccc;border-radius:0}pre code{padding:0;font-size:inherit;color:inherit;white-space:pre-wrap;background-color:transparent;border-radius:0}.pre-scrollable{max-height:340px;overflow-y:scroll}.container{margin-right:auto;margin-left:auto;padding-left:15px;padding-right:15px}@media (min-width:768px){.container{width:750px}}@media (min-width:992px){.container{width:970px}}@media (min-width:1200px){.container{width:1170px}}.container-fluid{margin-right:auto;margin-left:auto;padding-left:15px;padding-right:15px}.row{margin-left:-15px;margin-right:-15px}.col-xs-1, .col-sm-1, .col-md-1, .col-lg-1, .col-xs-2, .col-sm-2, .col-md-2, .col-lg-2, .col-xs-3, .col-sm-3, .col-md-3, .col-lg-3, .col-xs-4, .col-sm-4, .col-md-4, .col-lg-4, .col-xs-5, .col-sm-5, .col-md-5, .col-lg-5, .col-xs-6, .col-sm-6, .col-md-6, .col-lg-6, .col-xs-7, .col-sm-7, .col-md-7, .col-lg-7, .col-xs-8, .col-sm-8, .col-md-8, .col-lg-8, .col-xs-9, .col-sm-9, .col-md-9, .col-lg-9, .col-xs-10, .col-sm-10, .col-md-10, .col-lg-10, .col-xs-11, .col-sm-11, .col-md-11, .col-lg-11, .col-xs-12, .col-sm-12, .col-md-12, .col-lg-12{position:relative;min-height:1px;padding-left:15px;padding-right:15px}.col-xs-1, .col-xs-2, .col-xs-3, .col-xs-4, .col-xs-5, .col-xs-6, .col-xs-7, .col-xs-8, .col-xs-9, .col-xs-10, .col-xs-11, .col-xs-12{float:left}.col-xs-12{width:100%}.col-xs-11{width:91.66666667%}.col-xs-10{width:83.33333333%}.col-xs-9{width:75%}.col-xs-8{width:66.66666667%}.col-xs-7{width:58.33333333%}.col-xs-6{width:50%}.col-xs-5{width:41.66666667%}.col-xs-4{width:33.33333333%}.col-xs-3{width:25%}.col-xs-2{width:16.66666667%}.col-xs-1{width:8.33333333%}.col-xs-pull-12{right:100%}.col-xs-pull-11{right:91.66666667%}.col-xs-pull-10{right:83.33333333%}.col-xs-pull-9{right:75%}.col-xs-pull-8{right:66.66666667%}.col-xs-pull-7{right:58.33333333%}.col-xs-pull-6{right:50%}.col-xs-pull-5{right:41.66666667%}.col-xs-pull-4{right:33.33333333%}.col-xs-pull-3{right:25%}.col-xs-pull-2{right:16.66666667%}.col-xs-pull-1{right:8.33333333%}.col-xs-pull-0{right:auto}.col-xs-push-12{left:100%}.col-xs-push-11{left:91.66666667%}.col-xs-push-10{left:83.33333333%}.col-xs-push-9{left:75%}.col-xs-push-8{left:66.66666667%}.col-xs-push-7{left:58.33333333%}.col-xs-push-6{left:50%}.col-xs-push-5{left:41.66666667%}.col-xs-push-4{left:33.33333333%}.col-xs-push-3{left:25%}.col-xs-push-2{left:16.66666667%}.col-xs-push-1{left:8.33333333%}.col-xs-push-0{left:auto}.col-xs-offset-12{margin-left:100%}.col-xs-offset-11{margin-left:91.66666667%}.col-xs-offset-10{margin-left:83.33333333%}.col-xs-offset-9{margin-left:75%}.col-xs-offset-8{margin-left:66.66666667%}.col-xs-offset-7{margin-left:58.33333333%}.col-xs-offset-6{margin-left:50%}.col-xs-offset-5{margin-left:41.66666667%}.col-xs-offset-4{margin-left:33.33333333%}.col-xs-offset-3{margin-left:25%}.col-xs-offset-2{margin-left:16.66666667%}.col-xs-offset-1{margin-left:8.33333333%}.col-xs-offset-0{margin-left:0}@media (min-width:768px){.col-sm-1, .col-sm-2, .col-sm-3, .col-sm-4, .col-sm-5, .col-sm-6, .col-sm-7, .col-sm-8, .col-sm-9, .col-sm-10, .col-sm-11, .col-sm-12{float:left}.col-sm-12{width:100%}.col-sm-11{width:91.66666667%}.col-sm-10{width:83.33333333%}.col-sm-9{width:75%}.col-sm-8{width:66.66666667%}.col-sm-7{width:58.33333333%}.col-sm-6{width:50%}.col-sm-5{width:41.66666667%}.col-sm-4{width:33.33333333%}.col-sm-3{width:25%}.col-sm-2{width:16.66666667%}.col-sm-1{width:8.33333333%}.col-sm-pull-12{right:100%}.col-sm-pull-11{right:91.66666667%}.col-sm-pull-10{right:83.33333333%}.col-sm-pull-9{right:75%}.col-sm-pull-8{right:66.66666667%}.col-sm-pull-7{right:58.33333333%}.col-sm-pull-6{right:50%}.col-sm-pull-5{right:41.66666667%}.col-sm-pull-4{right:33.33333333%}.col-sm-pull-3{right:25%}.col-sm-pull-2{right:16.66666667%}.col-sm-pull-1{right:8.33333333%}.col-sm-pull-0{right:auto}.col-sm-push-12{left:100%}.col-sm-push-11{left:91.66666667%}.col-sm-push-10{left:83.33333333%}.col-sm-push-9{left:75%}.col-sm-push-8{left:66.66666667%}.col-sm-push-7{left:58.33333333%}.col-sm-push-6{left:50%}.col-sm-push-5{left:41.66666667%}.col-sm-push-4{left:33.33333333%}.col-sm-push-3{left:25%}.col-sm-push-2{left:16.66666667%}.col-sm-push-1{left:8.33333333%}.col-sm-push-0{left:auto}.col-sm-offset-12{margin-left:100%}.col-sm-offset-11{margin-left:91.66666667%}.col-sm-offset-10{margin-left:83.33333333%}.col-sm-offset-9{margin-left:75%}.col-sm-offset-8{margin-left:66.66666667%}.col-sm-offset-7{margin-left:58.33333333%}.col-sm-offset-6{margin-left:50%}.col-sm-offset-5{margin-left:41.66666667%}.col-sm-offset-4{margin-left:33.33333333%}.col-sm-offset-3{margin-left:25%}.col-sm-offset-2{margin-left:16.66666667%}.col-sm-offset-1{margin-left:8.33333333%}.col-sm-offset-0{margin-left:0}}@media (min-width:992px){.col-md-1, .col-md-2, .col-md-3, .col-md-4, .col-md-5, .col-md-6, .col-md-7, .col-md-8, .col-md-9, .col-md-10, .col-md-11, .col-md-12{float:left}.col-md-12{width:100%}.col-md-11{width:91.66666667%}.col-md-10{width:83.33333333%}.col-md-9{width:75%}.col-md-8{width:66.66666667%}.col-md-7{width:58.33333333%}.col-md-6{width:50%}.col-md-5{width:41.66666667%}.col-md-4{width:33.33333333%}.col-md-3{width:25%}.col-md-2{width:16.66666667%}.col-md-1{width:8.33333333%}.col-md-pull-12{right:100%}.col-md-pull-11{right:91.66666667%}.col-md-pull-10{right:83.33333333%}.col-md-pull-9{right:75%}.col-md-pull-8{right:66.66666667%}.col-md-pull-7{right:58.33333333%}.col-md-pull-6{right:50%}.col-md-pull-5{right:41.66666667%}.col-md-pull-4{right:33.33333333%}.col-md-pull-3{right:25%}.col-md-pull-2{right:16.66666667%}.col-md-pull-1{right:8.33333333%}.col-md-pull-0{right:auto}.col-md-push-12{left:100%}.col-md-push-11{left:91.66666667%}.col-md-push-10{left:83.33333333%}.col-md-push-9{left:75%}.col-md-push-8{left:66.66666667%}.col-md-push-7{left:58.33333333%}.col-md-push-6{left:50%}.col-md-push-5{left:41.66666667%}.col-md-push-4{left:33.33333333%}.col-md-push-3{left:25%}.col-md-push-2{left:16.66666667%}.col-md-push-1{left:8.33333333%}.col-md-push-0{left:auto}.col-md-offset-12{margin-left:100%}.col-md-offset-11{margin-left:91.66666667%}.col-md-offset-10{margin-left:83.33333333%}.col-md-offset-9{margin-left:75%}.col-md-offset-8{margin-left:66.66666667%}.col-md-offset-7{margin-left:58.33333333%}.col-md-offset-6{margin-left:50%}.col-md-offset-5{margin-left:41.66666667%}.col-md-offset-4{margin-left:33.33333333%}.col-md-offset-3{margin-left:25%}.col-md-offset-2{margin-left:16.66666667%}.col-md-offset-1{margin-left:8.33333333%}.col-md-offset-0{margin-left:0}}@media (min-width:1200px){.col-lg-1, .col-lg-2, .col-lg-3, .col-lg-4, .col-lg-5, .col-lg-6, .col-lg-7, .col-lg-8, .col-lg-9, .col-lg-10, .col-lg-11, .col-lg-12{float:left}.col-lg-12{width:100%}.col-lg-11{width:91.66666667%}.col-lg-10{width:83.33333333%}.col-lg-9{width:75%}.col-lg-8{width:66.66666667%}.col-lg-7{width:58.33333333%}.col-lg-6{width:50%}.col-lg-5{width:41.66666667%}.col-lg-4{width:33.33333333%}.col-lg-3{width:25%}.col-lg-2{width:16.66666667%}.col-lg-1{width:8.33333333%}.col-lg-pull-12{right:100%}.col-lg-pull-11{right:91.66666667%}.col-lg-pull-10{right:83.33333333%}.col-lg-pull-9{right:75%}.col-lg-pull-8{right:66.66666667%}.col-lg-pull-7{right:58.33333333%}.col-lg-pull-6{right:50%}.col-lg-pull-5{right:41.66666667%}.col-lg-pull-4{right:33.33333333%}.col-lg-pull-3{right:25%}.col-lg-pull-2{right:16.66666667%}.col-lg-pull-1{right:8.33333333%}.col-lg-pull-0{right:auto}.col-lg-push-12{left:100%}.col-lg-push-11{left:91.66666667%}.col-lg-push-10{left:83.33333333%}.col-lg-push-9{left:75%}.col-lg-push-8{left:66.66666667%}.col-lg-push-7{left:58.33333333%}.col-lg-push-6{left:50%}.col-lg-push-5{left:41.66666667%}.col-lg-push-4{left:33.33333333%}.col-lg-push-3{left:25%}.col-lg-push-2{left:16.66666667%}.col-lg-push-1{left:8.33333333%}.col-lg-push-0{left:auto}.col-lg-offset-12{margin-left:100%}.col-lg-offset-11{margin-left:91.66666667%}.col-lg-offset-10{margin-left:83.33333333%}.col-lg-offset-9{margin-left:75%}.col-lg-offset-8{margin-left:66.66666667%}.col-lg-offset-7{margin-left:58.33333333%}.col-lg-offset-6{margin-left:50%}.col-lg-offset-5{margin-left:41.66666667%}.col-lg-offset-4{margin-left:33.33333333%}.col-lg-offset-3{margin-left:25%}.col-lg-offset-2{margin-left:16.66666667%}.col-lg-offset-1{margin-left:8.33333333%}.col-lg-offset-0{margin-left:0}}table{background-color:transparent}caption{padding-top:8px;padding-bottom:8px;color:#222;text-align:left}th{text-align:left}.table{width:100%;max-width:100%;margin-bottom:20px}.table>thead>tr>th,.table>tbody>tr>th,.table>tfoot>tr>th,.table>thead>tr>td,.table>tbody>tr>td,.table>tfoot>tr>td{padding:8px;line-height:1.42857143;vertical-align:top;border-top:1px solid #ddd}.table>thead>tr>th{vertical-align:bottom;border-bottom:2px solid #ddd}.table>caption+thead>tr:first-child>th,.table>colgroup+thead>tr:first-child>th,.table>thead:first-child>tr:first-child>th,.table>caption+thead>tr:first-child>td,.table>colgroup+thead>tr:first-child>td,.table>thead:first-child>tr:first-child>td{border-top:0}.table>tbody+tbody{border-top:2px solid #ddd}.table .table{background-color:#252830}.table-condensed>thead>tr>th,.table-condensed>tbody>tr>th,.table-condensed>tfoot>tr>th,.table-condensed>thead>tr>td,.table-condensed>tbody>tr>td,.table-condensed>tfoot>tr>td{padding:5px}.table-bordered{border:1px solid #ddd}.table-bordered>thead>tr>th,.table-bordered>tbody>tr>th,.table-bordered>tfoot>tr>th,.table-bordered>thead>tr>td,.table-bordered>tbody>tr>td,.table-bordered>tfoot>tr>td{border:1px solid #ddd}.table-bordered>thead>tr>th,.table-bordered>thead>tr>td{border-bottom-width:2px}.table-striped>tbody>tr:nth-of-type(odd){background-color:#f9f9f9}.table-hover>tbody>tr:hover{background-color:#f5f5f5}table col[class*="col-"]{position:static;float:none;display:table-column}table td[class*="col-"],table th[class*="col-"]{position:static;float:none;display:table-cell}.table>thead>tr>td.active,.table>tbody>tr>td.active,.table>tfoot>tr>td.active,.table>thead>tr>th.active,.table>tbody>tr>th.active,.table>tfoot>tr>th.active,.table>thead>tr.active>td,.table>tbody>tr.active>td,.table>tfoot>tr.active>td,.table>thead>tr.active>th,.table>tbody>tr.active>th,.table>tfoot>tr.active>th{background-color:#f5f5f5}.table-hover>tbody>tr>td.active:hover,.table-hover>tbody>tr>th.active:hover,.table-hover>tbody>tr.active:hover>td,.table-hover>tbody>tr:hover>.active,.table-hover>tbody>tr.active:hover>th{background-color:#e8e8e8}.table>thead>tr>td.success,.table>tbody>tr>td.success,.table>tfoot>tr>td.success,.table>thead>tr>th.success,.table>tbody>tr>th.success,.table>tfoot>tr>th.success,.table>thead>tr.success>td,.table>tbody>tr.success>td,.table>tfoot>tr.success>td,.table>thead>tr.success>th,.table>tbody>tr.success>th,.table>tfoot>tr.success>th{background-color:#dff0d8}.table-hover>tbody>tr>td.success:hover,.table-hover>tbody>tr>th.success:hover,.table-hover>tbody>tr.success:hover>td,.table-hover>tbody>tr:hover>.success,.table-hover>tbody>tr.success:hover>th{background-color:#d0e9c6}.table>thead>tr>td.info,.table>tbody>tr>td.info,.table>tfoot>tr>td.info,.table>thead>tr>th.info,.table>tbody>tr>th.info,.table>tfoot>tr>th.info,.table>thead>tr.info>td,.table>tbody>tr.info>td,.table>tfoot>tr.info>td,.table>thead>tr.info>th,.table>tbody>tr.info>th,.table>tfoot>tr.info>th{background-color:#d9edf7}.table-hover>tbody>tr>td.info:hover,.table-hover>tbody>tr>th.info:hover,.table-hover>tbody>tr.info:hover>td,.table-hover>tbody>tr:hover>.info,.table-hover>tbody>tr.info:hover>th{background-color:#c4e3f3}.table>thead>tr>td.warning,.table>tbody>tr>td.warning,.table>tfoot>tr>td.warning,.table>thead>tr>th.warning,.table>tbody>tr>th.warning,.table>tfoot>tr>th.warning,.table>thead>tr.warning>td,.table>tbody>tr.warning>td,.table>tfoot>tr.warning>td,.table>thead>tr.warning>th,.table>tbody>tr.warning>th,.table>tfoot>tr.warning>th{background-color:#fcf8e3}.table-hover>tbody>tr>td.warning:hover,.table-hover>tbody>tr>th.warning:hover,.table-hover>tbody>tr.warning:hover>td,.table-hover>tbody>tr:hover>.warning,.table-hover>tbody>tr.warning:hover>th{background-color:#faf2cc}.table>thead>tr>td.danger,.table>tbody>tr>td.danger,.table>tfoot>tr>td.danger,.table>thead>tr>th.danger,.table>tbody>tr>th.danger,.table>tfoot>tr>th.danger,.table>thead>tr.danger>td,.table>tbody>tr.danger>td,.table>tfoot>tr.danger>td,.table>thead>tr.danger>th,.table>tbody>tr.danger>th,.table>tfoot>tr.danger>th{background-color:#f2dede}.table-hover>tbody>tr>td.danger:hover,.table-hover>tbody>tr>th.danger:hover,.table-hover>tbody>tr.danger:hover>td,.table-hover>tbody>tr:hover>.danger,.table-hover>tbody>tr.danger:hover>th{background-color:#ebcccc}.table-responsive{overflow-x:auto;min-height:0.01%}@media screen and (max-width:767px){.table-responsive{width:100%;margin-bottom:15px;overflow-y:hidden;-ms-overflow-style:-ms-autohiding-scrollbar;border:1px solid #ddd}.table-responsive>.table{margin-bottom:0}.table-responsive>.table>thead>tr>th,.table-responsive>.table>tbody>tr>th,.table-responsive>.table>tfoot>tr>th,.table-responsive>.table>thead>tr>td,.table-responsive>.table>tbody>tr>td,.table-responsive>.table>tfoot>tr>td{white-space:nowrap}.table-responsive>.table-bordered{border:0}.table-responsive>.table-bordered>thead>tr>th:first-child,.table-responsive>.table-bordered>tbody>tr>th:first-child,.table-responsive>.table-bordered>tfoot>tr>th:first-child,.table-responsive>.table-bordered>thead>tr>td:first-child,.table-responsive>.table-bordered>tbody>tr>td:first-child,.table-responsive>.table-bordered>tfoot>tr>td:first-child{border-left:0}.table-responsive>.table-bordered>thead>tr>th:last-child,.table-responsive>.table-bordered>tbody>tr>th:last-child,.table-responsive>.table-bordered>tfoot>tr>th:last-child,.table-responsive>.table-bordered>thead>tr>td:last-child,.table-responsive>.table-bordered>tbody>tr>td:last-child,.table-responsive>.table-bordered>tfoot>tr>td:last-child{border-right:0}.table-responsive>.table-bordered>tbody>tr:last-child>th,.table-responsive>.table-bordered>tfoot>tr:last-child>th,.table-responsive>.table-bordered>tbody>tr:last-child>td,.table-responsive>.table-bordered>tfoot>tr:last-child>td{border-bottom:0}}fieldset{padding:0;margin:0;border:0;min-width:0}legend{display:block;width:100%;padding:0;margin-bottom:20px;font-size:21px;line-height:inherit;color:#999;border:0;border-bottom:1px solid #e5e5e5}label{display:inline-block;max-width:100%;margin-bottom:5px;font-weight:bold}input[type="search"]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type="radio"],input[type="checkbox"]{margin:4px 0 0;margin-top:1px \9;line-height:normal}input[type="file"]{display:block}input[type="range"]{display:block;width:100%}select[multiple],select[size]{height:auto}input[type="file"]:focus,input[type="radio"]:focus,input[type="checkbox"]:focus{outline:thin dotted;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}output{display:block;padding-top:7px;font-size:14px;line-height:1.42857143;color:#000}.form-control{display:block;width:100%;height:34px;padding:6px 12px;font-size:14px;line-height:1.42857143;color:#000;background-color:#fff;background-image:none;border:1px solid #ccc;border-radius:0;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-webkit-transition:border-color ease-in-out .15s, -webkit-box-shadow ease-in-out .15s;-o-transition:border-color ease-in-out .15s, box-shadow ease-in-out .15s;transition:border-color ease-in-out .15s, box-shadow ease-in-out .15s}.form-control:focus{border-color:#66afe9;outline:0;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(102, 175, 233, 0.6);box-shadow:inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(102, 175, 233, 0.6)}.form-control::-moz-placeholder{color:#999;opacity:1}.form-control:-ms-input-placeholder{color:#999}.form-control::-webkit-input-placeholder{color:#999}.form-control::-ms-expand{border:0;background-color:transparent}.form-control[disabled],.form-control[readonly],fieldset[disabled] .form-control{background-color:#333;opacity:1}.form-control[disabled],fieldset[disabled] .form-control{cursor:not-allowed}textarea.form-control{height:auto}input[type="search"]{-webkit-appearance:none}@media screen and (-webkit-min-device-pixel-ratio:0){input[type="date"].form-control,input[type="time"].form-control,input[type="datetime-local"].form-control,input[type="month"].form-control{line-height:34px}input[type="date"].input-sm,input[type="time"].input-sm,input[type="datetime-local"].input-sm,input[type="month"].input-sm,.input-group-sm input[type="date"],.input-group-sm input[type="time"],.input-group-sm input[type="datetime-local"],.input-group-sm input[type="month"]{line-height:30px}input[type="date"].input-lg,input[type="time"].input-lg,input[type="datetime-local"].input-lg,input[type="month"].input-lg,.input-group-lg input[type="date"],.input-group-lg input[type="time"],.input-group-lg input[type="datetime-local"],.input-group-lg input[type="month"]{line-height:46px}}.form-group{margin-bottom:15px}.radio,.checkbox{position:relative;display:block;margin-top:10px;margin-bottom:10px}.radio label,.checkbox label{min-height:20px;padding-left:20px;margin-bottom:0;font-weight:normal;cursor:pointer}.radio input[type="radio"],.radio-inline input[type="radio"],.checkbox input[type="checkbox"],.checkbox-inline input[type="checkbox"]{position:absolute;margin-left:-20px;margin-top:4px \9}.radio+.radio,.checkbox+.checkbox{margin-top:-5px}.radio-inline,.checkbox-inline{position:relative;display:inline-block;padding-left:20px;margin-bottom:0;vertical-align:middle;font-weight:normal;cursor:pointer}.radio-inline+.radio-inline,.checkbox-inline+.checkbox-inline{margin-top:0;margin-left:10px}input[type="radio"][disabled],input[type="checkbox"][disabled],input[type="radio"].disabled,input[type="checkbox"].disabled,fieldset[disabled] input[type="radio"],fieldset[disabled] input[type="checkbox"]{cursor:not-allowed}.radio-inline.disabled,.checkbox-inline.disabled,fieldset[disabled] .radio-inline,fieldset[disabled] .checkbox-inline{cursor:not-allowed}.radio.disabled label,.checkbox.disabled label,fieldset[disabled] .radio label,fieldset[disabled] .checkbox label{cursor:not-allowed}.form-control-static{padding-top:7px;padding-bottom:7px;margin-bottom:0;min-height:34px}.form-control-static.input-lg,.form-control-static.input-sm{padding-left:0;padding-right:0}.input-sm{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:0}select.input-sm{height:30px;line-height:30px}textarea.input-sm,select[multiple].input-sm{height:auto}.form-group-sm .form-control{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:0}.form-group-sm select.form-control{height:30px;line-height:30px}.form-group-sm textarea.form-control,.form-group-sm select[multiple].form-control{height:auto}.form-group-sm .form-control-static{height:30px;min-height:32px;padding:6px 10px;font-size:12px;line-height:1.5}.input-lg{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:0}select.input-lg{height:46px;line-height:46px}textarea.input-lg,select[multiple].input-lg{height:auto}.form-group-lg .form-control{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:0}.form-group-lg select.form-control{height:46px;line-height:46px}.form-group-lg textarea.form-control,.form-group-lg select[multiple].form-control{height:auto}.form-group-lg .form-control-static{height:46px;min-height:38px;padding:11px 16px;font-size:18px;line-height:1.3333333}.has-feedback{position:relative}.has-feedback .form-control{padding-right:42.5px}.form-control-feedback{position:absolute;top:0;right:0;z-index:2;display:block;width:34px;height:34px;line-height:34px;text-align:center;pointer-events:none}.input-lg+.form-control-feedback,.input-group-lg+.form-control-feedback,.form-group-lg .form-control+.form-control-feedback{width:46px;height:46px;line-height:46px}.input-sm+.form-control-feedback,.input-group-sm+.form-control-feedback,.form-group-sm .form-control+.form-control-feedback{width:30px;height:30px;line-height:30px}.has-success .help-block,.has-success .control-label,.has-success .radio,.has-success .checkbox,.has-success .radio-inline,.has-success .checkbox-inline,.has-success.radio label,.has-success.checkbox label,.has-success.radio-inline label,.has-success.checkbox-inline label{color:#3c763d}.has-success .form-control{border-color:#3c763d;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.has-success .form-control:focus{border-color:#2b542c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #67b168;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #67b168}.has-success .input-group-addon{color:#3c763d;border-color:#3c763d;background-color:#dff0d8}.has-success .form-control-feedback{color:#3c763d}.has-warning .help-block,.has-warning .control-label,.has-warning .radio,.has-warning .checkbox,.has-warning .radio-inline,.has-warning .checkbox-inline,.has-warning.radio label,.has-warning.checkbox label,.has-warning.radio-inline label,.has-warning.checkbox-inline label{color:#8a6d3b}.has-warning .form-control{border-color:#8a6d3b;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.has-warning .form-control:focus{border-color:#66512c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #c0a16b;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #c0a16b}.has-warning .input-group-addon{color:#8a6d3b;border-color:#8a6d3b;background-color:#fcf8e3}.has-warning .form-control-feedback{color:#8a6d3b}.has-error .help-block,.has-error .control-label,.has-error .radio,.has-error .checkbox,.has-error .radio-inline,.has-error .checkbox-inline,.has-error.radio label,.has-error.checkbox label,.has-error.radio-inline label,.has-error.checkbox-inline label{color:#a94442}.has-error .form-control{border-color:#a94442;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.has-error .form-control:focus{border-color:#843534;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #ce8483;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #ce8483}.has-error .input-group-addon{color:#a94442;border-color:#a94442;background-color:#f2dede}.has-error .form-control-feedback{color:#a94442}.has-feedback label~.form-control-feedback{top:25px}.has-feedback label.sr-only~.form-control-feedback{top:0}.help-block{display:block;margin-top:5px;margin-bottom:10px;color:#d9d9d9}@media (min-width:768px){.form-inline .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.form-inline .form-control{display:inline-block;width:auto;vertical-align:middle}.form-inline .form-control-static{display:inline-block}.form-inline .input-group{display:inline-table;vertical-align:middle}.form-inline .input-group .input-group-addon,.form-inline .input-group .input-group-btn,.form-inline .input-group .form-control{width:auto}.form-inline .input-group>.form-control{width:100%}.form-inline .control-label{margin-bottom:0;vertical-align:middle}.form-inline .radio,.form-inline .checkbox{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.form-inline .radio label,.form-inline .checkbox label{padding-left:0}.form-inline .radio input[type="radio"],.form-inline .checkbox input[type="checkbox"]{position:relative;margin-left:0}.form-inline .has-feedback .form-control-feedback{top:0}}.form-horizontal .radio,.form-horizontal .checkbox,.form-horizontal .radio-inline,.form-horizontal .checkbox-inline{margin-top:0;margin-bottom:0;padding-top:7px}.form-horizontal .radio,.form-horizontal .checkbox{min-height:27px}.form-horizontal .form-group{margin-left:-15px;margin-right:-15px}@media (min-width:768px){.form-horizontal .control-label{text-align:right;margin-bottom:0;padding-top:7px}}.form-horizontal .has-feedback .form-control-feedback{right:15px}@media (min-width:768px){.form-horizontal .form-group-lg .control-label{padding-top:11px;font-size:18px}}@media (min-width:768px){.form-horizontal .form-group-sm .control-label{padding-top:6px;font-size:12px}}.btn{display:inline-block;margin-bottom:0;font-weight:normal;text-align:center;vertical-align:middle;-ms-touch-action:manipulation;touch-action:manipulation;cursor:pointer;background-image:none;border:1px solid transparent;white-space:nowrap;padding:6px 12px;font-size:14px;line-height:1.42857143;border-radius:0;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.btn:focus,.btn:active:focus,.btn.active:focus,.btn.focus,.btn:active.focus,.btn.active.focus{outline:thin dotted;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.btn:hover,.btn:focus,.btn.focus{color:#333;text-decoration:none}.btn:active,.btn.active{outline:0;background-image:none;-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,0.125);box-shadow:inset 0 3px 5px rgba(0,0,0,0.125)}.btn.disabled,.btn[disabled],fieldset[disabled] .btn{cursor:not-allowed;opacity:.65;filter:alpha(opacity=65);-webkit-box-shadow:none;box-shadow:none}a.btn.disabled,fieldset[disabled] a.btn{pointer-events:none}.btn-default{color:#333;background-color:#fff;border-color:#ccc}.btn-default:focus,.btn-default.focus{color:#333;background-color:#e6e6e6;border-color:#8c8c8c}.btn-default:hover{color:#333;background-color:#e6e6e6;border-color:#adadad}.btn-default:active,.btn-default.active,.open>.dropdown-toggle.btn-default{color:#333;background-color:#e6e6e6;border-color:#adadad}.btn-default:active:hover,.btn-default.active:hover,.open>.dropdown-toggle.btn-default:hover,.btn-default:active:focus,.btn-default.active:focus,.open>.dropdown-toggle.btn-default:focus,.btn-default:active.focus,.btn-default.active.focus,.open>.dropdown-toggle.btn-default.focus{color:#333;background-color:#d4d4d4;border-color:#8c8c8c}.btn-default:active,.btn-default.active,.open>.dropdown-toggle.btn-default{background-image:none}.btn-default.disabled:hover,.btn-default[disabled]:hover,fieldset[disabled] .btn-default:hover,.btn-default.disabled:focus,.btn-default[disabled]:focus,fieldset[disabled] .btn-default:focus,.btn-default.disabled.focus,.btn-default[disabled].focus,fieldset[disabled] .btn-default.focus{background-color:#fff;border-color:#ccc}.btn-default .badge{color:#fff;background-color:#333}.btn-primary{color:#fff;background-color:#98e8ff;border-color:#7fe2ff}.btn-primary:focus,.btn-primary.focus{color:#fff;background-color:#65ddff;border-color:#00c5fe}.btn-primary:hover{color:#fff;background-color:#65ddff;border-color:#41d5ff}.btn-primary:active,.btn-primary.active,.open>.dropdown-toggle.btn-primary{color:#fff;background-color:#65ddff;border-color:#41d5ff}.btn-primary:active:hover,.btn-primary.active:hover,.open>.dropdown-toggle.btn-primary:hover,.btn-primary:active:focus,.btn-primary.active:focus,.open>.dropdown-toggle.btn-primary:focus,.btn-primary:active.focus,.btn-primary.active.focus,.open>.dropdown-toggle.btn-primary.focus{color:#fff;background-color:#41d5ff;border-color:#00c5fe}.btn-primary:active,.btn-primary.active,.open>.dropdown-toggle.btn-primary{background-image:none}.btn-primary.disabled:hover,.btn-primary[disabled]:hover,fieldset[disabled] .btn-primary:hover,.btn-primary.disabled:focus,.btn-primary[disabled]:focus,fieldset[disabled] .btn-primary:focus,.btn-primary.disabled.focus,.btn-primary[disabled].focus,fieldset[disabled] .btn-primary.focus{background-color:#98e8ff;border-color:#7fe2ff}.btn-primary .badge{color:#98e8ff;background-color:#fff}.btn-success{color:#fff;background-color:#5cb85c;border-color:#4cae4c}.btn-success:focus,.btn-success.focus{color:#fff;background-color:#449d44;border-color:#255625}.btn-success:hover{color:#fff;background-color:#449d44;border-color:#398439}.btn-success:active,.btn-success.active,.open>.dropdown-toggle.btn-success{color:#fff;background-color:#449d44;border-color:#398439}.btn-success:active:hover,.btn-success.active:hover,.open>.dropdown-toggle.btn-success:hover,.btn-success:active:focus,.btn-success.active:focus,.open>.dropdown-toggle.btn-success:focus,.btn-success:active.focus,.btn-success.active.focus,.open>.dropdown-toggle.btn-success.focus{color:#fff;background-color:#398439;border-color:#255625}.btn-success:active,.btn-success.active,.open>.dropdown-toggle.btn-success{background-image:none}.btn-success.disabled:hover,.btn-success[disabled]:hover,fieldset[disabled] .btn-success:hover,.btn-success.disabled:focus,.btn-success[disabled]:focus,fieldset[disabled] .btn-success:focus,.btn-success.disabled.focus,.btn-success[disabled].focus,fieldset[disabled] .btn-success.focus{background-color:#5cb85c;border-color:#4cae4c}.btn-success .badge{color:#5cb85c;background-color:#fff}.btn-info{color:#fff;background-color:#5bc0de;border-color:#46b8da}.btn-info:focus,.btn-info.focus{color:#fff;background-color:#31b0d5;border-color:#1b6d85}.btn-info:hover{color:#fff;background-color:#31b0d5;border-color:#269abc}.btn-info:active,.btn-info.active,.open>.dropdown-toggle.btn-info{color:#fff;background-color:#31b0d5;border-color:#269abc}.btn-info:active:hover,.btn-info.active:hover,.open>.dropdown-toggle.btn-info:hover,.btn-info:active:focus,.btn-info.active:focus,.open>.dropdown-toggle.btn-info:focus,.btn-info:active.focus,.btn-info.active.focus,.open>.dropdown-toggle.btn-info.focus{color:#fff;background-color:#269abc;border-color:#1b6d85}.btn-info:active,.btn-info.active,.open>.dropdown-toggle.btn-info{background-image:none}.btn-info.disabled:hover,.btn-info[disabled]:hover,fieldset[disabled] .btn-info:hover,.btn-info.disabled:focus,.btn-info[disabled]:focus,fieldset[disabled] .btn-info:focus,.btn-info.disabled.focus,.btn-info[disabled].focus,fieldset[disabled] .btn-info.focus{background-color:#5bc0de;border-color:#46b8da}.btn-info .badge{color:#5bc0de;background-color:#fff}.btn-warning{color:#fff;background-color:#f0ad4e;border-color:#eea236}.btn-warning:focus,.btn-warning.focus{color:#fff;background-color:#ec971f;border-color:#985f0d}.btn-warning:hover{color:#fff;background-color:#ec971f;border-color:#d58512}.btn-warning:active,.btn-warning.active,.open>.dropdown-toggle.btn-warning{color:#fff;background-color:#ec971f;border-color:#d58512}.btn-warning:active:hover,.btn-warning.active:hover,.open>.dropdown-toggle.btn-warning:hover,.btn-warning:active:focus,.btn-warning.active:focus,.open>.dropdown-toggle.btn-warning:focus,.btn-warning:active.focus,.btn-warning.active.focus,.open>.dropdown-toggle.btn-warning.focus{color:#fff;background-color:#d58512;border-color:#985f0d}.btn-warning:active,.btn-warning.active,.open>.dropdown-toggle.btn-warning{background-image:none}.btn-warning.disabled:hover,.btn-warning[disabled]:hover,fieldset[disabled] .btn-warning:hover,.btn-warning.disabled:focus,.btn-warning[disabled]:focus,fieldset[disabled] .btn-warning:focus,.btn-warning.disabled.focus,.btn-warning[disabled].focus,fieldset[disabled] .btn-warning.focus{background-color:#f0ad4e;border-color:#eea236}.btn-warning .badge{color:#f0ad4e;background-color:#fff}.btn-danger{color:#fff;background-color:#d9534f;border-color:#d43f3a}.btn-danger:focus,.btn-danger.focus{color:#fff;background-color:#c9302c;border-color:#761c19}.btn-danger:hover{color:#fff;background-color:#c9302c;border-color:#ac2925}.btn-danger:active,.btn-danger.active,.open>.dropdown-toggle.btn-danger{color:#fff;background-color:#c9302c;border-color:#ac2925}.btn-danger:active:hover,.btn-danger.active:hover,.open>.dropdown-toggle.btn-danger:hover,.btn-danger:active:focus,.btn-danger.active:focus,.open>.dropdown-toggle.btn-danger:focus,.btn-danger:active.focus,.btn-danger.active.focus,.open>.dropdown-toggle.btn-danger.focus{color:#fff;background-color:#ac2925;border-color:#761c19}.btn-danger:active,.btn-danger.active,.open>.dropdown-toggle.btn-danger{background-image:none}.btn-danger.disabled:hover,.btn-danger[disabled]:hover,fieldset[disabled] .btn-danger:hover,.btn-danger.disabled:focus,.btn-danger[disabled]:focus,fieldset[disabled] .btn-danger:focus,.btn-danger.disabled.focus,.btn-danger[disabled].focus,fieldset[disabled] .btn-danger.focus{background-color:#d9534f;border-color:#d43f3a}.btn-danger .badge{color:#d9534f;background-color:#fff}.btn-link{color:#98e8ff;font-weight:normal;border-radius:0}.btn-link,.btn-link:active,.btn-link.active,.btn-link[disabled],fieldset[disabled] .btn-link{background-color:transparent;-webkit-box-shadow:none;box-shadow:none}.btn-link,.btn-link:hover,.btn-link:focus,.btn-link:active{border-color:transparent}.btn-link:hover,.btn-link:focus{color:#4cd7ff;text-decoration:underline;background-color:transparent}.btn-link[disabled]:hover,fieldset[disabled] .btn-link:hover,.btn-link[disabled]:focus,fieldset[disabled] .btn-link:focus{color:#222;text-decoration:none}.btn-lg,.btn-group-lg>.btn{padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:0}.btn-sm,.btn-group-sm>.btn{padding:5px 10px;font-size:12px;line-height:1.5;border-radius:0}.btn-xs,.btn-group-xs>.btn{padding:1px 5px;font-size:12px;line-height:1.5;border-radius:0}.btn-block{display:block;width:100%}.btn-block+.btn-block{margin-top:5px}input[type="submit"].btn-block,input[type="reset"].btn-block,input[type="button"].btn-block{width:100%}.fade{opacity:0;-webkit-transition:opacity .15s linear;-o-transition:opacity .15s linear;transition:opacity .15s linear}.fade.in{opacity:1}.collapse{display:none}.collapse.in{display:block}tr.collapse.in{display:table-row}tbody.collapse.in{display:table-row-group}.collapsing{position:relative;height:0;overflow:hidden;-webkit-transition-property:height, visibility;-o-transition-property:height, visibility;transition-property:height, visibility;-webkit-transition-duration:.35s;-o-transition-duration:.35s;transition-duration:.35s;-webkit-transition-timing-function:ease;-o-transition-timing-function:ease;transition-timing-function:ease}.caret{display:inline-block;width:0;height:0;margin-left:2px;vertical-align:middle;border-top:4px dashed;border-top:4px solid \9;border-right:4px solid transparent;border-left:4px solid transparent}.dropup,.dropdown{position:relative}.dropdown-toggle:focus{outline:0}.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:160px;padding:5px 0;margin:2px 0 0;list-style:none;font-size:14px;text-align:left;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.15);border-radius:0;-webkit-box-shadow:0 6px 12px rgba(0,0,0,0.175);box-shadow:0 6px 12px rgba(0,0,0,0.175);-webkit-background-clip:padding-box;background-clip:padding-box}.dropdown-menu.pull-right{right:0;left:auto}.dropdown-menu .divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.dropdown-menu>li>a{display:block;padding:3px 20px;clear:both;font-weight:normal;line-height:1.42857143;color:#424242;white-space:nowrap}.dropdown-menu>li>a:hover,.dropdown-menu>li>a:focus{text-decoration:none;color:#8c8c8c;background-color:#f5f5f5}.dropdown-menu>.active>a,.dropdown-menu>.active>a:hover,.dropdown-menu>.active>a:focus{color:#fff;text-decoration:none;outline:0;background-color:#418194}.dropdown-menu>.disabled>a,.dropdown-menu>.disabled>a:hover,.dropdown-menu>.disabled>a:focus{color:#222}.dropdown-menu>.disabled>a:hover,.dropdown-menu>.disabled>a:focus{text-decoration:none;background-color:transparent;background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);cursor:not-allowed}.open>.dropdown-menu{display:block}.open>a{outline:0}.dropdown-menu-right{left:auto;right:0}.dropdown-menu-left{left:0;right:auto}.dropdown-header{display:block;padding:3px 20px;font-size:12px;line-height:1.42857143;color:#222;white-space:nowrap}.dropdown-backdrop{position:fixed;left:0;right:0;bottom:0;top:0;z-index:990}.pull-right>.dropdown-menu{right:0;left:auto}.dropup .caret,.navbar-fixed-bottom .dropdown .caret{border-top:0;border-bottom:4px dashed;border-bottom:4px solid \9;content:""}.dropup .dropdown-menu,.navbar-fixed-bottom .dropdown .dropdown-menu{top:auto;bottom:100%;margin-bottom:2px}@media (min-width:768px){.navbar-right .dropdown-menu{left:auto;right:0}.navbar-right .dropdown-menu-left{left:0;right:auto}}.btn-group,.btn-group-vertical{position:relative;display:inline-block;vertical-align:middle}.btn-group>.btn,.btn-group-vertical>.btn{position:relative;float:left}.btn-group>.btn:hover,.btn-group-vertical>.btn:hover,.btn-group>.btn:focus,.btn-group-vertical>.btn:focus,.btn-group>.btn:active,.btn-group-vertical>.btn:active,.btn-group>.btn.active,.btn-group-vertical>.btn.active{z-index:2}.btn-group .btn+.btn,.btn-group .btn+.btn-group,.btn-group .btn-group+.btn,.btn-group .btn-group+.btn-group{margin-left:-1px}.btn-toolbar{margin-left:-5px}.btn-toolbar .btn,.btn-toolbar .btn-group,.btn-toolbar .input-group{float:left}.btn-toolbar>.btn,.btn-toolbar>.btn-group,.btn-toolbar>.input-group{margin-left:5px}.btn-group>.btn:not(:first-child):not(:last-child):not(.dropdown-toggle){border-radius:0}.btn-group>.btn:first-child{margin-left:0}.btn-group>.btn:first-child:not(:last-child):not(.dropdown-toggle){border-bottom-right-radius:0;border-top-right-radius:0}.btn-group>.btn:last-child:not(:first-child),.btn-group>.dropdown-toggle:not(:first-child){border-bottom-left-radius:0;border-top-left-radius:0}.btn-group>.btn-group{float:left}.btn-group>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-bottom-right-radius:0;border-top-right-radius:0}.btn-group>.btn-group:last-child:not(:first-child)>.btn:first-child{border-bottom-left-radius:0;border-top-left-radius:0}.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0}.btn-group>.btn+.dropdown-toggle{padding-left:8px;padding-right:8px}.btn-group>.btn-lg+.dropdown-toggle{padding-left:12px;padding-right:12px}.btn-group.open .dropdown-toggle{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,0.125);box-shadow:inset 0 3px 5px rgba(0,0,0,0.125)}.btn-group.open .dropdown-toggle.btn-link{-webkit-box-shadow:none;box-shadow:none}.btn .caret{margin-left:0}.btn-lg .caret{border-width:5px 5px 0;border-bottom-width:0}.dropup .btn-lg .caret{border-width:0 5px 5px}.btn-group-vertical>.btn,.btn-group-vertical>.btn-group,.btn-group-vertical>.btn-group>.btn{display:block;float:none;width:100%;max-width:100%}.btn-group-vertical>.btn-group>.btn{float:none}.btn-group-vertical>.btn+.btn,.btn-group-vertical>.btn+.btn-group,.btn-group-vertical>.btn-group+.btn,.btn-group-vertical>.btn-group+.btn-group{margin-top:-1px;margin-left:0}.btn-group-vertical>.btn:not(:first-child):not(:last-child){border-radius:0}.btn-group-vertical>.btn:first-child:not(:last-child){border-top-right-radius:0;border-top-left-radius:0;border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn:last-child:not(:first-child){border-top-right-radius:0;border-top-left-radius:0;border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group-vertical>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group-vertical>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn-group:last-child:not(:first-child)>.btn:first-child{border-top-right-radius:0;border-top-left-radius:0}.btn-group-justified{display:table;width:100%;table-layout:fixed;border-collapse:separate}.btn-group-justified>.btn,.btn-group-justified>.btn-group{float:none;display:table-cell;width:1%}.btn-group-justified>.btn-group .btn{width:100%}.btn-group-justified>.btn-group .dropdown-menu{left:auto}[data-toggle="buttons"]>.btn input[type="radio"],[data-toggle="buttons"]>.btn-group>.btn input[type="radio"],[data-toggle="buttons"]>.btn input[type="checkbox"],[data-toggle="buttons"]>.btn-group>.btn input[type="checkbox"]{position:absolute;clip:rect(0, 0, 0, 0);pointer-events:none}.input-group{position:relative;display:table;border-collapse:separate}.input-group[class*="col-"]{float:none;padding-left:0;padding-right:0}.input-group .form-control{position:relative;z-index:2;float:left;width:100%;margin-bottom:0}.input-group .form-control:focus{z-index:3}.input-group-lg>.form-control,.input-group-lg>.input-group-addon,.input-group-lg>.input-group-btn>.btn{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:0}select.input-group-lg>.form-control,select.input-group-lg>.input-group-addon,select.input-group-lg>.input-group-btn>.btn{height:46px;line-height:46px}textarea.input-group-lg>.form-control,textarea.input-group-lg>.input-group-addon,textarea.input-group-lg>.input-group-btn>.btn,select[multiple].input-group-lg>.form-control,select[multiple].input-group-lg>.input-group-addon,select[multiple].input-group-lg>.input-group-btn>.btn{height:auto}.input-group-sm>.form-control,.input-group-sm>.input-group-addon,.input-group-sm>.input-group-btn>.btn{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:0}select.input-group-sm>.form-control,select.input-group-sm>.input-group-addon,select.input-group-sm>.input-group-btn>.btn{height:30px;line-height:30px}textarea.input-group-sm>.form-control,textarea.input-group-sm>.input-group-addon,textarea.input-group-sm>.input-group-btn>.btn,select[multiple].input-group-sm>.form-control,select[multiple].input-group-sm>.input-group-addon,select[multiple].input-group-sm>.input-group-btn>.btn{height:auto}.input-group-addon,.input-group-btn,.input-group .form-control{display:table-cell}.input-group-addon:not(:first-child):not(:last-child),.input-group-btn:not(:first-child):not(:last-child),.input-group .form-control:not(:first-child):not(:last-child){border-radius:0}.input-group-addon,.input-group-btn{width:1%;white-space:nowrap;vertical-align:middle}.input-group-addon{padding:6px 12px;font-size:14px;font-weight:normal;line-height:1;color:#000;text-align:center;background-color:#555;border:1px solid #ccc;border-radius:0}.input-group-addon.input-sm{padding:5px 10px;font-size:12px;border-radius:0}.input-group-addon.input-lg{padding:10px 16px;font-size:18px;border-radius:0}.input-group-addon input[type="radio"],.input-group-addon input[type="checkbox"]{margin-top:0}.input-group .form-control:first-child,.input-group-addon:first-child,.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group>.btn,.input-group-btn:first-child>.dropdown-toggle,.input-group-btn:last-child>.btn:not(:last-child):not(.dropdown-toggle),.input-group-btn:last-child>.btn-group:not(:last-child)>.btn{border-bottom-right-radius:0;border-top-right-radius:0}.input-group-addon:first-child{border-right:0}.input-group .form-control:last-child,.input-group-addon:last-child,.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group>.btn,.input-group-btn:last-child>.dropdown-toggle,.input-group-btn:first-child>.btn:not(:first-child),.input-group-btn:first-child>.btn-group:not(:first-child)>.btn{border-bottom-left-radius:0;border-top-left-radius:0}.input-group-addon:last-child{border-left:0}.input-group-btn{position:relative;font-size:0;white-space:nowrap}.input-group-btn>.btn{position:relative}.input-group-btn>.btn+.btn{margin-left:-1px}.input-group-btn>.btn:hover,.input-group-btn>.btn:focus,.input-group-btn>.btn:active{z-index:2}.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group{margin-right:-1px}.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group{z-index:2;margin-left:-1px}.nav{margin-bottom:0;padding-left:0;list-style:none}.nav>li{position:relative;display:block}.nav>li>a{position:relative;display:block;padding:10px 15px}.nav>li>a:hover,.nav>li>a:focus{text-decoration:none;background-color:#555}.nav>li.disabled>a{color:#222}.nav>li.disabled>a:hover,.nav>li.disabled>a:focus{color:#222;text-decoration:none;background-color:transparent;cursor:not-allowed}.nav .open>a,.nav .open>a:hover,.nav .open>a:focus{background-color:#555;border-color:#98e8ff}.nav .nav-divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.nav>li>a>img{max-width:none}.nav-tabs{border-bottom:1px solid #ddd}.nav-tabs>li{float:left;margin-bottom:-1px}.nav-tabs>li>a{margin-right:2px;line-height:1.42857143;border:1px solid transparent;border-radius:0 0 0 0}.nav-tabs>li>a:hover{border-color:#555 #555 #ddd}.nav-tabs>li.active>a,.nav-tabs>li.active>a:hover,.nav-tabs>li.active>a:focus{color:#eee;background-color:#252830;border:1px solid #ddd;border-bottom-color:transparent;cursor:default}.nav-tabs.nav-justified{width:100%;border-bottom:0}.nav-tabs.nav-justified>li{float:none}.nav-tabs.nav-justified>li>a{text-align:center;margin-bottom:5px}.nav-tabs.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media (min-width:768px){.nav-tabs.nav-justified>li{display:table-cell;width:1%}.nav-tabs.nav-justified>li>a{margin-bottom:0}}.nav-tabs.nav-justified>li>a{margin-right:0;border-radius:0}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:hover,.nav-tabs.nav-justified>.active>a:focus{border:1px solid #ddd}@media (min-width:768px){.nav-tabs.nav-justified>li>a{border-bottom:1px solid #ddd;border-radius:0 0 0 0}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:hover,.nav-tabs.nav-justified>.active>a:focus{border-bottom-color:#252830}}.nav-pills>li{float:left}.nav-pills>li>a{border-radius:0}.nav-pills>li+li{margin-left:2px}.nav-pills>li.active>a,.nav-pills>li.active>a:hover,.nav-pills>li.active>a:focus{color:#fff;background-color:#98e8ff}.nav-stacked>li{float:none}.nav-stacked>li+li{margin-top:2px;margin-left:0}.nav-justified{width:100%}.nav-justified>li{float:none}.nav-justified>li>a{text-align:center;margin-bottom:5px}.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media (min-width:768px){.nav-justified>li{display:table-cell;width:1%}.nav-justified>li>a{margin-bottom:0}}.nav-tabs-justified{border-bottom:0}.nav-tabs-justified>li>a{margin-right:0;border-radius:0}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:hover,.nav-tabs-justified>.active>a:focus{border:1px solid #ddd}@media (min-width:768px){.nav-tabs-justified>li>a{border-bottom:1px solid #ddd;border-radius:0 0 0 0}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:hover,.nav-tabs-justified>.active>a:focus{border-bottom-color:#252830}}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.nav-tabs .dropdown-menu{margin-top:-1px;border-top-right-radius:0;border-top-left-radius:0}.navbar{position:relative;min-height:50px;margin-bottom:20px;border:1px solid transparent}@media (min-width:768px){.navbar{border-radius:0}}@media (min-width:768px){.navbar-header{float:left}}.navbar-collapse{overflow-x:visible;padding-right:15px;padding-left:15px;border-top:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.1);box-shadow:inset 0 1px 0 rgba(255,255,255,0.1);-webkit-overflow-scrolling:touch}.navbar-collapse.in{overflow-y:auto}@media (min-width:768px){.navbar-collapse{width:auto;border-top:0;-webkit-box-shadow:none;box-shadow:none}.navbar-collapse.collapse{display:block !important;height:auto !important;padding-bottom:0;overflow:visible !important}.navbar-collapse.in{overflow-y:visible}.navbar-fixed-top .navbar-collapse,.navbar-static-top .navbar-collapse,.navbar-fixed-bottom .navbar-collapse{padding-left:0;padding-right:0}}.navbar-fixed-top .navbar-collapse,.navbar-fixed-bottom .navbar-collapse{max-height:340px}@media (max-device-width:480px) and (orientation:landscape){.navbar-fixed-top .navbar-collapse,.navbar-fixed-bottom .navbar-collapse{max-height:200px}}.container>.navbar-header,.container-fluid>.navbar-header,.container>.navbar-collapse,.container-fluid>.navbar-collapse{margin-right:-15px;margin-left:-15px}@media (min-width:768px){.container>.navbar-header,.container-fluid>.navbar-header,.container>.navbar-collapse,.container-fluid>.navbar-collapse{margin-right:0;margin-left:0}}.navbar-static-top{z-index:1000;border-width:0 0 1px}@media (min-width:768px){.navbar-static-top{border-radius:0}}.navbar-fixed-top,.navbar-fixed-bottom{position:fixed;right:0;left:0;z-index:1030}@media (min-width:768px){.navbar-fixed-top,.navbar-fixed-bottom{border-radius:0}}.navbar-fixed-top{top:0;border-width:0 0 1px}.navbar-fixed-bottom{bottom:0;margin-bottom:0;border-width:1px 0 0}.navbar-brand{float:left;padding:15px 15px;font-size:18px;line-height:20px;height:50px}.navbar-brand:hover,.navbar-brand:focus{text-decoration:none}.navbar-brand>img{display:block}@media (min-width:768px){.navbar>.container .navbar-brand,.navbar>.container-fluid .navbar-brand{margin-left:-15px}}.navbar-toggle{position:relative;float:right;margin-right:15px;padding:9px 10px;margin-top:8px;margin-bottom:8px;background-color:transparent;background-image:none;border:1px solid transparent;border-radius:0}.navbar-toggle:focus{outline:0}.navbar-toggle .icon-bar{display:block;width:22px;height:2px;border-radius:1px}.navbar-toggle .icon-bar+.icon-bar{margin-top:4px}@media (min-width:768px){.navbar-toggle{display:none}}.navbar-nav{margin:7.5px -15px}.navbar-nav>li>a{padding-top:10px;padding-bottom:10px;line-height:20px}@media (max-width:767px){.navbar-nav .open .dropdown-menu{position:static;float:none;width:auto;margin-top:0;background-color:transparent;border:0;-webkit-box-shadow:none;box-shadow:none}.navbar-nav .open .dropdown-menu>li>a,.navbar-nav .open .dropdown-menu .dropdown-header{padding:5px 15px 5px 25px}.navbar-nav .open .dropdown-menu>li>a{line-height:20px}.navbar-nav .open .dropdown-menu>li>a:hover,.navbar-nav .open .dropdown-menu>li>a:focus{background-image:none}}@media (min-width:768px){.navbar-nav{float:left;margin:0}.navbar-nav>li{float:left}.navbar-nav>li>a{padding-top:15px;padding-bottom:15px}}.navbar-form{margin-left:-15px;margin-right:-15px;padding:10px 15px;border-top:1px solid transparent;border-bottom:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.1);box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.1);margin-top:8px;margin-bottom:8px}@media (min-width:768px){.navbar-form .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.navbar-form .form-control{display:inline-block;width:auto;vertical-align:middle}.navbar-form .form-control-static{display:inline-block}.navbar-form .input-group{display:inline-table;vertical-align:middle}.navbar-form .input-group .input-group-addon,.navbar-form .input-group .input-group-btn,.navbar-form .input-group .form-control{width:auto}.navbar-form .input-group>.form-control{width:100%}.navbar-form .control-label{margin-bottom:0;vertical-align:middle}.navbar-form .radio,.navbar-form .checkbox{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.navbar-form .radio label,.navbar-form .checkbox label{padding-left:0}.navbar-form .radio input[type="radio"],.navbar-form .checkbox input[type="checkbox"]{position:relative;margin-left:0}.navbar-form .has-feedback .form-control-feedback{top:0}}@media (max-width:767px){.navbar-form .form-group{margin-bottom:5px}.navbar-form .form-group:last-child{margin-bottom:0}}@media (min-width:768px){.navbar-form{width:auto;border:0;margin-left:0;margin-right:0;padding-top:0;padding-bottom:0;-webkit-box-shadow:none;box-shadow:none}}.navbar-nav>li>.dropdown-menu{margin-top:0;border-top-right-radius:0;border-top-left-radius:0}.navbar-fixed-bottom .navbar-nav>li>.dropdown-menu{margin-bottom:0;border-top-right-radius:0;border-top-left-radius:0;border-bottom-right-radius:0;border-bottom-left-radius:0}.navbar-btn{margin-top:8px;margin-bottom:8px}.navbar-btn.btn-sm{margin-top:10px;margin-bottom:10px}.navbar-btn.btn-xs{margin-top:14px;margin-bottom:14px}.navbar-text{margin-top:15px;margin-bottom:15px}@media (min-width:768px){.navbar-text{float:left;margin-left:15px;margin-right:15px}}@media (min-width:768px){.navbar-left{float:left !important}.navbar-right{float:right !important;margin-right:-15px}.navbar-right~.navbar-right{margin-right:0}}.navbar-default{background-color:#f8f8f8;border-color:#e7e7e7}.navbar-default .navbar-brand{color:#777}.navbar-default .navbar-brand:hover,.navbar-default .navbar-brand:focus{color:#5e5e5e;background-color:transparent}.navbar-default .navbar-text{color:#777}.navbar-default .navbar-nav>li>a{color:#777}.navbar-default .navbar-nav>li>a:hover,.navbar-default .navbar-nav>li>a:focus{color:#333;background-color:transparent}.navbar-default .navbar-nav>.active>a,.navbar-default .navbar-nav>.active>a:hover,.navbar-default .navbar-nav>.active>a:focus{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav>.disabled>a,.navbar-default .navbar-nav>.disabled>a:hover,.navbar-default .navbar-nav>.disabled>a:focus{color:#ccc;background-color:transparent}.navbar-default .navbar-toggle{border-color:#ddd}.navbar-default .navbar-toggle:hover,.navbar-default .navbar-toggle:focus{background-color:#ddd}.navbar-default .navbar-toggle .icon-bar{background-color:#888}.navbar-default .navbar-collapse,.navbar-default .navbar-form{border-color:#e7e7e7}.navbar-default .navbar-nav>.open>a,.navbar-default .navbar-nav>.open>a:hover,.navbar-default .navbar-nav>.open>a:focus{background-color:#e7e7e7;color:#555}@media (max-width:767px){.navbar-default .navbar-nav .open .dropdown-menu>li>a{color:#777}.navbar-default .navbar-nav .open .dropdown-menu>li>a:hover,.navbar-default .navbar-nav .open .dropdown-menu>li>a:focus{color:#333;background-color:transparent}.navbar-default .navbar-nav .open .dropdown-menu>.active>a,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:hover,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:focus{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:hover,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:focus{color:#ccc;background-color:transparent}}.navbar-default .navbar-link{color:#777}.navbar-default .navbar-link:hover{color:#333}.navbar-default .btn-link{color:#777}.navbar-default .btn-link:hover,.navbar-default .btn-link:focus{color:#333}.navbar-default .btn-link[disabled]:hover,fieldset[disabled] .navbar-default .btn-link:hover,.navbar-default .btn-link[disabled]:focus,fieldset[disabled] .navbar-default .btn-link:focus{color:#ccc}.navbar-inverse{background-color:#222;border-color:#080808}.navbar-inverse .navbar-brand{color:#494949}.navbar-inverse .navbar-brand:hover,.navbar-inverse .navbar-brand:focus{color:#fff;background-color:transparent}.navbar-inverse .navbar-text{color:#494949}.navbar-inverse .navbar-nav>li>a{color:#494949}.navbar-inverse .navbar-nav>li>a:hover,.navbar-inverse .navbar-nav>li>a:focus{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav>.active>a,.navbar-inverse .navbar-nav>.active>a:hover,.navbar-inverse .navbar-nav>.active>a:focus{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav>.disabled>a,.navbar-inverse .navbar-nav>.disabled>a:hover,.navbar-inverse .navbar-nav>.disabled>a:focus{color:#444;background-color:transparent}.navbar-inverse .navbar-toggle{border-color:#333}.navbar-inverse .navbar-toggle:hover,.navbar-inverse .navbar-toggle:focus{background-color:#333}.navbar-inverse .navbar-toggle .icon-bar{background-color:#fff}.navbar-inverse .navbar-collapse,.navbar-inverse .navbar-form{border-color:#101010}.navbar-inverse .navbar-nav>.open>a,.navbar-inverse .navbar-nav>.open>a:hover,.navbar-inverse .navbar-nav>.open>a:focus{background-color:#080808;color:#fff}@media (max-width:767px){.navbar-inverse .navbar-nav .open .dropdown-menu>.dropdown-header{border-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu .divider{background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a{color:#494949}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:hover,.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:focus{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:hover,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:focus{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:hover,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:focus{color:#444;background-color:transparent}}.navbar-inverse .navbar-link{color:#494949}.navbar-inverse .navbar-link:hover{color:#fff}.navbar-inverse .btn-link{color:#494949}.navbar-inverse .btn-link:hover,.navbar-inverse .btn-link:focus{color:#fff}.navbar-inverse .btn-link[disabled]:hover,fieldset[disabled] .navbar-inverse .btn-link:hover,.navbar-inverse .btn-link[disabled]:focus,fieldset[disabled] .navbar-inverse .btn-link:focus{color:#444}.breadcrumb{padding:8px 15px;margin-bottom:20px;list-style:none;background-color:#f5f5f5;border-radius:0}.breadcrumb>li{display:inline-block}.breadcrumb>li+li:before{content:"/\00a0";padding:0 5px;color:#ccc}.breadcrumb>.active{color:#222}.pagination{display:inline-block;padding-left:0;margin:20px 0;border-radius:0}.pagination>li{display:inline}.pagination>li>a,.pagination>li>span{position:relative;float:left;padding:6px 12px;line-height:1.42857143;text-decoration:none;color:#98e8ff;background-color:#fff;border:1px solid #ddd;margin-left:-1px}.pagination>li:first-child>a,.pagination>li:first-child>span{margin-left:0;border-bottom-left-radius:0;border-top-left-radius:0}.pagination>li:last-child>a,.pagination>li:last-child>span{border-bottom-right-radius:0;border-top-right-radius:0}.pagination>li>a:hover,.pagination>li>span:hover,.pagination>li>a:focus,.pagination>li>span:focus{z-index:2;color:#4cd7ff;background-color:#555;border-color:#ddd}.pagination>.active>a,.pagination>.active>span,.pagination>.active>a:hover,.pagination>.active>span:hover,.pagination>.active>a:focus,.pagination>.active>span:focus{z-index:3;color:#fff;background-color:#98e8ff;border-color:#98e8ff;cursor:default}.pagination>.disabled>span,.pagination>.disabled>span:hover,.pagination>.disabled>span:focus,.pagination>.disabled>a,.pagination>.disabled>a:hover,.pagination>.disabled>a:focus{color:#222;background-color:#fff;border-color:#ddd;cursor:not-allowed}.pagination-lg>li>a,.pagination-lg>li>span{padding:10px 16px;font-size:18px;line-height:1.3333333}.pagination-lg>li:first-child>a,.pagination-lg>li:first-child>span{border-bottom-left-radius:0;border-top-left-radius:0}.pagination-lg>li:last-child>a,.pagination-lg>li:last-child>span{border-bottom-right-radius:0;border-top-right-radius:0}.pagination-sm>li>a,.pagination-sm>li>span{padding:5px 10px;font-size:12px;line-height:1.5}.pagination-sm>li:first-child>a,.pagination-sm>li:first-child>span{border-bottom-left-radius:0;border-top-left-radius:0}.pagination-sm>li:last-child>a,.pagination-sm>li:last-child>span{border-bottom-right-radius:0;border-top-right-radius:0}.pager{padding-left:0;margin:20px 0;list-style:none;text-align:center}.pager li{display:inline}.pager li>a,.pager li>span{display:inline-block;padding:5px 14px;background-color:#fff;border:1px solid #ddd;border-radius:15px}.pager li>a:hover,.pager li>a:focus{text-decoration:none;background-color:#555}.pager .next>a,.pager .next>span{float:right}.pager .previous>a,.pager .previous>span{float:left}.pager .disabled>a,.pager .disabled>a:hover,.pager .disabled>a:focus,.pager .disabled>span{color:#222;background-color:#fff;cursor:not-allowed}.label{display:inline;padding:.2em .6em .3em;font-size:75%;font-weight:bold;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:baseline;border-radius:.25em}a.label:hover,a.label:focus{color:#fff;text-decoration:none;cursor:pointer}.label:empty{display:none}.btn .label{position:relative;top:-1px}.label-default{background-color:#222}.label-default[href]:hover,.label-default[href]:focus{background-color:#090909}.label-primary{background-color:#98e8ff}.label-primary[href]:hover,.label-primary[href]:focus{background-color:#65ddff}.label-success{background-color:#5cb85c}.label-success[href]:hover,.label-success[href]:focus{background-color:#449d44}.label-info{background-color:#5bc0de}.label-info[href]:hover,.label-info[href]:focus{background-color:#31b0d5}.label-warning{background-color:#f0ad4e}.label-warning[href]:hover,.label-warning[href]:focus{background-color:#ec971f}.label-danger{background-color:#d9534f}.label-danger[href]:hover,.label-danger[href]:focus{background-color:#c9302c}.badge{display:inline-block;min-width:10px;padding:3px 7px;font-size:12px;font-weight:bold;color:#fff;line-height:1;vertical-align:middle;white-space:nowrap;text-align:center;background-color:#222;border-radius:10px}.badge:empty{display:none}.btn .badge{position:relative;top:-1px}.btn-xs .badge,.btn-group-xs>.btn .badge{top:0;padding:1px 5px}a.badge:hover,a.badge:focus{color:#fff;text-decoration:none;cursor:pointer}.list-group-item.active>.badge,.nav-pills>.active>a>.badge{color:#98e8ff;background-color:#fff}.list-group-item>.badge{float:right}.list-group-item>.badge+.badge{margin-right:5px}.nav-pills>li>a>.badge{margin-left:3px}.jumbotron{padding-top:30px;padding-bottom:30px;margin-bottom:30px;color:inherit;background-color:#555}.jumbotron h1,.jumbotron .h1{color:inherit}.jumbotron p{margin-bottom:15px;font-size:21px;font-weight:200}.jumbotron>hr{border-top-color:#3c3c3c}.container .jumbotron,.container-fluid .jumbotron{border-radius:0;padding-left:15px;padding-right:15px}.jumbotron .container{max-width:100%}@media screen and (min-width:768px){.jumbotron{padding-top:48px;padding-bottom:48px}.container .jumbotron,.container-fluid .jumbotron{padding-left:60px;padding-right:60px}.jumbotron h1,.jumbotron .h1{font-size:63px}}.thumbnail{display:block;padding:4px;margin-bottom:20px;line-height:1.42857143;background-color:#252830;border:1px solid #ddd;border-radius:0;-webkit-transition:border .2s ease-in-out;-o-transition:border .2s ease-in-out;transition:border .2s ease-in-out}.thumbnail>img,.thumbnail a>img{margin-left:auto;margin-right:auto}a.thumbnail:hover,a.thumbnail:focus,a.thumbnail.active{border-color:#98e8ff}.thumbnail .caption{padding:9px;color:#999}.alert{padding:15px;margin-bottom:20px;border:1px solid transparent;border-radius:0}.alert h4{margin-top:0;color:inherit}.alert .alert-link{font-weight:bold}.alert>p,.alert>ul{margin-bottom:0}.alert>p+p{margin-top:5px}.alert-dismissable,.alert-dismissible{padding-right:35px}.alert-dismissable .close,.alert-dismissible .close{position:relative;top:-2px;right:-21px;color:inherit}.alert-success{background-color:#dff0d8;border-color:#d6e9c6;color:#3c763d}.alert-success hr{border-top-color:#c9e2b3}.alert-success .alert-link{color:#2b542c}.alert-info{background-color:#d9edf7;border-color:#bce8f1;color:#31708f}.alert-info hr{border-top-color:#a6e1ec}.alert-info .alert-link{color:#245269}.alert-warning{background-color:#fcf8e3;border-color:#faebcc;color:#8a6d3b}.alert-warning hr{border-top-color:#f7e1b5}.alert-warning .alert-link{color:#66512c}.alert-danger{background-color:#f2dede;border-color:#ebccd1;color:#a94442}.alert-danger hr{border-top-color:#e4b9c0}.alert-danger .alert-link{color:#843534}@-webkit-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-o-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}.progress{overflow:hidden;height:20px;margin-bottom:20px;background-color:#f5f5f5;border-radius:0;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1);box-shadow:inset 0 1px 2px rgba(0,0,0,0.1)}.progress-bar{float:left;width:0%;height:100%;font-size:12px;line-height:20px;color:#fff;text-align:center;background-color:#98e8ff;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);-webkit-transition:width .6s ease;-o-transition:width .6s ease;transition:width .6s ease}.progress-striped .progress-bar,.progress-bar-striped{background-image:-webkit-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);-webkit-background-size:40px 40px;background-size:40px 40px}.progress.active .progress-bar,.progress-bar.active{-webkit-animation:progress-bar-stripes 2s linear infinite;-o-animation:progress-bar-stripes 2s linear infinite;animation:progress-bar-stripes 2s linear infinite}.progress-bar-success{background-color:#5cb85c}.progress-striped .progress-bar-success{background-image:-webkit-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent)}.progress-bar-info{background-color:#5bc0de}.progress-striped .progress-bar-info{background-image:-webkit-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent)}.progress-bar-warning{background-color:#f0ad4e}.progress-striped .progress-bar-warning{background-image:-webkit-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent)}.progress-bar-danger{background-color:#d9534f}.progress-striped .progress-bar-danger{background-image:-webkit-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent)}.media{margin-top:15px}.media:first-child{margin-top:0}.media,.media-body{zoom:1;overflow:hidden}.media-body{width:10000px}.media-object{display:block}.media-object.img-thumbnail{max-width:none}.media-right,.media>.pull-right{padding-left:10px}.media-left,.media>.pull-left{padding-right:10px}.media-left,.media-right,.media-body{display:table-cell;vertical-align:top}.media-middle{vertical-align:middle}.media-bottom{vertical-align:bottom}.media-heading{margin-top:0;margin-bottom:5px}.media-list{padding-left:0;list-style:none}.list-group{margin-bottom:20px;padding-left:0}.list-group-item{position:relative;display:block;padding:10px 15px;margin-bottom:-1px;background-color:#fff;border:1px solid #ddd}.list-group-item:first-child{border-top-right-radius:0;border-top-left-radius:0}.list-group-item:last-child{margin-bottom:0;border-bottom-right-radius:0;border-bottom-left-radius:0}a.list-group-item,button.list-group-item{color:#555}a.list-group-item .list-group-item-heading,button.list-group-item .list-group-item-heading{color:#333}a.list-group-item:hover,button.list-group-item:hover,a.list-group-item:focus,button.list-group-item:focus{text-decoration:none;color:#555;background-color:#f5f5f5}button.list-group-item{width:100%;text-align:left}.list-group-item.disabled,.list-group-item.disabled:hover,.list-group-item.disabled:focus{background-color:#555;color:#222;cursor:not-allowed}.list-group-item.disabled .list-group-item-heading,.list-group-item.disabled:hover .list-group-item-heading,.list-group-item.disabled:focus .list-group-item-heading{color:inherit}.list-group-item.disabled .list-group-item-text,.list-group-item.disabled:hover .list-group-item-text,.list-group-item.disabled:focus .list-group-item-text{color:#222}.list-group-item.active,.list-group-item.active:hover,.list-group-item.active:focus{z-index:2;color:#fff;background-color:#98e8ff;border-color:#98e8ff}.list-group-item.active .list-group-item-heading,.list-group-item.active:hover .list-group-item-heading,.list-group-item.active:focus .list-group-item-heading,.list-group-item.active .list-group-item-heading>small,.list-group-item.active:hover .list-group-item-heading>small,.list-group-item.active:focus .list-group-item-heading>small,.list-group-item.active .list-group-item-heading>.small,.list-group-item.active:hover .list-group-item-heading>.small,.list-group-item.active:focus .list-group-item-heading>.small{color:inherit}.list-group-item.active .list-group-item-text,.list-group-item.active:hover .list-group-item-text,.list-group-item.active:focus .list-group-item-text{color:#fff}.list-group-item-success{color:#3c763d;background-color:#dff0d8}a.list-group-item-success,button.list-group-item-success{color:#3c763d}a.list-group-item-success .list-group-item-heading,button.list-group-item-success .list-group-item-heading{color:inherit}a.list-group-item-success:hover,button.list-group-item-success:hover,a.list-group-item-success:focus,button.list-group-item-success:focus{color:#3c763d;background-color:#d0e9c6}a.list-group-item-success.active,button.list-group-item-success.active,a.list-group-item-success.active:hover,button.list-group-item-success.active:hover,a.list-group-item-success.active:focus,button.list-group-item-success.active:focus{color:#fff;background-color:#3c763d;border-color:#3c763d}.list-group-item-info{color:#31708f;background-color:#d9edf7}a.list-group-item-info,button.list-group-item-info{color:#31708f}a.list-group-item-info .list-group-item-heading,button.list-group-item-info .list-group-item-heading{color:inherit}a.list-group-item-info:hover,button.list-group-item-info:hover,a.list-group-item-info:focus,button.list-group-item-info:focus{color:#31708f;background-color:#c4e3f3}a.list-group-item-info.active,button.list-group-item-info.active,a.list-group-item-info.active:hover,button.list-group-item-info.active:hover,a.list-group-item-info.active:focus,button.list-group-item-info.active:focus{color:#fff;background-color:#31708f;border-color:#31708f}.list-group-item-warning{color:#8a6d3b;background-color:#fcf8e3}a.list-group-item-warning,button.list-group-item-warning{color:#8a6d3b}a.list-group-item-warning .list-group-item-heading,button.list-group-item-warning .list-group-item-heading{color:inherit}a.list-group-item-warning:hover,button.list-group-item-warning:hover,a.list-group-item-warning:focus,button.list-group-item-warning:focus{color:#8a6d3b;background-color:#faf2cc}a.list-group-item-warning.active,button.list-group-item-warning.active,a.list-group-item-warning.active:hover,button.list-group-item-warning.active:hover,a.list-group-item-warning.active:focus,button.list-group-item-warning.active:focus{color:#fff;background-color:#8a6d3b;border-color:#8a6d3b}.list-group-item-danger{color:#a94442;background-color:#f2dede}a.list-group-item-danger,button.list-group-item-danger{color:#a94442}a.list-group-item-danger .list-group-item-heading,button.list-group-item-danger .list-group-item-heading{color:inherit}a.list-group-item-danger:hover,button.list-group-item-danger:hover,a.list-group-item-danger:focus,button.list-group-item-danger:focus{color:#a94442;background-color:#ebcccc}a.list-group-item-danger.active,button.list-group-item-danger.active,a.list-group-item-danger.active:hover,button.list-group-item-danger.active:hover,a.list-group-item-danger.active:focus,button.list-group-item-danger.active:focus{color:#fff;background-color:#a94442;border-color:#a94442}.list-group-item-heading{margin-top:0;margin-bottom:5px}.list-group-item-text{margin-bottom:0;line-height:1.3}.panel{margin-bottom:20px;background-color:#fff;border:1px solid transparent;border-radius:0;-webkit-box-shadow:0 1px 1px rgba(0,0,0,0.05);box-shadow:0 1px 1px rgba(0,0,0,0.05)}.panel-body{padding:15px}.panel-heading{padding:10px 15px;border-bottom:1px solid transparent;border-top-right-radius:-1;border-top-left-radius:-1}.panel-heading>.dropdown .dropdown-toggle{color:inherit}.panel-title{margin-top:0;margin-bottom:0;font-size:16px;color:inherit}.panel-title>a,.panel-title>small,.panel-title>.small,.panel-title>small>a,.panel-title>.small>a{color:inherit}.panel-footer{padding:10px 15px;background-color:#f5f5f5;border-top:1px solid #ddd;border-bottom-right-radius:-1;border-bottom-left-radius:-1}.panel>.list-group,.panel>.panel-collapse>.list-group{margin-bottom:0}.panel>.list-group .list-group-item,.panel>.panel-collapse>.list-group .list-group-item{border-width:1px 0;border-radius:0}.panel>.list-group:first-child .list-group-item:first-child,.panel>.panel-collapse>.list-group:first-child .list-group-item:first-child{border-top:0;border-top-right-radius:-1;border-top-left-radius:-1}.panel>.list-group:last-child .list-group-item:last-child,.panel>.panel-collapse>.list-group:last-child .list-group-item:last-child{border-bottom:0;border-bottom-right-radius:-1;border-bottom-left-radius:-1}.panel>.panel-heading+.panel-collapse>.list-group .list-group-item:first-child{border-top-right-radius:0;border-top-left-radius:0}.panel-heading+.list-group .list-group-item:first-child{border-top-width:0}.list-group+.panel-footer{border-top-width:0}.panel>.table,.panel>.table-responsive>.table,.panel>.panel-collapse>.table{margin-bottom:0}.panel>.table caption,.panel>.table-responsive>.table caption,.panel>.panel-collapse>.table caption{padding-left:15px;padding-right:15px}.panel>.table:first-child,.panel>.table-responsive:first-child>.table:first-child{border-top-right-radius:-1;border-top-left-radius:-1}.panel>.table:first-child>thead:first-child>tr:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child{border-top-left-radius:-1;border-top-right-radius:-1}.panel>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table:first-child>thead:first-child>tr:first-child th:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:first-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:first-child{border-top-left-radius:-1}.panel>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table:first-child>thead:first-child>tr:first-child th:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:last-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:last-child{border-top-right-radius:-1}.panel>.table:last-child,.panel>.table-responsive:last-child>.table:last-child{border-bottom-right-radius:-1;border-bottom-left-radius:-1}.panel>.table:last-child>tbody:last-child>tr:last-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child{border-bottom-left-radius:-1;border-bottom-right-radius:-1}.panel>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:first-child{border-bottom-left-radius:-1}.panel>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:last-child{border-bottom-right-radius:-1}.panel>.panel-body+.table,.panel>.panel-body+.table-responsive,.panel>.table+.panel-body,.panel>.table-responsive+.panel-body{border-top:1px solid #ddd}.panel>.table>tbody:first-child>tr:first-child th,.panel>.table>tbody:first-child>tr:first-child td{border-top:0}.panel>.table-bordered,.panel>.table-responsive>.table-bordered{border:0}.panel>.table-bordered>thead>tr>th:first-child,.panel>.table-responsive>.table-bordered>thead>tr>th:first-child,.panel>.table-bordered>tbody>tr>th:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:first-child,.panel>.table-bordered>tfoot>tr>th:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:first-child,.panel>.table-bordered>thead>tr>td:first-child,.panel>.table-responsive>.table-bordered>thead>tr>td:first-child,.panel>.table-bordered>tbody>tr>td:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:first-child,.panel>.table-bordered>tfoot>tr>td:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:first-child{border-left:0}.panel>.table-bordered>thead>tr>th:last-child,.panel>.table-responsive>.table-bordered>thead>tr>th:last-child,.panel>.table-bordered>tbody>tr>th:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:last-child,.panel>.table-bordered>tfoot>tr>th:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:last-child,.panel>.table-bordered>thead>tr>td:last-child,.panel>.table-responsive>.table-bordered>thead>tr>td:last-child,.panel>.table-bordered>tbody>tr>td:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:last-child,.panel>.table-bordered>tfoot>tr>td:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:last-child{border-right:0}.panel>.table-bordered>thead>tr:first-child>td,.panel>.table-responsive>.table-bordered>thead>tr:first-child>td,.panel>.table-bordered>tbody>tr:first-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>td,.panel>.table-bordered>thead>tr:first-child>th,.panel>.table-responsive>.table-bordered>thead>tr:first-child>th,.panel>.table-bordered>tbody>tr:first-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>th{border-bottom:0}.panel>.table-bordered>tbody>tr:last-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>td,.panel>.table-bordered>tfoot>tr:last-child>td,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>td,.panel>.table-bordered>tbody>tr:last-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>th,.panel>.table-bordered>tfoot>tr:last-child>th,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>th{border-bottom:0}.panel>.table-responsive{border:0;margin-bottom:0}.panel-group{margin-bottom:20px}.panel-group .panel{margin-bottom:0;border-radius:0}.panel-group .panel+.panel{margin-top:5px}.panel-group .panel-heading{border-bottom:0}.panel-group .panel-heading+.panel-collapse>.panel-body,.panel-group .panel-heading+.panel-collapse>.list-group{border-top:1px solid #ddd}.panel-group .panel-footer{border-top:0}.panel-group .panel-footer+.panel-collapse .panel-body{border-bottom:1px solid #ddd}.panel-default{border-color:#ddd}.panel-default>.panel-heading{color:#999;background-color:#f5f5f5;border-color:#ddd}.panel-default>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ddd}.panel-default>.panel-heading .badge{color:#f5f5f5;background-color:#999}.panel-default>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ddd}.panel-primary{border-color:#98e8ff}.panel-primary>.panel-heading{color:#fff;background-color:#98e8ff;border-color:#98e8ff}.panel-primary>.panel-heading+.panel-collapse>.panel-body{border-top-color:#98e8ff}.panel-primary>.panel-heading .badge{color:#98e8ff;background-color:#fff}.panel-primary>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#98e8ff}.panel-success{border-color:#d6e9c6}.panel-success>.panel-heading{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.panel-success>.panel-heading+.panel-collapse>.panel-body{border-top-color:#d6e9c6}.panel-success>.panel-heading .badge{color:#dff0d8;background-color:#3c763d}.panel-success>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#d6e9c6}.panel-info{border-color:#bce8f1}.panel-info>.panel-heading{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.panel-info>.panel-heading+.panel-collapse>.panel-body{border-top-color:#bce8f1}.panel-info>.panel-heading .badge{color:#d9edf7;background-color:#31708f}.panel-info>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#bce8f1}.panel-warning{border-color:#faebcc}.panel-warning>.panel-heading{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.panel-warning>.panel-heading+.panel-collapse>.panel-body{border-top-color:#faebcc}.panel-warning>.panel-heading .badge{color:#fcf8e3;background-color:#8a6d3b}.panel-warning>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#faebcc}.panel-danger{border-color:#ebccd1}.panel-danger>.panel-heading{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.panel-danger>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ebccd1}.panel-danger>.panel-heading .badge{color:#f2dede;background-color:#a94442}.panel-danger>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ebccd1}.embed-responsive{position:relative;display:block;height:0;padding:0;overflow:hidden}.embed-responsive .embed-responsive-item,.embed-responsive iframe,.embed-responsive embed,.embed-responsive object,.embed-responsive video{position:absolute;top:0;left:0;bottom:0;height:100%;width:100%;border:0}.embed-responsive-16by9{padding-bottom:56.25%}.embed-responsive-4by3{padding-bottom:75%}.well{min-height:20px;padding:19px;margin-bottom:20px;background-color:#f5f5f5;border:1px solid #e3e3e3;border-radius:0;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.05);box-shadow:inset 0 1px 1px rgba(0,0,0,0.05)}.well blockquote{border-color:#ddd;border-color:rgba(0,0,0,0.15)}.well-lg{padding:24px;border-radius:0}.well-sm{padding:9px;border-radius:0}.close{float:right;font-size:21px;font-weight:bold;line-height:1;color:#000;text-shadow:0 1px 0 #fff;opacity:.2;filter:alpha(opacity=20)}.close:hover,.close:focus{color:#000;text-decoration:none;cursor:pointer;opacity:.5;filter:alpha(opacity=50)}button.close{padding:0;cursor:pointer;background:transparent;border:0;-webkit-appearance:none}.modal-open{overflow:hidden}.modal{display:none;overflow:hidden;position:fixed;top:0;right:0;bottom:0;left:0;z-index:1050;-webkit-overflow-scrolling:touch;outline:0}.modal.fade .modal-dialog{-webkit-transform:translate(0, -25%);-ms-transform:translate(0, -25%);-o-transform:translate(0, -25%);transform:translate(0, -25%);-webkit-transition:-webkit-transform 0.3s ease-out;-o-transition:-o-transform 0.3s ease-out;transition:transform 0.3s ease-out}.modal.in .modal-dialog{-webkit-transform:translate(0, 0);-ms-transform:translate(0, 0);-o-transform:translate(0, 0);transform:translate(0, 0)}.modal-open .modal{overflow-x:hidden;overflow-y:auto}.modal-dialog{position:relative;width:auto;margin:10px}.modal-content{position:relative;background-color:#fff;border:1px solid #999;border:1px solid rgba(0,0,0,0.2);border-radius:0;-webkit-box-shadow:0 3px 9px rgba(0,0,0,0.5);box-shadow:0 3px 9px rgba(0,0,0,0.5);-webkit-background-clip:padding-box;background-clip:padding-box;outline:0}.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040;background-color:#000}.modal-backdrop.fade{opacity:0;filter:alpha(opacity=0)}.modal-backdrop.in{opacity:.5;filter:alpha(opacity=50)}.modal-header{padding:15px;border-bottom:1px solid #e5e5e5}.modal-header .close{margin-top:-2px}.modal-title{margin:0;line-height:1.42857143}.modal-body{position:relative;padding:15px}.modal-footer{padding:15px;text-align:right;border-top:1px solid #e5e5e5}.modal-footer .btn+.btn{margin-left:5px;margin-bottom:0}.modal-footer .btn-group .btn+.btn{margin-left:-1px}.modal-footer .btn-block+.btn-block{margin-left:0}.modal-scrollbar-measure{position:absolute;top:-9999px;width:50px;height:50px;overflow:scroll}@media (min-width:768px){.modal-dialog{width:600px;margin:30px auto}.modal-content{-webkit-box-shadow:0 5px 15px rgba(0,0,0,0.5);box-shadow:0 5px 15px rgba(0,0,0,0.5)}.modal-sm{width:300px}}@media (min-width:992px){.modal-lg{width:900px}}.tooltip{position:absolute;z-index:1070;display:block;font-family:"Roboto","Helvetica Neue",Helvetica,Arial,sans-serif;font-style:normal;font-weight:normal;letter-spacing:normal;line-break:auto;line-height:1.42857143;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;white-space:normal;word-break:normal;word-spacing:normal;word-wrap:normal;font-size:12px;opacity:0;filter:alpha(opacity=0)}.tooltip.in{opacity:.9;filter:alpha(opacity=90)}.tooltip.top{margin-top:-3px;padding:5px 0}.tooltip.right{margin-left:3px;padding:0 5px}.tooltip.bottom{margin-top:3px;padding:5px 0}.tooltip.left{margin-left:-3px;padding:0 5px}.tooltip-inner{max-width:200px;padding:3px 8px;color:#fff;text-align:center;background-color:#000;border-radius:0}.tooltip-arrow{position:absolute;width:0;height:0;border-color:transparent;border-style:solid}.tooltip.top .tooltip-arrow{bottom:0;left:50%;margin-left:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-left .tooltip-arrow{bottom:0;right:5px;margin-bottom:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-right .tooltip-arrow{bottom:0;left:5px;margin-bottom:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.right .tooltip-arrow{top:50%;left:0;margin-top:-5px;border-width:5px 5px 5px 0;border-right-color:#000}.tooltip.left .tooltip-arrow{top:50%;right:0;margin-top:-5px;border-width:5px 0 5px 5px;border-left-color:#000}.tooltip.bottom .tooltip-arrow{top:0;left:50%;margin-left:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-left .tooltip-arrow{top:0;right:5px;margin-top:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-right .tooltip-arrow{top:0;left:5px;margin-top:-5px;border-width:0 5px 5px;border-bottom-color:#000}.popover{position:absolute;top:0;left:0;z-index:1060;display:none;max-width:276px;padding:1px;font-family:"Roboto","Helvetica Neue",Helvetica,Arial,sans-serif;font-style:normal;font-weight:normal;letter-spacing:normal;line-break:auto;line-height:1.42857143;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;white-space:normal;word-break:normal;word-spacing:normal;word-wrap:normal;font-size:14px;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.2);border-radius:0;-webkit-box-shadow:0 5px 10px rgba(0,0,0,0.2);box-shadow:0 5px 10px rgba(0,0,0,0.2)}.popover.top{margin-top:-10px}.popover.right{margin-left:10px}.popover.bottom{margin-top:10px}.popover.left{margin-left:-10px}.popover-title{margin:0;padding:8px 14px;font-size:14px;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;border-radius:-1 -1 0 0}.popover-content{padding:9px 14px}.popover>.arrow,.popover>.arrow:after{position:absolute;display:block;width:0;height:0;border-color:transparent;border-style:solid}.popover>.arrow{border-width:11px}.popover>.arrow:after{border-width:10px;content:""}.popover.top>.arrow{left:50%;margin-left:-11px;border-bottom-width:0;border-top-color:#999;border-top-color:rgba(0,0,0,0.25);bottom:-11px}.popover.top>.arrow:after{content:" ";bottom:1px;margin-left:-10px;border-bottom-width:0;border-top-color:#fff}.popover.right>.arrow{top:50%;left:-11px;margin-top:-11px;border-left-width:0;border-right-color:#999;border-right-color:rgba(0,0,0,0.25)}.popover.right>.arrow:after{content:" ";left:1px;bottom:-10px;border-left-width:0;border-right-color:#fff}.popover.bottom>.arrow{left:50%;margin-left:-11px;border-top-width:0;border-bottom-color:#999;border-bottom-color:rgba(0,0,0,0.25);top:-11px}.popover.bottom>.arrow:after{content:" ";top:1px;margin-left:-10px;border-top-width:0;border-bottom-color:#fff}.popover.left>.arrow{top:50%;right:-11px;margin-top:-11px;border-right-width:0;border-left-color:#999;border-left-color:rgba(0,0,0,0.25)}.popover.left>.arrow:after{content:" ";right:1px;border-right-width:0;border-left-color:#fff;bottom:-10px}.carousel{position:relative}.carousel-inner{position:relative;overflow:hidden;width:100%}.carousel-inner>.item{display:none;position:relative;-webkit-transition:.6s ease-in-out left;-o-transition:.6s ease-in-out left;transition:.6s ease-in-out left}.carousel-inner>.item>img,.carousel-inner>.item>a>img{line-height:1}@media all and (transform-3d),(-webkit-transform-3d){.carousel-inner>.item{-webkit-transition:-webkit-transform 0.6s ease-in-out;-o-transition:-o-transform 0.6s ease-in-out;transition:transform 0.6s ease-in-out;-webkit-backface-visibility:hidden;backface-visibility:hidden;-webkit-perspective:1000px;perspective:1000px}.carousel-inner>.item.next,.carousel-inner>.item.active.right{-webkit-transform:translate3d(100%, 0, 0);transform:translate3d(100%, 0, 0);left:0}.carousel-inner>.item.prev,.carousel-inner>.item.active.left{-webkit-transform:translate3d(-100%, 0, 0);transform:translate3d(-100%, 0, 0);left:0}.carousel-inner>.item.next.left,.carousel-inner>.item.prev.right,.carousel-inner>.item.active{-webkit-transform:translate3d(0, 0, 0);transform:translate3d(0, 0, 0);left:0}}.carousel-inner>.active,.carousel-inner>.next,.carousel-inner>.prev{display:block}.carousel-inner>.active{left:0}.carousel-inner>.next,.carousel-inner>.prev{position:absolute;top:0;width:100%}.carousel-inner>.next{left:100%}.carousel-inner>.prev{left:-100%}.carousel-inner>.next.left,.carousel-inner>.prev.right{left:0}.carousel-inner>.active.left{left:-100%}.carousel-inner>.active.right{left:100%}.carousel-control{position:absolute;top:0;left:0;bottom:0;width:15%;opacity:.5;filter:alpha(opacity=50);font-size:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,0.6);background-color:rgba(0,0,0,0)}.carousel-control.left{background-image:-webkit-linear-gradient(left, rgba(0,0,0,0.5) 0, rgba(0,0,0,0.0001) 100%);background-image:-o-linear-gradient(left, rgba(0,0,0,0.5) 0, rgba(0,0,0,0.0001) 100%);background-image:-webkit-gradient(linear, left top, right top, color-stop(0, rgba(0,0,0,0.5)), to(rgba(0,0,0,0.0001)));background-image:linear-gradient(to right, rgba(0,0,0,0.5) 0, rgba(0,0,0,0.0001) 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#80000000', endColorstr='#00000000', GradientType=1)}.carousel-control.right{left:auto;right:0;background-image:-webkit-linear-gradient(left, rgba(0,0,0,0.0001) 0, rgba(0,0,0,0.5) 100%);background-image:-o-linear-gradient(left, rgba(0,0,0,0.0001) 0, rgba(0,0,0,0.5) 100%);background-image:-webkit-gradient(linear, left top, right top, color-stop(0, rgba(0,0,0,0.0001)), to(rgba(0,0,0,0.5)));background-image:linear-gradient(to right, rgba(0,0,0,0.0001) 0, rgba(0,0,0,0.5) 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#00000000', endColorstr='#80000000', GradientType=1)}.carousel-control:hover,.carousel-control:focus{outline:0;color:#fff;text-decoration:none;opacity:.9;filter:alpha(opacity=90)}.carousel-control .icon-prev,.carousel-control .icon-next,.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right{position:absolute;top:50%;margin-top:-10px;z-index:5;display:inline-block}.carousel-control .icon-prev,.carousel-control .glyphicon-chevron-left{left:50%;margin-left:-10px}.carousel-control .icon-next,.carousel-control .glyphicon-chevron-right{right:50%;margin-right:-10px}.carousel-control .icon-prev,.carousel-control .icon-next{width:20px;height:20px;line-height:1;font-family:serif}.carousel-control .icon-prev:before{content:'\2039'}.carousel-control .icon-next:before{content:'\203a'}.carousel-indicators{position:absolute;bottom:10px;left:50%;z-index:15;width:60%;margin-left:-30%;padding-left:0;list-style:none;text-align:center}.carousel-indicators li{display:inline-block;width:10px;height:10px;margin:1px;text-indent:-999px;border:1px solid #fff;border-radius:10px;cursor:pointer;background-color:#000 \9;background-color:rgba(0,0,0,0)}.carousel-indicators .active{margin:0;width:12px;height:12px;background-color:#fff}.carousel-caption{position:absolute;left:15%;right:15%;bottom:20px;z-index:10;padding-top:20px;padding-bottom:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,0.6)}.carousel-caption .btn{text-shadow:none}@media screen and (min-width:768px){.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right,.carousel-control .icon-prev,.carousel-control .icon-next{width:30px;height:30px;margin-top:-10px;font-size:30px}.carousel-control .glyphicon-chevron-left,.carousel-control .icon-prev{margin-left:-10px}.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next{margin-right:-10px}.carousel-caption{left:20%;right:20%;padding-bottom:30px}.carousel-indicators{bottom:20px}}.clearfix:before,.clearfix:after,.dl-horizontal dd:before,.dl-horizontal dd:after,.container:before,.container:after,.container-fluid:before,.container-fluid:after,.row:before,.row:after,.form-horizontal .form-group:before,.form-horizontal .form-group:after,.btn-toolbar:before,.btn-toolbar:after,.btn-group-vertical>.btn-group:before,.btn-group-vertical>.btn-group:after,.nav:before,.nav:after,.navbar:before,.navbar:after,.navbar-header:before,.navbar-header:after,.navbar-collapse:before,.navbar-collapse:after,.pager:before,.pager:after,.panel-body:before,.panel-body:after,.modal-header:before,.modal-header:after,.modal-footer:before,.modal-footer:after{content:" ";display:table}.clearfix:after,.dl-horizontal dd:after,.container:after,.container-fluid:after,.row:after,.form-horizontal .form-group:after,.btn-toolbar:after,.btn-group-vertical>.btn-group:after,.nav:after,.navbar:after,.navbar-header:after,.navbar-collapse:after,.pager:after,.panel-body:after,.modal-header:after,.modal-footer:after{clear:both}.center-block{display:block;margin-left:auto;margin-right:auto}.pull-right{float:right !important}.pull-left{float:left !important}.hide{display:none !important}.show{display:block !important}.invisible{visibility:hidden}.text-hide{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.hidden{display:none !important}.affix{position:fixed}@-ms-viewport{width:device-width}.visible-xs,.visible-sm,.visible-md,.visible-lg{display:none !important}.visible-xs-block,.visible-xs-inline,.visible-xs-inline-block,.visible-sm-block,.visible-sm-inline,.visible-sm-inline-block,.visible-md-block,.visible-md-inline,.visible-md-inline-block,.visible-lg-block,.visible-lg-inline,.visible-lg-inline-block{display:none !important}@media (max-width:767px){.visible-xs{display:block !important}table.visible-xs{display:table !important}tr.visible-xs{display:table-row !important}th.visible-xs,td.visible-xs{display:table-cell !important}}@media (max-width:767px){.visible-xs-block{display:block !important}}@media (max-width:767px){.visible-xs-inline{display:inline !important}}@media (max-width:767px){.visible-xs-inline-block{display:inline-block !important}}@media (min-width:768px) and (max-width:991px){.visible-sm{display:block !important}table.visible-sm{display:table !important}tr.visible-sm{display:table-row !important}th.visible-sm,td.visible-sm{display:table-cell !important}}@media (min-width:768px) and (max-width:991px){.visible-sm-block{display:block !important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline{display:inline !important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline-block{display:inline-block !important}}@media (min-width:992px) and (max-width:1199px){.visible-md{display:block !important}table.visible-md{display:table !important}tr.visible-md{display:table-row !important}th.visible-md,td.visible-md{display:table-cell !important}}@media (min-width:992px) and (max-width:1199px){.visible-md-block{display:block !important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline{display:inline !important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline-block{display:inline-block !important}}@media (min-width:1200px){.visible-lg{display:block !important}table.visible-lg{display:table !important}tr.visible-lg{display:table-row !important}th.visible-lg,td.visible-lg{display:table-cell !important}}@media (min-width:1200px){.visible-lg-block{display:block !important}}@media (min-width:1200px){.visible-lg-inline{display:inline !important}}@media (min-width:1200px){.visible-lg-inline-block{display:inline-block !important}}@media (max-width:767px){.hidden-xs{display:none !important}}@media (min-width:768px) and (max-width:991px){.hidden-sm{display:none !important}}@media (min-width:992px) and (max-width:1199px){.hidden-md{display:none !important}}@media (min-width:1200px){.hidden-lg{display:none !important}}.visible-print{display:none !important}@media print{.visible-print{display:block !important}table.visible-print{display:table !important}tr.visible-print{display:table-row !important}th.visible-print,td.visible-print{display:table-cell !important}}.visible-print-block{display:none !important}@media print{.visible-print-block{display:block !important}}.visible-print-inline{display:none !important}@media print{.visible-print-inline{display:inline !important}}.visible-print-inline-block{display:none !important}@media print{.visible-print-inline-block{display:inline-block !important}}@media print{.hidden-print{display:none !important}} \ No newline at end of file + *//*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css */html{font-family:sans-serif;-ms-text-size-adjust:100%;-webkit-text-size-adjust:100%}body{margin:0}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section,summary{display:block}audio,canvas,progress,video{display:inline-block;vertical-align:baseline}audio:not([controls]){display:none;height:0}[hidden],template{display:none}a{background-color:transparent}a:active,a:hover{outline:0}abbr[title]{border-bottom:none;text-decoration:underline;text-decoration:underline dotted}b,strong{font-weight:bold}dfn{font-style:italic}h1{font-size:2em;margin:0.67em 0}mark{background:#ff0;color:#000}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-0.5em}sub{bottom:-0.25em}img{border:0}svg:not(:root){overflow:hidden}figure{margin:1em 40px}hr{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;height:0}pre{overflow:auto}code,kbd,pre,samp{font-family:monospace, monospace;font-size:1em}button,input,optgroup,select,textarea{color:inherit;font:inherit;margin:0}button{overflow:visible}button,select{text-transform:none}button,html input[type="button"],input[type="reset"],input[type="submit"]{-webkit-appearance:button;cursor:pointer}button[disabled],html input[disabled]{cursor:default}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}input{line-height:normal}input[type="checkbox"],input[type="radio"]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:0}input[type="number"]::-webkit-inner-spin-button,input[type="number"]::-webkit-outer-spin-button{height:auto}input[type="search"]{-webkit-appearance:textfield;-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box}input[type="search"]::-webkit-search-cancel-button,input[type="search"]::-webkit-search-decoration{-webkit-appearance:none}fieldset{border:1px solid #c0c0c0;margin:0 2px;padding:0.35em 0.625em 0.75em}legend{border:0;padding:0}textarea{overflow:auto}optgroup{font-weight:bold}table{border-collapse:collapse;border-spacing:0}td,th{padding:0}/*! Source: https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css */@media print{*,*:before,*:after{color:#000 !important;text-shadow:none !important;background:transparent !important;-webkit-box-shadow:none !important;box-shadow:none !important}a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) ")"}a[href^="#"]:after,a[href^="javascript:"]:after{content:""}pre,blockquote{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100% !important}p,h2,h3{orphans:3;widows:3}h2,h3{page-break-after:avoid}.navbar{display:none}.btn>.caret,.dropup>.btn>.caret{border-top-color:#000 !important}.label{border:1px solid #000}.table{border-collapse:collapse !important}.table td,.table th{background-color:#fff !important}.table-bordered th,.table-bordered td{border:1px solid #ddd !important}}@font-face{font-family:"Glyphicons Halflings";src:url("../fonts/glyphicons-halflings-regular.eot");src:url("../fonts/glyphicons-halflings-regular.eot?#iefix") format("embedded-opentype"),url("../fonts/glyphicons-halflings-regular.woff2") format("woff2"),url("../fonts/glyphicons-halflings-regular.woff") format("woff"),url("../fonts/glyphicons-halflings-regular.ttf") format("truetype"),url("../fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular") format("svg")}.glyphicon{position:relative;top:1px;display:inline-block;font-family:"Glyphicons Halflings";font-style:normal;font-weight:400;line-height:1;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.glyphicon-asterisk:before{content:"\002a"}.glyphicon-plus:before{content:"\002b"}.glyphicon-euro:before,.glyphicon-eur:before{content:"\20ac"}.glyphicon-minus:before{content:"\2212"}.glyphicon-cloud:before{content:"\2601"}.glyphicon-envelope:before{content:"\2709"}.glyphicon-pencil:before{content:"\270f"}.glyphicon-glass:before{content:"\e001"}.glyphicon-music:before{content:"\e002"}.glyphicon-search:before{content:"\e003"}.glyphicon-heart:before{content:"\e005"}.glyphicon-star:before{content:"\e006"}.glyphicon-star-empty:before{content:"\e007"}.glyphicon-user:before{content:"\e008"}.glyphicon-film:before{content:"\e009"}.glyphicon-th-large:before{content:"\e010"}.glyphicon-th:before{content:"\e011"}.glyphicon-th-list:before{content:"\e012"}.glyphicon-ok:before{content:"\e013"}.glyphicon-remove:before{content:"\e014"}.glyphicon-zoom-in:before{content:"\e015"}.glyphicon-zoom-out:before{content:"\e016"}.glyphicon-off:before{content:"\e017"}.glyphicon-signal:before{content:"\e018"}.glyphicon-cog:before{content:"\e019"}.glyphicon-trash:before{content:"\e020"}.glyphicon-home:before{content:"\e021"}.glyphicon-file:before{content:"\e022"}.glyphicon-time:before{content:"\e023"}.glyphicon-road:before{content:"\e024"}.glyphicon-download-alt:before{content:"\e025"}.glyphicon-download:before{content:"\e026"}.glyphicon-upload:before{content:"\e027"}.glyphicon-inbox:before{content:"\e028"}.glyphicon-play-circle:before{content:"\e029"}.glyphicon-repeat:before{content:"\e030"}.glyphicon-refresh:before{content:"\e031"}.glyphicon-list-alt:before{content:"\e032"}.glyphicon-lock:before{content:"\e033"}.glyphicon-flag:before{content:"\e034"}.glyphicon-headphones:before{content:"\e035"}.glyphicon-volume-off:before{content:"\e036"}.glyphicon-volume-down:before{content:"\e037"}.glyphicon-volume-up:before{content:"\e038"}.glyphicon-qrcode:before{content:"\e039"}.glyphicon-barcode:before{content:"\e040"}.glyphicon-tag:before{content:"\e041"}.glyphicon-tags:before{content:"\e042"}.glyphicon-book:before{content:"\e043"}.glyphicon-bookmark:before{content:"\e044"}.glyphicon-print:before{content:"\e045"}.glyphicon-camera:before{content:"\e046"}.glyphicon-font:before{content:"\e047"}.glyphicon-bold:before{content:"\e048"}.glyphicon-italic:before{content:"\e049"}.glyphicon-text-height:before{content:"\e050"}.glyphicon-text-width:before{content:"\e051"}.glyphicon-align-left:before{content:"\e052"}.glyphicon-align-center:before{content:"\e053"}.glyphicon-align-right:before{content:"\e054"}.glyphicon-align-justify:before{content:"\e055"}.glyphicon-list:before{content:"\e056"}.glyphicon-indent-left:before{content:"\e057"}.glyphicon-indent-right:before{content:"\e058"}.glyphicon-facetime-video:before{content:"\e059"}.glyphicon-picture:before{content:"\e060"}.glyphicon-map-marker:before{content:"\e062"}.glyphicon-adjust:before{content:"\e063"}.glyphicon-tint:before{content:"\e064"}.glyphicon-edit:before{content:"\e065"}.glyphicon-share:before{content:"\e066"}.glyphicon-check:before{content:"\e067"}.glyphicon-move:before{content:"\e068"}.glyphicon-step-backward:before{content:"\e069"}.glyphicon-fast-backward:before{content:"\e070"}.glyphicon-backward:before{content:"\e071"}.glyphicon-play:before{content:"\e072"}.glyphicon-pause:before{content:"\e073"}.glyphicon-stop:before{content:"\e074"}.glyphicon-forward:before{content:"\e075"}.glyphicon-fast-forward:before{content:"\e076"}.glyphicon-step-forward:before{content:"\e077"}.glyphicon-eject:before{content:"\e078"}.glyphicon-chevron-left:before{content:"\e079"}.glyphicon-chevron-right:before{content:"\e080"}.glyphicon-plus-sign:before{content:"\e081"}.glyphicon-minus-sign:before{content:"\e082"}.glyphicon-remove-sign:before{content:"\e083"}.glyphicon-ok-sign:before{content:"\e084"}.glyphicon-question-sign:before{content:"\e085"}.glyphicon-info-sign:before{content:"\e086"}.glyphicon-screenshot:before{content:"\e087"}.glyphicon-remove-circle:before{content:"\e088"}.glyphicon-ok-circle:before{content:"\e089"}.glyphicon-ban-circle:before{content:"\e090"}.glyphicon-arrow-left:before{content:"\e091"}.glyphicon-arrow-right:before{content:"\e092"}.glyphicon-arrow-up:before{content:"\e093"}.glyphicon-arrow-down:before{content:"\e094"}.glyphicon-share-alt:before{content:"\e095"}.glyphicon-resize-full:before{content:"\e096"}.glyphicon-resize-small:before{content:"\e097"}.glyphicon-exclamation-sign:before{content:"\e101"}.glyphicon-gift:before{content:"\e102"}.glyphicon-leaf:before{content:"\e103"}.glyphicon-fire:before{content:"\e104"}.glyphicon-eye-open:before{content:"\e105"}.glyphicon-eye-close:before{content:"\e106"}.glyphicon-warning-sign:before{content:"\e107"}.glyphicon-plane:before{content:"\e108"}.glyphicon-calendar:before{content:"\e109"}.glyphicon-random:before{content:"\e110"}.glyphicon-comment:before{content:"\e111"}.glyphicon-magnet:before{content:"\e112"}.glyphicon-chevron-up:before{content:"\e113"}.glyphicon-chevron-down:before{content:"\e114"}.glyphicon-retweet:before{content:"\e115"}.glyphicon-shopping-cart:before{content:"\e116"}.glyphicon-folder-close:before{content:"\e117"}.glyphicon-folder-open:before{content:"\e118"}.glyphicon-resize-vertical:before{content:"\e119"}.glyphicon-resize-horizontal:before{content:"\e120"}.glyphicon-hdd:before{content:"\e121"}.glyphicon-bullhorn:before{content:"\e122"}.glyphicon-bell:before{content:"\e123"}.glyphicon-certificate:before{content:"\e124"}.glyphicon-thumbs-up:before{content:"\e125"}.glyphicon-thumbs-down:before{content:"\e126"}.glyphicon-hand-right:before{content:"\e127"}.glyphicon-hand-left:before{content:"\e128"}.glyphicon-hand-up:before{content:"\e129"}.glyphicon-hand-down:before{content:"\e130"}.glyphicon-circle-arrow-right:before{content:"\e131"}.glyphicon-circle-arrow-left:before{content:"\e132"}.glyphicon-circle-arrow-up:before{content:"\e133"}.glyphicon-circle-arrow-down:before{content:"\e134"}.glyphicon-globe:before{content:"\e135"}.glyphicon-wrench:before{content:"\e136"}.glyphicon-tasks:before{content:"\e137"}.glyphicon-filter:before{content:"\e138"}.glyphicon-briefcase:before{content:"\e139"}.glyphicon-fullscreen:before{content:"\e140"}.glyphicon-dashboard:before{content:"\e141"}.glyphicon-paperclip:before{content:"\e142"}.glyphicon-heart-empty:before{content:"\e143"}.glyphicon-link:before{content:"\e144"}.glyphicon-phone:before{content:"\e145"}.glyphicon-pushpin:before{content:"\e146"}.glyphicon-usd:before{content:"\e148"}.glyphicon-gbp:before{content:"\e149"}.glyphicon-sort:before{content:"\e150"}.glyphicon-sort-by-alphabet:before{content:"\e151"}.glyphicon-sort-by-alphabet-alt:before{content:"\e152"}.glyphicon-sort-by-order:before{content:"\e153"}.glyphicon-sort-by-order-alt:before{content:"\e154"}.glyphicon-sort-by-attributes:before{content:"\e155"}.glyphicon-sort-by-attributes-alt:before{content:"\e156"}.glyphicon-unchecked:before{content:"\e157"}.glyphicon-expand:before{content:"\e158"}.glyphicon-collapse-down:before{content:"\e159"}.glyphicon-collapse-up:before{content:"\e160"}.glyphicon-log-in:before{content:"\e161"}.glyphicon-flash:before{content:"\e162"}.glyphicon-log-out:before{content:"\e163"}.glyphicon-new-window:before{content:"\e164"}.glyphicon-record:before{content:"\e165"}.glyphicon-save:before{content:"\e166"}.glyphicon-open:before{content:"\e167"}.glyphicon-saved:before{content:"\e168"}.glyphicon-import:before{content:"\e169"}.glyphicon-export:before{content:"\e170"}.glyphicon-send:before{content:"\e171"}.glyphicon-floppy-disk:before{content:"\e172"}.glyphicon-floppy-saved:before{content:"\e173"}.glyphicon-floppy-remove:before{content:"\e174"}.glyphicon-floppy-save:before{content:"\e175"}.glyphicon-floppy-open:before{content:"\e176"}.glyphicon-credit-card:before{content:"\e177"}.glyphicon-transfer:before{content:"\e178"}.glyphicon-cutlery:before{content:"\e179"}.glyphicon-header:before{content:"\e180"}.glyphicon-compressed:before{content:"\e181"}.glyphicon-earphone:before{content:"\e182"}.glyphicon-phone-alt:before{content:"\e183"}.glyphicon-tower:before{content:"\e184"}.glyphicon-stats:before{content:"\e185"}.glyphicon-sd-video:before{content:"\e186"}.glyphicon-hd-video:before{content:"\e187"}.glyphicon-subtitles:before{content:"\e188"}.glyphicon-sound-stereo:before{content:"\e189"}.glyphicon-sound-dolby:before{content:"\e190"}.glyphicon-sound-5-1:before{content:"\e191"}.glyphicon-sound-6-1:before{content:"\e192"}.glyphicon-sound-7-1:before{content:"\e193"}.glyphicon-copyright-mark:before{content:"\e194"}.glyphicon-registration-mark:before{content:"\e195"}.glyphicon-cloud-download:before{content:"\e197"}.glyphicon-cloud-upload:before{content:"\e198"}.glyphicon-tree-conifer:before{content:"\e199"}.glyphicon-tree-deciduous:before{content:"\e200"}.glyphicon-cd:before{content:"\e201"}.glyphicon-save-file:before{content:"\e202"}.glyphicon-open-file:before{content:"\e203"}.glyphicon-level-up:before{content:"\e204"}.glyphicon-copy:before{content:"\e205"}.glyphicon-paste:before{content:"\e206"}.glyphicon-alert:before{content:"\e209"}.glyphicon-equalizer:before{content:"\e210"}.glyphicon-king:before{content:"\e211"}.glyphicon-queen:before{content:"\e212"}.glyphicon-pawn:before{content:"\e213"}.glyphicon-bishop:before{content:"\e214"}.glyphicon-knight:before{content:"\e215"}.glyphicon-baby-formula:before{content:"\e216"}.glyphicon-tent:before{content:"\26fa"}.glyphicon-blackboard:before{content:"\e218"}.glyphicon-bed:before{content:"\e219"}.glyphicon-apple:before{content:"\f8ff"}.glyphicon-erase:before{content:"\e221"}.glyphicon-hourglass:before{content:"\231b"}.glyphicon-lamp:before{content:"\e223"}.glyphicon-duplicate:before{content:"\e224"}.glyphicon-piggy-bank:before{content:"\e225"}.glyphicon-scissors:before{content:"\e226"}.glyphicon-bitcoin:before{content:"\e227"}.glyphicon-btc:before{content:"\e227"}.glyphicon-xbt:before{content:"\e227"}.glyphicon-yen:before{content:"\00a5"}.glyphicon-jpy:before{content:"\00a5"}.glyphicon-ruble:before{content:"\20bd"}.glyphicon-rub:before{content:"\20bd"}.glyphicon-scale:before{content:"\e230"}.glyphicon-ice-lolly:before{content:"\e231"}.glyphicon-ice-lolly-tasted:before{content:"\e232"}.glyphicon-education:before{content:"\e233"}.glyphicon-option-horizontal:before{content:"\e234"}.glyphicon-option-vertical:before{content:"\e235"}.glyphicon-menu-hamburger:before{content:"\e236"}.glyphicon-modal-window:before{content:"\e237"}.glyphicon-oil:before{content:"\e238"}.glyphicon-grain:before{content:"\e239"}.glyphicon-sunglasses:before{content:"\e240"}.glyphicon-text-size:before{content:"\e241"}.glyphicon-text-color:before{content:"\e242"}.glyphicon-text-background:before{content:"\e243"}.glyphicon-object-align-top:before{content:"\e244"}.glyphicon-object-align-bottom:before{content:"\e245"}.glyphicon-object-align-horizontal:before{content:"\e246"}.glyphicon-object-align-left:before{content:"\e247"}.glyphicon-object-align-vertical:before{content:"\e248"}.glyphicon-object-align-right:before{content:"\e249"}.glyphicon-triangle-right:before{content:"\e250"}.glyphicon-triangle-left:before{content:"\e251"}.glyphicon-triangle-bottom:before{content:"\e252"}.glyphicon-triangle-top:before{content:"\e253"}.glyphicon-console:before{content:"\e254"}.glyphicon-superscript:before{content:"\e255"}.glyphicon-subscript:before{content:"\e256"}.glyphicon-menu-left:before{content:"\e257"}.glyphicon-menu-right:before{content:"\e258"}.glyphicon-menu-down:before{content:"\e259"}.glyphicon-menu-up:before{content:"\e260"}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}*:before,*:after{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:10px;-webkit-tap-highlight-color:rgba(0,0,0,0)}body{font-family:"Roboto","Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;line-height:1.42857143;color:#999;background-color:#252830}input,button,select,textarea{font-family:inherit;font-size:inherit;line-height:inherit}a{color:#98e8ff;text-decoration:none}a:hover,a:focus{color:#4cd7ff;text-decoration:underline}a:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}figure{margin:0}img{vertical-align:middle}.img-responsive,.thumbnail>img,.thumbnail a>img,.carousel-inner>.item>img,.carousel-inner>.item>a>img{display:block;max-width:100%;height:auto}.img-rounded{border-radius:0}.img-thumbnail{padding:4px;line-height:1.42857143;background-color:#252830;border:1px solid #ddd;border-radius:0;-webkit-transition:all .2s ease-in-out;-o-transition:all .2s ease-in-out;transition:all .2s ease-in-out;display:inline-block;max-width:100%;height:auto}.img-circle{border-radius:50%}hr{margin-top:20px;margin-bottom:20px;border:0;border-top:1px solid #555}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0, 0, 0, 0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}[role="button"]{cursor:pointer}h1,h2,h3,h4,h5,h6,.h1,.h2,.h3,.h4,.h5,.h6{font-family:inherit;font-weight:500;line-height:1.1;color:inherit}h1 small,h2 small,h3 small,h4 small,h5 small,h6 small,.h1 small,.h2 small,.h3 small,.h4 small,.h5 small,.h6 small,h1 .small,h2 .small,h3 .small,h4 .small,h5 .small,h6 .small,.h1 .small,.h2 .small,.h3 .small,.h4 .small,.h5 .small,.h6 .small{font-weight:400;line-height:1;color:#222}h1,.h1,h2,.h2,h3,.h3{margin-top:20px;margin-bottom:10px}h1 small,.h1 small,h2 small,.h2 small,h3 small,.h3 small,h1 .small,.h1 .small,h2 .small,.h2 .small,h3 .small,.h3 .small{font-size:65%}h4,.h4,h5,.h5,h6,.h6{margin-top:10px;margin-bottom:10px}h4 small,.h4 small,h5 small,.h5 small,h6 small,.h6 small,h4 .small,.h4 .small,h5 .small,.h5 .small,h6 .small,.h6 .small{font-size:75%}h1,.h1{font-size:36px}h2,.h2{font-size:30px}h3,.h3{font-size:24px}h4,.h4{font-size:18px}h5,.h5{font-size:14px}h6,.h6{font-size:12px}p{margin:0 0 10px}.lead{margin-bottom:20px;font-size:16px;font-weight:300;line-height:1.4}@media (min-width:768px){.lead{font-size:21px}}small,.small{font-size:85%}mark,.mark{padding:.2em;background-color:#fcf8e3}.text-left{text-align:left}.text-right{text-align:right}.text-center{text-align:center}.text-justify{text-align:justify}.text-nowrap{white-space:nowrap}.text-lowercase{text-transform:lowercase}.text-uppercase{text-transform:uppercase}.text-capitalize{text-transform:capitalize}.text-muted{color:#222}.text-primary{color:#98e8ff}a.text-primary:hover,a.text-primary:focus{color:#65ddff}.text-success{color:#3c763d}a.text-success:hover,a.text-success:focus{color:#2b542c}.text-info{color:#31708f}a.text-info:hover,a.text-info:focus{color:#245269}.text-warning{color:#8a6d3b}a.text-warning:hover,a.text-warning:focus{color:#66512c}.text-danger{color:#a94442}a.text-danger:hover,a.text-danger:focus{color:#843534}.bg-primary{color:#fff;background-color:#98e8ff}a.bg-primary:hover,a.bg-primary:focus{background-color:#65ddff}.bg-success{background-color:#dff0d8}a.bg-success:hover,a.bg-success:focus{background-color:#c1e2b3}.bg-info{background-color:#d9edf7}a.bg-info:hover,a.bg-info:focus{background-color:#afd9ee}.bg-warning{background-color:#fcf8e3}a.bg-warning:hover,a.bg-warning:focus{background-color:#f7ecb5}.bg-danger{background-color:#f2dede}a.bg-danger:hover,a.bg-danger:focus{background-color:#e4b9b9}.page-header{padding-bottom:9px;margin:40px 0 20px;border-bottom:1px solid #555}ul,ol{margin-top:0;margin-bottom:10px}ul ul,ol ul,ul ol,ol ol{margin-bottom:0}.list-unstyled{padding-left:0;list-style:none}.list-inline{padding-left:0;list-style:none;margin-left:-5px}.list-inline>li{display:inline-block;padding-right:5px;padding-left:5px}dl{margin-top:0;margin-bottom:20px}dt,dd{line-height:1.42857143}dt{font-weight:700}dd{margin-left:0}@media (min-width:768px){.dl-horizontal dt{float:left;width:160px;clear:left;text-align:right;overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.dl-horizontal dd{margin-left:180px}}abbr[title],abbr[data-original-title]{cursor:help}.initialism{font-size:90%;text-transform:uppercase}blockquote{padding:10px 20px;margin:0 0 20px;font-size:17.5px;border-left:5px solid #555}blockquote p:last-child,blockquote ul:last-child,blockquote ol:last-child{margin-bottom:0}blockquote footer,blockquote small,blockquote .small{display:block;font-size:80%;line-height:1.42857143;color:#222}blockquote footer:before,blockquote small:before,blockquote .small:before{content:"\2014 \00A0"}.blockquote-reverse,blockquote.pull-right{padding-right:15px;padding-left:0;text-align:right;border-right:5px solid #555;border-left:0}.blockquote-reverse footer:before,blockquote.pull-right footer:before,.blockquote-reverse small:before,blockquote.pull-right small:before,.blockquote-reverse .small:before,blockquote.pull-right .small:before{content:""}.blockquote-reverse footer:after,blockquote.pull-right footer:after,.blockquote-reverse small:after,blockquote.pull-right small:after,.blockquote-reverse .small:after,blockquote.pull-right .small:after{content:"\00A0 \2014"}address{margin-bottom:20px;font-style:normal;line-height:1.42857143}code,kbd,pre,samp{font-family:Menlo,Monaco,Consolas,"Courier New",monospace}code{padding:2px 4px;font-size:90%;color:#c7254e;background-color:#f9f2f4;border-radius:0}kbd{padding:2px 4px;font-size:90%;color:#fff;background-color:#333;border-radius:0;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,0.25);box-shadow:inset 0 -1px 0 rgba(0,0,0,0.25)}kbd kbd{padding:0;font-size:100%;font-weight:700;-webkit-box-shadow:none;box-shadow:none}pre{display:block;padding:9.5px;margin:0 0 10px;font-size:13px;line-height:1.42857143;color:#999;word-break:break-all;word-wrap:break-word;background-color:#f5f5f5;border:1px solid #ccc;border-radius:0}pre code{padding:0;font-size:inherit;color:inherit;white-space:pre-wrap;background-color:transparent;border-radius:0}.pre-scrollable{max-height:340px;overflow-y:scroll}.container{padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}@media (min-width:768px){.container{width:750px}}@media (min-width:992px){.container{width:970px}}@media (min-width:1200px){.container{width:1170px}}.container-fluid{padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}.row{margin-right:-15px;margin-left:-15px}.row-no-gutters{margin-right:0;margin-left:0}.row-no-gutters [class*="col-"]{padding-right:0;padding-left:0}.col-xs-1, .col-sm-1, .col-md-1, .col-lg-1, .col-xs-2, .col-sm-2, .col-md-2, .col-lg-2, .col-xs-3, .col-sm-3, .col-md-3, .col-lg-3, .col-xs-4, .col-sm-4, .col-md-4, .col-lg-4, .col-xs-5, .col-sm-5, .col-md-5, .col-lg-5, .col-xs-6, .col-sm-6, .col-md-6, .col-lg-6, .col-xs-7, .col-sm-7, .col-md-7, .col-lg-7, .col-xs-8, .col-sm-8, .col-md-8, .col-lg-8, .col-xs-9, .col-sm-9, .col-md-9, .col-lg-9, .col-xs-10, .col-sm-10, .col-md-10, .col-lg-10, .col-xs-11, .col-sm-11, .col-md-11, .col-lg-11, .col-xs-12, .col-sm-12, .col-md-12, .col-lg-12{position:relative;min-height:1px;padding-right:15px;padding-left:15px}.col-xs-1, .col-xs-2, .col-xs-3, .col-xs-4, .col-xs-5, .col-xs-6, .col-xs-7, .col-xs-8, .col-xs-9, .col-xs-10, .col-xs-11, .col-xs-12{float:left}.col-xs-12{width:100%}.col-xs-11{width:91.66666667%}.col-xs-10{width:83.33333333%}.col-xs-9{width:75%}.col-xs-8{width:66.66666667%}.col-xs-7{width:58.33333333%}.col-xs-6{width:50%}.col-xs-5{width:41.66666667%}.col-xs-4{width:33.33333333%}.col-xs-3{width:25%}.col-xs-2{width:16.66666667%}.col-xs-1{width:8.33333333%}.col-xs-pull-12{right:100%}.col-xs-pull-11{right:91.66666667%}.col-xs-pull-10{right:83.33333333%}.col-xs-pull-9{right:75%}.col-xs-pull-8{right:66.66666667%}.col-xs-pull-7{right:58.33333333%}.col-xs-pull-6{right:50%}.col-xs-pull-5{right:41.66666667%}.col-xs-pull-4{right:33.33333333%}.col-xs-pull-3{right:25%}.col-xs-pull-2{right:16.66666667%}.col-xs-pull-1{right:8.33333333%}.col-xs-pull-0{right:auto}.col-xs-push-12{left:100%}.col-xs-push-11{left:91.66666667%}.col-xs-push-10{left:83.33333333%}.col-xs-push-9{left:75%}.col-xs-push-8{left:66.66666667%}.col-xs-push-7{left:58.33333333%}.col-xs-push-6{left:50%}.col-xs-push-5{left:41.66666667%}.col-xs-push-4{left:33.33333333%}.col-xs-push-3{left:25%}.col-xs-push-2{left:16.66666667%}.col-xs-push-1{left:8.33333333%}.col-xs-push-0{left:auto}.col-xs-offset-12{margin-left:100%}.col-xs-offset-11{margin-left:91.66666667%}.col-xs-offset-10{margin-left:83.33333333%}.col-xs-offset-9{margin-left:75%}.col-xs-offset-8{margin-left:66.66666667%}.col-xs-offset-7{margin-left:58.33333333%}.col-xs-offset-6{margin-left:50%}.col-xs-offset-5{margin-left:41.66666667%}.col-xs-offset-4{margin-left:33.33333333%}.col-xs-offset-3{margin-left:25%}.col-xs-offset-2{margin-left:16.66666667%}.col-xs-offset-1{margin-left:8.33333333%}.col-xs-offset-0{margin-left:0}@media (min-width:768px){.col-sm-1, .col-sm-2, .col-sm-3, .col-sm-4, .col-sm-5, .col-sm-6, .col-sm-7, .col-sm-8, .col-sm-9, .col-sm-10, .col-sm-11, .col-sm-12{float:left}.col-sm-12{width:100%}.col-sm-11{width:91.66666667%}.col-sm-10{width:83.33333333%}.col-sm-9{width:75%}.col-sm-8{width:66.66666667%}.col-sm-7{width:58.33333333%}.col-sm-6{width:50%}.col-sm-5{width:41.66666667%}.col-sm-4{width:33.33333333%}.col-sm-3{width:25%}.col-sm-2{width:16.66666667%}.col-sm-1{width:8.33333333%}.col-sm-pull-12{right:100%}.col-sm-pull-11{right:91.66666667%}.col-sm-pull-10{right:83.33333333%}.col-sm-pull-9{right:75%}.col-sm-pull-8{right:66.66666667%}.col-sm-pull-7{right:58.33333333%}.col-sm-pull-6{right:50%}.col-sm-pull-5{right:41.66666667%}.col-sm-pull-4{right:33.33333333%}.col-sm-pull-3{right:25%}.col-sm-pull-2{right:16.66666667%}.col-sm-pull-1{right:8.33333333%}.col-sm-pull-0{right:auto}.col-sm-push-12{left:100%}.col-sm-push-11{left:91.66666667%}.col-sm-push-10{left:83.33333333%}.col-sm-push-9{left:75%}.col-sm-push-8{left:66.66666667%}.col-sm-push-7{left:58.33333333%}.col-sm-push-6{left:50%}.col-sm-push-5{left:41.66666667%}.col-sm-push-4{left:33.33333333%}.col-sm-push-3{left:25%}.col-sm-push-2{left:16.66666667%}.col-sm-push-1{left:8.33333333%}.col-sm-push-0{left:auto}.col-sm-offset-12{margin-left:100%}.col-sm-offset-11{margin-left:91.66666667%}.col-sm-offset-10{margin-left:83.33333333%}.col-sm-offset-9{margin-left:75%}.col-sm-offset-8{margin-left:66.66666667%}.col-sm-offset-7{margin-left:58.33333333%}.col-sm-offset-6{margin-left:50%}.col-sm-offset-5{margin-left:41.66666667%}.col-sm-offset-4{margin-left:33.33333333%}.col-sm-offset-3{margin-left:25%}.col-sm-offset-2{margin-left:16.66666667%}.col-sm-offset-1{margin-left:8.33333333%}.col-sm-offset-0{margin-left:0}}@media (min-width:992px){.col-md-1, .col-md-2, .col-md-3, .col-md-4, .col-md-5, .col-md-6, .col-md-7, .col-md-8, .col-md-9, .col-md-10, .col-md-11, .col-md-12{float:left}.col-md-12{width:100%}.col-md-11{width:91.66666667%}.col-md-10{width:83.33333333%}.col-md-9{width:75%}.col-md-8{width:66.66666667%}.col-md-7{width:58.33333333%}.col-md-6{width:50%}.col-md-5{width:41.66666667%}.col-md-4{width:33.33333333%}.col-md-3{width:25%}.col-md-2{width:16.66666667%}.col-md-1{width:8.33333333%}.col-md-pull-12{right:100%}.col-md-pull-11{right:91.66666667%}.col-md-pull-10{right:83.33333333%}.col-md-pull-9{right:75%}.col-md-pull-8{right:66.66666667%}.col-md-pull-7{right:58.33333333%}.col-md-pull-6{right:50%}.col-md-pull-5{right:41.66666667%}.col-md-pull-4{right:33.33333333%}.col-md-pull-3{right:25%}.col-md-pull-2{right:16.66666667%}.col-md-pull-1{right:8.33333333%}.col-md-pull-0{right:auto}.col-md-push-12{left:100%}.col-md-push-11{left:91.66666667%}.col-md-push-10{left:83.33333333%}.col-md-push-9{left:75%}.col-md-push-8{left:66.66666667%}.col-md-push-7{left:58.33333333%}.col-md-push-6{left:50%}.col-md-push-5{left:41.66666667%}.col-md-push-4{left:33.33333333%}.col-md-push-3{left:25%}.col-md-push-2{left:16.66666667%}.col-md-push-1{left:8.33333333%}.col-md-push-0{left:auto}.col-md-offset-12{margin-left:100%}.col-md-offset-11{margin-left:91.66666667%}.col-md-offset-10{margin-left:83.33333333%}.col-md-offset-9{margin-left:75%}.col-md-offset-8{margin-left:66.66666667%}.col-md-offset-7{margin-left:58.33333333%}.col-md-offset-6{margin-left:50%}.col-md-offset-5{margin-left:41.66666667%}.col-md-offset-4{margin-left:33.33333333%}.col-md-offset-3{margin-left:25%}.col-md-offset-2{margin-left:16.66666667%}.col-md-offset-1{margin-left:8.33333333%}.col-md-offset-0{margin-left:0}}@media (min-width:1200px){.col-lg-1, .col-lg-2, .col-lg-3, .col-lg-4, .col-lg-5, .col-lg-6, .col-lg-7, .col-lg-8, .col-lg-9, .col-lg-10, .col-lg-11, .col-lg-12{float:left}.col-lg-12{width:100%}.col-lg-11{width:91.66666667%}.col-lg-10{width:83.33333333%}.col-lg-9{width:75%}.col-lg-8{width:66.66666667%}.col-lg-7{width:58.33333333%}.col-lg-6{width:50%}.col-lg-5{width:41.66666667%}.col-lg-4{width:33.33333333%}.col-lg-3{width:25%}.col-lg-2{width:16.66666667%}.col-lg-1{width:8.33333333%}.col-lg-pull-12{right:100%}.col-lg-pull-11{right:91.66666667%}.col-lg-pull-10{right:83.33333333%}.col-lg-pull-9{right:75%}.col-lg-pull-8{right:66.66666667%}.col-lg-pull-7{right:58.33333333%}.col-lg-pull-6{right:50%}.col-lg-pull-5{right:41.66666667%}.col-lg-pull-4{right:33.33333333%}.col-lg-pull-3{right:25%}.col-lg-pull-2{right:16.66666667%}.col-lg-pull-1{right:8.33333333%}.col-lg-pull-0{right:auto}.col-lg-push-12{left:100%}.col-lg-push-11{left:91.66666667%}.col-lg-push-10{left:83.33333333%}.col-lg-push-9{left:75%}.col-lg-push-8{left:66.66666667%}.col-lg-push-7{left:58.33333333%}.col-lg-push-6{left:50%}.col-lg-push-5{left:41.66666667%}.col-lg-push-4{left:33.33333333%}.col-lg-push-3{left:25%}.col-lg-push-2{left:16.66666667%}.col-lg-push-1{left:8.33333333%}.col-lg-push-0{left:auto}.col-lg-offset-12{margin-left:100%}.col-lg-offset-11{margin-left:91.66666667%}.col-lg-offset-10{margin-left:83.33333333%}.col-lg-offset-9{margin-left:75%}.col-lg-offset-8{margin-left:66.66666667%}.col-lg-offset-7{margin-left:58.33333333%}.col-lg-offset-6{margin-left:50%}.col-lg-offset-5{margin-left:41.66666667%}.col-lg-offset-4{margin-left:33.33333333%}.col-lg-offset-3{margin-left:25%}.col-lg-offset-2{margin-left:16.66666667%}.col-lg-offset-1{margin-left:8.33333333%}.col-lg-offset-0{margin-left:0}}table{background-color:transparent}table col[class*="col-"]{position:static;display:table-column;float:none}table td[class*="col-"],table th[class*="col-"]{position:static;display:table-cell;float:none}caption{padding-top:8px;padding-bottom:8px;color:#222;text-align:left}th{text-align:left}.table{width:100%;max-width:100%;margin-bottom:20px}.table>thead>tr>th,.table>tbody>tr>th,.table>tfoot>tr>th,.table>thead>tr>td,.table>tbody>tr>td,.table>tfoot>tr>td{padding:8px;line-height:1.42857143;vertical-align:top;border-top:1px solid #ddd}.table>thead>tr>th{vertical-align:bottom;border-bottom:2px solid #ddd}.table>caption+thead>tr:first-child>th,.table>colgroup+thead>tr:first-child>th,.table>thead:first-child>tr:first-child>th,.table>caption+thead>tr:first-child>td,.table>colgroup+thead>tr:first-child>td,.table>thead:first-child>tr:first-child>td{border-top:0}.table>tbody+tbody{border-top:2px solid #ddd}.table .table{background-color:#252830}.table-condensed>thead>tr>th,.table-condensed>tbody>tr>th,.table-condensed>tfoot>tr>th,.table-condensed>thead>tr>td,.table-condensed>tbody>tr>td,.table-condensed>tfoot>tr>td{padding:5px}.table-bordered{border:1px solid #ddd}.table-bordered>thead>tr>th,.table-bordered>tbody>tr>th,.table-bordered>tfoot>tr>th,.table-bordered>thead>tr>td,.table-bordered>tbody>tr>td,.table-bordered>tfoot>tr>td{border:1px solid #ddd}.table-bordered>thead>tr>th,.table-bordered>thead>tr>td{border-bottom-width:2px}.table-striped>tbody>tr:nth-of-type(odd){background-color:#f9f9f9}.table-hover>tbody>tr:hover{background-color:#f5f5f5}.table>thead>tr>td.active,.table>tbody>tr>td.active,.table>tfoot>tr>td.active,.table>thead>tr>th.active,.table>tbody>tr>th.active,.table>tfoot>tr>th.active,.table>thead>tr.active>td,.table>tbody>tr.active>td,.table>tfoot>tr.active>td,.table>thead>tr.active>th,.table>tbody>tr.active>th,.table>tfoot>tr.active>th{background-color:#f5f5f5}.table-hover>tbody>tr>td.active:hover,.table-hover>tbody>tr>th.active:hover,.table-hover>tbody>tr.active:hover>td,.table-hover>tbody>tr:hover>.active,.table-hover>tbody>tr.active:hover>th{background-color:#e8e8e8}.table>thead>tr>td.success,.table>tbody>tr>td.success,.table>tfoot>tr>td.success,.table>thead>tr>th.success,.table>tbody>tr>th.success,.table>tfoot>tr>th.success,.table>thead>tr.success>td,.table>tbody>tr.success>td,.table>tfoot>tr.success>td,.table>thead>tr.success>th,.table>tbody>tr.success>th,.table>tfoot>tr.success>th{background-color:#dff0d8}.table-hover>tbody>tr>td.success:hover,.table-hover>tbody>tr>th.success:hover,.table-hover>tbody>tr.success:hover>td,.table-hover>tbody>tr:hover>.success,.table-hover>tbody>tr.success:hover>th{background-color:#d0e9c6}.table>thead>tr>td.info,.table>tbody>tr>td.info,.table>tfoot>tr>td.info,.table>thead>tr>th.info,.table>tbody>tr>th.info,.table>tfoot>tr>th.info,.table>thead>tr.info>td,.table>tbody>tr.info>td,.table>tfoot>tr.info>td,.table>thead>tr.info>th,.table>tbody>tr.info>th,.table>tfoot>tr.info>th{background-color:#d9edf7}.table-hover>tbody>tr>td.info:hover,.table-hover>tbody>tr>th.info:hover,.table-hover>tbody>tr.info:hover>td,.table-hover>tbody>tr:hover>.info,.table-hover>tbody>tr.info:hover>th{background-color:#c4e3f3}.table>thead>tr>td.warning,.table>tbody>tr>td.warning,.table>tfoot>tr>td.warning,.table>thead>tr>th.warning,.table>tbody>tr>th.warning,.table>tfoot>tr>th.warning,.table>thead>tr.warning>td,.table>tbody>tr.warning>td,.table>tfoot>tr.warning>td,.table>thead>tr.warning>th,.table>tbody>tr.warning>th,.table>tfoot>tr.warning>th{background-color:#fcf8e3}.table-hover>tbody>tr>td.warning:hover,.table-hover>tbody>tr>th.warning:hover,.table-hover>tbody>tr.warning:hover>td,.table-hover>tbody>tr:hover>.warning,.table-hover>tbody>tr.warning:hover>th{background-color:#faf2cc}.table>thead>tr>td.danger,.table>tbody>tr>td.danger,.table>tfoot>tr>td.danger,.table>thead>tr>th.danger,.table>tbody>tr>th.danger,.table>tfoot>tr>th.danger,.table>thead>tr.danger>td,.table>tbody>tr.danger>td,.table>tfoot>tr.danger>td,.table>thead>tr.danger>th,.table>tbody>tr.danger>th,.table>tfoot>tr.danger>th{background-color:#f2dede}.table-hover>tbody>tr>td.danger:hover,.table-hover>tbody>tr>th.danger:hover,.table-hover>tbody>tr.danger:hover>td,.table-hover>tbody>tr:hover>.danger,.table-hover>tbody>tr.danger:hover>th{background-color:#ebcccc}.table-responsive{min-height:.01%;overflow-x:auto}@media screen and (max-width:767px){.table-responsive{width:100%;margin-bottom:15px;overflow-y:hidden;-ms-overflow-style:-ms-autohiding-scrollbar;border:1px solid #ddd}.table-responsive>.table{margin-bottom:0}.table-responsive>.table>thead>tr>th,.table-responsive>.table>tbody>tr>th,.table-responsive>.table>tfoot>tr>th,.table-responsive>.table>thead>tr>td,.table-responsive>.table>tbody>tr>td,.table-responsive>.table>tfoot>tr>td{white-space:nowrap}.table-responsive>.table-bordered{border:0}.table-responsive>.table-bordered>thead>tr>th:first-child,.table-responsive>.table-bordered>tbody>tr>th:first-child,.table-responsive>.table-bordered>tfoot>tr>th:first-child,.table-responsive>.table-bordered>thead>tr>td:first-child,.table-responsive>.table-bordered>tbody>tr>td:first-child,.table-responsive>.table-bordered>tfoot>tr>td:first-child{border-left:0}.table-responsive>.table-bordered>thead>tr>th:last-child,.table-responsive>.table-bordered>tbody>tr>th:last-child,.table-responsive>.table-bordered>tfoot>tr>th:last-child,.table-responsive>.table-bordered>thead>tr>td:last-child,.table-responsive>.table-bordered>tbody>tr>td:last-child,.table-responsive>.table-bordered>tfoot>tr>td:last-child{border-right:0}.table-responsive>.table-bordered>tbody>tr:last-child>th,.table-responsive>.table-bordered>tfoot>tr:last-child>th,.table-responsive>.table-bordered>tbody>tr:last-child>td,.table-responsive>.table-bordered>tfoot>tr:last-child>td{border-bottom:0}}fieldset{min-width:0;padding:0;margin:0;border:0}legend{display:block;width:100%;padding:0;margin-bottom:20px;font-size:21px;line-height:inherit;color:#999;border:0;border-bottom:1px solid #e5e5e5}label{display:inline-block;max-width:100%;margin-bottom:5px;font-weight:700}input[type="search"]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;-webkit-appearance:none;appearance:none}input[type="radio"],input[type="checkbox"]{margin:4px 0 0;margin-top:1px \9;line-height:normal}input[type="radio"][disabled],input[type="checkbox"][disabled],input[type="radio"].disabled,input[type="checkbox"].disabled,fieldset[disabled] input[type="radio"],fieldset[disabled] input[type="checkbox"]{cursor:not-allowed}input[type="file"]{display:block}input[type="range"]{display:block;width:100%}select[multiple],select[size]{height:auto}input[type="file"]:focus,input[type="radio"]:focus,input[type="checkbox"]:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}output{display:block;padding-top:7px;font-size:14px;line-height:1.42857143;color:#000}.form-control{display:block;width:100%;height:34px;padding:6px 12px;font-size:14px;line-height:1.42857143;color:#000;background-color:#fff;background-image:none;border:1px solid #ccc;border-radius:0;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-webkit-transition:border-color ease-in-out .15s, -webkit-box-shadow ease-in-out .15s;-o-transition:border-color ease-in-out .15s, box-shadow ease-in-out .15s;transition:border-color ease-in-out .15s, box-shadow ease-in-out .15s}.form-control:focus{border-color:#66afe9;outline:0;-webkit-box-shadow:inset 0 1px 1px rgba(0, 0, 0, .075), 0 0 8px rgba(102, 175, 233, 0.6);box-shadow:inset 0 1px 1px rgba(0, 0, 0, .075), 0 0 8px rgba(102, 175, 233, 0.6)}.form-control::-moz-placeholder{color:#999;opacity:1}.form-control:-ms-input-placeholder{color:#999}.form-control::-webkit-input-placeholder{color:#999}.form-control::-ms-expand{background-color:transparent;border:0}.form-control[disabled],.form-control[readonly],fieldset[disabled] .form-control{background-color:#333;opacity:1}.form-control[disabled],fieldset[disabled] .form-control{cursor:not-allowed}textarea.form-control{height:auto}@media screen and (-webkit-min-device-pixel-ratio:0){input[type="date"].form-control,input[type="time"].form-control,input[type="datetime-local"].form-control,input[type="month"].form-control{line-height:34px}input[type="date"].input-sm,input[type="time"].input-sm,input[type="datetime-local"].input-sm,input[type="month"].input-sm,.input-group-sm input[type="date"],.input-group-sm input[type="time"],.input-group-sm input[type="datetime-local"],.input-group-sm input[type="month"]{line-height:30px}input[type="date"].input-lg,input[type="time"].input-lg,input[type="datetime-local"].input-lg,input[type="month"].input-lg,.input-group-lg input[type="date"],.input-group-lg input[type="time"],.input-group-lg input[type="datetime-local"],.input-group-lg input[type="month"]{line-height:46px}}.form-group{margin-bottom:15px}.radio,.checkbox{position:relative;display:block;margin-top:10px;margin-bottom:10px}.radio.disabled label,.checkbox.disabled label,fieldset[disabled] .radio label,fieldset[disabled] .checkbox label{cursor:not-allowed}.radio label,.checkbox label{min-height:20px;padding-left:20px;margin-bottom:0;font-weight:400;cursor:pointer}.radio input[type="radio"],.radio-inline input[type="radio"],.checkbox input[type="checkbox"],.checkbox-inline input[type="checkbox"]{position:absolute;margin-top:4px \9;margin-left:-20px}.radio+.radio,.checkbox+.checkbox{margin-top:-5px}.radio-inline,.checkbox-inline{position:relative;display:inline-block;padding-left:20px;margin-bottom:0;font-weight:400;vertical-align:middle;cursor:pointer}.radio-inline.disabled,.checkbox-inline.disabled,fieldset[disabled] .radio-inline,fieldset[disabled] .checkbox-inline{cursor:not-allowed}.radio-inline+.radio-inline,.checkbox-inline+.checkbox-inline{margin-top:0;margin-left:10px}.form-control-static{min-height:34px;padding-top:7px;padding-bottom:7px;margin-bottom:0}.form-control-static.input-lg,.form-control-static.input-sm{padding-right:0;padding-left:0}.input-sm{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:0}select.input-sm{height:30px;line-height:30px}textarea.input-sm,select[multiple].input-sm{height:auto}.form-group-sm .form-control{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:0}.form-group-sm select.form-control{height:30px;line-height:30px}.form-group-sm textarea.form-control,.form-group-sm select[multiple].form-control{height:auto}.form-group-sm .form-control-static{height:30px;min-height:32px;padding:6px 10px;font-size:12px;line-height:1.5}.input-lg{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:0}select.input-lg{height:46px;line-height:46px}textarea.input-lg,select[multiple].input-lg{height:auto}.form-group-lg .form-control{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:0}.form-group-lg select.form-control{height:46px;line-height:46px}.form-group-lg textarea.form-control,.form-group-lg select[multiple].form-control{height:auto}.form-group-lg .form-control-static{height:46px;min-height:38px;padding:11px 16px;font-size:18px;line-height:1.3333333}.has-feedback{position:relative}.has-feedback .form-control{padding-right:42.5px}.form-control-feedback{position:absolute;top:0;right:0;z-index:2;display:block;width:34px;height:34px;line-height:34px;text-align:center;pointer-events:none}.input-lg+.form-control-feedback,.input-group-lg+.form-control-feedback,.form-group-lg .form-control+.form-control-feedback{width:46px;height:46px;line-height:46px}.input-sm+.form-control-feedback,.input-group-sm+.form-control-feedback,.form-group-sm .form-control+.form-control-feedback{width:30px;height:30px;line-height:30px}.has-success .help-block,.has-success .control-label,.has-success .radio,.has-success .checkbox,.has-success .radio-inline,.has-success .checkbox-inline,.has-success.radio label,.has-success.checkbox label,.has-success.radio-inline label,.has-success.checkbox-inline label{color:#3c763d}.has-success .form-control{border-color:#3c763d;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.has-success .form-control:focus{border-color:#2b542c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #67b168;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #67b168}.has-success .input-group-addon{color:#3c763d;background-color:#dff0d8;border-color:#3c763d}.has-success .form-control-feedback{color:#3c763d}.has-warning .help-block,.has-warning .control-label,.has-warning .radio,.has-warning .checkbox,.has-warning .radio-inline,.has-warning .checkbox-inline,.has-warning.radio label,.has-warning.checkbox label,.has-warning.radio-inline label,.has-warning.checkbox-inline label{color:#8a6d3b}.has-warning .form-control{border-color:#8a6d3b;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.has-warning .form-control:focus{border-color:#66512c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #c0a16b;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #c0a16b}.has-warning .input-group-addon{color:#8a6d3b;background-color:#fcf8e3;border-color:#8a6d3b}.has-warning .form-control-feedback{color:#8a6d3b}.has-error .help-block,.has-error .control-label,.has-error .radio,.has-error .checkbox,.has-error .radio-inline,.has-error .checkbox-inline,.has-error.radio label,.has-error.checkbox label,.has-error.radio-inline label,.has-error.checkbox-inline label{color:#a94442}.has-error .form-control{border-color:#a94442;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.has-error .form-control:focus{border-color:#843534;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #ce8483;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #ce8483}.has-error .input-group-addon{color:#a94442;background-color:#f2dede;border-color:#a94442}.has-error .form-control-feedback{color:#a94442}.has-feedback label~.form-control-feedback{top:25px}.has-feedback label.sr-only~.form-control-feedback{top:0}.help-block{display:block;margin-top:5px;margin-bottom:10px;color:#d9d9d9}@media (min-width:768px){.form-inline .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.form-inline .form-control{display:inline-block;width:auto;vertical-align:middle}.form-inline .form-control-static{display:inline-block}.form-inline .input-group{display:inline-table;vertical-align:middle}.form-inline .input-group .input-group-addon,.form-inline .input-group .input-group-btn,.form-inline .input-group .form-control{width:auto}.form-inline .input-group>.form-control{width:100%}.form-inline .control-label{margin-bottom:0;vertical-align:middle}.form-inline .radio,.form-inline .checkbox{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.form-inline .radio label,.form-inline .checkbox label{padding-left:0}.form-inline .radio input[type="radio"],.form-inline .checkbox input[type="checkbox"]{position:relative;margin-left:0}.form-inline .has-feedback .form-control-feedback{top:0}}.form-horizontal .radio,.form-horizontal .checkbox,.form-horizontal .radio-inline,.form-horizontal .checkbox-inline{padding-top:7px;margin-top:0;margin-bottom:0}.form-horizontal .radio,.form-horizontal .checkbox{min-height:27px}.form-horizontal .form-group{margin-right:-15px;margin-left:-15px}@media (min-width:768px){.form-horizontal .control-label{padding-top:7px;margin-bottom:0;text-align:right}}.form-horizontal .has-feedback .form-control-feedback{right:15px}@media (min-width:768px){.form-horizontal .form-group-lg .control-label{padding-top:11px;font-size:18px}}@media (min-width:768px){.form-horizontal .form-group-sm .control-label{padding-top:6px;font-size:12px}}.btn{display:inline-block;margin-bottom:0;font-weight:normal;text-align:center;white-space:nowrap;vertical-align:middle;-ms-touch-action:manipulation;touch-action:manipulation;cursor:pointer;background-image:none;border:1px solid transparent;padding:6px 12px;font-size:14px;line-height:1.42857143;border-radius:0;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.btn:focus,.btn:active:focus,.btn.active:focus,.btn.focus,.btn:active.focus,.btn.active.focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.btn:hover,.btn:focus,.btn.focus{color:#333;text-decoration:none}.btn:active,.btn.active{background-image:none;outline:0;-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,0.125);box-shadow:inset 0 3px 5px rgba(0,0,0,0.125)}.btn.disabled,.btn[disabled],fieldset[disabled] .btn{cursor:not-allowed;filter:alpha(opacity=65);opacity:.65;-webkit-box-shadow:none;box-shadow:none}a.btn.disabled,fieldset[disabled] a.btn{pointer-events:none}.btn-default{color:#333;background-color:#fff;border-color:#ccc}.btn-default:focus,.btn-default.focus{color:#333;background-color:#e6e6e6;border-color:#8c8c8c}.btn-default:hover{color:#333;background-color:#e6e6e6;border-color:#adadad}.btn-default:active,.btn-default.active,.open>.dropdown-toggle.btn-default{color:#333;background-color:#e6e6e6;background-image:none;border-color:#adadad}.btn-default:active:hover,.btn-default.active:hover,.open>.dropdown-toggle.btn-default:hover,.btn-default:active:focus,.btn-default.active:focus,.open>.dropdown-toggle.btn-default:focus,.btn-default:active.focus,.btn-default.active.focus,.open>.dropdown-toggle.btn-default.focus{color:#333;background-color:#d4d4d4;border-color:#8c8c8c}.btn-default.disabled:hover,.btn-default[disabled]:hover,fieldset[disabled] .btn-default:hover,.btn-default.disabled:focus,.btn-default[disabled]:focus,fieldset[disabled] .btn-default:focus,.btn-default.disabled.focus,.btn-default[disabled].focus,fieldset[disabled] .btn-default.focus{background-color:#fff;border-color:#ccc}.btn-default .badge{color:#fff;background-color:#333}.btn-primary{color:#fff;background-color:#98e8ff;border-color:#7fe2ff}.btn-primary:focus,.btn-primary.focus{color:#fff;background-color:#65ddff;border-color:#00c5fe}.btn-primary:hover{color:#fff;background-color:#65ddff;border-color:#41d5ff}.btn-primary:active,.btn-primary.active,.open>.dropdown-toggle.btn-primary{color:#fff;background-color:#65ddff;background-image:none;border-color:#41d5ff}.btn-primary:active:hover,.btn-primary.active:hover,.open>.dropdown-toggle.btn-primary:hover,.btn-primary:active:focus,.btn-primary.active:focus,.open>.dropdown-toggle.btn-primary:focus,.btn-primary:active.focus,.btn-primary.active.focus,.open>.dropdown-toggle.btn-primary.focus{color:#fff;background-color:#41d5ff;border-color:#00c5fe}.btn-primary.disabled:hover,.btn-primary[disabled]:hover,fieldset[disabled] .btn-primary:hover,.btn-primary.disabled:focus,.btn-primary[disabled]:focus,fieldset[disabled] .btn-primary:focus,.btn-primary.disabled.focus,.btn-primary[disabled].focus,fieldset[disabled] .btn-primary.focus{background-color:#98e8ff;border-color:#7fe2ff}.btn-primary .badge{color:#98e8ff;background-color:#fff}.btn-success{color:#fff;background-color:#5cb85c;border-color:#4cae4c}.btn-success:focus,.btn-success.focus{color:#fff;background-color:#449d44;border-color:#255625}.btn-success:hover{color:#fff;background-color:#449d44;border-color:#398439}.btn-success:active,.btn-success.active,.open>.dropdown-toggle.btn-success{color:#fff;background-color:#449d44;background-image:none;border-color:#398439}.btn-success:active:hover,.btn-success.active:hover,.open>.dropdown-toggle.btn-success:hover,.btn-success:active:focus,.btn-success.active:focus,.open>.dropdown-toggle.btn-success:focus,.btn-success:active.focus,.btn-success.active.focus,.open>.dropdown-toggle.btn-success.focus{color:#fff;background-color:#398439;border-color:#255625}.btn-success.disabled:hover,.btn-success[disabled]:hover,fieldset[disabled] .btn-success:hover,.btn-success.disabled:focus,.btn-success[disabled]:focus,fieldset[disabled] .btn-success:focus,.btn-success.disabled.focus,.btn-success[disabled].focus,fieldset[disabled] .btn-success.focus{background-color:#5cb85c;border-color:#4cae4c}.btn-success .badge{color:#5cb85c;background-color:#fff}.btn-info{color:#fff;background-color:#5bc0de;border-color:#46b8da}.btn-info:focus,.btn-info.focus{color:#fff;background-color:#31b0d5;border-color:#1b6d85}.btn-info:hover{color:#fff;background-color:#31b0d5;border-color:#269abc}.btn-info:active,.btn-info.active,.open>.dropdown-toggle.btn-info{color:#fff;background-color:#31b0d5;background-image:none;border-color:#269abc}.btn-info:active:hover,.btn-info.active:hover,.open>.dropdown-toggle.btn-info:hover,.btn-info:active:focus,.btn-info.active:focus,.open>.dropdown-toggle.btn-info:focus,.btn-info:active.focus,.btn-info.active.focus,.open>.dropdown-toggle.btn-info.focus{color:#fff;background-color:#269abc;border-color:#1b6d85}.btn-info.disabled:hover,.btn-info[disabled]:hover,fieldset[disabled] .btn-info:hover,.btn-info.disabled:focus,.btn-info[disabled]:focus,fieldset[disabled] .btn-info:focus,.btn-info.disabled.focus,.btn-info[disabled].focus,fieldset[disabled] .btn-info.focus{background-color:#5bc0de;border-color:#46b8da}.btn-info .badge{color:#5bc0de;background-color:#fff}.btn-warning{color:#fff;background-color:#f0ad4e;border-color:#eea236}.btn-warning:focus,.btn-warning.focus{color:#fff;background-color:#ec971f;border-color:#985f0d}.btn-warning:hover{color:#fff;background-color:#ec971f;border-color:#d58512}.btn-warning:active,.btn-warning.active,.open>.dropdown-toggle.btn-warning{color:#fff;background-color:#ec971f;background-image:none;border-color:#d58512}.btn-warning:active:hover,.btn-warning.active:hover,.open>.dropdown-toggle.btn-warning:hover,.btn-warning:active:focus,.btn-warning.active:focus,.open>.dropdown-toggle.btn-warning:focus,.btn-warning:active.focus,.btn-warning.active.focus,.open>.dropdown-toggle.btn-warning.focus{color:#fff;background-color:#d58512;border-color:#985f0d}.btn-warning.disabled:hover,.btn-warning[disabled]:hover,fieldset[disabled] .btn-warning:hover,.btn-warning.disabled:focus,.btn-warning[disabled]:focus,fieldset[disabled] .btn-warning:focus,.btn-warning.disabled.focus,.btn-warning[disabled].focus,fieldset[disabled] .btn-warning.focus{background-color:#f0ad4e;border-color:#eea236}.btn-warning .badge{color:#f0ad4e;background-color:#fff}.btn-danger{color:#fff;background-color:#d9534f;border-color:#d43f3a}.btn-danger:focus,.btn-danger.focus{color:#fff;background-color:#c9302c;border-color:#761c19}.btn-danger:hover{color:#fff;background-color:#c9302c;border-color:#ac2925}.btn-danger:active,.btn-danger.active,.open>.dropdown-toggle.btn-danger{color:#fff;background-color:#c9302c;background-image:none;border-color:#ac2925}.btn-danger:active:hover,.btn-danger.active:hover,.open>.dropdown-toggle.btn-danger:hover,.btn-danger:active:focus,.btn-danger.active:focus,.open>.dropdown-toggle.btn-danger:focus,.btn-danger:active.focus,.btn-danger.active.focus,.open>.dropdown-toggle.btn-danger.focus{color:#fff;background-color:#ac2925;border-color:#761c19}.btn-danger.disabled:hover,.btn-danger[disabled]:hover,fieldset[disabled] .btn-danger:hover,.btn-danger.disabled:focus,.btn-danger[disabled]:focus,fieldset[disabled] .btn-danger:focus,.btn-danger.disabled.focus,.btn-danger[disabled].focus,fieldset[disabled] .btn-danger.focus{background-color:#d9534f;border-color:#d43f3a}.btn-danger .badge{color:#d9534f;background-color:#fff}.btn-link{font-weight:400;color:#98e8ff;border-radius:0}.btn-link,.btn-link:active,.btn-link.active,.btn-link[disabled],fieldset[disabled] .btn-link{background-color:transparent;-webkit-box-shadow:none;box-shadow:none}.btn-link,.btn-link:hover,.btn-link:focus,.btn-link:active{border-color:transparent}.btn-link:hover,.btn-link:focus{color:#4cd7ff;text-decoration:underline;background-color:transparent}.btn-link[disabled]:hover,fieldset[disabled] .btn-link:hover,.btn-link[disabled]:focus,fieldset[disabled] .btn-link:focus{color:#222;text-decoration:none}.btn-lg,.btn-group-lg>.btn{padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:0}.btn-sm,.btn-group-sm>.btn{padding:5px 10px;font-size:12px;line-height:1.5;border-radius:0}.btn-xs,.btn-group-xs>.btn{padding:1px 5px;font-size:12px;line-height:1.5;border-radius:0}.btn-block{display:block;width:100%}.btn-block+.btn-block{margin-top:5px}input[type="submit"].btn-block,input[type="reset"].btn-block,input[type="button"].btn-block{width:100%}.fade{opacity:0;-webkit-transition:opacity .15s linear;-o-transition:opacity .15s linear;transition:opacity .15s linear}.fade.in{opacity:1}.collapse{display:none}.collapse.in{display:block}tr.collapse.in{display:table-row}tbody.collapse.in{display:table-row-group}.collapsing{position:relative;height:0;overflow:hidden;-webkit-transition-property:height, visibility;-o-transition-property:height, visibility;transition-property:height, visibility;-webkit-transition-duration:.35s;-o-transition-duration:.35s;transition-duration:.35s;-webkit-transition-timing-function:ease;-o-transition-timing-function:ease;transition-timing-function:ease}.caret{display:inline-block;width:0;height:0;margin-left:2px;vertical-align:middle;border-top:4px dashed;border-top:4px solid \9;border-right:4px solid transparent;border-left:4px solid transparent}.dropup,.dropdown{position:relative}.dropdown-toggle:focus{outline:0}.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:160px;padding:5px 0;margin:2px 0 0;font-size:14px;text-align:left;list-style:none;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.15);border-radius:0;-webkit-box-shadow:0 6px 12px rgba(0,0,0,0.175);box-shadow:0 6px 12px rgba(0,0,0,0.175)}.dropdown-menu.pull-right{right:0;left:auto}.dropdown-menu .divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.dropdown-menu>li>a{display:block;padding:3px 20px;clear:both;font-weight:400;line-height:1.42857143;color:#424242;white-space:nowrap}.dropdown-menu>li>a:hover,.dropdown-menu>li>a:focus{color:#8c8c8c;text-decoration:none;background-color:#f5f5f5}.dropdown-menu>.active>a,.dropdown-menu>.active>a:hover,.dropdown-menu>.active>a:focus{color:#fff;text-decoration:none;background-color:#418194;outline:0}.dropdown-menu>.disabled>a,.dropdown-menu>.disabled>a:hover,.dropdown-menu>.disabled>a:focus{color:#222}.dropdown-menu>.disabled>a:hover,.dropdown-menu>.disabled>a:focus{text-decoration:none;cursor:not-allowed;background-color:transparent;background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false)}.open>.dropdown-menu{display:block}.open>a{outline:0}.dropdown-menu-right{right:0;left:auto}.dropdown-menu-left{right:auto;left:0}.dropdown-header{display:block;padding:3px 20px;font-size:12px;line-height:1.42857143;color:#222;white-space:nowrap}.dropdown-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:990}.pull-right>.dropdown-menu{right:0;left:auto}.dropup .caret,.navbar-fixed-bottom .dropdown .caret{content:"";border-top:0;border-bottom:4px dashed;border-bottom:4px solid \9}.dropup .dropdown-menu,.navbar-fixed-bottom .dropdown .dropdown-menu{top:auto;bottom:100%;margin-bottom:2px}@media (min-width:768px){.navbar-right .dropdown-menu{right:0;left:auto}.navbar-right .dropdown-menu-left{right:auto;left:0}}.btn-group,.btn-group-vertical{position:relative;display:inline-block;vertical-align:middle}.btn-group>.btn,.btn-group-vertical>.btn{position:relative;float:left}.btn-group>.btn:hover,.btn-group-vertical>.btn:hover,.btn-group>.btn:focus,.btn-group-vertical>.btn:focus,.btn-group>.btn:active,.btn-group-vertical>.btn:active,.btn-group>.btn.active,.btn-group-vertical>.btn.active{z-index:2}.btn-group .btn+.btn,.btn-group .btn+.btn-group,.btn-group .btn-group+.btn,.btn-group .btn-group+.btn-group{margin-left:-1px}.btn-toolbar{margin-left:-5px}.btn-toolbar .btn,.btn-toolbar .btn-group,.btn-toolbar .input-group{float:left}.btn-toolbar>.btn,.btn-toolbar>.btn-group,.btn-toolbar>.input-group{margin-left:5px}.btn-group>.btn:not(:first-child):not(:last-child):not(.dropdown-toggle){border-radius:0}.btn-group>.btn:first-child{margin-left:0}.btn-group>.btn:first-child:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn:last-child:not(:first-child),.btn-group>.dropdown-toggle:not(:first-child){border-top-left-radius:0;border-bottom-left-radius:0}.btn-group>.btn-group{float:left}.btn-group>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn-group:last-child:not(:first-child)>.btn:first-child{border-top-left-radius:0;border-bottom-left-radius:0}.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0}.btn-group>.btn+.dropdown-toggle{padding-right:8px;padding-left:8px}.btn-group>.btn-lg+.dropdown-toggle{padding-right:12px;padding-left:12px}.btn-group.open .dropdown-toggle{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,0.125);box-shadow:inset 0 3px 5px rgba(0,0,0,0.125)}.btn-group.open .dropdown-toggle.btn-link{-webkit-box-shadow:none;box-shadow:none}.btn .caret{margin-left:0}.btn-lg .caret{border-width:5px 5px 0;border-bottom-width:0}.dropup .btn-lg .caret{border-width:0 5px 5px}.btn-group-vertical>.btn,.btn-group-vertical>.btn-group,.btn-group-vertical>.btn-group>.btn{display:block;float:none;width:100%;max-width:100%}.btn-group-vertical>.btn-group>.btn{float:none}.btn-group-vertical>.btn+.btn,.btn-group-vertical>.btn+.btn-group,.btn-group-vertical>.btn-group+.btn,.btn-group-vertical>.btn-group+.btn-group{margin-top:-1px;margin-left:0}.btn-group-vertical>.btn:not(:first-child):not(:last-child){border-radius:0}.btn-group-vertical>.btn:first-child:not(:last-child){border-top-left-radius:0;border-top-right-radius:0;border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn:last-child:not(:first-child){border-top-left-radius:0;border-top-right-radius:0;border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group-vertical>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group-vertical>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn-group:last-child:not(:first-child)>.btn:first-child{border-top-left-radius:0;border-top-right-radius:0}.btn-group-justified{display:table;width:100%;table-layout:fixed;border-collapse:separate}.btn-group-justified>.btn,.btn-group-justified>.btn-group{display:table-cell;float:none;width:1%}.btn-group-justified>.btn-group .btn{width:100%}.btn-group-justified>.btn-group .dropdown-menu{left:auto}[data-toggle="buttons"]>.btn input[type="radio"],[data-toggle="buttons"]>.btn-group>.btn input[type="radio"],[data-toggle="buttons"]>.btn input[type="checkbox"],[data-toggle="buttons"]>.btn-group>.btn input[type="checkbox"]{position:absolute;clip:rect(0, 0, 0, 0);pointer-events:none}.input-group{position:relative;display:table;border-collapse:separate}.input-group[class*="col-"]{float:none;padding-right:0;padding-left:0}.input-group .form-control{position:relative;z-index:2;float:left;width:100%;margin-bottom:0}.input-group .form-control:focus{z-index:3}.input-group-lg>.form-control,.input-group-lg>.input-group-addon,.input-group-lg>.input-group-btn>.btn{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:0}select.input-group-lg>.form-control,select.input-group-lg>.input-group-addon,select.input-group-lg>.input-group-btn>.btn{height:46px;line-height:46px}textarea.input-group-lg>.form-control,textarea.input-group-lg>.input-group-addon,textarea.input-group-lg>.input-group-btn>.btn,select[multiple].input-group-lg>.form-control,select[multiple].input-group-lg>.input-group-addon,select[multiple].input-group-lg>.input-group-btn>.btn{height:auto}.input-group-sm>.form-control,.input-group-sm>.input-group-addon,.input-group-sm>.input-group-btn>.btn{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:0}select.input-group-sm>.form-control,select.input-group-sm>.input-group-addon,select.input-group-sm>.input-group-btn>.btn{height:30px;line-height:30px}textarea.input-group-sm>.form-control,textarea.input-group-sm>.input-group-addon,textarea.input-group-sm>.input-group-btn>.btn,select[multiple].input-group-sm>.form-control,select[multiple].input-group-sm>.input-group-addon,select[multiple].input-group-sm>.input-group-btn>.btn{height:auto}.input-group-addon,.input-group-btn,.input-group .form-control{display:table-cell}.input-group-addon:not(:first-child):not(:last-child),.input-group-btn:not(:first-child):not(:last-child),.input-group .form-control:not(:first-child):not(:last-child){border-radius:0}.input-group-addon,.input-group-btn{width:1%;white-space:nowrap;vertical-align:middle}.input-group-addon{padding:6px 12px;font-size:14px;font-weight:400;line-height:1;color:#000;text-align:center;background-color:#555;border:1px solid #ccc;border-radius:0}.input-group-addon.input-sm{padding:5px 10px;font-size:12px;border-radius:0}.input-group-addon.input-lg{padding:10px 16px;font-size:18px;border-radius:0}.input-group-addon input[type="radio"],.input-group-addon input[type="checkbox"]{margin-top:0}.input-group .form-control:first-child,.input-group-addon:first-child,.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group>.btn,.input-group-btn:first-child>.dropdown-toggle,.input-group-btn:last-child>.btn:not(:last-child):not(.dropdown-toggle),.input-group-btn:last-child>.btn-group:not(:last-child)>.btn{border-top-right-radius:0;border-bottom-right-radius:0}.input-group-addon:first-child{border-right:0}.input-group .form-control:last-child,.input-group-addon:last-child,.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group>.btn,.input-group-btn:last-child>.dropdown-toggle,.input-group-btn:first-child>.btn:not(:first-child),.input-group-btn:first-child>.btn-group:not(:first-child)>.btn{border-top-left-radius:0;border-bottom-left-radius:0}.input-group-addon:last-child{border-left:0}.input-group-btn{position:relative;font-size:0;white-space:nowrap}.input-group-btn>.btn{position:relative}.input-group-btn>.btn+.btn{margin-left:-1px}.input-group-btn>.btn:hover,.input-group-btn>.btn:focus,.input-group-btn>.btn:active{z-index:2}.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group{margin-right:-1px}.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group{z-index:2;margin-left:-1px}.nav{padding-left:0;margin-bottom:0;list-style:none}.nav>li{position:relative;display:block}.nav>li>a{position:relative;display:block;padding:10px 15px}.nav>li>a:hover,.nav>li>a:focus{text-decoration:none;background-color:#555}.nav>li.disabled>a{color:#222}.nav>li.disabled>a:hover,.nav>li.disabled>a:focus{color:#222;text-decoration:none;cursor:not-allowed;background-color:transparent}.nav .open>a,.nav .open>a:hover,.nav .open>a:focus{background-color:#555;border-color:#98e8ff}.nav .nav-divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.nav>li>a>img{max-width:none}.nav-tabs{border-bottom:1px solid #ddd}.nav-tabs>li{float:left;margin-bottom:-1px}.nav-tabs>li>a{margin-right:2px;line-height:1.42857143;border:1px solid transparent;border-radius:0 0 0 0}.nav-tabs>li>a:hover{border-color:#555 #555 #ddd}.nav-tabs>li.active>a,.nav-tabs>li.active>a:hover,.nav-tabs>li.active>a:focus{color:#eee;cursor:default;background-color:#252830;border:1px solid #ddd;border-bottom-color:transparent}.nav-tabs.nav-justified{width:100%;border-bottom:0}.nav-tabs.nav-justified>li{float:none}.nav-tabs.nav-justified>li>a{margin-bottom:5px;text-align:center}.nav-tabs.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media (min-width:768px){.nav-tabs.nav-justified>li{display:table-cell;width:1%}.nav-tabs.nav-justified>li>a{margin-bottom:0}}.nav-tabs.nav-justified>li>a{margin-right:0;border-radius:0}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:hover,.nav-tabs.nav-justified>.active>a:focus{border:1px solid #ddd}@media (min-width:768px){.nav-tabs.nav-justified>li>a{border-bottom:1px solid #ddd;border-radius:0 0 0 0}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:hover,.nav-tabs.nav-justified>.active>a:focus{border-bottom-color:#252830}}.nav-pills>li{float:left}.nav-pills>li>a{border-radius:0}.nav-pills>li+li{margin-left:2px}.nav-pills>li.active>a,.nav-pills>li.active>a:hover,.nav-pills>li.active>a:focus{color:#fff;background-color:#98e8ff}.nav-stacked>li{float:none}.nav-stacked>li+li{margin-top:2px;margin-left:0}.nav-justified{width:100%}.nav-justified>li{float:none}.nav-justified>li>a{margin-bottom:5px;text-align:center}.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media (min-width:768px){.nav-justified>li{display:table-cell;width:1%}.nav-justified>li>a{margin-bottom:0}}.nav-tabs-justified{border-bottom:0}.nav-tabs-justified>li>a{margin-right:0;border-radius:0}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:hover,.nav-tabs-justified>.active>a:focus{border:1px solid #ddd}@media (min-width:768px){.nav-tabs-justified>li>a{border-bottom:1px solid #ddd;border-radius:0 0 0 0}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:hover,.nav-tabs-justified>.active>a:focus{border-bottom-color:#252830}}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.nav-tabs .dropdown-menu{margin-top:-1px;border-top-left-radius:0;border-top-right-radius:0}.navbar{position:relative;min-height:50px;margin-bottom:20px;border:1px solid transparent}@media (min-width:768px){.navbar{border-radius:0}}@media (min-width:768px){.navbar-header{float:left}}.navbar-collapse{padding-right:15px;padding-left:15px;overflow-x:visible;border-top:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.1);box-shadow:inset 0 1px 0 rgba(255,255,255,0.1);-webkit-overflow-scrolling:touch}.navbar-collapse.in{overflow-y:auto}@media (min-width:768px){.navbar-collapse{width:auto;border-top:0;-webkit-box-shadow:none;box-shadow:none}.navbar-collapse.collapse{display:block !important;height:auto !important;padding-bottom:0;overflow:visible !important}.navbar-collapse.in{overflow-y:visible}.navbar-fixed-top .navbar-collapse,.navbar-static-top .navbar-collapse,.navbar-fixed-bottom .navbar-collapse{padding-right:0;padding-left:0}}.navbar-fixed-top,.navbar-fixed-bottom{position:fixed;right:0;left:0;z-index:1030}.navbar-fixed-top .navbar-collapse,.navbar-fixed-bottom .navbar-collapse{max-height:340px}@media (max-device-width:480px) and (orientation:landscape){.navbar-fixed-top .navbar-collapse,.navbar-fixed-bottom .navbar-collapse{max-height:200px}}@media (min-width:768px){.navbar-fixed-top,.navbar-fixed-bottom{border-radius:0}}.navbar-fixed-top{top:0;border-width:0 0 1px}.navbar-fixed-bottom{bottom:0;margin-bottom:0;border-width:1px 0 0}.container>.navbar-header,.container-fluid>.navbar-header,.container>.navbar-collapse,.container-fluid>.navbar-collapse{margin-right:-15px;margin-left:-15px}@media (min-width:768px){.container>.navbar-header,.container-fluid>.navbar-header,.container>.navbar-collapse,.container-fluid>.navbar-collapse{margin-right:0;margin-left:0}}.navbar-static-top{z-index:1000;border-width:0 0 1px}@media (min-width:768px){.navbar-static-top{border-radius:0}}.navbar-brand{float:left;height:50px;padding:15px 15px;font-size:18px;line-height:20px}.navbar-brand:hover,.navbar-brand:focus{text-decoration:none}.navbar-brand>img{display:block}@media (min-width:768px){.navbar>.container .navbar-brand,.navbar>.container-fluid .navbar-brand{margin-left:-15px}}.navbar-toggle{position:relative;float:right;padding:9px 10px;margin-right:15px;margin-top:8px;margin-bottom:8px;background-color:transparent;background-image:none;border:1px solid transparent;border-radius:0}.navbar-toggle:focus{outline:0}.navbar-toggle .icon-bar{display:block;width:22px;height:2px;border-radius:1px}.navbar-toggle .icon-bar+.icon-bar{margin-top:4px}@media (min-width:768px){.navbar-toggle{display:none}}.navbar-nav{margin:7.5px -15px}.navbar-nav>li>a{padding-top:10px;padding-bottom:10px;line-height:20px}@media (max-width:767px){.navbar-nav .open .dropdown-menu{position:static;float:none;width:auto;margin-top:0;background-color:transparent;border:0;-webkit-box-shadow:none;box-shadow:none}.navbar-nav .open .dropdown-menu>li>a,.navbar-nav .open .dropdown-menu .dropdown-header{padding:5px 15px 5px 25px}.navbar-nav .open .dropdown-menu>li>a{line-height:20px}.navbar-nav .open .dropdown-menu>li>a:hover,.navbar-nav .open .dropdown-menu>li>a:focus{background-image:none}}@media (min-width:768px){.navbar-nav{float:left;margin:0}.navbar-nav>li{float:left}.navbar-nav>li>a{padding-top:15px;padding-bottom:15px}}.navbar-form{padding:10px 15px;margin-right:-15px;margin-left:-15px;border-top:1px solid transparent;border-bottom:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.1);box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.1);margin-top:8px;margin-bottom:8px}@media (min-width:768px){.navbar-form .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.navbar-form .form-control{display:inline-block;width:auto;vertical-align:middle}.navbar-form .form-control-static{display:inline-block}.navbar-form .input-group{display:inline-table;vertical-align:middle}.navbar-form .input-group .input-group-addon,.navbar-form .input-group .input-group-btn,.navbar-form .input-group .form-control{width:auto}.navbar-form .input-group>.form-control{width:100%}.navbar-form .control-label{margin-bottom:0;vertical-align:middle}.navbar-form .radio,.navbar-form .checkbox{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.navbar-form .radio label,.navbar-form .checkbox label{padding-left:0}.navbar-form .radio input[type="radio"],.navbar-form .checkbox input[type="checkbox"]{position:relative;margin-left:0}.navbar-form .has-feedback .form-control-feedback{top:0}}@media (max-width:767px){.navbar-form .form-group{margin-bottom:5px}.navbar-form .form-group:last-child{margin-bottom:0}}@media (min-width:768px){.navbar-form{width:auto;padding-top:0;padding-bottom:0;margin-right:0;margin-left:0;border:0;-webkit-box-shadow:none;box-shadow:none}}.navbar-nav>li>.dropdown-menu{margin-top:0;border-top-left-radius:0;border-top-right-radius:0}.navbar-fixed-bottom .navbar-nav>li>.dropdown-menu{margin-bottom:0;border-top-left-radius:0;border-top-right-radius:0;border-bottom-right-radius:0;border-bottom-left-radius:0}.navbar-btn{margin-top:8px;margin-bottom:8px}.navbar-btn.btn-sm{margin-top:10px;margin-bottom:10px}.navbar-btn.btn-xs{margin-top:14px;margin-bottom:14px}.navbar-text{margin-top:15px;margin-bottom:15px}@media (min-width:768px){.navbar-text{float:left;margin-right:15px;margin-left:15px}}@media (min-width:768px){.navbar-left{float:left !important}.navbar-right{float:right !important;margin-right:-15px}.navbar-right~.navbar-right{margin-right:0}}.navbar-default{background-color:#f8f8f8;border-color:#e7e7e7}.navbar-default .navbar-brand{color:#777}.navbar-default .navbar-brand:hover,.navbar-default .navbar-brand:focus{color:#5e5e5e;background-color:transparent}.navbar-default .navbar-text{color:#777}.navbar-default .navbar-nav>li>a{color:#777}.navbar-default .navbar-nav>li>a:hover,.navbar-default .navbar-nav>li>a:focus{color:#333;background-color:transparent}.navbar-default .navbar-nav>.active>a,.navbar-default .navbar-nav>.active>a:hover,.navbar-default .navbar-nav>.active>a:focus{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav>.disabled>a,.navbar-default .navbar-nav>.disabled>a:hover,.navbar-default .navbar-nav>.disabled>a:focus{color:#ccc;background-color:transparent}.navbar-default .navbar-nav>.open>a,.navbar-default .navbar-nav>.open>a:hover,.navbar-default .navbar-nav>.open>a:focus{color:#555;background-color:#e7e7e7}@media (max-width:767px){.navbar-default .navbar-nav .open .dropdown-menu>li>a{color:#777}.navbar-default .navbar-nav .open .dropdown-menu>li>a:hover,.navbar-default .navbar-nav .open .dropdown-menu>li>a:focus{color:#333;background-color:transparent}.navbar-default .navbar-nav .open .dropdown-menu>.active>a,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:hover,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:focus{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:hover,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:focus{color:#ccc;background-color:transparent}}.navbar-default .navbar-toggle{border-color:#ddd}.navbar-default .navbar-toggle:hover,.navbar-default .navbar-toggle:focus{background-color:#ddd}.navbar-default .navbar-toggle .icon-bar{background-color:#888}.navbar-default .navbar-collapse,.navbar-default .navbar-form{border-color:#e7e7e7}.navbar-default .navbar-link{color:#777}.navbar-default .navbar-link:hover{color:#333}.navbar-default .btn-link{color:#777}.navbar-default .btn-link:hover,.navbar-default .btn-link:focus{color:#333}.navbar-default .btn-link[disabled]:hover,fieldset[disabled] .navbar-default .btn-link:hover,.navbar-default .btn-link[disabled]:focus,fieldset[disabled] .navbar-default .btn-link:focus{color:#ccc}.navbar-inverse{background-color:#222;border-color:#080808}.navbar-inverse .navbar-brand{color:#494949}.navbar-inverse .navbar-brand:hover,.navbar-inverse .navbar-brand:focus{color:#fff;background-color:transparent}.navbar-inverse .navbar-text{color:#494949}.navbar-inverse .navbar-nav>li>a{color:#494949}.navbar-inverse .navbar-nav>li>a:hover,.navbar-inverse .navbar-nav>li>a:focus{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav>.active>a,.navbar-inverse .navbar-nav>.active>a:hover,.navbar-inverse .navbar-nav>.active>a:focus{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav>.disabled>a,.navbar-inverse .navbar-nav>.disabled>a:hover,.navbar-inverse .navbar-nav>.disabled>a:focus{color:#444;background-color:transparent}.navbar-inverse .navbar-nav>.open>a,.navbar-inverse .navbar-nav>.open>a:hover,.navbar-inverse .navbar-nav>.open>a:focus{color:#fff;background-color:#080808}@media (max-width:767px){.navbar-inverse .navbar-nav .open .dropdown-menu>.dropdown-header{border-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu .divider{background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a{color:#494949}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:hover,.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:focus{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:hover,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:focus{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:hover,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:focus{color:#444;background-color:transparent}}.navbar-inverse .navbar-toggle{border-color:#333}.navbar-inverse .navbar-toggle:hover,.navbar-inverse .navbar-toggle:focus{background-color:#333}.navbar-inverse .navbar-toggle .icon-bar{background-color:#fff}.navbar-inverse .navbar-collapse,.navbar-inverse .navbar-form{border-color:#101010}.navbar-inverse .navbar-link{color:#494949}.navbar-inverse .navbar-link:hover{color:#fff}.navbar-inverse .btn-link{color:#494949}.navbar-inverse .btn-link:hover,.navbar-inverse .btn-link:focus{color:#fff}.navbar-inverse .btn-link[disabled]:hover,fieldset[disabled] .navbar-inverse .btn-link:hover,.navbar-inverse .btn-link[disabled]:focus,fieldset[disabled] .navbar-inverse .btn-link:focus{color:#444}.breadcrumb{padding:8px 15px;margin-bottom:20px;list-style:none;background-color:#f5f5f5;border-radius:0}.breadcrumb>li{display:inline-block}.breadcrumb>li+li:before{padding:0 5px;color:#ccc;content:"/\00a0"}.breadcrumb>.active{color:#222}.pagination{display:inline-block;padding-left:0;margin:20px 0;border-radius:0}.pagination>li{display:inline}.pagination>li>a,.pagination>li>span{position:relative;float:left;padding:6px 12px;margin-left:-1px;line-height:1.42857143;color:#98e8ff;text-decoration:none;background-color:#fff;border:1px solid #ddd}.pagination>li>a:hover,.pagination>li>span:hover,.pagination>li>a:focus,.pagination>li>span:focus{z-index:2;color:#4cd7ff;background-color:#555;border-color:#ddd}.pagination>li:first-child>a,.pagination>li:first-child>span{margin-left:0;border-top-left-radius:0;border-bottom-left-radius:0}.pagination>li:last-child>a,.pagination>li:last-child>span{border-top-right-radius:0;border-bottom-right-radius:0}.pagination>.active>a,.pagination>.active>span,.pagination>.active>a:hover,.pagination>.active>span:hover,.pagination>.active>a:focus,.pagination>.active>span:focus{z-index:3;color:#fff;cursor:default;background-color:#98e8ff;border-color:#98e8ff}.pagination>.disabled>span,.pagination>.disabled>span:hover,.pagination>.disabled>span:focus,.pagination>.disabled>a,.pagination>.disabled>a:hover,.pagination>.disabled>a:focus{color:#222;cursor:not-allowed;background-color:#fff;border-color:#ddd}.pagination-lg>li>a,.pagination-lg>li>span{padding:10px 16px;font-size:18px;line-height:1.3333333}.pagination-lg>li:first-child>a,.pagination-lg>li:first-child>span{border-top-left-radius:0;border-bottom-left-radius:0}.pagination-lg>li:last-child>a,.pagination-lg>li:last-child>span{border-top-right-radius:0;border-bottom-right-radius:0}.pagination-sm>li>a,.pagination-sm>li>span{padding:5px 10px;font-size:12px;line-height:1.5}.pagination-sm>li:first-child>a,.pagination-sm>li:first-child>span{border-top-left-radius:0;border-bottom-left-radius:0}.pagination-sm>li:last-child>a,.pagination-sm>li:last-child>span{border-top-right-radius:0;border-bottom-right-radius:0}.pager{padding-left:0;margin:20px 0;text-align:center;list-style:none}.pager li{display:inline}.pager li>a,.pager li>span{display:inline-block;padding:5px 14px;background-color:#fff;border:1px solid #ddd;border-radius:15px}.pager li>a:hover,.pager li>a:focus{text-decoration:none;background-color:#555}.pager .next>a,.pager .next>span{float:right}.pager .previous>a,.pager .previous>span{float:left}.pager .disabled>a,.pager .disabled>a:hover,.pager .disabled>a:focus,.pager .disabled>span{color:#222;cursor:not-allowed;background-color:#fff}.label{display:inline;padding:.2em .6em .3em;font-size:75%;font-weight:700;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:baseline;border-radius:.25em}a.label:hover,a.label:focus{color:#fff;text-decoration:none;cursor:pointer}.label:empty{display:none}.btn .label{position:relative;top:-1px}.label-default{background-color:#222}.label-default[href]:hover,.label-default[href]:focus{background-color:#090909}.label-primary{background-color:#98e8ff}.label-primary[href]:hover,.label-primary[href]:focus{background-color:#65ddff}.label-success{background-color:#5cb85c}.label-success[href]:hover,.label-success[href]:focus{background-color:#449d44}.label-info{background-color:#5bc0de}.label-info[href]:hover,.label-info[href]:focus{background-color:#31b0d5}.label-warning{background-color:#f0ad4e}.label-warning[href]:hover,.label-warning[href]:focus{background-color:#ec971f}.label-danger{background-color:#d9534f}.label-danger[href]:hover,.label-danger[href]:focus{background-color:#c9302c}.badge{display:inline-block;min-width:10px;padding:3px 7px;font-size:12px;font-weight:bold;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:middle;background-color:#222;border-radius:10px}.badge:empty{display:none}.btn .badge{position:relative;top:-1px}.btn-xs .badge,.btn-group-xs>.btn .badge{top:0;padding:1px 5px}a.badge:hover,a.badge:focus{color:#fff;text-decoration:none;cursor:pointer}.list-group-item.active>.badge,.nav-pills>.active>a>.badge{color:#98e8ff;background-color:#fff}.list-group-item>.badge{float:right}.list-group-item>.badge+.badge{margin-right:5px}.nav-pills>li>a>.badge{margin-left:3px}.jumbotron{padding-top:30px;padding-bottom:30px;margin-bottom:30px;color:inherit;background-color:#555}.jumbotron h1,.jumbotron .h1{color:inherit}.jumbotron p{margin-bottom:15px;font-size:21px;font-weight:200}.jumbotron>hr{border-top-color:#3c3c3c}.container .jumbotron,.container-fluid .jumbotron{padding-right:15px;padding-left:15px;border-radius:0}.jumbotron .container{max-width:100%}@media screen and (min-width:768px){.jumbotron{padding-top:48px;padding-bottom:48px}.container .jumbotron,.container-fluid .jumbotron{padding-right:60px;padding-left:60px}.jumbotron h1,.jumbotron .h1{font-size:63px}}.thumbnail{display:block;padding:4px;margin-bottom:20px;line-height:1.42857143;background-color:#252830;border:1px solid #ddd;border-radius:0;-webkit-transition:border .2s ease-in-out;-o-transition:border .2s ease-in-out;transition:border .2s ease-in-out}.thumbnail>img,.thumbnail a>img{margin-right:auto;margin-left:auto}a.thumbnail:hover,a.thumbnail:focus,a.thumbnail.active{border-color:#98e8ff}.thumbnail .caption{padding:9px;color:#999}.alert{padding:15px;margin-bottom:20px;border:1px solid transparent;border-radius:0}.alert h4{margin-top:0;color:inherit}.alert .alert-link{font-weight:bold}.alert>p,.alert>ul{margin-bottom:0}.alert>p+p{margin-top:5px}.alert-dismissable,.alert-dismissible{padding-right:35px}.alert-dismissable .close,.alert-dismissible .close{position:relative;top:-2px;right:-21px;color:inherit}.alert-success{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.alert-success hr{border-top-color:#c9e2b3}.alert-success .alert-link{color:#2b542c}.alert-info{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.alert-info hr{border-top-color:#a6e1ec}.alert-info .alert-link{color:#245269}.alert-warning{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.alert-warning hr{border-top-color:#f7e1b5}.alert-warning .alert-link{color:#66512c}.alert-danger{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.alert-danger hr{border-top-color:#e4b9c0}.alert-danger .alert-link{color:#843534}@-webkit-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-o-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}.progress{height:20px;margin-bottom:20px;overflow:hidden;background-color:#f5f5f5;border-radius:0;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1);box-shadow:inset 0 1px 2px rgba(0,0,0,0.1)}.progress-bar{float:left;width:0%;height:100%;font-size:12px;line-height:20px;color:#fff;text-align:center;background-color:#98e8ff;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);-webkit-transition:width .6s ease;-o-transition:width .6s ease;transition:width .6s ease}.progress-striped .progress-bar,.progress-bar-striped{background-image:-webkit-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);-webkit-background-size:40px 40px;background-size:40px 40px}.progress.active .progress-bar,.progress-bar.active{-webkit-animation:progress-bar-stripes 2s linear infinite;-o-animation:progress-bar-stripes 2s linear infinite;animation:progress-bar-stripes 2s linear infinite}.progress-bar-success{background-color:#5cb85c}.progress-striped .progress-bar-success{background-image:-webkit-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent)}.progress-bar-info{background-color:#5bc0de}.progress-striped .progress-bar-info{background-image:-webkit-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent)}.progress-bar-warning{background-color:#f0ad4e}.progress-striped .progress-bar-warning{background-image:-webkit-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent)}.progress-bar-danger{background-color:#d9534f}.progress-striped .progress-bar-danger{background-image:-webkit-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent)}.media{margin-top:15px}.media:first-child{margin-top:0}.media,.media-body{overflow:hidden;zoom:1}.media-body{width:10000px}.media-object{display:block}.media-object.img-thumbnail{max-width:none}.media-right,.media>.pull-right{padding-left:10px}.media-left,.media>.pull-left{padding-right:10px}.media-left,.media-right,.media-body{display:table-cell;vertical-align:top}.media-middle{vertical-align:middle}.media-bottom{vertical-align:bottom}.media-heading{margin-top:0;margin-bottom:5px}.media-list{padding-left:0;list-style:none}.list-group{padding-left:0;margin-bottom:20px}.list-group-item{position:relative;display:block;padding:10px 15px;margin-bottom:-1px;background-color:#fff;border:1px solid #ddd}.list-group-item:first-child{border-top-left-radius:0;border-top-right-radius:0}.list-group-item:last-child{margin-bottom:0;border-bottom-right-radius:0;border-bottom-left-radius:0}.list-group-item.disabled,.list-group-item.disabled:hover,.list-group-item.disabled:focus{color:#222;cursor:not-allowed;background-color:#555}.list-group-item.disabled .list-group-item-heading,.list-group-item.disabled:hover .list-group-item-heading,.list-group-item.disabled:focus .list-group-item-heading{color:inherit}.list-group-item.disabled .list-group-item-text,.list-group-item.disabled:hover .list-group-item-text,.list-group-item.disabled:focus .list-group-item-text{color:#222}.list-group-item.active,.list-group-item.active:hover,.list-group-item.active:focus{z-index:2;color:#fff;background-color:#98e8ff;border-color:#98e8ff}.list-group-item.active .list-group-item-heading,.list-group-item.active:hover .list-group-item-heading,.list-group-item.active:focus .list-group-item-heading,.list-group-item.active .list-group-item-heading>small,.list-group-item.active:hover .list-group-item-heading>small,.list-group-item.active:focus .list-group-item-heading>small,.list-group-item.active .list-group-item-heading>.small,.list-group-item.active:hover .list-group-item-heading>.small,.list-group-item.active:focus .list-group-item-heading>.small{color:inherit}.list-group-item.active .list-group-item-text,.list-group-item.active:hover .list-group-item-text,.list-group-item.active:focus .list-group-item-text{color:#fff}a.list-group-item,button.list-group-item{color:#555}a.list-group-item .list-group-item-heading,button.list-group-item .list-group-item-heading{color:#333}a.list-group-item:hover,button.list-group-item:hover,a.list-group-item:focus,button.list-group-item:focus{color:#555;text-decoration:none;background-color:#f5f5f5}button.list-group-item{width:100%;text-align:left}.list-group-item-success{color:#3c763d;background-color:#dff0d8}a.list-group-item-success,button.list-group-item-success{color:#3c763d}a.list-group-item-success .list-group-item-heading,button.list-group-item-success .list-group-item-heading{color:inherit}a.list-group-item-success:hover,button.list-group-item-success:hover,a.list-group-item-success:focus,button.list-group-item-success:focus{color:#3c763d;background-color:#d0e9c6}a.list-group-item-success.active,button.list-group-item-success.active,a.list-group-item-success.active:hover,button.list-group-item-success.active:hover,a.list-group-item-success.active:focus,button.list-group-item-success.active:focus{color:#fff;background-color:#3c763d;border-color:#3c763d}.list-group-item-info{color:#31708f;background-color:#d9edf7}a.list-group-item-info,button.list-group-item-info{color:#31708f}a.list-group-item-info .list-group-item-heading,button.list-group-item-info .list-group-item-heading{color:inherit}a.list-group-item-info:hover,button.list-group-item-info:hover,a.list-group-item-info:focus,button.list-group-item-info:focus{color:#31708f;background-color:#c4e3f3}a.list-group-item-info.active,button.list-group-item-info.active,a.list-group-item-info.active:hover,button.list-group-item-info.active:hover,a.list-group-item-info.active:focus,button.list-group-item-info.active:focus{color:#fff;background-color:#31708f;border-color:#31708f}.list-group-item-warning{color:#8a6d3b;background-color:#fcf8e3}a.list-group-item-warning,button.list-group-item-warning{color:#8a6d3b}a.list-group-item-warning .list-group-item-heading,button.list-group-item-warning .list-group-item-heading{color:inherit}a.list-group-item-warning:hover,button.list-group-item-warning:hover,a.list-group-item-warning:focus,button.list-group-item-warning:focus{color:#8a6d3b;background-color:#faf2cc}a.list-group-item-warning.active,button.list-group-item-warning.active,a.list-group-item-warning.active:hover,button.list-group-item-warning.active:hover,a.list-group-item-warning.active:focus,button.list-group-item-warning.active:focus{color:#fff;background-color:#8a6d3b;border-color:#8a6d3b}.list-group-item-danger{color:#a94442;background-color:#f2dede}a.list-group-item-danger,button.list-group-item-danger{color:#a94442}a.list-group-item-danger .list-group-item-heading,button.list-group-item-danger .list-group-item-heading{color:inherit}a.list-group-item-danger:hover,button.list-group-item-danger:hover,a.list-group-item-danger:focus,button.list-group-item-danger:focus{color:#a94442;background-color:#ebcccc}a.list-group-item-danger.active,button.list-group-item-danger.active,a.list-group-item-danger.active:hover,button.list-group-item-danger.active:hover,a.list-group-item-danger.active:focus,button.list-group-item-danger.active:focus{color:#fff;background-color:#a94442;border-color:#a94442}.list-group-item-heading{margin-top:0;margin-bottom:5px}.list-group-item-text{margin-bottom:0;line-height:1.3}.panel{margin-bottom:20px;background-color:#fff;border:1px solid transparent;border-radius:0;-webkit-box-shadow:0 1px 1px rgba(0,0,0,0.05);box-shadow:0 1px 1px rgba(0,0,0,0.05)}.panel-body{padding:15px}.panel-heading{padding:10px 15px;border-bottom:1px solid transparent;border-top-left-radius:-1;border-top-right-radius:-1}.panel-heading>.dropdown .dropdown-toggle{color:inherit}.panel-title{margin-top:0;margin-bottom:0;font-size:16px;color:inherit}.panel-title>a,.panel-title>small,.panel-title>.small,.panel-title>small>a,.panel-title>.small>a{color:inherit}.panel-footer{padding:10px 15px;background-color:#f5f5f5;border-top:1px solid #ddd;border-bottom-right-radius:-1;border-bottom-left-radius:-1}.panel>.list-group,.panel>.panel-collapse>.list-group{margin-bottom:0}.panel>.list-group .list-group-item,.panel>.panel-collapse>.list-group .list-group-item{border-width:1px 0;border-radius:0}.panel>.list-group:first-child .list-group-item:first-child,.panel>.panel-collapse>.list-group:first-child .list-group-item:first-child{border-top:0;border-top-left-radius:-1;border-top-right-radius:-1}.panel>.list-group:last-child .list-group-item:last-child,.panel>.panel-collapse>.list-group:last-child .list-group-item:last-child{border-bottom:0;border-bottom-right-radius:-1;border-bottom-left-radius:-1}.panel>.panel-heading+.panel-collapse>.list-group .list-group-item:first-child{border-top-left-radius:0;border-top-right-radius:0}.panel-heading+.list-group .list-group-item:first-child{border-top-width:0}.list-group+.panel-footer{border-top-width:0}.panel>.table,.panel>.table-responsive>.table,.panel>.panel-collapse>.table{margin-bottom:0}.panel>.table caption,.panel>.table-responsive>.table caption,.panel>.panel-collapse>.table caption{padding-right:15px;padding-left:15px}.panel>.table:first-child,.panel>.table-responsive:first-child>.table:first-child{border-top-left-radius:-1;border-top-right-radius:-1}.panel>.table:first-child>thead:first-child>tr:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child{border-top-left-radius:-1;border-top-right-radius:-1}.panel>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table:first-child>thead:first-child>tr:first-child th:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:first-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:first-child{border-top-left-radius:-1}.panel>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table:first-child>thead:first-child>tr:first-child th:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:last-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:last-child{border-top-right-radius:-1}.panel>.table:last-child,.panel>.table-responsive:last-child>.table:last-child{border-bottom-right-radius:-1;border-bottom-left-radius:-1}.panel>.table:last-child>tbody:last-child>tr:last-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child{border-bottom-right-radius:-1;border-bottom-left-radius:-1}.panel>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:first-child{border-bottom-left-radius:-1}.panel>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:last-child{border-bottom-right-radius:-1}.panel>.panel-body+.table,.panel>.panel-body+.table-responsive,.panel>.table+.panel-body,.panel>.table-responsive+.panel-body{border-top:1px solid #ddd}.panel>.table>tbody:first-child>tr:first-child th,.panel>.table>tbody:first-child>tr:first-child td{border-top:0}.panel>.table-bordered,.panel>.table-responsive>.table-bordered{border:0}.panel>.table-bordered>thead>tr>th:first-child,.panel>.table-responsive>.table-bordered>thead>tr>th:first-child,.panel>.table-bordered>tbody>tr>th:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:first-child,.panel>.table-bordered>tfoot>tr>th:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:first-child,.panel>.table-bordered>thead>tr>td:first-child,.panel>.table-responsive>.table-bordered>thead>tr>td:first-child,.panel>.table-bordered>tbody>tr>td:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:first-child,.panel>.table-bordered>tfoot>tr>td:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:first-child{border-left:0}.panel>.table-bordered>thead>tr>th:last-child,.panel>.table-responsive>.table-bordered>thead>tr>th:last-child,.panel>.table-bordered>tbody>tr>th:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:last-child,.panel>.table-bordered>tfoot>tr>th:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:last-child,.panel>.table-bordered>thead>tr>td:last-child,.panel>.table-responsive>.table-bordered>thead>tr>td:last-child,.panel>.table-bordered>tbody>tr>td:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:last-child,.panel>.table-bordered>tfoot>tr>td:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:last-child{border-right:0}.panel>.table-bordered>thead>tr:first-child>td,.panel>.table-responsive>.table-bordered>thead>tr:first-child>td,.panel>.table-bordered>tbody>tr:first-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>td,.panel>.table-bordered>thead>tr:first-child>th,.panel>.table-responsive>.table-bordered>thead>tr:first-child>th,.panel>.table-bordered>tbody>tr:first-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>th{border-bottom:0}.panel>.table-bordered>tbody>tr:last-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>td,.panel>.table-bordered>tfoot>tr:last-child>td,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>td,.panel>.table-bordered>tbody>tr:last-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>th,.panel>.table-bordered>tfoot>tr:last-child>th,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>th{border-bottom:0}.panel>.table-responsive{margin-bottom:0;border:0}.panel-group{margin-bottom:20px}.panel-group .panel{margin-bottom:0;border-radius:0}.panel-group .panel+.panel{margin-top:5px}.panel-group .panel-heading{border-bottom:0}.panel-group .panel-heading+.panel-collapse>.panel-body,.panel-group .panel-heading+.panel-collapse>.list-group{border-top:1px solid #ddd}.panel-group .panel-footer{border-top:0}.panel-group .panel-footer+.panel-collapse .panel-body{border-bottom:1px solid #ddd}.panel-default{border-color:#ddd}.panel-default>.panel-heading{color:#999;background-color:#f5f5f5;border-color:#ddd}.panel-default>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ddd}.panel-default>.panel-heading .badge{color:#f5f5f5;background-color:#999}.panel-default>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ddd}.panel-primary{border-color:#98e8ff}.panel-primary>.panel-heading{color:#fff;background-color:#98e8ff;border-color:#98e8ff}.panel-primary>.panel-heading+.panel-collapse>.panel-body{border-top-color:#98e8ff}.panel-primary>.panel-heading .badge{color:#98e8ff;background-color:#fff}.panel-primary>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#98e8ff}.panel-success{border-color:#d6e9c6}.panel-success>.panel-heading{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.panel-success>.panel-heading+.panel-collapse>.panel-body{border-top-color:#d6e9c6}.panel-success>.panel-heading .badge{color:#dff0d8;background-color:#3c763d}.panel-success>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#d6e9c6}.panel-info{border-color:#bce8f1}.panel-info>.panel-heading{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.panel-info>.panel-heading+.panel-collapse>.panel-body{border-top-color:#bce8f1}.panel-info>.panel-heading .badge{color:#d9edf7;background-color:#31708f}.panel-info>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#bce8f1}.panel-warning{border-color:#faebcc}.panel-warning>.panel-heading{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.panel-warning>.panel-heading+.panel-collapse>.panel-body{border-top-color:#faebcc}.panel-warning>.panel-heading .badge{color:#fcf8e3;background-color:#8a6d3b}.panel-warning>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#faebcc}.panel-danger{border-color:#ebccd1}.panel-danger>.panel-heading{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.panel-danger>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ebccd1}.panel-danger>.panel-heading .badge{color:#f2dede;background-color:#a94442}.panel-danger>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ebccd1}.embed-responsive{position:relative;display:block;height:0;padding:0;overflow:hidden}.embed-responsive .embed-responsive-item,.embed-responsive iframe,.embed-responsive embed,.embed-responsive object,.embed-responsive video{position:absolute;top:0;bottom:0;left:0;width:100%;height:100%;border:0}.embed-responsive-16by9{padding-bottom:56.25%}.embed-responsive-4by3{padding-bottom:75%}.well{min-height:20px;padding:19px;margin-bottom:20px;background-color:#f5f5f5;border:1px solid #e3e3e3;border-radius:0;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.05);box-shadow:inset 0 1px 1px rgba(0,0,0,0.05)}.well blockquote{border-color:#ddd;border-color:rgba(0,0,0,0.15)}.well-lg{padding:24px;border-radius:0}.well-sm{padding:9px;border-radius:0}.close{float:right;font-size:21px;font-weight:bold;line-height:1;color:#000;text-shadow:0 1px 0 #fff;filter:alpha(opacity=20);opacity:.2}.close:hover,.close:focus{color:#000;text-decoration:none;cursor:pointer;filter:alpha(opacity=50);opacity:.5}button.close{padding:0;cursor:pointer;background:transparent;border:0;-webkit-appearance:none;appearance:none}.modal-open{overflow:hidden}.modal{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1050;display:none;overflow:hidden;-webkit-overflow-scrolling:touch;outline:0}.modal.fade .modal-dialog{-webkit-transform:translate(0, -25%);-ms-transform:translate(0, -25%);-o-transform:translate(0, -25%);transform:translate(0, -25%);-webkit-transition:-webkit-transform 0.3s ease-out;-o-transition:-o-transform 0.3s ease-out;transition:transform 0.3s ease-out}.modal.in .modal-dialog{-webkit-transform:translate(0, 0);-ms-transform:translate(0, 0);-o-transform:translate(0, 0);transform:translate(0, 0)}.modal-open .modal{overflow-x:hidden;overflow-y:auto}.modal-dialog{position:relative;width:auto;margin:10px}.modal-content{position:relative;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #999;border:1px solid rgba(0,0,0,0.2);border-radius:0;-webkit-box-shadow:0 3px 9px rgba(0,0,0,0.5);box-shadow:0 3px 9px rgba(0,0,0,0.5);outline:0}.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040;background-color:#000}.modal-backdrop.fade{filter:alpha(opacity=0);opacity:0}.modal-backdrop.in{filter:alpha(opacity=50);opacity:.5}.modal-header{padding:15px;border-bottom:1px solid #e5e5e5}.modal-header .close{margin-top:-2px}.modal-title{margin:0;line-height:1.42857143}.modal-body{position:relative;padding:15px}.modal-footer{padding:15px;text-align:right;border-top:1px solid #e5e5e5}.modal-footer .btn+.btn{margin-bottom:0;margin-left:5px}.modal-footer .btn-group .btn+.btn{margin-left:-1px}.modal-footer .btn-block+.btn-block{margin-left:0}.modal-scrollbar-measure{position:absolute;top:-9999px;width:50px;height:50px;overflow:scroll}@media (min-width:768px){.modal-dialog{width:600px;margin:30px auto}.modal-content{-webkit-box-shadow:0 5px 15px rgba(0,0,0,0.5);box-shadow:0 5px 15px rgba(0,0,0,0.5)}.modal-sm{width:300px}}@media (min-width:992px){.modal-lg{width:900px}}.tooltip{position:absolute;z-index:1070;display:block;font-family:"Roboto","Helvetica Neue",Helvetica,Arial,sans-serif;font-style:normal;font-weight:400;line-height:1.42857143;line-break:auto;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;word-wrap:normal;white-space:normal;font-size:12px;filter:alpha(opacity=0);opacity:0}.tooltip.in{filter:alpha(opacity=90);opacity:.9}.tooltip.top{padding:5px 0;margin-top:-3px}.tooltip.right{padding:0 5px;margin-left:3px}.tooltip.bottom{padding:5px 0;margin-top:3px}.tooltip.left{padding:0 5px;margin-left:-3px}.tooltip.top .tooltip-arrow{bottom:0;left:50%;margin-left:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-left .tooltip-arrow{right:5px;bottom:0;margin-bottom:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-right .tooltip-arrow{bottom:0;left:5px;margin-bottom:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.right .tooltip-arrow{top:50%;left:0;margin-top:-5px;border-width:5px 5px 5px 0;border-right-color:#000}.tooltip.left .tooltip-arrow{top:50%;right:0;margin-top:-5px;border-width:5px 0 5px 5px;border-left-color:#000}.tooltip.bottom .tooltip-arrow{top:0;left:50%;margin-left:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-left .tooltip-arrow{top:0;right:5px;margin-top:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-right .tooltip-arrow{top:0;left:5px;margin-top:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip-inner{max-width:200px;padding:3px 8px;color:#fff;text-align:center;background-color:#000;border-radius:0}.tooltip-arrow{position:absolute;width:0;height:0;border-color:transparent;border-style:solid}.popover{position:absolute;top:0;left:0;z-index:1060;display:none;max-width:276px;padding:1px;font-family:"Roboto","Helvetica Neue",Helvetica,Arial,sans-serif;font-style:normal;font-weight:400;line-height:1.42857143;line-break:auto;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;word-wrap:normal;white-space:normal;font-size:14px;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.2);border-radius:0;-webkit-box-shadow:0 5px 10px rgba(0,0,0,0.2);box-shadow:0 5px 10px rgba(0,0,0,0.2)}.popover.top{margin-top:-10px}.popover.right{margin-left:10px}.popover.bottom{margin-top:10px}.popover.left{margin-left:-10px}.popover>.arrow{border-width:11px}.popover>.arrow,.popover>.arrow:after{position:absolute;display:block;width:0;height:0;border-color:transparent;border-style:solid}.popover>.arrow:after{content:"";border-width:10px}.popover.top>.arrow{bottom:-11px;left:50%;margin-left:-11px;border-top-color:#999;border-top-color:rgba(0,0,0,0.25);border-bottom-width:0}.popover.top>.arrow:after{bottom:1px;margin-left:-10px;content:" ";border-top-color:#fff;border-bottom-width:0}.popover.right>.arrow{top:50%;left:-11px;margin-top:-11px;border-right-color:#999;border-right-color:rgba(0,0,0,0.25);border-left-width:0}.popover.right>.arrow:after{bottom:-10px;left:1px;content:" ";border-right-color:#fff;border-left-width:0}.popover.bottom>.arrow{top:-11px;left:50%;margin-left:-11px;border-top-width:0;border-bottom-color:#999;border-bottom-color:rgba(0,0,0,0.25)}.popover.bottom>.arrow:after{top:1px;margin-left:-10px;content:" ";border-top-width:0;border-bottom-color:#fff}.popover.left>.arrow{top:50%;right:-11px;margin-top:-11px;border-right-width:0;border-left-color:#999;border-left-color:rgba(0,0,0,0.25)}.popover.left>.arrow:after{right:1px;bottom:-10px;content:" ";border-right-width:0;border-left-color:#fff}.popover-title{padding:8px 14px;margin:0;font-size:14px;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;border-radius:-1 -1 0 0}.popover-content{padding:9px 14px}.carousel{position:relative}.carousel-inner{position:relative;width:100%;overflow:hidden}.carousel-inner>.item{position:relative;display:none;-webkit-transition:.6s ease-in-out left;-o-transition:.6s ease-in-out left;transition:.6s ease-in-out left}.carousel-inner>.item>img,.carousel-inner>.item>a>img{line-height:1}@media all and (transform-3d),(-webkit-transform-3d){.carousel-inner>.item{-webkit-transition:-webkit-transform 0.6s ease-in-out;-o-transition:-o-transform 0.6s ease-in-out;transition:transform 0.6s ease-in-out;-webkit-backface-visibility:hidden;backface-visibility:hidden;-webkit-perspective:1000px;perspective:1000px}.carousel-inner>.item.next,.carousel-inner>.item.active.right{-webkit-transform:translate3d(100%, 0, 0);transform:translate3d(100%, 0, 0);left:0}.carousel-inner>.item.prev,.carousel-inner>.item.active.left{-webkit-transform:translate3d(-100%, 0, 0);transform:translate3d(-100%, 0, 0);left:0}.carousel-inner>.item.next.left,.carousel-inner>.item.prev.right,.carousel-inner>.item.active{-webkit-transform:translate3d(0, 0, 0);transform:translate3d(0, 0, 0);left:0}}.carousel-inner>.active,.carousel-inner>.next,.carousel-inner>.prev{display:block}.carousel-inner>.active{left:0}.carousel-inner>.next,.carousel-inner>.prev{position:absolute;top:0;width:100%}.carousel-inner>.next{left:100%}.carousel-inner>.prev{left:-100%}.carousel-inner>.next.left,.carousel-inner>.prev.right{left:0}.carousel-inner>.active.left{left:-100%}.carousel-inner>.active.right{left:100%}.carousel-control{position:absolute;top:0;bottom:0;left:0;width:15%;font-size:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,0.6);background-color:rgba(0,0,0,0);filter:alpha(opacity=50);opacity:.5}.carousel-control.left{background-image:-webkit-linear-gradient(left, rgba(0,0,0,0.5) 0, rgba(0,0,0,0.0001) 100%);background-image:-o-linear-gradient(left, rgba(0,0,0,0.5) 0, rgba(0,0,0,0.0001) 100%);background-image:-webkit-gradient(linear, left top, right top, color-stop(0, rgba(0,0,0,0.5)), to(rgba(0,0,0,0.0001)));background-image:linear-gradient(to right, rgba(0,0,0,0.5) 0, rgba(0,0,0,0.0001) 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#80000000', endColorstr='#00000000', GradientType=1);background-repeat:repeat-x}.carousel-control.right{right:0;left:auto;background-image:-webkit-linear-gradient(left, rgba(0,0,0,0.0001) 0, rgba(0,0,0,0.5) 100%);background-image:-o-linear-gradient(left, rgba(0,0,0,0.0001) 0, rgba(0,0,0,0.5) 100%);background-image:-webkit-gradient(linear, left top, right top, color-stop(0, rgba(0,0,0,0.0001)), to(rgba(0,0,0,0.5)));background-image:linear-gradient(to right, rgba(0,0,0,0.0001) 0, rgba(0,0,0,0.5) 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#00000000', endColorstr='#80000000', GradientType=1);background-repeat:repeat-x}.carousel-control:hover,.carousel-control:focus{color:#fff;text-decoration:none;outline:0;filter:alpha(opacity=90);opacity:.9}.carousel-control .icon-prev,.carousel-control .icon-next,.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right{position:absolute;top:50%;z-index:5;display:inline-block;margin-top:-10px}.carousel-control .icon-prev,.carousel-control .glyphicon-chevron-left{left:50%;margin-left:-10px}.carousel-control .icon-next,.carousel-control .glyphicon-chevron-right{right:50%;margin-right:-10px}.carousel-control .icon-prev,.carousel-control .icon-next{width:20px;height:20px;font-family:serif;line-height:1}.carousel-control .icon-prev:before{content:"\2039"}.carousel-control .icon-next:before{content:"\203a"}.carousel-indicators{position:absolute;bottom:10px;left:50%;z-index:15;width:60%;padding-left:0;margin-left:-30%;text-align:center;list-style:none}.carousel-indicators li{display:inline-block;width:10px;height:10px;margin:1px;text-indent:-999px;cursor:pointer;background-color:#000 \9;background-color:rgba(0,0,0,0);border:1px solid #fff;border-radius:10px}.carousel-indicators .active{width:12px;height:12px;margin:0;background-color:#fff}.carousel-caption{position:absolute;right:15%;bottom:20px;left:15%;z-index:10;padding-top:20px;padding-bottom:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,0.6)}.carousel-caption .btn{text-shadow:none}@media screen and (min-width:768px){.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right,.carousel-control .icon-prev,.carousel-control .icon-next{width:30px;height:30px;margin-top:-10px;font-size:30px}.carousel-control .glyphicon-chevron-left,.carousel-control .icon-prev{margin-left:-10px}.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next{margin-right:-10px}.carousel-caption{right:20%;left:20%;padding-bottom:30px}.carousel-indicators{bottom:20px}}.clearfix:before,.clearfix:after,.dl-horizontal dd:before,.dl-horizontal dd:after,.container:before,.container:after,.container-fluid:before,.container-fluid:after,.row:before,.row:after,.form-horizontal .form-group:before,.form-horizontal .form-group:after,.btn-toolbar:before,.btn-toolbar:after,.btn-group-vertical>.btn-group:before,.btn-group-vertical>.btn-group:after,.nav:before,.nav:after,.navbar:before,.navbar:after,.navbar-header:before,.navbar-header:after,.navbar-collapse:before,.navbar-collapse:after,.pager:before,.pager:after,.panel-body:before,.panel-body:after,.modal-header:before,.modal-header:after,.modal-footer:before,.modal-footer:after{display:table;content:" "}.clearfix:after,.dl-horizontal dd:after,.container:after,.container-fluid:after,.row:after,.form-horizontal .form-group:after,.btn-toolbar:after,.btn-group-vertical>.btn-group:after,.nav:after,.navbar:after,.navbar-header:after,.navbar-collapse:after,.pager:after,.panel-body:after,.modal-header:after,.modal-footer:after{clear:both}.center-block{display:block;margin-right:auto;margin-left:auto}.pull-right{float:right !important}.pull-left{float:left !important}.hide{display:none !important}.show{display:block !important}.invisible{visibility:hidden}.text-hide{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.hidden{display:none !important}.affix{position:fixed}@-ms-viewport{width:device-width}.visible-xs,.visible-sm,.visible-md,.visible-lg{display:none !important}.visible-xs-block,.visible-xs-inline,.visible-xs-inline-block,.visible-sm-block,.visible-sm-inline,.visible-sm-inline-block,.visible-md-block,.visible-md-inline,.visible-md-inline-block,.visible-lg-block,.visible-lg-inline,.visible-lg-inline-block{display:none !important}@media (max-width:767px){.visible-xs{display:block !important}table.visible-xs{display:table !important}tr.visible-xs{display:table-row !important}th.visible-xs,td.visible-xs{display:table-cell !important}}@media (max-width:767px){.visible-xs-block{display:block !important}}@media (max-width:767px){.visible-xs-inline{display:inline !important}}@media (max-width:767px){.visible-xs-inline-block{display:inline-block !important}}@media (min-width:768px) and (max-width:991px){.visible-sm{display:block !important}table.visible-sm{display:table !important}tr.visible-sm{display:table-row !important}th.visible-sm,td.visible-sm{display:table-cell !important}}@media (min-width:768px) and (max-width:991px){.visible-sm-block{display:block !important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline{display:inline !important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline-block{display:inline-block !important}}@media (min-width:992px) and (max-width:1199px){.visible-md{display:block !important}table.visible-md{display:table !important}tr.visible-md{display:table-row !important}th.visible-md,td.visible-md{display:table-cell !important}}@media (min-width:992px) and (max-width:1199px){.visible-md-block{display:block !important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline{display:inline !important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline-block{display:inline-block !important}}@media (min-width:1200px){.visible-lg{display:block !important}table.visible-lg{display:table !important}tr.visible-lg{display:table-row !important}th.visible-lg,td.visible-lg{display:table-cell !important}}@media (min-width:1200px){.visible-lg-block{display:block !important}}@media (min-width:1200px){.visible-lg-inline{display:inline !important}}@media (min-width:1200px){.visible-lg-inline-block{display:inline-block !important}}@media (max-width:767px){.hidden-xs{display:none !important}}@media (min-width:768px) and (max-width:991px){.hidden-sm{display:none !important}}@media (min-width:992px) and (max-width:1199px){.hidden-md{display:none !important}}@media (min-width:1200px){.hidden-lg{display:none !important}}.visible-print{display:none !important}@media print{.visible-print{display:block !important}table.visible-print{display:table !important}tr.visible-print{display:table-row !important}th.visible-print,td.visible-print{display:table-cell !important}}.visible-print-block{display:none !important}@media print{.visible-print-block{display:block !important}}.visible-print-inline{display:none !important}@media print{.visible-print-inline{display:inline !important}}.visible-print-inline-block{display:none !important}@media print{.visible-print-inline-block{display:inline-block !important}}@media print{.hidden-print{display:none !important}} \ No newline at end of file diff --git a/presto-main/src/main/resources/webapp/vendor/bootstrap/fonts/glyphicons-halflings-regular.eot b/presto-main/src/main/resources/webapp/vendor/bootstrap/fonts/glyphicons-halflings-regular.eot old mode 100755 new mode 100644 diff --git a/presto-main/src/main/resources/webapp/vendor/bootstrap/fonts/glyphicons-halflings-regular.svg b/presto-main/src/main/resources/webapp/vendor/bootstrap/fonts/glyphicons-halflings-regular.svg old mode 100755 new mode 100644 diff --git a/presto-main/src/main/resources/webapp/vendor/bootstrap/fonts/glyphicons-halflings-regular.ttf b/presto-main/src/main/resources/webapp/vendor/bootstrap/fonts/glyphicons-halflings-regular.ttf old mode 100755 new mode 100644 diff --git a/presto-main/src/main/resources/webapp/vendor/bootstrap/fonts/glyphicons-halflings-regular.woff b/presto-main/src/main/resources/webapp/vendor/bootstrap/fonts/glyphicons-halflings-regular.woff old mode 100755 new mode 100644 diff --git a/presto-main/src/main/resources/webapp/vendor/bootstrap/fonts/glyphicons-halflings-regular.woff2 b/presto-main/src/main/resources/webapp/vendor/bootstrap/fonts/glyphicons-halflings-regular.woff2 old mode 100755 new mode 100644 diff --git a/presto-main/src/main/resources/webapp/vendor/bootstrap/js/bootstrap.js b/presto-main/src/main/resources/webapp/vendor/bootstrap/js/bootstrap.js old mode 100755 new mode 100644 index ab51f8b7aa2b..9779445bd7e5 --- a/presto-main/src/main/resources/webapp/vendor/bootstrap/js/bootstrap.js +++ b/presto-main/src/main/resources/webapp/vendor/bootstrap/js/bootstrap.js @@ -1,29 +1,29 @@ /*! - * Bootstrap v3.3.5 (http://getbootstrap.com) - * Copyright 2011-2016 Twitter, Inc. - * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * Generated using the Bootstrap Customizer (https://getbootstrap.com/docs/3.4/customize/) */ /*! - * Generated using the Bootstrap Customizer (http://getbootstrap.com/customize/?id=71446b832bd9dbb87141a654eb911637) - * Config saved to config.json and https://gist.github.com/71446b832bd9dbb87141a654eb911637 + * Bootstrap v3.4.1 (https://getbootstrap.com/) + * Copyright 2011-2020 Twitter, Inc. + * Licensed under the MIT license */ + if (typeof jQuery === 'undefined') { throw new Error('Bootstrap\'s JavaScript requires jQuery') } +function ($) { 'use strict'; var version = $.fn.jquery.split(' ')[0].split('.') - if ((version[0] < 2 && version[1] < 9) || (version[0] == 1 && version[1] == 9 && version[2] < 1) || (version[0] > 2)) { - throw new Error('Bootstrap\'s JavaScript requires jQuery version 1.9.1 or higher, but lower than version 3') + if ((version[0] < 2 && version[1] < 9) || (version[0] == 1 && version[1] == 9 && version[2] < 1) || (version[0] > 3)) { + throw new Error('Bootstrap\'s JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4') } }(jQuery); /* ======================================================================== - * Bootstrap: alert.js v3.3.6 - * http://getbootstrap.com/javascript/#alerts + * Bootstrap: alert.js v3.4.1 + * https://getbootstrap.com/docs/3.4/javascript/#alerts * ======================================================================== - * Copyright 2011-2015 Twitter, Inc. + * Copyright 2011-2019 Twitter, Inc. * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) * ======================================================================== */ @@ -39,7 +39,7 @@ if (typeof jQuery === 'undefined') { $(el).on('click', dismiss, this.close) } - Alert.VERSION = '3.3.6' + Alert.VERSION = '3.4.1' Alert.TRANSITION_DURATION = 150 @@ -52,7 +52,8 @@ if (typeof jQuery === 'undefined') { selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7 } - var $parent = $(selector) + selector = selector === '#' ? [] : selector + var $parent = $(document).find(selector) if (e) e.preventDefault() @@ -115,10 +116,10 @@ if (typeof jQuery === 'undefined') { }(jQuery); /* ======================================================================== - * Bootstrap: button.js v3.3.6 - * http://getbootstrap.com/javascript/#buttons + * Bootstrap: button.js v3.4.1 + * https://getbootstrap.com/docs/3.4/javascript/#buttons * ======================================================================== - * Copyright 2011-2015 Twitter, Inc. + * Copyright 2011-2019 Twitter, Inc. * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) * ======================================================================== */ @@ -135,7 +136,7 @@ if (typeof jQuery === 'undefined') { this.isLoading = false } - Button.VERSION = '3.3.6' + Button.VERSION = '3.4.1' Button.DEFAULTS = { loadingText: 'loading...' @@ -157,10 +158,10 @@ if (typeof jQuery === 'undefined') { if (state == 'loadingText') { this.isLoading = true - $el.addClass(d).attr(d, d) + $el.addClass(d).attr(d, d).prop(d, true) } else if (this.isLoading) { this.isLoading = false - $el.removeClass(d).removeAttr(d) + $el.removeClass(d).removeAttr(d).prop(d, false) } }, this), 0) } @@ -224,10 +225,15 @@ if (typeof jQuery === 'undefined') { $(document) .on('click.bs.button.data-api', '[data-toggle^="button"]', function (e) { - var $btn = $(e.target) - if (!$btn.hasClass('btn')) $btn = $btn.closest('.btn') + var $btn = $(e.target).closest('.btn') Plugin.call($btn, 'toggle') - if (!($(e.target).is('input[type="radio"]') || $(e.target).is('input[type="checkbox"]'))) e.preventDefault() + if (!($(e.target).is('input[type="radio"], input[type="checkbox"]'))) { + // Prevent double click on radios, and the double selections (so cancellation) on checkboxes + e.preventDefault() + // The target component still receive the focus + if ($btn.is('input,button')) $btn.trigger('focus') + else $btn.find('input:visible,button:visible').first().trigger('focus') + } }) .on('focus.bs.button.data-api blur.bs.button.data-api', '[data-toggle^="button"]', function (e) { $(e.target).closest('.btn').toggleClass('focus', /^focus(in)?$/.test(e.type)) @@ -236,10 +242,10 @@ if (typeof jQuery === 'undefined') { }(jQuery); /* ======================================================================== - * Bootstrap: carousel.js v3.3.6 - * http://getbootstrap.com/javascript/#carousel + * Bootstrap: carousel.js v3.4.1 + * https://getbootstrap.com/docs/3.4/javascript/#carousel * ======================================================================== - * Copyright 2011-2015 Twitter, Inc. + * Copyright 2011-2019 Twitter, Inc. * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) * ======================================================================== */ @@ -267,7 +273,7 @@ if (typeof jQuery === 'undefined') { .on('mouseleave.bs.carousel', $.proxy(this.cycle, this)) } - Carousel.VERSION = '3.3.6' + Carousel.VERSION = '3.4.1' Carousel.TRANSITION_DURATION = 600 @@ -381,7 +387,9 @@ if (typeof jQuery === 'undefined') { var slidEvent = $.Event('slid.bs.carousel', { relatedTarget: relatedTarget, direction: direction }) // yes, "slid" if ($.support.transition && this.$element.hasClass('slide')) { $next.addClass(type) - $next[0].offsetWidth // force reflow + if (typeof $next === 'object' && $next.length) { + $next[0].offsetWidth // force reflow + } $active.addClass(direction) $next.addClass(direction) $active @@ -443,10 +451,17 @@ if (typeof jQuery === 'undefined') { // ================= var clickHandler = function (e) { - var href var $this = $(this) - var $target = $($this.attr('data-target') || (href = $this.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '')) // strip for ie7 + var href = $this.attr('href') + if (href) { + href = href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7 + } + + var target = $this.attr('data-target') || href + var $target = $(document).find(target) + if (!$target.hasClass('carousel')) return + var options = $.extend({}, $target.data(), $this.data()) var slideIndex = $this.attr('data-slide-to') if (slideIndex) options.interval = false @@ -474,10 +489,10 @@ if (typeof jQuery === 'undefined') { }(jQuery); /* ======================================================================== - * Bootstrap: dropdown.js v3.3.6 - * http://getbootstrap.com/javascript/#dropdowns + * Bootstrap: dropdown.js v3.4.1 + * https://getbootstrap.com/docs/3.4/javascript/#dropdowns * ======================================================================== - * Copyright 2011-2015 Twitter, Inc. + * Copyright 2011-2019 Twitter, Inc. * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) * ======================================================================== */ @@ -494,7 +509,7 @@ if (typeof jQuery === 'undefined') { $(element).on('click.bs.dropdown', this.toggle) } - Dropdown.VERSION = '3.3.6' + Dropdown.VERSION = '3.4.1' function getParent($this) { var selector = $this.attr('data-target') @@ -504,7 +519,7 @@ if (typeof jQuery === 'undefined') { selector = selector && /#[A-Za-z]/.test(selector) && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7 } - var $parent = selector && $(selector) + var $parent = selector !== '#' ? $(document).find(selector) : null return $parent && $parent.length ? $parent : $this.parent() } @@ -640,10 +655,10 @@ if (typeof jQuery === 'undefined') { }(jQuery); /* ======================================================================== - * Bootstrap: modal.js v3.3.6 - * http://getbootstrap.com/javascript/#modals + * Bootstrap: modal.js v3.4.1 + * https://getbootstrap.com/docs/3.4/javascript/#modals * ======================================================================== - * Copyright 2011-2015 Twitter, Inc. + * Copyright 2011-2019 Twitter, Inc. * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) * ======================================================================== */ @@ -655,15 +670,16 @@ if (typeof jQuery === 'undefined') { // ====================== var Modal = function (element, options) { - this.options = options - this.$body = $(document.body) - this.$element = $(element) - this.$dialog = this.$element.find('.modal-dialog') - this.$backdrop = null - this.isShown = null - this.originalBodyPad = null - this.scrollbarWidth = 0 + this.options = options + this.$body = $(document.body) + this.$element = $(element) + this.$dialog = this.$element.find('.modal-dialog') + this.$backdrop = null + this.isShown = null + this.originalBodyPad = null + this.scrollbarWidth = 0 this.ignoreBackdropClick = false + this.fixedContent = '.navbar-fixed-top, .navbar-fixed-bottom' if (this.options.remote) { this.$element @@ -674,7 +690,7 @@ if (typeof jQuery === 'undefined') { } } - Modal.VERSION = '3.3.6' + Modal.VERSION = '3.4.1' Modal.TRANSITION_DURATION = 300 Modal.BACKDROP_TRANSITION_DURATION = 150 @@ -691,7 +707,7 @@ if (typeof jQuery === 'undefined') { Modal.prototype.show = function (_relatedTarget) { var that = this - var e = $.Event('show.bs.modal', { relatedTarget: _relatedTarget }) + var e = $.Event('show.bs.modal', { relatedTarget: _relatedTarget }) this.$element.trigger(e) @@ -781,7 +797,9 @@ if (typeof jQuery === 'undefined') { $(document) .off('focusin.bs.modal') // guard against infinite focus loop .on('focusin.bs.modal', $.proxy(function (e) { - if (this.$element[0] !== e.target && !this.$element.has(e.target).length) { + if (document !== e.target && + this.$element[0] !== e.target && + !this.$element.has(e.target).length) { this.$element.trigger('focus') } }, this)) @@ -883,7 +901,7 @@ if (typeof jQuery === 'undefined') { var modalIsOverflowing = this.$element[0].scrollHeight > document.documentElement.clientHeight this.$element.css({ - paddingLeft: !this.bodyIsOverflowing && modalIsOverflowing ? this.scrollbarWidth : '', + paddingLeft: !this.bodyIsOverflowing && modalIsOverflowing ? this.scrollbarWidth : '', paddingRight: this.bodyIsOverflowing && !modalIsOverflowing ? this.scrollbarWidth : '' }) } @@ -908,11 +926,26 @@ if (typeof jQuery === 'undefined') { Modal.prototype.setScrollbar = function () { var bodyPad = parseInt((this.$body.css('padding-right') || 0), 10) this.originalBodyPad = document.body.style.paddingRight || '' - if (this.bodyIsOverflowing) this.$body.css('padding-right', bodyPad + this.scrollbarWidth) + var scrollbarWidth = this.scrollbarWidth + if (this.bodyIsOverflowing) { + this.$body.css('padding-right', bodyPad + scrollbarWidth) + $(this.fixedContent).each(function (index, element) { + var actualPadding = element.style.paddingRight + var calculatedPadding = $(element).css('padding-right') + $(element) + .data('padding-right', actualPadding) + .css('padding-right', parseFloat(calculatedPadding) + scrollbarWidth + 'px') + }) + } } Modal.prototype.resetScrollbar = function () { this.$body.css('padding-right', this.originalBodyPad) + $(this.fixedContent).each(function (index, element) { + var padding = $(element).data('padding-right') + $(element).removeData('padding-right') + element.style.paddingRight = padding ? padding : '' + }) } Modal.prototype.measureScrollbar = function () { // thx walsh @@ -930,8 +963,8 @@ if (typeof jQuery === 'undefined') { function Plugin(option, _relatedTarget) { return this.each(function () { - var $this = $(this) - var data = $this.data('bs.modal') + var $this = $(this) + var data = $this.data('bs.modal') var options = $.extend({}, Modal.DEFAULTS, $this.data(), typeof option == 'object' && option) if (!data) $this.data('bs.modal', (data = new Modal(this, options))) @@ -942,7 +975,7 @@ if (typeof jQuery === 'undefined') { var old = $.fn.modal - $.fn.modal = Plugin + $.fn.modal = Plugin $.fn.modal.Constructor = Modal @@ -959,10 +992,13 @@ if (typeof jQuery === 'undefined') { // ============== $(document).on('click.bs.modal.data-api', '[data-toggle="modal"]', function (e) { - var $this = $(this) - var href = $this.attr('href') - var $target = $($this.attr('data-target') || (href && href.replace(/.*(?=#[^\s]+$)/, ''))) // strip for ie7 - var option = $target.data('bs.modal') ? 'toggle' : $.extend({ remote: !/#/.test(href) && href }, $target.data(), $this.data()) + var $this = $(this) + var href = $this.attr('href') + var target = $this.attr('data-target') || + (href && href.replace(/.*(?=#[^\s]+$)/, '')) // strip for ie7 + + var $target = $(document).find(target) + var option = $target.data('bs.modal') ? 'toggle' : $.extend({ remote: !/#/.test(href) && href }, $target.data(), $this.data()) if ($this.is('a')) e.preventDefault() @@ -978,18 +1014,148 @@ if (typeof jQuery === 'undefined') { }(jQuery); /* ======================================================================== - * Bootstrap: tooltip.js v3.3.6 - * http://getbootstrap.com/javascript/#tooltip + * Bootstrap: tooltip.js v3.4.1 + * https://getbootstrap.com/docs/3.4/javascript/#tooltip * Inspired by the original jQuery.tipsy by Jason Frame * ======================================================================== - * Copyright 2011-2015 Twitter, Inc. + * Copyright 2011-2019 Twitter, Inc. * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) * ======================================================================== */ - +function ($) { 'use strict'; + var DISALLOWED_ATTRIBUTES = ['sanitize', 'whiteList', 'sanitizeFn'] + + var uriAttrs = [ + 'background', + 'cite', + 'href', + 'itemtype', + 'longdesc', + 'poster', + 'src', + 'xlink:href' + ] + + var ARIA_ATTRIBUTE_PATTERN = /^aria-[\w-]*$/i + + var DefaultWhitelist = { + // Global attributes allowed on any supplied element below. + '*': ['class', 'dir', 'id', 'lang', 'role', ARIA_ATTRIBUTE_PATTERN], + a: ['target', 'href', 'title', 'rel'], + area: [], + b: [], + br: [], + col: [], + code: [], + div: [], + em: [], + hr: [], + h1: [], + h2: [], + h3: [], + h4: [], + h5: [], + h6: [], + i: [], + img: ['src', 'alt', 'title', 'width', 'height'], + li: [], + ol: [], + p: [], + pre: [], + s: [], + small: [], + span: [], + sub: [], + sup: [], + strong: [], + u: [], + ul: [] + } + + /** + * A pattern that recognizes a commonly useful subset of URLs that are safe. + * + * Shoutout to Angular 7 https://github.com/angular/angular/blob/7.2.4/packages/core/src/sanitization/url_sanitizer.ts + */ + var SAFE_URL_PATTERN = /^(?:(?:https?|mailto|ftp|tel|file):|[^&:/?#]*(?:[/?#]|$))/gi + + /** + * A pattern that matches safe data URLs. Only matches image, video and audio types. + * + * Shoutout to Angular 7 https://github.com/angular/angular/blob/7.2.4/packages/core/src/sanitization/url_sanitizer.ts + */ + var DATA_URL_PATTERN = /^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[a-z0-9+/]+=*$/i + + function allowedAttribute(attr, allowedAttributeList) { + var attrName = attr.nodeName.toLowerCase() + + if ($.inArray(attrName, allowedAttributeList) !== -1) { + if ($.inArray(attrName, uriAttrs) !== -1) { + return Boolean(attr.nodeValue.match(SAFE_URL_PATTERN) || attr.nodeValue.match(DATA_URL_PATTERN)) + } + + return true + } + + var regExp = $(allowedAttributeList).filter(function (index, value) { + return value instanceof RegExp + }) + + // Check if a regular expression validates the attribute. + for (var i = 0, l = regExp.length; i < l; i++) { + if (attrName.match(regExp[i])) { + return true + } + } + + return false + } + + function sanitizeHtml(unsafeHtml, whiteList, sanitizeFn) { + if (unsafeHtml.length === 0) { + return unsafeHtml + } + + if (sanitizeFn && typeof sanitizeFn === 'function') { + return sanitizeFn(unsafeHtml) + } + + // IE 8 and below don't support createHTMLDocument + if (!document.implementation || !document.implementation.createHTMLDocument) { + return unsafeHtml + } + + var createdDocument = document.implementation.createHTMLDocument('sanitization') + createdDocument.body.innerHTML = unsafeHtml + + var whitelistKeys = $.map(whiteList, function (el, i) { return i }) + var elements = $(createdDocument.body).find('*') + + for (var i = 0, len = elements.length; i < len; i++) { + var el = elements[i] + var elName = el.nodeName.toLowerCase() + + if ($.inArray(elName, whitelistKeys) === -1) { + el.parentNode.removeChild(el) + + continue + } + + var attributeList = $.map(el.attributes, function (el) { return el }) + var whitelistedAttributes = [].concat(whiteList['*'] || [], whiteList[elName] || []) + + for (var j = 0, len2 = attributeList.length; j < len2; j++) { + if (!allowedAttribute(attributeList[j], whitelistedAttributes)) { + el.removeAttribute(attributeList[j].nodeName) + } + } + } + + return createdDocument.body.innerHTML + } + // TOOLTIP PUBLIC CLASS DEFINITION // =============================== @@ -1005,7 +1171,7 @@ if (typeof jQuery === 'undefined') { this.init('tooltip', element, options) } - Tooltip.VERSION = '3.3.6' + Tooltip.VERSION = '3.4.1' Tooltip.TRANSITION_DURATION = 150 @@ -1022,7 +1188,10 @@ if (typeof jQuery === 'undefined') { viewport: { selector: 'body', padding: 0 - } + }, + sanitize : true, + sanitizeFn : null, + whiteList : DefaultWhitelist } Tooltip.prototype.init = function (type, element, options) { @@ -1030,7 +1199,7 @@ if (typeof jQuery === 'undefined') { this.type = type this.$element = $(element) this.options = this.getOptions(options) - this.$viewport = this.options.viewport && $($.isFunction(this.options.viewport) ? this.options.viewport.call(this, this.$element) : (this.options.viewport.selector || this.options.viewport)) + this.$viewport = this.options.viewport && $(document).find($.isFunction(this.options.viewport) ? this.options.viewport.call(this, this.$element) : (this.options.viewport.selector || this.options.viewport)) this.inState = { click: false, hover: false, focus: false } if (this.$element[0] instanceof document.constructor && !this.options.selector) { @@ -1063,7 +1232,15 @@ if (typeof jQuery === 'undefined') { } Tooltip.prototype.getOptions = function (options) { - options = $.extend({}, this.getDefaults(), this.$element.data(), options) + var dataAttributes = this.$element.data() + + for (var dataAttr in dataAttributes) { + if (dataAttributes.hasOwnProperty(dataAttr) && $.inArray(dataAttr, DISALLOWED_ATTRIBUTES) !== -1) { + delete dataAttributes[dataAttr] + } + } + + options = $.extend({}, this.getDefaults(), dataAttributes, options) if (options.delay && typeof options.delay == 'number') { options.delay = { @@ -1072,6 +1249,10 @@ if (typeof jQuery === 'undefined') { } } + if (options.sanitize) { + options.template = sanitizeHtml(options.template, options.whiteList, options.sanitizeFn) + } + return options } @@ -1183,7 +1364,7 @@ if (typeof jQuery === 'undefined') { .addClass(placement) .data('bs.' + this.type, this) - this.options.container ? $tip.appendTo(this.options.container) : $tip.insertAfter(this.$element) + this.options.container ? $tip.appendTo($(document).find(this.options.container)) : $tip.insertAfter(this.$element) this.$element.trigger('inserted.bs.' + this.type) var pos = this.getPosition() @@ -1285,7 +1466,16 @@ if (typeof jQuery === 'undefined') { var $tip = this.tip() var title = this.getTitle() - $tip.find('.tooltip-inner')[this.options.html ? 'html' : 'text'](title) + if (this.options.html) { + if (this.options.sanitize) { + title = sanitizeHtml(title, this.options.whiteList, this.options.sanitizeFn) + } + + $tip.find('.tooltip-inner').html(title) + } else { + $tip.find('.tooltip-inner').text(title) + } + $tip.removeClass('fade in top bottom left right') } @@ -1296,9 +1486,11 @@ if (typeof jQuery === 'undefined') { function complete() { if (that.hoverState != 'in') $tip.detach() - that.$element - .removeAttr('aria-describedby') - .trigger('hidden.bs.' + that.type) + if (that.$element) { // TODO: Check whether guarding this code with this `if` is really necessary. + that.$element + .removeAttr('aria-describedby') + .trigger('hidden.bs.' + that.type) + } callback && callback() } @@ -1341,7 +1533,10 @@ if (typeof jQuery === 'undefined') { // width and height are missing in IE8, so compute them manually; see https://github.com/twbs/bootstrap/issues/14093 elRect = $.extend({}, elRect, { width: elRect.right - elRect.left, height: elRect.bottom - elRect.top }) } - var elOffset = isBody ? { top: 0, left: 0 } : $element.offset() + var isSvg = window.SVGElement && el instanceof window.SVGElement + // Avoid using $.offset() on SVGs since it gives incorrect results in jQuery 3. + // See https://github.com/twbs/bootstrap/issues/20280 + var elOffset = isBody ? { top: 0, left: 0 } : (isSvg ? null : $element.offset()) var scroll = { scroll: isBody ? document.documentElement.scrollTop || document.body.scrollTop : $element.scrollTop() } var outerDims = isBody ? { width: $(window).width(), height: $(window).height() } : null @@ -1457,9 +1652,13 @@ if (typeof jQuery === 'undefined') { that.$tip = null that.$arrow = null that.$viewport = null + that.$element = null }) } + Tooltip.prototype.sanitizeHtml = function (unsafeHtml) { + return sanitizeHtml(unsafeHtml, this.options.whiteList, this.options.sanitizeFn) + } // TOOLTIP PLUGIN DEFINITION // ========================= @@ -1493,10 +1692,10 @@ if (typeof jQuery === 'undefined') { }(jQuery); /* ======================================================================== - * Bootstrap: popover.js v3.3.6 - * http://getbootstrap.com/javascript/#popovers + * Bootstrap: popover.js v3.4.1 + * https://getbootstrap.com/docs/3.4/javascript/#popovers * ======================================================================== - * Copyright 2011-2015 Twitter, Inc. + * Copyright 2011-2019 Twitter, Inc. * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) * ======================================================================== */ @@ -1513,7 +1712,7 @@ if (typeof jQuery === 'undefined') { if (!$.fn.tooltip) throw new Error('Popover requires tooltip.js') - Popover.VERSION = '3.3.6' + Popover.VERSION = '3.4.1' Popover.DEFAULTS = $.extend({}, $.fn.tooltip.Constructor.DEFAULTS, { placement: 'right', @@ -1539,10 +1738,25 @@ if (typeof jQuery === 'undefined') { var title = this.getTitle() var content = this.getContent() - $tip.find('.popover-title')[this.options.html ? 'html' : 'text'](title) - $tip.find('.popover-content').children().detach().end()[ // we use append for html objects to maintain js events - this.options.html ? (typeof content == 'string' ? 'html' : 'append') : 'text' - ](content) + if (this.options.html) { + var typeContent = typeof content + + if (this.options.sanitize) { + title = this.sanitizeHtml(title) + + if (typeContent === 'string') { + content = this.sanitizeHtml(content) + } + } + + $tip.find('.popover-title').html(title) + $tip.find('.popover-content').children().detach().end()[ + typeContent === 'string' ? 'html' : 'append' + ](content) + } else { + $tip.find('.popover-title').text(title) + $tip.find('.popover-content').children().detach().end().text(content) + } $tip.removeClass('fade top bottom left right in') @@ -1561,8 +1775,8 @@ if (typeof jQuery === 'undefined') { return $e.attr('data-content') || (typeof o.content == 'function' ? - o.content.call($e[0]) : - o.content) + o.content.call($e[0]) : + o.content) } Popover.prototype.arrow = function () { @@ -1602,10 +1816,10 @@ if (typeof jQuery === 'undefined') { }(jQuery); /* ======================================================================== - * Bootstrap: tab.js v3.3.6 - * http://getbootstrap.com/javascript/#tabs + * Bootstrap: tab.js v3.4.1 + * https://getbootstrap.com/docs/3.4/javascript/#tabs * ======================================================================== - * Copyright 2011-2015 Twitter, Inc. + * Copyright 2011-2019 Twitter, Inc. * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) * ======================================================================== */ @@ -1622,7 +1836,7 @@ if (typeof jQuery === 'undefined') { // jscs:enable requireDollarBeforejQueryAssignment } - Tab.VERSION = '3.3.6' + Tab.VERSION = '3.4.1' Tab.TRANSITION_DURATION = 150 @@ -1651,7 +1865,7 @@ if (typeof jQuery === 'undefined') { if (showEvent.isDefaultPrevented() || hideEvent.isDefaultPrevented()) return - var $target = $(selector) + var $target = $(document).find(selector) this.activate($this.closest('li'), $ul) this.activate($target, $target.parent(), function () { @@ -1676,15 +1890,15 @@ if (typeof jQuery === 'undefined') { $active .removeClass('active') .find('> .dropdown-menu > .active') - .removeClass('active') + .removeClass('active') .end() .find('[data-toggle="tab"]') - .attr('aria-expanded', false) + .attr('aria-expanded', false) element .addClass('active') .find('[data-toggle="tab"]') - .attr('aria-expanded', true) + .attr('aria-expanded', true) if (transition) { element[0].offsetWidth // reflow for transition @@ -1696,10 +1910,10 @@ if (typeof jQuery === 'undefined') { if (element.parent('.dropdown-menu').length) { element .closest('li.dropdown') - .addClass('active') + .addClass('active') .end() .find('[data-toggle="tab"]') - .attr('aria-expanded', true) + .attr('aria-expanded', true) } callback && callback() @@ -1758,10 +1972,10 @@ if (typeof jQuery === 'undefined') { }(jQuery); /* ======================================================================== - * Bootstrap: affix.js v3.3.6 - * http://getbootstrap.com/javascript/#affix + * Bootstrap: affix.js v3.4.1 + * https://getbootstrap.com/docs/3.4/javascript/#affix * ======================================================================== - * Copyright 2011-2015 Twitter, Inc. + * Copyright 2011-2019 Twitter, Inc. * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) * ======================================================================== */ @@ -1775,7 +1989,9 @@ if (typeof jQuery === 'undefined') { var Affix = function (element, options) { this.options = $.extend({}, Affix.DEFAULTS, options) - this.$target = $(this.options.target) + var target = this.options.target === Affix.DEFAULTS.target ? $(this.options.target) : $(document).find(this.options.target) + + this.$target = target .on('scroll.bs.affix.data-api', $.proxy(this.checkPosition, this)) .on('click.bs.affix.data-api', $.proxy(this.checkPositionWithEventLoop, this)) @@ -1787,7 +2003,7 @@ if (typeof jQuery === 'undefined') { this.checkPosition() } - Affix.VERSION = '3.3.6' + Affix.VERSION = '3.4.1' Affix.RESET = 'affix affix-top affix-bottom' @@ -1921,13 +2137,14 @@ if (typeof jQuery === 'undefined') { }(jQuery); /* ======================================================================== - * Bootstrap: collapse.js v3.3.6 - * http://getbootstrap.com/javascript/#collapse + * Bootstrap: collapse.js v3.4.1 + * https://getbootstrap.com/docs/3.4/javascript/#collapse * ======================================================================== - * Copyright 2011-2015 Twitter, Inc. + * Copyright 2011-2019 Twitter, Inc. * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) * ======================================================================== */ +/* jshint latedef: false */ +function ($) { 'use strict'; @@ -1951,7 +2168,7 @@ if (typeof jQuery === 'undefined') { if (this.options.toggle) this.toggle() } - Collapse.VERSION = '3.3.6' + Collapse.VERSION = '3.4.1' Collapse.TRANSITION_DURATION = 350 @@ -2058,7 +2275,7 @@ if (typeof jQuery === 'undefined') { } Collapse.prototype.getParent = function () { - return $(this.options.parent) + return $(document).find(this.options.parent) .find('[data-toggle="collapse"][data-parent="' + this.options.parent + '"]') .each($.proxy(function (i, element) { var $element = $(element) @@ -2081,7 +2298,7 @@ if (typeof jQuery === 'undefined') { var target = $trigger.attr('data-target') || (href = $trigger.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7 - return $(target) + return $(document).find(target) } @@ -2133,10 +2350,10 @@ if (typeof jQuery === 'undefined') { }(jQuery); /* ======================================================================== - * Bootstrap: scrollspy.js v3.3.6 - * http://getbootstrap.com/javascript/#scrollspy + * Bootstrap: scrollspy.js v3.4.1 + * https://getbootstrap.com/docs/3.4/javascript/#scrollspy * ======================================================================== - * Copyright 2011-2015 Twitter, Inc. + * Copyright 2011-2019 Twitter, Inc. * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) * ======================================================================== */ @@ -2162,7 +2379,7 @@ if (typeof jQuery === 'undefined') { this.process() } - ScrollSpy.VERSION = '3.3.6' + ScrollSpy.VERSION = '3.4.1' ScrollSpy.DEFAULTS = { offset: 10 @@ -2306,10 +2523,10 @@ if (typeof jQuery === 'undefined') { }(jQuery); /* ======================================================================== - * Bootstrap: transition.js v3.3.6 - * http://getbootstrap.com/javascript/#transitions + * Bootstrap: transition.js v3.4.1 + * https://getbootstrap.com/docs/3.4/javascript/#transitions * ======================================================================== - * Copyright 2011-2015 Twitter, Inc. + * Copyright 2011-2019 Twitter, Inc. * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) * ======================================================================== */ @@ -2317,7 +2534,7 @@ if (typeof jQuery === 'undefined') { +function ($) { 'use strict'; - // CSS TRANSITION SUPPORT (Shoutout: http://www.modernizr.com/) + // CSS TRANSITION SUPPORT (Shoutout: https://modernizr.com/) // ============================================================ function transitionEnd() { @@ -2339,7 +2556,7 @@ if (typeof jQuery === 'undefined') { return false // explicit for ie8 ( ._.) } - // http://blog.alexmaccaw.com/css-transitions + // https://blog.alexmaccaw.com/css-transitions $.fn.emulateTransitionEnd = function (duration) { var called = false var $el = this diff --git a/presto-main/src/main/resources/webapp/vendor/bootstrap/js/bootstrap.min.js b/presto-main/src/main/resources/webapp/vendor/bootstrap/js/bootstrap.min.js old mode 100755 new mode 100644 index ffff92890f94..ebf47049bb56 --- a/presto-main/src/main/resources/webapp/vendor/bootstrap/js/bootstrap.min.js +++ b/presto-main/src/main/resources/webapp/vendor/bootstrap/js/bootstrap.min.js @@ -1,12 +1,12 @@ /*! - * Bootstrap v3.3.5 (http://getbootstrap.com) - * Copyright 2011-2016 Twitter, Inc. - * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * Generated using the Bootstrap Customizer (https://getbootstrap.com/docs/3.4/customize/) */ /*! - * Generated using the Bootstrap Customizer (http://getbootstrap.com/customize/?id=71446b832bd9dbb87141a654eb911637) - * Config saved to config.json and https://gist.github.com/71446b832bd9dbb87141a654eb911637 + * Bootstrap v3.4.1 (https://getbootstrap.com/) + * Copyright 2011-2020 Twitter, Inc. + * Licensed under the MIT license */ -if("undefined"==typeof jQuery)throw new Error("Bootstrap's JavaScript requires jQuery");+function(t){"use strict";var e=t.fn.jquery.split(" ")[0].split(".");if(e[0]<2&&e[1]<9||1==e[0]&&9==e[1]&&e[2]<1||e[0]>2)throw new Error("Bootstrap's JavaScript requires jQuery version 1.9.1 or higher, but lower than version 3")}(jQuery),+function(t){"use strict";function e(e){return this.each(function(){var i=t(this),n=i.data("bs.alert");n||i.data("bs.alert",n=new o(this)),"string"==typeof e&&n[e].call(i)})}var i='[data-dismiss="alert"]',o=function(e){t(e).on("click",i,this.close)};o.VERSION="3.3.6",o.TRANSITION_DURATION=150,o.prototype.close=function(e){function i(){a.detach().trigger("closed.bs.alert").remove()}var n=t(this),s=n.attr("data-target");s||(s=n.attr("href"),s=s&&s.replace(/.*(?=#[^\s]*$)/,""));var a=t(s);e&&e.preventDefault(),a.length||(a=n.closest(".alert")),a.trigger(e=t.Event("close.bs.alert")),e.isDefaultPrevented()||(a.removeClass("in"),t.support.transition&&a.hasClass("fade")?a.one("bsTransitionEnd",i).emulateTransitionEnd(o.TRANSITION_DURATION):i())};var n=t.fn.alert;t.fn.alert=e,t.fn.alert.Constructor=o,t.fn.alert.noConflict=function(){return t.fn.alert=n,this},t(document).on("click.bs.alert.data-api",i,o.prototype.close)}(jQuery),+function(t){"use strict";function e(e){return this.each(function(){var o=t(this),n=o.data("bs.button"),s="object"==typeof e&&e;n||o.data("bs.button",n=new i(this,s)),"toggle"==e?n.toggle():e&&n.setState(e)})}var i=function(e,o){this.$element=t(e),this.options=t.extend({},i.DEFAULTS,o),this.isLoading=!1};i.VERSION="3.3.6",i.DEFAULTS={loadingText:"loading..."},i.prototype.setState=function(e){var i="disabled",o=this.$element,n=o.is("input")?"val":"html",s=o.data();e+="Text",null==s.resetText&&o.data("resetText",o[n]()),setTimeout(t.proxy(function(){o[n](null==s[e]?this.options[e]:s[e]),"loadingText"==e?(this.isLoading=!0,o.addClass(i).attr(i,i)):this.isLoading&&(this.isLoading=!1,o.removeClass(i).removeAttr(i))},this),0)},i.prototype.toggle=function(){var t=!0,e=this.$element.closest('[data-toggle="buttons"]');if(e.length){var i=this.$element.find("input");"radio"==i.prop("type")?(i.prop("checked")&&(t=!1),e.find(".active").removeClass("active"),this.$element.addClass("active")):"checkbox"==i.prop("type")&&(i.prop("checked")!==this.$element.hasClass("active")&&(t=!1),this.$element.toggleClass("active")),i.prop("checked",this.$element.hasClass("active")),t&&i.trigger("change")}else this.$element.attr("aria-pressed",!this.$element.hasClass("active")),this.$element.toggleClass("active")};var o=t.fn.button;t.fn.button=e,t.fn.button.Constructor=i,t.fn.button.noConflict=function(){return t.fn.button=o,this},t(document).on("click.bs.button.data-api",'[data-toggle^="button"]',function(i){var o=t(i.target);o.hasClass("btn")||(o=o.closest(".btn")),e.call(o,"toggle"),t(i.target).is('input[type="radio"]')||t(i.target).is('input[type="checkbox"]')||i.preventDefault()}).on("focus.bs.button.data-api blur.bs.button.data-api",'[data-toggle^="button"]',function(e){t(e.target).closest(".btn").toggleClass("focus",/^focus(in)?$/.test(e.type))})}(jQuery),+function(t){"use strict";function e(e){return this.each(function(){var o=t(this),n=o.data("bs.carousel"),s=t.extend({},i.DEFAULTS,o.data(),"object"==typeof e&&e),a="string"==typeof e?e:s.slide;n||o.data("bs.carousel",n=new i(this,s)),"number"==typeof e?n.to(e):a?n[a]():s.interval&&n.pause().cycle()})}var i=function(e,i){this.$element=t(e),this.$indicators=this.$element.find(".carousel-indicators"),this.options=i,this.paused=null,this.sliding=null,this.interval=null,this.$active=null,this.$items=null,this.options.keyboard&&this.$element.on("keydown.bs.carousel",t.proxy(this.keydown,this)),"hover"==this.options.pause&&!("ontouchstart"in document.documentElement)&&this.$element.on("mouseenter.bs.carousel",t.proxy(this.pause,this)).on("mouseleave.bs.carousel",t.proxy(this.cycle,this))};i.VERSION="3.3.6",i.TRANSITION_DURATION=600,i.DEFAULTS={interval:5e3,pause:"hover",wrap:!0,keyboard:!0},i.prototype.keydown=function(t){if(!/input|textarea/i.test(t.target.tagName)){switch(t.which){case 37:this.prev();break;case 39:this.next();break;default:return}t.preventDefault()}},i.prototype.cycle=function(e){return e||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(t.proxy(this.next,this),this.options.interval)),this},i.prototype.getItemIndex=function(t){return this.$items=t.parent().children(".item"),this.$items.index(t||this.$active)},i.prototype.getItemForDirection=function(t,e){var i=this.getItemIndex(e),o="prev"==t&&0===i||"next"==t&&i==this.$items.length-1;if(o&&!this.options.wrap)return e;var n="prev"==t?-1:1,s=(i+n)%this.$items.length;return this.$items.eq(s)},i.prototype.to=function(t){var e=this,i=this.getItemIndex(this.$active=this.$element.find(".item.active"));return t>this.$items.length-1||0>t?void 0:this.sliding?this.$element.one("slid.bs.carousel",function(){e.to(t)}):i==t?this.pause().cycle():this.slide(t>i?"next":"prev",this.$items.eq(t))},i.prototype.pause=function(e){return e||(this.paused=!0),this.$element.find(".next, .prev").length&&t.support.transition&&(this.$element.trigger(t.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},i.prototype.next=function(){return this.sliding?void 0:this.slide("next")},i.prototype.prev=function(){return this.sliding?void 0:this.slide("prev")},i.prototype.slide=function(e,o){var n=this.$element.find(".item.active"),s=o||this.getItemForDirection(e,n),a=this.interval,r="next"==e?"left":"right",l=this;if(s.hasClass("active"))return this.sliding=!1;var h=s[0],d=t.Event("slide.bs.carousel",{relatedTarget:h,direction:r});if(this.$element.trigger(d),!d.isDefaultPrevented()){if(this.sliding=!0,a&&this.pause(),this.$indicators.length){this.$indicators.find(".active").removeClass("active");var p=t(this.$indicators.children()[this.getItemIndex(s)]);p&&p.addClass("active")}var c=t.Event("slid.bs.carousel",{relatedTarget:h,direction:r});return t.support.transition&&this.$element.hasClass("slide")?(s.addClass(e),s[0].offsetWidth,n.addClass(r),s.addClass(r),n.one("bsTransitionEnd",function(){s.removeClass([e,r].join(" ")).addClass("active"),n.removeClass(["active",r].join(" ")),l.sliding=!1,setTimeout(function(){l.$element.trigger(c)},0)}).emulateTransitionEnd(i.TRANSITION_DURATION)):(n.removeClass("active"),s.addClass("active"),this.sliding=!1,this.$element.trigger(c)),a&&this.cycle(),this}};var o=t.fn.carousel;t.fn.carousel=e,t.fn.carousel.Constructor=i,t.fn.carousel.noConflict=function(){return t.fn.carousel=o,this};var n=function(i){var o,n=t(this),s=t(n.attr("data-target")||(o=n.attr("href"))&&o.replace(/.*(?=#[^\s]+$)/,""));if(s.hasClass("carousel")){var a=t.extend({},s.data(),n.data()),r=n.attr("data-slide-to");r&&(a.interval=!1),e.call(s,a),r&&s.data("bs.carousel").to(r),i.preventDefault()}};t(document).on("click.bs.carousel.data-api","[data-slide]",n).on("click.bs.carousel.data-api","[data-slide-to]",n),t(window).on("load",function(){t('[data-ride="carousel"]').each(function(){var i=t(this);e.call(i,i.data())})})}(jQuery),+function(t){"use strict";function e(e){var i=e.attr("data-target");i||(i=e.attr("href"),i=i&&/#[A-Za-z]/.test(i)&&i.replace(/.*(?=#[^\s]*$)/,""));var o=i&&t(i);return o&&o.length?o:e.parent()}function i(i){i&&3===i.which||(t(n).remove(),t(s).each(function(){var o=t(this),n=e(o),s={relatedTarget:this};n.hasClass("open")&&(i&&"click"==i.type&&/input|textarea/i.test(i.target.tagName)&&t.contains(n[0],i.target)||(n.trigger(i=t.Event("hide.bs.dropdown",s)),i.isDefaultPrevented()||(o.attr("aria-expanded","false"),n.removeClass("open").trigger(t.Event("hidden.bs.dropdown",s)))))}))}function o(e){return this.each(function(){var i=t(this),o=i.data("bs.dropdown");o||i.data("bs.dropdown",o=new a(this)),"string"==typeof e&&o[e].call(i)})}var n=".dropdown-backdrop",s='[data-toggle="dropdown"]',a=function(e){t(e).on("click.bs.dropdown",this.toggle)};a.VERSION="3.3.6",a.prototype.toggle=function(o){var n=t(this);if(!n.is(".disabled, :disabled")){var s=e(n),a=s.hasClass("open");if(i(),!a){"ontouchstart"in document.documentElement&&!s.closest(".navbar-nav").length&&t(document.createElement("div")).addClass("dropdown-backdrop").insertAfter(t(this)).on("click",i);var r={relatedTarget:this};if(s.trigger(o=t.Event("show.bs.dropdown",r)),o.isDefaultPrevented())return;n.trigger("focus").attr("aria-expanded","true"),s.toggleClass("open").trigger(t.Event("shown.bs.dropdown",r))}return!1}},a.prototype.keydown=function(i){if(/(38|40|27|32)/.test(i.which)&&!/input|textarea/i.test(i.target.tagName)){var o=t(this);if(i.preventDefault(),i.stopPropagation(),!o.is(".disabled, :disabled")){var n=e(o),a=n.hasClass("open");if(!a&&27!=i.which||a&&27==i.which)return 27==i.which&&n.find(s).trigger("focus"),o.trigger("click");var r=" li:not(.disabled):visible a",l=n.find(".dropdown-menu"+r);if(l.length){var h=l.index(i.target);38==i.which&&h>0&&h--,40==i.which&&hdocument.documentElement.clientHeight;this.$element.css({paddingLeft:!this.bodyIsOverflowing&&t?this.scrollbarWidth:"",paddingRight:this.bodyIsOverflowing&&!t?this.scrollbarWidth:""})},i.prototype.resetAdjustments=function(){this.$element.css({paddingLeft:"",paddingRight:""})},i.prototype.checkScrollbar=function(){var t=window.innerWidth;if(!t){var e=document.documentElement.getBoundingClientRect();t=e.right-Math.abs(e.left)}this.bodyIsOverflowing=document.body.clientWidth
',trigger:"hover focus",title:"",delay:0,html:!1,container:!1,viewport:{selector:"body",padding:0}},i.prototype.init=function(e,i,o){if(this.enabled=!0,this.type=e,this.$element=t(i),this.options=this.getOptions(o),this.$viewport=this.options.viewport&&t(t.isFunction(this.options.viewport)?this.options.viewport.call(this,this.$element):this.options.viewport.selector||this.options.viewport),this.inState={click:!1,hover:!1,focus:!1},this.$element[0]instanceof document.constructor&&!this.options.selector)throw new Error("`selector` option must be specified when initializing "+this.type+" on the window.document object!");for(var n=this.options.trigger.split(" "),s=n.length;s--;){var a=n[s];if("click"==a)this.$element.on("click."+this.type,this.options.selector,t.proxy(this.toggle,this));else if("manual"!=a){var r="hover"==a?"mouseenter":"focusin",l="hover"==a?"mouseleave":"focusout";this.$element.on(r+"."+this.type,this.options.selector,t.proxy(this.enter,this)),this.$element.on(l+"."+this.type,this.options.selector,t.proxy(this.leave,this))}}this.options.selector?this._options=t.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},i.prototype.getDefaults=function(){return i.DEFAULTS},i.prototype.getOptions=function(e){return e=t.extend({},this.getDefaults(),this.$element.data(),e),e.delay&&"number"==typeof e.delay&&(e.delay={show:e.delay,hide:e.delay}),e},i.prototype.getDelegateOptions=function(){var e={},i=this.getDefaults();return this._options&&t.each(this._options,function(t,o){i[t]!=o&&(e[t]=o)}),e},i.prototype.enter=function(e){var i=e instanceof this.constructor?e:t(e.currentTarget).data("bs."+this.type);return i||(i=new this.constructor(e.currentTarget,this.getDelegateOptions()),t(e.currentTarget).data("bs."+this.type,i)),e instanceof t.Event&&(i.inState["focusin"==e.type?"focus":"hover"]=!0),i.tip().hasClass("in")||"in"==i.hoverState?void(i.hoverState="in"):(clearTimeout(i.timeout),i.hoverState="in",i.options.delay&&i.options.delay.show?void(i.timeout=setTimeout(function(){"in"==i.hoverState&&i.show()},i.options.delay.show)):i.show())},i.prototype.isInStateTrue=function(){for(var t in this.inState)if(this.inState[t])return!0;return!1},i.prototype.leave=function(e){var i=e instanceof this.constructor?e:t(e.currentTarget).data("bs."+this.type);return i||(i=new this.constructor(e.currentTarget,this.getDelegateOptions()),t(e.currentTarget).data("bs."+this.type,i)),e instanceof t.Event&&(i.inState["focusout"==e.type?"focus":"hover"]=!1),i.isInStateTrue()?void 0:(clearTimeout(i.timeout),i.hoverState="out",i.options.delay&&i.options.delay.hide?void(i.timeout=setTimeout(function(){"out"==i.hoverState&&i.hide()},i.options.delay.hide)):i.hide())},i.prototype.show=function(){var e=t.Event("show.bs."+this.type);if(this.hasContent()&&this.enabled){this.$element.trigger(e);var o=t.contains(this.$element[0].ownerDocument.documentElement,this.$element[0]);if(e.isDefaultPrevented()||!o)return;var n=this,s=this.tip(),a=this.getUID(this.type);this.setContent(),s.attr("id",a),this.$element.attr("aria-describedby",a),this.options.animation&&s.addClass("fade");var r="function"==typeof this.options.placement?this.options.placement.call(this,s[0],this.$element[0]):this.options.placement,l=/\s?auto?\s?/i,h=l.test(r);h&&(r=r.replace(l,"")||"top"),s.detach().css({top:0,left:0,display:"block"}).addClass(r).data("bs."+this.type,this),this.options.container?s.appendTo(this.options.container):s.insertAfter(this.$element),this.$element.trigger("inserted.bs."+this.type);var d=this.getPosition(),p=s[0].offsetWidth,c=s[0].offsetHeight;if(h){var f=r,u=this.getPosition(this.$viewport);r="bottom"==r&&d.bottom+c>u.bottom?"top":"top"==r&&d.top-cu.width?"left":"left"==r&&d.left-pa.top+a.height&&(n.top=a.top+a.height-l)}else{var h=e.left-s,d=e.left+s+i;ha.right&&(n.left=a.left+a.width-d)}return n},i.prototype.getTitle=function(){var t,e=this.$element,i=this.options;return t=e.attr("data-original-title")||("function"==typeof i.title?i.title.call(e[0]):i.title)},i.prototype.getUID=function(t){do t+=~~(1e6*Math.random());while(document.getElementById(t));return t},i.prototype.tip=function(){if(!this.$tip&&(this.$tip=t(this.options.template),1!=this.$tip.length))throw new Error(this.type+" `template` option must consist of exactly 1 top-level element!");return this.$tip},i.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},i.prototype.enable=function(){this.enabled=!0},i.prototype.disable=function(){this.enabled=!1},i.prototype.toggleEnabled=function(){this.enabled=!this.enabled},i.prototype.toggle=function(e){var i=this;e&&(i=t(e.currentTarget).data("bs."+this.type),i||(i=new this.constructor(e.currentTarget,this.getDelegateOptions()),t(e.currentTarget).data("bs."+this.type,i))),e?(i.inState.click=!i.inState.click,i.isInStateTrue()?i.enter(i):i.leave(i)):i.tip().hasClass("in")?i.leave(i):i.enter(i)},i.prototype.destroy=function(){var t=this;clearTimeout(this.timeout),this.hide(function(){t.$element.off("."+t.type).removeData("bs."+t.type),t.$tip&&t.$tip.detach(),t.$tip=null,t.$arrow=null,t.$viewport=null})};var o=t.fn.tooltip;t.fn.tooltip=e,t.fn.tooltip.Constructor=i,t.fn.tooltip.noConflict=function(){return t.fn.tooltip=o,this}}(jQuery),+function(t){"use strict";function e(e){return this.each(function(){var o=t(this),n=o.data("bs.popover"),s="object"==typeof e&&e;(n||!/destroy|hide/.test(e))&&(n||o.data("bs.popover",n=new i(this,s)),"string"==typeof e&&n[e]())})}var i=function(t,e){this.init("popover",t,e)};if(!t.fn.tooltip)throw new Error("Popover requires tooltip.js");i.VERSION="3.3.6",i.DEFAULTS=t.extend({},t.fn.tooltip.Constructor.DEFAULTS,{placement:"right",trigger:"click",content:"",template:''}),i.prototype=t.extend({},t.fn.tooltip.Constructor.prototype),i.prototype.constructor=i,i.prototype.getDefaults=function(){return i.DEFAULTS},i.prototype.setContent=function(){var t=this.tip(),e=this.getTitle(),i=this.getContent();t.find(".popover-title")[this.options.html?"html":"text"](e),t.find(".popover-content").children().detach().end()[this.options.html?"string"==typeof i?"html":"append":"text"](i),t.removeClass("fade top bottom left right in"),t.find(".popover-title").html()||t.find(".popover-title").hide()},i.prototype.hasContent=function(){return this.getTitle()||this.getContent()},i.prototype.getContent=function(){var t=this.$element,e=this.options;return t.attr("data-content")||("function"==typeof e.content?e.content.call(t[0]):e.content)},i.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")};var o=t.fn.popover;t.fn.popover=e,t.fn.popover.Constructor=i,t.fn.popover.noConflict=function(){return t.fn.popover=o,this}}(jQuery),+function(t){"use strict";function e(e){return this.each(function(){var o=t(this),n=o.data("bs.tab");n||o.data("bs.tab",n=new i(this)),"string"==typeof e&&n[e]()})}var i=function(e){this.element=t(e)};i.VERSION="3.3.6",i.TRANSITION_DURATION=150,i.prototype.show=function(){var e=this.element,i=e.closest("ul:not(.dropdown-menu)"),o=e.data("target");if(o||(o=e.attr("href"),o=o&&o.replace(/.*(?=#[^\s]*$)/,"")),!e.parent("li").hasClass("active")){var n=i.find(".active:last a"),s=t.Event("hide.bs.tab",{relatedTarget:e[0]}),a=t.Event("show.bs.tab",{relatedTarget:n[0]});if(n.trigger(s),e.trigger(a),!a.isDefaultPrevented()&&!s.isDefaultPrevented()){var r=t(o);this.activate(e.closest("li"),i),this.activate(r,r.parent(),function(){n.trigger({type:"hidden.bs.tab",relatedTarget:e[0]}),e.trigger({type:"shown.bs.tab",relatedTarget:n[0]})})}}},i.prototype.activate=function(e,o,n){function s(){a.removeClass("active").find("> .dropdown-menu > .active").removeClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!1),e.addClass("active").find('[data-toggle="tab"]').attr("aria-expanded",!0),r?(e[0].offsetWidth,e.addClass("in")):e.removeClass("fade"),e.parent(".dropdown-menu").length&&e.closest("li.dropdown").addClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!0),n&&n()}var a=o.find("> .active"),r=n&&t.support.transition&&(a.length&&a.hasClass("fade")||!!o.find("> .fade").length);a.length&&r?a.one("bsTransitionEnd",s).emulateTransitionEnd(i.TRANSITION_DURATION):s(),a.removeClass("in")};var o=t.fn.tab;t.fn.tab=e,t.fn.tab.Constructor=i,t.fn.tab.noConflict=function(){return t.fn.tab=o,this};var n=function(i){i.preventDefault(),e.call(t(this),"show")};t(document).on("click.bs.tab.data-api",'[data-toggle="tab"]',n).on("click.bs.tab.data-api",'[data-toggle="pill"]',n)}(jQuery),+function(t){"use strict";function e(e){return this.each(function(){var o=t(this),n=o.data("bs.affix"),s="object"==typeof e&&e;n||o.data("bs.affix",n=new i(this,s)),"string"==typeof e&&n[e]()})}var i=function(e,o){this.options=t.extend({},i.DEFAULTS,o),this.$target=t(this.options.target).on("scroll.bs.affix.data-api",t.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",t.proxy(this.checkPositionWithEventLoop,this)),this.$element=t(e),this.affixed=null,this.unpin=null,this.pinnedOffset=null,this.checkPosition()};i.VERSION="3.3.6",i.RESET="affix affix-top affix-bottom",i.DEFAULTS={offset:0,target:window},i.prototype.getState=function(t,e,i,o){var n=this.$target.scrollTop(),s=this.$element.offset(),a=this.$target.height();if(null!=i&&"top"==this.affixed)return i>n?"top":!1;if("bottom"==this.affixed)return null!=i?n+this.unpin<=s.top?!1:"bottom":t-o>=n+a?!1:"bottom";var r=null==this.affixed,l=r?n:s.top,h=r?a:e;return null!=i&&i>=n?"top":null!=o&&l+h>=t-o?"bottom":!1},i.prototype.getPinnedOffset=function(){if(this.pinnedOffset)return this.pinnedOffset;this.$element.removeClass(i.RESET).addClass("affix");var t=this.$target.scrollTop(),e=this.$element.offset();return this.pinnedOffset=e.top-t},i.prototype.checkPositionWithEventLoop=function(){setTimeout(t.proxy(this.checkPosition,this),1)},i.prototype.checkPosition=function(){if(this.$element.is(":visible")){var e=this.$element.height(),o=this.options.offset,n=o.top,s=o.bottom,a=Math.max(t(document).height(),t(document.body).height());"object"!=typeof o&&(s=n=o),"function"==typeof n&&(n=o.top(this.$element)),"function"==typeof s&&(s=o.bottom(this.$element));var r=this.getState(a,e,n,s);if(this.affixed!=r){null!=this.unpin&&this.$element.css("top","");var l="affix"+(r?"-"+r:""),h=t.Event(l+".bs.affix");if(this.$element.trigger(h),h.isDefaultPrevented())return;this.affixed=r,this.unpin="bottom"==r?this.getPinnedOffset():null,this.$element.removeClass(i.RESET).addClass(l).trigger(l.replace("affix","affixed")+".bs.affix")}"bottom"==r&&this.$element.offset({top:a-e-s})}};var o=t.fn.affix;t.fn.affix=e,t.fn.affix.Constructor=i,t.fn.affix.noConflict=function(){return t.fn.affix=o,this},t(window).on("load",function(){t('[data-spy="affix"]').each(function(){var i=t(this),o=i.data();o.offset=o.offset||{},null!=o.offsetBottom&&(o.offset.bottom=o.offsetBottom),null!=o.offsetTop&&(o.offset.top=o.offsetTop),e.call(i,o)})})}(jQuery),+function(t){"use strict";function e(e){var i,o=e.attr("data-target")||(i=e.attr("href"))&&i.replace(/.*(?=#[^\s]+$)/,"");return t(o)}function i(e){return this.each(function(){var i=t(this),n=i.data("bs.collapse"),s=t.extend({},o.DEFAULTS,i.data(),"object"==typeof e&&e);!n&&s.toggle&&/show|hide/.test(e)&&(s.toggle=!1),n||i.data("bs.collapse",n=new o(this,s)),"string"==typeof e&&n[e]()})}var o=function(e,i){this.$element=t(e),this.options=t.extend({},o.DEFAULTS,i),this.$trigger=t('[data-toggle="collapse"][href="#'+e.id+'"],[data-toggle="collapse"][data-target="#'+e.id+'"]'),this.transitioning=null,this.options.parent?this.$parent=this.getParent():this.addAriaAndCollapsedClass(this.$element,this.$trigger),this.options.toggle&&this.toggle()};o.VERSION="3.3.6",o.TRANSITION_DURATION=350,o.DEFAULTS={toggle:!0},o.prototype.dimension=function(){var t=this.$element.hasClass("width");return t?"width":"height"},o.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var e,n=this.$parent&&this.$parent.children(".panel").children(".in, .collapsing");if(!(n&&n.length&&(e=n.data("bs.collapse"),e&&e.transitioning))){var s=t.Event("show.bs.collapse");if(this.$element.trigger(s),!s.isDefaultPrevented()){n&&n.length&&(i.call(n,"hide"),e||n.data("bs.collapse",null));var a=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[a](0).attr("aria-expanded",!0),this.$trigger.removeClass("collapsed").attr("aria-expanded",!0),this.transitioning=1;var r=function(){this.$element.removeClass("collapsing").addClass("collapse in")[a](""),this.transitioning=0,this.$element.trigger("shown.bs.collapse")};if(!t.support.transition)return r.call(this);var l=t.camelCase(["scroll",a].join("-"));this.$element.one("bsTransitionEnd",t.proxy(r,this)).emulateTransitionEnd(o.TRANSITION_DURATION)[a](this.$element[0][l]); -}}}},o.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var e=t.Event("hide.bs.collapse");if(this.$element.trigger(e),!e.isDefaultPrevented()){var i=this.dimension();this.$element[i](this.$element[i]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse in").attr("aria-expanded",!1),this.$trigger.addClass("collapsed").attr("aria-expanded",!1),this.transitioning=1;var n=function(){this.transitioning=0,this.$element.removeClass("collapsing").addClass("collapse").trigger("hidden.bs.collapse")};return t.support.transition?void this.$element[i](0).one("bsTransitionEnd",t.proxy(n,this)).emulateTransitionEnd(o.TRANSITION_DURATION):n.call(this)}}},o.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()},o.prototype.getParent=function(){return t(this.options.parent).find('[data-toggle="collapse"][data-parent="'+this.options.parent+'"]').each(t.proxy(function(i,o){var n=t(o);this.addAriaAndCollapsedClass(e(n),n)},this)).end()},o.prototype.addAriaAndCollapsedClass=function(t,e){var i=t.hasClass("in");t.attr("aria-expanded",i),e.toggleClass("collapsed",!i).attr("aria-expanded",i)};var n=t.fn.collapse;t.fn.collapse=i,t.fn.collapse.Constructor=o,t.fn.collapse.noConflict=function(){return t.fn.collapse=n,this},t(document).on("click.bs.collapse.data-api",'[data-toggle="collapse"]',function(o){var n=t(this);n.attr("data-target")||o.preventDefault();var s=e(n),a=s.data("bs.collapse"),r=a?"toggle":n.data();i.call(s,r)})}(jQuery),+function(t){"use strict";function e(i,o){this.$body=t(document.body),this.$scrollElement=t(t(i).is(document.body)?window:i),this.options=t.extend({},e.DEFAULTS,o),this.selector=(this.options.target||"")+" .nav li > a",this.offsets=[],this.targets=[],this.activeTarget=null,this.scrollHeight=0,this.$scrollElement.on("scroll.bs.scrollspy",t.proxy(this.process,this)),this.refresh(),this.process()}function i(i){return this.each(function(){var o=t(this),n=o.data("bs.scrollspy"),s="object"==typeof i&&i;n||o.data("bs.scrollspy",n=new e(this,s)),"string"==typeof i&&n[i]()})}e.VERSION="3.3.6",e.DEFAULTS={offset:10},e.prototype.getScrollHeight=function(){return this.$scrollElement[0].scrollHeight||Math.max(this.$body[0].scrollHeight,document.documentElement.scrollHeight)},e.prototype.refresh=function(){var e=this,i="offset",o=0;this.offsets=[],this.targets=[],this.scrollHeight=this.getScrollHeight(),t.isWindow(this.$scrollElement[0])||(i="position",o=this.$scrollElement.scrollTop()),this.$body.find(this.selector).map(function(){var e=t(this),n=e.data("target")||e.attr("href"),s=/^#./.test(n)&&t(n);return s&&s.length&&s.is(":visible")&&[[s[i]().top+o,n]]||null}).sort(function(t,e){return t[0]-e[0]}).each(function(){e.offsets.push(this[0]),e.targets.push(this[1])})},e.prototype.process=function(){var t,e=this.$scrollElement.scrollTop()+this.options.offset,i=this.getScrollHeight(),o=this.options.offset+i-this.$scrollElement.height(),n=this.offsets,s=this.targets,a=this.activeTarget;if(this.scrollHeight!=i&&this.refresh(),e>=o)return a!=(t=s[s.length-1])&&this.activate(t);if(a&&e=n[t]&&(void 0===n[t+1]||e3)throw new Error("Bootstrap's JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4")}(jQuery),+function(t){"use strict";function e(e){return this.each(function(){var i=t(this),n=i.data("bs.alert");n||i.data("bs.alert",n=new o(this)),"string"==typeof e&&n[e].call(i)})}var i='[data-dismiss="alert"]',o=function(e){t(e).on("click",i,this.close)};o.VERSION="3.4.1",o.TRANSITION_DURATION=150,o.prototype.close=function(e){function i(){a.detach().trigger("closed.bs.alert").remove()}var n=t(this),s=n.attr("data-target");s||(s=n.attr("href"),s=s&&s.replace(/.*(?=#[^\s]*$)/,"")),s="#"===s?[]:s;var a=t(document).find(s);e&&e.preventDefault(),a.length||(a=n.closest(".alert")),a.trigger(e=t.Event("close.bs.alert")),e.isDefaultPrevented()||(a.removeClass("in"),t.support.transition&&a.hasClass("fade")?a.one("bsTransitionEnd",i).emulateTransitionEnd(o.TRANSITION_DURATION):i())};var n=t.fn.alert;t.fn.alert=e,t.fn.alert.Constructor=o,t.fn.alert.noConflict=function(){return t.fn.alert=n,this},t(document).on("click.bs.alert.data-api",i,o.prototype.close)}(jQuery),+function(t){"use strict";function e(e){return this.each(function(){var o=t(this),n=o.data("bs.button"),s="object"==typeof e&&e;n||o.data("bs.button",n=new i(this,s)),"toggle"==e?n.toggle():e&&n.setState(e)})}var i=function(e,o){this.$element=t(e),this.options=t.extend({},i.DEFAULTS,o),this.isLoading=!1};i.VERSION="3.4.1",i.DEFAULTS={loadingText:"loading..."},i.prototype.setState=function(e){var i="disabled",o=this.$element,n=o.is("input")?"val":"html",s=o.data();e+="Text",null==s.resetText&&o.data("resetText",o[n]()),setTimeout(t.proxy(function(){o[n](null==s[e]?this.options[e]:s[e]),"loadingText"==e?(this.isLoading=!0,o.addClass(i).attr(i,i).prop(i,!0)):this.isLoading&&(this.isLoading=!1,o.removeClass(i).removeAttr(i).prop(i,!1))},this),0)},i.prototype.toggle=function(){var t=!0,e=this.$element.closest('[data-toggle="buttons"]');if(e.length){var i=this.$element.find("input");"radio"==i.prop("type")?(i.prop("checked")&&(t=!1),e.find(".active").removeClass("active"),this.$element.addClass("active")):"checkbox"==i.prop("type")&&(i.prop("checked")!==this.$element.hasClass("active")&&(t=!1),this.$element.toggleClass("active")),i.prop("checked",this.$element.hasClass("active")),t&&i.trigger("change")}else this.$element.attr("aria-pressed",!this.$element.hasClass("active")),this.$element.toggleClass("active")};var o=t.fn.button;t.fn.button=e,t.fn.button.Constructor=i,t.fn.button.noConflict=function(){return t.fn.button=o,this},t(document).on("click.bs.button.data-api",'[data-toggle^="button"]',function(i){var o=t(i.target).closest(".btn");e.call(o,"toggle"),t(i.target).is('input[type="radio"], input[type="checkbox"]')||(i.preventDefault(),o.is("input,button")?o.trigger("focus"):o.find("input:visible,button:visible").first().trigger("focus"))}).on("focus.bs.button.data-api blur.bs.button.data-api",'[data-toggle^="button"]',function(e){t(e.target).closest(".btn").toggleClass("focus",/^focus(in)?$/.test(e.type))})}(jQuery),+function(t){"use strict";function e(e){return this.each(function(){var o=t(this),n=o.data("bs.carousel"),s=t.extend({},i.DEFAULTS,o.data(),"object"==typeof e&&e),a="string"==typeof e?e:s.slide;n||o.data("bs.carousel",n=new i(this,s)),"number"==typeof e?n.to(e):a?n[a]():s.interval&&n.pause().cycle()})}var i=function(e,i){this.$element=t(e),this.$indicators=this.$element.find(".carousel-indicators"),this.options=i,this.paused=null,this.sliding=null,this.interval=null,this.$active=null,this.$items=null,this.options.keyboard&&this.$element.on("keydown.bs.carousel",t.proxy(this.keydown,this)),"hover"==this.options.pause&&!("ontouchstart"in document.documentElement)&&this.$element.on("mouseenter.bs.carousel",t.proxy(this.pause,this)).on("mouseleave.bs.carousel",t.proxy(this.cycle,this))};i.VERSION="3.4.1",i.TRANSITION_DURATION=600,i.DEFAULTS={interval:5e3,pause:"hover",wrap:!0,keyboard:!0},i.prototype.keydown=function(t){if(!/input|textarea/i.test(t.target.tagName)){switch(t.which){case 37:this.prev();break;case 39:this.next();break;default:return}t.preventDefault()}},i.prototype.cycle=function(e){return e||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(t.proxy(this.next,this),this.options.interval)),this},i.prototype.getItemIndex=function(t){return this.$items=t.parent().children(".item"),this.$items.index(t||this.$active)},i.prototype.getItemForDirection=function(t,e){var i=this.getItemIndex(e),o="prev"==t&&0===i||"next"==t&&i==this.$items.length-1;if(o&&!this.options.wrap)return e;var n="prev"==t?-1:1,s=(i+n)%this.$items.length;return this.$items.eq(s)},i.prototype.to=function(t){var e=this,i=this.getItemIndex(this.$active=this.$element.find(".item.active"));return t>this.$items.length-1||0>t?void 0:this.sliding?this.$element.one("slid.bs.carousel",function(){e.to(t)}):i==t?this.pause().cycle():this.slide(t>i?"next":"prev",this.$items.eq(t))},i.prototype.pause=function(e){return e||(this.paused=!0),this.$element.find(".next, .prev").length&&t.support.transition&&(this.$element.trigger(t.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},i.prototype.next=function(){return this.sliding?void 0:this.slide("next")},i.prototype.prev=function(){return this.sliding?void 0:this.slide("prev")},i.prototype.slide=function(e,o){var n=this.$element.find(".item.active"),s=o||this.getItemForDirection(e,n),a=this.interval,r="next"==e?"left":"right",l=this;if(s.hasClass("active"))return this.sliding=!1;var h=s[0],d=t.Event("slide.bs.carousel",{relatedTarget:h,direction:r});if(this.$element.trigger(d),!d.isDefaultPrevented()){if(this.sliding=!0,a&&this.pause(),this.$indicators.length){this.$indicators.find(".active").removeClass("active");var p=t(this.$indicators.children()[this.getItemIndex(s)]);p&&p.addClass("active")}var c=t.Event("slid.bs.carousel",{relatedTarget:h,direction:r});return t.support.transition&&this.$element.hasClass("slide")?(s.addClass(e),"object"==typeof s&&s.length&&s[0].offsetWidth,n.addClass(r),s.addClass(r),n.one("bsTransitionEnd",function(){s.removeClass([e,r].join(" ")).addClass("active"),n.removeClass(["active",r].join(" ")),l.sliding=!1,setTimeout(function(){l.$element.trigger(c)},0)}).emulateTransitionEnd(i.TRANSITION_DURATION)):(n.removeClass("active"),s.addClass("active"),this.sliding=!1,this.$element.trigger(c)),a&&this.cycle(),this}};var o=t.fn.carousel;t.fn.carousel=e,t.fn.carousel.Constructor=i,t.fn.carousel.noConflict=function(){return t.fn.carousel=o,this};var n=function(i){var o=t(this),n=o.attr("href");n&&(n=n.replace(/.*(?=#[^\s]+$)/,""));var s=o.attr("data-target")||n,a=t(document).find(s);if(a.hasClass("carousel")){var r=t.extend({},a.data(),o.data()),l=o.attr("data-slide-to");l&&(r.interval=!1),e.call(a,r),l&&a.data("bs.carousel").to(l),i.preventDefault()}};t(document).on("click.bs.carousel.data-api","[data-slide]",n).on("click.bs.carousel.data-api","[data-slide-to]",n),t(window).on("load",function(){t('[data-ride="carousel"]').each(function(){var i=t(this);e.call(i,i.data())})})}(jQuery),+function(t){"use strict";function e(e){var i=e.attr("data-target");i||(i=e.attr("href"),i=i&&/#[A-Za-z]/.test(i)&&i.replace(/.*(?=#[^\s]*$)/,""));var o="#"!==i?t(document).find(i):null;return o&&o.length?o:e.parent()}function i(i){i&&3===i.which||(t(n).remove(),t(s).each(function(){var o=t(this),n=e(o),s={relatedTarget:this};n.hasClass("open")&&(i&&"click"==i.type&&/input|textarea/i.test(i.target.tagName)&&t.contains(n[0],i.target)||(n.trigger(i=t.Event("hide.bs.dropdown",s)),i.isDefaultPrevented()||(o.attr("aria-expanded","false"),n.removeClass("open").trigger(t.Event("hidden.bs.dropdown",s)))))}))}function o(e){return this.each(function(){var i=t(this),o=i.data("bs.dropdown");o||i.data("bs.dropdown",o=new a(this)),"string"==typeof e&&o[e].call(i)})}var n=".dropdown-backdrop",s='[data-toggle="dropdown"]',a=function(e){t(e).on("click.bs.dropdown",this.toggle)};a.VERSION="3.4.1",a.prototype.toggle=function(o){var n=t(this);if(!n.is(".disabled, :disabled")){var s=e(n),a=s.hasClass("open");if(i(),!a){"ontouchstart"in document.documentElement&&!s.closest(".navbar-nav").length&&t(document.createElement("div")).addClass("dropdown-backdrop").insertAfter(t(this)).on("click",i);var r={relatedTarget:this};if(s.trigger(o=t.Event("show.bs.dropdown",r)),o.isDefaultPrevented())return;n.trigger("focus").attr("aria-expanded","true"),s.toggleClass("open").trigger(t.Event("shown.bs.dropdown",r))}return!1}},a.prototype.keydown=function(i){if(/(38|40|27|32)/.test(i.which)&&!/input|textarea/i.test(i.target.tagName)){var o=t(this);if(i.preventDefault(),i.stopPropagation(),!o.is(".disabled, :disabled")){var n=e(o),a=n.hasClass("open");if(!a&&27!=i.which||a&&27==i.which)return 27==i.which&&n.find(s).trigger("focus"),o.trigger("click");var r=" li:not(.disabled):visible a",l=n.find(".dropdown-menu"+r);if(l.length){var h=l.index(i.target);38==i.which&&h>0&&h--,40==i.which&&hdocument.documentElement.clientHeight;this.$element.css({paddingLeft:!this.bodyIsOverflowing&&t?this.scrollbarWidth:"",paddingRight:this.bodyIsOverflowing&&!t?this.scrollbarWidth:""})},i.prototype.resetAdjustments=function(){this.$element.css({paddingLeft:"",paddingRight:""})},i.prototype.checkScrollbar=function(){var t=window.innerWidth;if(!t){var e=document.documentElement.getBoundingClientRect();t=e.right-Math.abs(e.left)}this.bodyIsOverflowing=document.body.clientWidtha;a++)if(o.match(n[a]))return!0;return!1}function i(i,o,n){if(0===i.length)return i;if(n&&"function"==typeof n)return n(i);if(!document.implementation||!document.implementation.createHTMLDocument)return i;var s=document.implementation.createHTMLDocument("sanitization");s.body.innerHTML=i;for(var a=t.map(o,function(t,e){return e}),r=t(s.body).find("*"),l=0,h=r.length;h>l;l++){var d=r[l],p=d.nodeName.toLowerCase();if(-1!==t.inArray(p,a))for(var c=t.map(d.attributes,function(t){return t}),f=[].concat(o["*"]||[],o[p]||[]),u=0,g=c.length;g>u;u++)e(c[u],f)||d.removeAttribute(c[u].nodeName);else d.parentNode.removeChild(d)}return s.body.innerHTML}function o(e){return this.each(function(){var i=t(this),o=i.data("bs.tooltip"),n="object"==typeof e&&e;!o&&/destroy|hide/.test(e)||(o||i.data("bs.tooltip",o=new d(this,n)),"string"==typeof e&&o[e]())})}var n=["sanitize","whiteList","sanitizeFn"],s=["background","cite","href","itemtype","longdesc","poster","src","xlink:href"],a=/^aria-[\w-]*$/i,r={"*":["class","dir","id","lang","role",a],a:["target","href","title","rel"],area:[],b:[],br:[],col:[],code:[],div:[],em:[],hr:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],i:[],img:["src","alt","title","width","height"],li:[],ol:[],p:[],pre:[],s:[],small:[],span:[],sub:[],sup:[],strong:[],u:[],ul:[]},l=/^(?:(?:https?|mailto|ftp|tel|file):|[^&:/?#]*(?:[/?#]|$))/gi,h=/^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[a-z0-9+/]+=*$/i,d=function(t,e){this.type=null,this.options=null,this.enabled=null,this.timeout=null,this.hoverState=null,this.$element=null,this.inState=null,this.init("tooltip",t,e)};d.VERSION="3.4.1",d.TRANSITION_DURATION=150,d.DEFAULTS={animation:!0,placement:"top",selector:!1,template:'',trigger:"hover focus",title:"",delay:0,html:!1,container:!1,viewport:{selector:"body",padding:0},sanitize:!0,sanitizeFn:null,whiteList:r},d.prototype.init=function(e,i,o){if(this.enabled=!0,this.type=e,this.$element=t(i),this.options=this.getOptions(o),this.$viewport=this.options.viewport&&t(document).find(t.isFunction(this.options.viewport)?this.options.viewport.call(this,this.$element):this.options.viewport.selector||this.options.viewport),this.inState={click:!1,hover:!1,focus:!1},this.$element[0]instanceof document.constructor&&!this.options.selector)throw new Error("`selector` option must be specified when initializing "+this.type+" on the window.document object!");for(var n=this.options.trigger.split(" "),s=n.length;s--;){var a=n[s];if("click"==a)this.$element.on("click."+this.type,this.options.selector,t.proxy(this.toggle,this));else if("manual"!=a){var r="hover"==a?"mouseenter":"focusin",l="hover"==a?"mouseleave":"focusout";this.$element.on(r+"."+this.type,this.options.selector,t.proxy(this.enter,this)),this.$element.on(l+"."+this.type,this.options.selector,t.proxy(this.leave,this))}}this.options.selector?this._options=t.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},d.prototype.getDefaults=function(){return d.DEFAULTS},d.prototype.getOptions=function(e){var o=this.$element.data();for(var s in o)o.hasOwnProperty(s)&&-1!==t.inArray(s,n)&&delete o[s];return e=t.extend({},this.getDefaults(),o,e),e.delay&&"number"==typeof e.delay&&(e.delay={show:e.delay,hide:e.delay}),e.sanitize&&(e.template=i(e.template,e.whiteList,e.sanitizeFn)),e},d.prototype.getDelegateOptions=function(){var e={},i=this.getDefaults();return this._options&&t.each(this._options,function(t,o){i[t]!=o&&(e[t]=o)}),e},d.prototype.enter=function(e){var i=e instanceof this.constructor?e:t(e.currentTarget).data("bs."+this.type);return i||(i=new this.constructor(e.currentTarget,this.getDelegateOptions()),t(e.currentTarget).data("bs."+this.type,i)),e instanceof t.Event&&(i.inState["focusin"==e.type?"focus":"hover"]=!0),i.tip().hasClass("in")||"in"==i.hoverState?void(i.hoverState="in"):(clearTimeout(i.timeout),i.hoverState="in",i.options.delay&&i.options.delay.show?void(i.timeout=setTimeout(function(){"in"==i.hoverState&&i.show()},i.options.delay.show)):i.show())},d.prototype.isInStateTrue=function(){for(var t in this.inState)if(this.inState[t])return!0;return!1},d.prototype.leave=function(e){var i=e instanceof this.constructor?e:t(e.currentTarget).data("bs."+this.type);return i||(i=new this.constructor(e.currentTarget,this.getDelegateOptions()),t(e.currentTarget).data("bs."+this.type,i)),e instanceof t.Event&&(i.inState["focusout"==e.type?"focus":"hover"]=!1),i.isInStateTrue()?void 0:(clearTimeout(i.timeout),i.hoverState="out",i.options.delay&&i.options.delay.hide?void(i.timeout=setTimeout(function(){"out"==i.hoverState&&i.hide()},i.options.delay.hide)):i.hide())},d.prototype.show=function(){var e=t.Event("show.bs."+this.type);if(this.hasContent()&&this.enabled){this.$element.trigger(e);var i=t.contains(this.$element[0].ownerDocument.documentElement,this.$element[0]);if(e.isDefaultPrevented()||!i)return;var o=this,n=this.tip(),s=this.getUID(this.type);this.setContent(),n.attr("id",s),this.$element.attr("aria-describedby",s),this.options.animation&&n.addClass("fade");var a="function"==typeof this.options.placement?this.options.placement.call(this,n[0],this.$element[0]):this.options.placement,r=/\s?auto?\s?/i,l=r.test(a);l&&(a=a.replace(r,"")||"top"),n.detach().css({top:0,left:0,display:"block"}).addClass(a).data("bs."+this.type,this),this.options.container?n.appendTo(t(document).find(this.options.container)):n.insertAfter(this.$element),this.$element.trigger("inserted.bs."+this.type);var h=this.getPosition(),p=n[0].offsetWidth,c=n[0].offsetHeight;if(l){var f=a,u=this.getPosition(this.$viewport);a="bottom"==a&&h.bottom+c>u.bottom?"top":"top"==a&&h.top-cu.width?"left":"left"==a&&h.left-pa.top+a.height&&(n.top=a.top+a.height-l)}else{var h=e.left-s,d=e.left+s+i;ha.right&&(n.left=a.left+a.width-d)}return n},d.prototype.getTitle=function(){var t,e=this.$element,i=this.options;return t=e.attr("data-original-title")||("function"==typeof i.title?i.title.call(e[0]):i.title)},d.prototype.getUID=function(t){do t+=~~(1e6*Math.random());while(document.getElementById(t));return t},d.prototype.tip=function(){if(!this.$tip&&(this.$tip=t(this.options.template),1!=this.$tip.length))throw new Error(this.type+" `template` option must consist of exactly 1 top-level element!");return this.$tip},d.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},d.prototype.enable=function(){this.enabled=!0},d.prototype.disable=function(){this.enabled=!1},d.prototype.toggleEnabled=function(){this.enabled=!this.enabled},d.prototype.toggle=function(e){var i=this;e&&(i=t(e.currentTarget).data("bs."+this.type),i||(i=new this.constructor(e.currentTarget,this.getDelegateOptions()),t(e.currentTarget).data("bs."+this.type,i))),e?(i.inState.click=!i.inState.click,i.isInStateTrue()?i.enter(i):i.leave(i)):i.tip().hasClass("in")?i.leave(i):i.enter(i)},d.prototype.destroy=function(){var t=this;clearTimeout(this.timeout),this.hide(function(){t.$element.off("."+t.type).removeData("bs."+t.type),t.$tip&&t.$tip.detach(),t.$tip=null,t.$arrow=null,t.$viewport=null,t.$element=null})},d.prototype.sanitizeHtml=function(t){return i(t,this.options.whiteList,this.options.sanitizeFn)};var p=t.fn.tooltip;t.fn.tooltip=o,t.fn.tooltip.Constructor=d,t.fn.tooltip.noConflict=function(){return t.fn.tooltip=p,this}}(jQuery),+function(t){"use strict";function e(e){return this.each(function(){var o=t(this),n=o.data("bs.popover"),s="object"==typeof e&&e;!n&&/destroy|hide/.test(e)||(n||o.data("bs.popover",n=new i(this,s)),"string"==typeof e&&n[e]())})}var i=function(t,e){this.init("popover",t,e)};if(!t.fn.tooltip)throw new Error("Popover requires tooltip.js");i.VERSION="3.4.1",i.DEFAULTS=t.extend({},t.fn.tooltip.Constructor.DEFAULTS,{placement:"right",trigger:"click",content:"",template:''}),i.prototype=t.extend({},t.fn.tooltip.Constructor.prototype),i.prototype.constructor=i,i.prototype.getDefaults=function(){return i.DEFAULTS},i.prototype.setContent=function(){var t=this.tip(),e=this.getTitle(),i=this.getContent();if(this.options.html){var o=typeof i;this.options.sanitize&&(e=this.sanitizeHtml(e),"string"===o&&(i=this.sanitizeHtml(i))),t.find(".popover-title").html(e),t.find(".popover-content").children().detach().end()["string"===o?"html":"append"](i)}else t.find(".popover-title").text(e),t.find(".popover-content").children().detach().end().text(i);t.removeClass("fade top bottom left right in"),t.find(".popover-title").html()||t.find(".popover-title").hide()},i.prototype.hasContent=function(){return this.getTitle()||this.getContent()},i.prototype.getContent=function(){var t=this.$element,e=this.options;return t.attr("data-content")||("function"==typeof e.content?e.content.call(t[0]):e.content)},i.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")};var o=t.fn.popover;t.fn.popover=e,t.fn.popover.Constructor=i,t.fn.popover.noConflict=function(){return t.fn.popover=o,this}}(jQuery),+function(t){"use strict";function e(e){return this.each(function(){var o=t(this),n=o.data("bs.tab");n||o.data("bs.tab",n=new i(this)),"string"==typeof e&&n[e]()})}var i=function(e){this.element=t(e)};i.VERSION="3.4.1",i.TRANSITION_DURATION=150,i.prototype.show=function(){var e=this.element,i=e.closest("ul:not(.dropdown-menu)"),o=e.data("target");if(o||(o=e.attr("href"),o=o&&o.replace(/.*(?=#[^\s]*$)/,"")),!e.parent("li").hasClass("active")){var n=i.find(".active:last a"),s=t.Event("hide.bs.tab",{relatedTarget:e[0]}),a=t.Event("show.bs.tab",{relatedTarget:n[0]});if(n.trigger(s),e.trigger(a),!a.isDefaultPrevented()&&!s.isDefaultPrevented()){var r=t(document).find(o);this.activate(e.closest("li"),i),this.activate(r,r.parent(),function(){n.trigger({type:"hidden.bs.tab",relatedTarget:e[0]}),e.trigger({type:"shown.bs.tab",relatedTarget:n[0]})})}}},i.prototype.activate=function(e,o,n){function s(){a.removeClass("active").find("> .dropdown-menu > .active").removeClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!1),e.addClass("active").find('[data-toggle="tab"]').attr("aria-expanded",!0),r?(e[0].offsetWidth,e.addClass("in")):e.removeClass("fade"),e.parent(".dropdown-menu").length&&e.closest("li.dropdown").addClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!0),n&&n()}var a=o.find("> .active"),r=n&&t.support.transition&&(a.length&&a.hasClass("fade")||!!o.find("> .fade").length);a.length&&r?a.one("bsTransitionEnd",s).emulateTransitionEnd(i.TRANSITION_DURATION):s(),a.removeClass("in")};var o=t.fn.tab;t.fn.tab=e,t.fn.tab.Constructor=i,t.fn.tab.noConflict=function(){return t.fn.tab=o,this};var n=function(i){i.preventDefault(),e.call(t(this),"show")};t(document).on("click.bs.tab.data-api",'[data-toggle="tab"]',n).on("click.bs.tab.data-api",'[data-toggle="pill"]',n)}(jQuery),+function(t){"use strict";function e(e){return this.each(function(){var o=t(this),n=o.data("bs.affix"),s="object"==typeof e&&e;n||o.data("bs.affix",n=new i(this,s)),"string"==typeof e&&n[e]()})}var i=function(e,o){this.options=t.extend({},i.DEFAULTS,o);var n=this.options.target===i.DEFAULTS.target?t(this.options.target):t(document).find(this.options.target);this.$target=n.on("scroll.bs.affix.data-api",t.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",t.proxy(this.checkPositionWithEventLoop,this)),this.$element=t(e),this.affixed=null,this.unpin=null,this.pinnedOffset=null,this.checkPosition()};i.VERSION="3.4.1",i.RESET="affix affix-top affix-bottom",i.DEFAULTS={offset:0,target:window},i.prototype.getState=function(t,e,i,o){var n=this.$target.scrollTop(),s=this.$element.offset(),a=this.$target.height();if(null!=i&&"top"==this.affixed)return i>n?"top":!1;if("bottom"==this.affixed)return null!=i?n+this.unpin<=s.top?!1:"bottom":t-o>=n+a?!1:"bottom";var r=null==this.affixed,l=r?n:s.top,h=r?a:e;return null!=i&&i>=n?"top":null!=o&&l+h>=t-o?"bottom":!1},i.prototype.getPinnedOffset=function(){if(this.pinnedOffset)return this.pinnedOffset;this.$element.removeClass(i.RESET).addClass("affix");var t=this.$target.scrollTop(),e=this.$element.offset();return this.pinnedOffset=e.top-t},i.prototype.checkPositionWithEventLoop=function(){setTimeout(t.proxy(this.checkPosition,this),1)},i.prototype.checkPosition=function(){ +if(this.$element.is(":visible")){var e=this.$element.height(),o=this.options.offset,n=o.top,s=o.bottom,a=Math.max(t(document).height(),t(document.body).height());"object"!=typeof o&&(s=n=o),"function"==typeof n&&(n=o.top(this.$element)),"function"==typeof s&&(s=o.bottom(this.$element));var r=this.getState(a,e,n,s);if(this.affixed!=r){null!=this.unpin&&this.$element.css("top","");var l="affix"+(r?"-"+r:""),h=t.Event(l+".bs.affix");if(this.$element.trigger(h),h.isDefaultPrevented())return;this.affixed=r,this.unpin="bottom"==r?this.getPinnedOffset():null,this.$element.removeClass(i.RESET).addClass(l).trigger(l.replace("affix","affixed")+".bs.affix")}"bottom"==r&&this.$element.offset({top:a-e-s})}};var o=t.fn.affix;t.fn.affix=e,t.fn.affix.Constructor=i,t.fn.affix.noConflict=function(){return t.fn.affix=o,this},t(window).on("load",function(){t('[data-spy="affix"]').each(function(){var i=t(this),o=i.data();o.offset=o.offset||{},null!=o.offsetBottom&&(o.offset.bottom=o.offsetBottom),null!=o.offsetTop&&(o.offset.top=o.offsetTop),e.call(i,o)})})}(jQuery),+function(t){"use strict";function e(e){var i,o=e.attr("data-target")||(i=e.attr("href"))&&i.replace(/.*(?=#[^\s]+$)/,"");return t(document).find(o)}function i(e){return this.each(function(){var i=t(this),n=i.data("bs.collapse"),s=t.extend({},o.DEFAULTS,i.data(),"object"==typeof e&&e);!n&&s.toggle&&/show|hide/.test(e)&&(s.toggle=!1),n||i.data("bs.collapse",n=new o(this,s)),"string"==typeof e&&n[e]()})}var o=function(e,i){this.$element=t(e),this.options=t.extend({},o.DEFAULTS,i),this.$trigger=t('[data-toggle="collapse"][href="#'+e.id+'"],[data-toggle="collapse"][data-target="#'+e.id+'"]'),this.transitioning=null,this.options.parent?this.$parent=this.getParent():this.addAriaAndCollapsedClass(this.$element,this.$trigger),this.options.toggle&&this.toggle()};o.VERSION="3.4.1",o.TRANSITION_DURATION=350,o.DEFAULTS={toggle:!0},o.prototype.dimension=function(){var t=this.$element.hasClass("width");return t?"width":"height"},o.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var e,n=this.$parent&&this.$parent.children(".panel").children(".in, .collapsing");if(!(n&&n.length&&(e=n.data("bs.collapse"),e&&e.transitioning))){var s=t.Event("show.bs.collapse");if(this.$element.trigger(s),!s.isDefaultPrevented()){n&&n.length&&(i.call(n,"hide"),e||n.data("bs.collapse",null));var a=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[a](0).attr("aria-expanded",!0),this.$trigger.removeClass("collapsed").attr("aria-expanded",!0),this.transitioning=1;var r=function(){this.$element.removeClass("collapsing").addClass("collapse in")[a](""),this.transitioning=0,this.$element.trigger("shown.bs.collapse")};if(!t.support.transition)return r.call(this);var l=t.camelCase(["scroll",a].join("-"));this.$element.one("bsTransitionEnd",t.proxy(r,this)).emulateTransitionEnd(o.TRANSITION_DURATION)[a](this.$element[0][l])}}}},o.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var e=t.Event("hide.bs.collapse");if(this.$element.trigger(e),!e.isDefaultPrevented()){var i=this.dimension();this.$element[i](this.$element[i]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse in").attr("aria-expanded",!1),this.$trigger.addClass("collapsed").attr("aria-expanded",!1),this.transitioning=1;var n=function(){this.transitioning=0,this.$element.removeClass("collapsing").addClass("collapse").trigger("hidden.bs.collapse")};return t.support.transition?void this.$element[i](0).one("bsTransitionEnd",t.proxy(n,this)).emulateTransitionEnd(o.TRANSITION_DURATION):n.call(this)}}},o.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()},o.prototype.getParent=function(){return t(document).find(this.options.parent).find('[data-toggle="collapse"][data-parent="'+this.options.parent+'"]').each(t.proxy(function(i,o){var n=t(o);this.addAriaAndCollapsedClass(e(n),n)},this)).end()},o.prototype.addAriaAndCollapsedClass=function(t,e){var i=t.hasClass("in");t.attr("aria-expanded",i),e.toggleClass("collapsed",!i).attr("aria-expanded",i)};var n=t.fn.collapse;t.fn.collapse=i,t.fn.collapse.Constructor=o,t.fn.collapse.noConflict=function(){return t.fn.collapse=n,this},t(document).on("click.bs.collapse.data-api",'[data-toggle="collapse"]',function(o){var n=t(this);n.attr("data-target")||o.preventDefault();var s=e(n),a=s.data("bs.collapse"),r=a?"toggle":n.data();i.call(s,r)})}(jQuery),+function(t){"use strict";function e(i,o){this.$body=t(document.body),this.$scrollElement=t(t(i).is(document.body)?window:i),this.options=t.extend({},e.DEFAULTS,o),this.selector=(this.options.target||"")+" .nav li > a",this.offsets=[],this.targets=[],this.activeTarget=null,this.scrollHeight=0,this.$scrollElement.on("scroll.bs.scrollspy",t.proxy(this.process,this)),this.refresh(),this.process()}function i(i){return this.each(function(){var o=t(this),n=o.data("bs.scrollspy"),s="object"==typeof i&&i;n||o.data("bs.scrollspy",n=new e(this,s)),"string"==typeof i&&n[i]()})}e.VERSION="3.4.1",e.DEFAULTS={offset:10},e.prototype.getScrollHeight=function(){return this.$scrollElement[0].scrollHeight||Math.max(this.$body[0].scrollHeight,document.documentElement.scrollHeight)},e.prototype.refresh=function(){var e=this,i="offset",o=0;this.offsets=[],this.targets=[],this.scrollHeight=this.getScrollHeight(),t.isWindow(this.$scrollElement[0])||(i="position",o=this.$scrollElement.scrollTop()),this.$body.find(this.selector).map(function(){var e=t(this),n=e.data("target")||e.attr("href"),s=/^#./.test(n)&&t(n);return s&&s.length&&s.is(":visible")&&[[s[i]().top+o,n]]||null}).sort(function(t,e){return t[0]-e[0]}).each(function(){e.offsets.push(this[0]),e.targets.push(this[1])})},e.prototype.process=function(){var t,e=this.$scrollElement.scrollTop()+this.options.offset,i=this.getScrollHeight(),o=this.options.offset+i-this.$scrollElement.height(),n=this.offsets,s=this.targets,a=this.activeTarget;if(this.scrollHeight!=i&&this.refresh(),e>=o)return a!=(t=s[s.length-1])&&this.activate(t);if(a&&e=n[t]&&(void 0===n[t+1]||ea?this[a+this.length]:this[a]:e.call(this)},pushStack:function(a){var b=n.merge(this.constructor(),a);return b.prevObject=this,b.context=this.context,b},each:function(a){return n.each(this,a)},map:function(a){return this.pushStack(n.map(this,function(b,c){return a.call(b,c,b)}))},slice:function(){return this.pushStack(e.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(a){var b=this.length,c=+a+(0>a?b:0);return this.pushStack(c>=0&&b>c?[this[c]]:[])},end:function(){return this.prevObject||this.constructor()},push:g,sort:c.sort,splice:c.splice},n.extend=n.fn.extend=function(){var a,b,c,d,e,f,g=arguments[0]||{},h=1,i=arguments.length,j=!1;for("boolean"==typeof g&&(j=g,g=arguments[h]||{},h++),"object"==typeof g||n.isFunction(g)||(g={}),h===i&&(g=this,h--);i>h;h++)if(null!=(a=arguments[h]))for(b in a)c=g[b],d=a[b],g!==d&&(j&&d&&(n.isPlainObject(d)||(e=n.isArray(d)))?(e?(e=!1,f=c&&n.isArray(c)?c:[]):f=c&&n.isPlainObject(c)?c:{},g[b]=n.extend(j,f,d)):void 0!==d&&(g[b]=d));return g},n.extend({expando:"jQuery"+(m+Math.random()).replace(/\D/g,""),isReady:!0,error:function(a){throw new Error(a)},noop:function(){},isFunction:function(a){return"function"===n.type(a)},isArray:Array.isArray,isWindow:function(a){return null!=a&&a===a.window},isNumeric:function(a){var b=a&&a.toString();return!n.isArray(a)&&b-parseFloat(b)+1>=0},isPlainObject:function(a){var b;if("object"!==n.type(a)||a.nodeType||n.isWindow(a))return!1;if(a.constructor&&!k.call(a,"constructor")&&!k.call(a.constructor.prototype||{},"isPrototypeOf"))return!1;for(b in a);return void 0===b||k.call(a,b)},isEmptyObject:function(a){var b;for(b in a)return!1;return!0},type:function(a){return null==a?a+"":"object"==typeof a||"function"==typeof a?i[j.call(a)]||"object":typeof a},globalEval:function(a){var b,c=eval;a=n.trim(a),a&&(1===a.indexOf("use strict")?(b=d.createElement("script"),b.text=a,d.head.appendChild(b).parentNode.removeChild(b)):c(a))},camelCase:function(a){return a.replace(p,"ms-").replace(q,r)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toLowerCase()===b.toLowerCase()},each:function(a,b){var c,d=0;if(s(a)){for(c=a.length;c>d;d++)if(b.call(a[d],d,a[d])===!1)break}else for(d in a)if(b.call(a[d],d,a[d])===!1)break;return a},trim:function(a){return null==a?"":(a+"").replace(o,"")},makeArray:function(a,b){var c=b||[];return null!=a&&(s(Object(a))?n.merge(c,"string"==typeof a?[a]:a):g.call(c,a)),c},inArray:function(a,b,c){return null==b?-1:h.call(b,a,c)},merge:function(a,b){for(var c=+b.length,d=0,e=a.length;c>d;d++)a[e++]=b[d];return a.length=e,a},grep:function(a,b,c){for(var d,e=[],f=0,g=a.length,h=!c;g>f;f++)d=!b(a[f],f),d!==h&&e.push(a[f]);return e},map:function(a,b,c){var d,e,g=0,h=[];if(s(a))for(d=a.length;d>g;g++)e=b(a[g],g,c),null!=e&&h.push(e);else for(g in a)e=b(a[g],g,c),null!=e&&h.push(e);return f.apply([],h)},guid:1,proxy:function(a,b){var c,d,f;return"string"==typeof b&&(c=a[b],b=a,a=c),n.isFunction(a)?(d=e.call(arguments,2),f=function(){return a.apply(b||this,d.concat(e.call(arguments)))},f.guid=a.guid=a.guid||n.guid++,f):void 0},now:Date.now,support:l}),"function"==typeof Symbol&&(n.fn[Symbol.iterator]=c[Symbol.iterator]),n.each("Boolean Number String Function Array Date RegExp Object Error Symbol".split(" "),function(a,b){i["[object "+b+"]"]=b.toLowerCase()});function s(a){var b=!!a&&"length"in a&&a.length,c=n.type(a);return"function"===c||n.isWindow(a)?!1:"array"===c||0===b||"number"==typeof b&&b>0&&b-1 in a}var t=function(a){var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u="sizzle"+1*new Date,v=a.document,w=0,x=0,y=ga(),z=ga(),A=ga(),B=function(a,b){return a===b&&(l=!0),0},C=1<<31,D={}.hasOwnProperty,E=[],F=E.pop,G=E.push,H=E.push,I=E.slice,J=function(a,b){for(var c=0,d=a.length;d>c;c++)if(a[c]===b)return c;return-1},K="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",L="[\\x20\\t\\r\\n\\f]",M="(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+",N="\\["+L+"*("+M+")(?:"+L+"*([*^$|!~]?=)"+L+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+M+"))|)"+L+"*\\]",O=":("+M+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+N+")*)|.*)\\)|)",P=new RegExp(L+"+","g"),Q=new RegExp("^"+L+"+|((?:^|[^\\\\])(?:\\\\.)*)"+L+"+$","g"),R=new RegExp("^"+L+"*,"+L+"*"),S=new RegExp("^"+L+"*([>+~]|"+L+")"+L+"*"),T=new RegExp("="+L+"*([^\\]'\"]*?)"+L+"*\\]","g"),U=new RegExp(O),V=new RegExp("^"+M+"$"),W={ID:new RegExp("^#("+M+")"),CLASS:new RegExp("^\\.("+M+")"),TAG:new RegExp("^("+M+"|[*])"),ATTR:new RegExp("^"+N),PSEUDO:new RegExp("^"+O),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+L+"*(even|odd|(([+-]|)(\\d*)n|)"+L+"*(?:([+-]|)"+L+"*(\\d+)|))"+L+"*\\)|)","i"),bool:new RegExp("^(?:"+K+")$","i"),needsContext:new RegExp("^"+L+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+L+"*((?:-\\d)?\\d*)"+L+"*\\)|)(?=[^-]|$)","i")},X=/^(?:input|select|textarea|button)$/i,Y=/^h\d$/i,Z=/^[^{]+\{\s*\[native \w/,$=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,_=/[+~]/,aa=/'|\\/g,ba=new RegExp("\\\\([\\da-f]{1,6}"+L+"?|("+L+")|.)","ig"),ca=function(a,b,c){var d="0x"+b-65536;return d!==d||c?b:0>d?String.fromCharCode(d+65536):String.fromCharCode(d>>10|55296,1023&d|56320)},da=function(){m()};try{H.apply(E=I.call(v.childNodes),v.childNodes),E[v.childNodes.length].nodeType}catch(ea){H={apply:E.length?function(a,b){G.apply(a,I.call(b))}:function(a,b){var c=a.length,d=0;while(a[c++]=b[d++]);a.length=c-1}}}function fa(a,b,d,e){var f,h,j,k,l,o,r,s,w=b&&b.ownerDocument,x=b?b.nodeType:9;if(d=d||[],"string"!=typeof a||!a||1!==x&&9!==x&&11!==x)return d;if(!e&&((b?b.ownerDocument||b:v)!==n&&m(b),b=b||n,p)){if(11!==x&&(o=$.exec(a)))if(f=o[1]){if(9===x){if(!(j=b.getElementById(f)))return d;if(j.id===f)return d.push(j),d}else if(w&&(j=w.getElementById(f))&&t(b,j)&&j.id===f)return d.push(j),d}else{if(o[2])return H.apply(d,b.getElementsByTagName(a)),d;if((f=o[3])&&c.getElementsByClassName&&b.getElementsByClassName)return H.apply(d,b.getElementsByClassName(f)),d}if(c.qsa&&!A[a+" "]&&(!q||!q.test(a))){if(1!==x)w=b,s=a;else if("object"!==b.nodeName.toLowerCase()){(k=b.getAttribute("id"))?k=k.replace(aa,"\\$&"):b.setAttribute("id",k=u),r=g(a),h=r.length,l=V.test(k)?"#"+k:"[id='"+k+"']";while(h--)r[h]=l+" "+qa(r[h]);s=r.join(","),w=_.test(a)&&oa(b.parentNode)||b}if(s)try{return H.apply(d,w.querySelectorAll(s)),d}catch(y){}finally{k===u&&b.removeAttribute("id")}}}return i(a.replace(Q,"$1"),b,d,e)}function ga(){var a=[];function b(c,e){return a.push(c+" ")>d.cacheLength&&delete b[a.shift()],b[c+" "]=e}return b}function ha(a){return a[u]=!0,a}function ia(a){var b=n.createElement("div");try{return!!a(b)}catch(c){return!1}finally{b.parentNode&&b.parentNode.removeChild(b),b=null}}function ja(a,b){var c=a.split("|"),e=c.length;while(e--)d.attrHandle[c[e]]=b}function ka(a,b){var c=b&&a,d=c&&1===a.nodeType&&1===b.nodeType&&(~b.sourceIndex||C)-(~a.sourceIndex||C);if(d)return d;if(c)while(c=c.nextSibling)if(c===b)return-1;return a?1:-1}function la(a){return function(b){var c=b.nodeName.toLowerCase();return"input"===c&&b.type===a}}function ma(a){return function(b){var c=b.nodeName.toLowerCase();return("input"===c||"button"===c)&&b.type===a}}function na(a){return ha(function(b){return b=+b,ha(function(c,d){var e,f=a([],c.length,b),g=f.length;while(g--)c[e=f[g]]&&(c[e]=!(d[e]=c[e]))})})}function oa(a){return a&&"undefined"!=typeof a.getElementsByTagName&&a}c=fa.support={},f=fa.isXML=function(a){var b=a&&(a.ownerDocument||a).documentElement;return b?"HTML"!==b.nodeName:!1},m=fa.setDocument=function(a){var b,e,g=a?a.ownerDocument||a:v;return g!==n&&9===g.nodeType&&g.documentElement?(n=g,o=n.documentElement,p=!f(n),(e=n.defaultView)&&e.top!==e&&(e.addEventListener?e.addEventListener("unload",da,!1):e.attachEvent&&e.attachEvent("onunload",da)),c.attributes=ia(function(a){return a.className="i",!a.getAttribute("className")}),c.getElementsByTagName=ia(function(a){return a.appendChild(n.createComment("")),!a.getElementsByTagName("*").length}),c.getElementsByClassName=Z.test(n.getElementsByClassName),c.getById=ia(function(a){return o.appendChild(a).id=u,!n.getElementsByName||!n.getElementsByName(u).length}),c.getById?(d.find.ID=function(a,b){if("undefined"!=typeof b.getElementById&&p){var c=b.getElementById(a);return c?[c]:[]}},d.filter.ID=function(a){var b=a.replace(ba,ca);return function(a){return a.getAttribute("id")===b}}):(delete d.find.ID,d.filter.ID=function(a){var b=a.replace(ba,ca);return function(a){var c="undefined"!=typeof a.getAttributeNode&&a.getAttributeNode("id");return c&&c.value===b}}),d.find.TAG=c.getElementsByTagName?function(a,b){return"undefined"!=typeof b.getElementsByTagName?b.getElementsByTagName(a):c.qsa?b.querySelectorAll(a):void 0}:function(a,b){var c,d=[],e=0,f=b.getElementsByTagName(a);if("*"===a){while(c=f[e++])1===c.nodeType&&d.push(c);return d}return f},d.find.CLASS=c.getElementsByClassName&&function(a,b){return"undefined"!=typeof b.getElementsByClassName&&p?b.getElementsByClassName(a):void 0},r=[],q=[],(c.qsa=Z.test(n.querySelectorAll))&&(ia(function(a){o.appendChild(a).innerHTML="",a.querySelectorAll("[msallowcapture^='']").length&&q.push("[*^$]="+L+"*(?:''|\"\")"),a.querySelectorAll("[selected]").length||q.push("\\["+L+"*(?:value|"+K+")"),a.querySelectorAll("[id~="+u+"-]").length||q.push("~="),a.querySelectorAll(":checked").length||q.push(":checked"),a.querySelectorAll("a#"+u+"+*").length||q.push(".#.+[+~]")}),ia(function(a){var b=n.createElement("input");b.setAttribute("type","hidden"),a.appendChild(b).setAttribute("name","D"),a.querySelectorAll("[name=d]").length&&q.push("name"+L+"*[*^$|!~]?="),a.querySelectorAll(":enabled").length||q.push(":enabled",":disabled"),a.querySelectorAll("*,:x"),q.push(",.*:")})),(c.matchesSelector=Z.test(s=o.matches||o.webkitMatchesSelector||o.mozMatchesSelector||o.oMatchesSelector||o.msMatchesSelector))&&ia(function(a){c.disconnectedMatch=s.call(a,"div"),s.call(a,"[s!='']:x"),r.push("!=",O)}),q=q.length&&new RegExp(q.join("|")),r=r.length&&new RegExp(r.join("|")),b=Z.test(o.compareDocumentPosition),t=b||Z.test(o.contains)?function(a,b){var c=9===a.nodeType?a.documentElement:a,d=b&&b.parentNode;return a===d||!(!d||1!==d.nodeType||!(c.contains?c.contains(d):a.compareDocumentPosition&&16&a.compareDocumentPosition(d)))}:function(a,b){if(b)while(b=b.parentNode)if(b===a)return!0;return!1},B=b?function(a,b){if(a===b)return l=!0,0;var d=!a.compareDocumentPosition-!b.compareDocumentPosition;return d?d:(d=(a.ownerDocument||a)===(b.ownerDocument||b)?a.compareDocumentPosition(b):1,1&d||!c.sortDetached&&b.compareDocumentPosition(a)===d?a===n||a.ownerDocument===v&&t(v,a)?-1:b===n||b.ownerDocument===v&&t(v,b)?1:k?J(k,a)-J(k,b):0:4&d?-1:1)}:function(a,b){if(a===b)return l=!0,0;var c,d=0,e=a.parentNode,f=b.parentNode,g=[a],h=[b];if(!e||!f)return a===n?-1:b===n?1:e?-1:f?1:k?J(k,a)-J(k,b):0;if(e===f)return ka(a,b);c=a;while(c=c.parentNode)g.unshift(c);c=b;while(c=c.parentNode)h.unshift(c);while(g[d]===h[d])d++;return d?ka(g[d],h[d]):g[d]===v?-1:h[d]===v?1:0},n):n},fa.matches=function(a,b){return fa(a,null,null,b)},fa.matchesSelector=function(a,b){if((a.ownerDocument||a)!==n&&m(a),b=b.replace(T,"='$1']"),c.matchesSelector&&p&&!A[b+" "]&&(!r||!r.test(b))&&(!q||!q.test(b)))try{var d=s.call(a,b);if(d||c.disconnectedMatch||a.document&&11!==a.document.nodeType)return d}catch(e){}return fa(b,n,null,[a]).length>0},fa.contains=function(a,b){return(a.ownerDocument||a)!==n&&m(a),t(a,b)},fa.attr=function(a,b){(a.ownerDocument||a)!==n&&m(a);var e=d.attrHandle[b.toLowerCase()],f=e&&D.call(d.attrHandle,b.toLowerCase())?e(a,b,!p):void 0;return void 0!==f?f:c.attributes||!p?a.getAttribute(b):(f=a.getAttributeNode(b))&&f.specified?f.value:null},fa.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)},fa.uniqueSort=function(a){var b,d=[],e=0,f=0;if(l=!c.detectDuplicates,k=!c.sortStable&&a.slice(0),a.sort(B),l){while(b=a[f++])b===a[f]&&(e=d.push(f));while(e--)a.splice(d[e],1)}return k=null,a},e=fa.getText=function(a){var b,c="",d=0,f=a.nodeType;if(f){if(1===f||9===f||11===f){if("string"==typeof a.textContent)return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=e(a)}else if(3===f||4===f)return a.nodeValue}else while(b=a[d++])c+=e(b);return c},d=fa.selectors={cacheLength:50,createPseudo:ha,match:W,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(ba,ca),a[3]=(a[3]||a[4]||a[5]||"").replace(ba,ca),"~="===a[2]&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),"nth"===a[1].slice(0,3)?(a[3]||fa.error(a[0]),a[4]=+(a[4]?a[5]+(a[6]||1):2*("even"===a[3]||"odd"===a[3])),a[5]=+(a[7]+a[8]||"odd"===a[3])):a[3]&&fa.error(a[0]),a},PSEUDO:function(a){var b,c=!a[6]&&a[2];return W.CHILD.test(a[0])?null:(a[3]?a[2]=a[4]||a[5]||"":c&&U.test(c)&&(b=g(c,!0))&&(b=c.indexOf(")",c.length-b)-c.length)&&(a[0]=a[0].slice(0,b),a[2]=c.slice(0,b)),a.slice(0,3))}},filter:{TAG:function(a){var b=a.replace(ba,ca).toLowerCase();return"*"===a?function(){return!0}:function(a){return a.nodeName&&a.nodeName.toLowerCase()===b}},CLASS:function(a){var b=y[a+" "];return b||(b=new RegExp("(^|"+L+")"+a+"("+L+"|$)"))&&y(a,function(a){return b.test("string"==typeof a.className&&a.className||"undefined"!=typeof a.getAttribute&&a.getAttribute("class")||"")})},ATTR:function(a,b,c){return function(d){var e=fa.attr(d,a);return null==e?"!="===b:b?(e+="","="===b?e===c:"!="===b?e!==c:"^="===b?c&&0===e.indexOf(c):"*="===b?c&&e.indexOf(c)>-1:"$="===b?c&&e.slice(-c.length)===c:"~="===b?(" "+e.replace(P," ")+" ").indexOf(c)>-1:"|="===b?e===c||e.slice(0,c.length+1)===c+"-":!1):!0}},CHILD:function(a,b,c,d,e){var f="nth"!==a.slice(0,3),g="last"!==a.slice(-4),h="of-type"===b;return 1===d&&0===e?function(a){return!!a.parentNode}:function(b,c,i){var j,k,l,m,n,o,p=f!==g?"nextSibling":"previousSibling",q=b.parentNode,r=h&&b.nodeName.toLowerCase(),s=!i&&!h,t=!1;if(q){if(f){while(p){m=b;while(m=m[p])if(h?m.nodeName.toLowerCase()===r:1===m.nodeType)return!1;o=p="only"===a&&!o&&"nextSibling"}return!0}if(o=[g?q.firstChild:q.lastChild],g&&s){m=q,l=m[u]||(m[u]={}),k=l[m.uniqueID]||(l[m.uniqueID]={}),j=k[a]||[],n=j[0]===w&&j[1],t=n&&j[2],m=n&&q.childNodes[n];while(m=++n&&m&&m[p]||(t=n=0)||o.pop())if(1===m.nodeType&&++t&&m===b){k[a]=[w,n,t];break}}else if(s&&(m=b,l=m[u]||(m[u]={}),k=l[m.uniqueID]||(l[m.uniqueID]={}),j=k[a]||[],n=j[0]===w&&j[1],t=n),t===!1)while(m=++n&&m&&m[p]||(t=n=0)||o.pop())if((h?m.nodeName.toLowerCase()===r:1===m.nodeType)&&++t&&(s&&(l=m[u]||(m[u]={}),k=l[m.uniqueID]||(l[m.uniqueID]={}),k[a]=[w,t]),m===b))break;return t-=e,t===d||t%d===0&&t/d>=0}}},PSEUDO:function(a,b){var c,e=d.pseudos[a]||d.setFilters[a.toLowerCase()]||fa.error("unsupported pseudo: "+a);return e[u]?e(b):e.length>1?(c=[a,a,"",b],d.setFilters.hasOwnProperty(a.toLowerCase())?ha(function(a,c){var d,f=e(a,b),g=f.length;while(g--)d=J(a,f[g]),a[d]=!(c[d]=f[g])}):function(a){return e(a,0,c)}):e}},pseudos:{not:ha(function(a){var b=[],c=[],d=h(a.replace(Q,"$1"));return d[u]?ha(function(a,b,c,e){var f,g=d(a,null,e,[]),h=a.length;while(h--)(f=g[h])&&(a[h]=!(b[h]=f))}):function(a,e,f){return b[0]=a,d(b,null,f,c),b[0]=null,!c.pop()}}),has:ha(function(a){return function(b){return fa(a,b).length>0}}),contains:ha(function(a){return a=a.replace(ba,ca),function(b){return(b.textContent||b.innerText||e(b)).indexOf(a)>-1}}),lang:ha(function(a){return V.test(a||"")||fa.error("unsupported lang: "+a),a=a.replace(ba,ca).toLowerCase(),function(b){var c;do if(c=p?b.lang:b.getAttribute("xml:lang")||b.getAttribute("lang"))return c=c.toLowerCase(),c===a||0===c.indexOf(a+"-");while((b=b.parentNode)&&1===b.nodeType);return!1}}),target:function(b){var c=a.location&&a.location.hash;return c&&c.slice(1)===b.id},root:function(a){return a===o},focus:function(a){return a===n.activeElement&&(!n.hasFocus||n.hasFocus())&&!!(a.type||a.href||~a.tabIndex)},enabled:function(a){return a.disabled===!1},disabled:function(a){return a.disabled===!0},checked:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&!!a.checked||"option"===b&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},empty:function(a){for(a=a.firstChild;a;a=a.nextSibling)if(a.nodeType<6)return!1;return!0},parent:function(a){return!d.pseudos.empty(a)},header:function(a){return Y.test(a.nodeName)},input:function(a){return X.test(a.nodeName)},button:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&"button"===a.type||"button"===b},text:function(a){var b;return"input"===a.nodeName.toLowerCase()&&"text"===a.type&&(null==(b=a.getAttribute("type"))||"text"===b.toLowerCase())},first:na(function(){return[0]}),last:na(function(a,b){return[b-1]}),eq:na(function(a,b,c){return[0>c?c+b:c]}),even:na(function(a,b){for(var c=0;b>c;c+=2)a.push(c);return a}),odd:na(function(a,b){for(var c=1;b>c;c+=2)a.push(c);return a}),lt:na(function(a,b,c){for(var d=0>c?c+b:c;--d>=0;)a.push(d);return a}),gt:na(function(a,b,c){for(var d=0>c?c+b:c;++db;b++)d+=a[b].value;return d}function ra(a,b,c){var d=b.dir,e=c&&"parentNode"===d,f=x++;return b.first?function(b,c,f){while(b=b[d])if(1===b.nodeType||e)return a(b,c,f)}:function(b,c,g){var h,i,j,k=[w,f];if(g){while(b=b[d])if((1===b.nodeType||e)&&a(b,c,g))return!0}else while(b=b[d])if(1===b.nodeType||e){if(j=b[u]||(b[u]={}),i=j[b.uniqueID]||(j[b.uniqueID]={}),(h=i[d])&&h[0]===w&&h[1]===f)return k[2]=h[2];if(i[d]=k,k[2]=a(b,c,g))return!0}}}function sa(a){return a.length>1?function(b,c,d){var e=a.length;while(e--)if(!a[e](b,c,d))return!1;return!0}:a[0]}function ta(a,b,c){for(var d=0,e=b.length;e>d;d++)fa(a,b[d],c);return c}function ua(a,b,c,d,e){for(var f,g=[],h=0,i=a.length,j=null!=b;i>h;h++)(f=a[h])&&(c&&!c(f,d,e)||(g.push(f),j&&b.push(h)));return g}function va(a,b,c,d,e,f){return d&&!d[u]&&(d=va(d)),e&&!e[u]&&(e=va(e,f)),ha(function(f,g,h,i){var j,k,l,m=[],n=[],o=g.length,p=f||ta(b||"*",h.nodeType?[h]:h,[]),q=!a||!f&&b?p:ua(p,m,a,h,i),r=c?e||(f?a:o||d)?[]:g:q;if(c&&c(q,r,h,i),d){j=ua(r,n),d(j,[],h,i),k=j.length;while(k--)(l=j[k])&&(r[n[k]]=!(q[n[k]]=l))}if(f){if(e||a){if(e){j=[],k=r.length;while(k--)(l=r[k])&&j.push(q[k]=l);e(null,r=[],j,i)}k=r.length;while(k--)(l=r[k])&&(j=e?J(f,l):m[k])>-1&&(f[j]=!(g[j]=l))}}else r=ua(r===g?r.splice(o,r.length):r),e?e(null,g,r,i):H.apply(g,r)})}function wa(a){for(var b,c,e,f=a.length,g=d.relative[a[0].type],h=g||d.relative[" "],i=g?1:0,k=ra(function(a){return a===b},h,!0),l=ra(function(a){return J(b,a)>-1},h,!0),m=[function(a,c,d){var e=!g&&(d||c!==j)||((b=c).nodeType?k(a,c,d):l(a,c,d));return b=null,e}];f>i;i++)if(c=d.relative[a[i].type])m=[ra(sa(m),c)];else{if(c=d.filter[a[i].type].apply(null,a[i].matches),c[u]){for(e=++i;f>e;e++)if(d.relative[a[e].type])break;return va(i>1&&sa(m),i>1&&qa(a.slice(0,i-1).concat({value:" "===a[i-2].type?"*":""})).replace(Q,"$1"),c,e>i&&wa(a.slice(i,e)),f>e&&wa(a=a.slice(e)),f>e&&qa(a))}m.push(c)}return sa(m)}function xa(a,b){var c=b.length>0,e=a.length>0,f=function(f,g,h,i,k){var l,o,q,r=0,s="0",t=f&&[],u=[],v=j,x=f||e&&d.find.TAG("*",k),y=w+=null==v?1:Math.random()||.1,z=x.length;for(k&&(j=g===n||g||k);s!==z&&null!=(l=x[s]);s++){if(e&&l){o=0,g||l.ownerDocument===n||(m(l),h=!p);while(q=a[o++])if(q(l,g||n,h)){i.push(l);break}k&&(w=y)}c&&((l=!q&&l)&&r--,f&&t.push(l))}if(r+=s,c&&s!==r){o=0;while(q=b[o++])q(t,u,g,h);if(f){if(r>0)while(s--)t[s]||u[s]||(u[s]=F.call(i));u=ua(u)}H.apply(i,u),k&&!f&&u.length>0&&r+b.length>1&&fa.uniqueSort(i)}return k&&(w=y,j=v),t};return c?ha(f):f}return h=fa.compile=function(a,b){var c,d=[],e=[],f=A[a+" "];if(!f){b||(b=g(a)),c=b.length;while(c--)f=wa(b[c]),f[u]?d.push(f):e.push(f);f=A(a,xa(e,d)),f.selector=a}return f},i=fa.select=function(a,b,e,f){var i,j,k,l,m,n="function"==typeof a&&a,o=!f&&g(a=n.selector||a);if(e=e||[],1===o.length){if(j=o[0]=o[0].slice(0),j.length>2&&"ID"===(k=j[0]).type&&c.getById&&9===b.nodeType&&p&&d.relative[j[1].type]){if(b=(d.find.ID(k.matches[0].replace(ba,ca),b)||[])[0],!b)return e;n&&(b=b.parentNode),a=a.slice(j.shift().value.length)}i=W.needsContext.test(a)?0:j.length;while(i--){if(k=j[i],d.relative[l=k.type])break;if((m=d.find[l])&&(f=m(k.matches[0].replace(ba,ca),_.test(j[0].type)&&oa(b.parentNode)||b))){if(j.splice(i,1),a=f.length&&qa(j),!a)return H.apply(e,f),e;break}}}return(n||h(a,o))(f,b,!p,e,!b||_.test(a)&&oa(b.parentNode)||b),e},c.sortStable=u.split("").sort(B).join("")===u,c.detectDuplicates=!!l,m(),c.sortDetached=ia(function(a){return 1&a.compareDocumentPosition(n.createElement("div"))}),ia(function(a){return a.innerHTML="","#"===a.firstChild.getAttribute("href")})||ja("type|href|height|width",function(a,b,c){return c?void 0:a.getAttribute(b,"type"===b.toLowerCase()?1:2)}),c.attributes&&ia(function(a){return a.innerHTML="",a.firstChild.setAttribute("value",""),""===a.firstChild.getAttribute("value")})||ja("value",function(a,b,c){return c||"input"!==a.nodeName.toLowerCase()?void 0:a.defaultValue}),ia(function(a){return null==a.getAttribute("disabled")})||ja(K,function(a,b,c){var d;return c?void 0:a[b]===!0?b.toLowerCase():(d=a.getAttributeNode(b))&&d.specified?d.value:null}),fa}(a);n.find=t,n.expr=t.selectors,n.expr[":"]=n.expr.pseudos,n.uniqueSort=n.unique=t.uniqueSort,n.text=t.getText,n.isXMLDoc=t.isXML,n.contains=t.contains;var u=function(a,b,c){var d=[],e=void 0!==c;while((a=a[b])&&9!==a.nodeType)if(1===a.nodeType){if(e&&n(a).is(c))break;d.push(a)}return d},v=function(a,b){for(var c=[];a;a=a.nextSibling)1===a.nodeType&&a!==b&&c.push(a);return c},w=n.expr.match.needsContext,x=/^<([\w-]+)\s*\/?>(?:<\/\1>|)$/,y=/^.[^:#\[\.,]*$/;function z(a,b,c){if(n.isFunction(b))return n.grep(a,function(a,d){return!!b.call(a,d,a)!==c});if(b.nodeType)return n.grep(a,function(a){return a===b!==c});if("string"==typeof b){if(y.test(b))return n.filter(b,a,c);b=n.filter(b,a)}return n.grep(a,function(a){return h.call(b,a)>-1!==c})}n.filter=function(a,b,c){var d=b[0];return c&&(a=":not("+a+")"),1===b.length&&1===d.nodeType?n.find.matchesSelector(d,a)?[d]:[]:n.find.matches(a,n.grep(b,function(a){return 1===a.nodeType}))},n.fn.extend({find:function(a){var b,c=this.length,d=[],e=this;if("string"!=typeof a)return this.pushStack(n(a).filter(function(){for(b=0;c>b;b++)if(n.contains(e[b],this))return!0}));for(b=0;c>b;b++)n.find(a,e[b],d);return d=this.pushStack(c>1?n.unique(d):d),d.selector=this.selector?this.selector+" "+a:a,d},filter:function(a){return this.pushStack(z(this,a||[],!1))},not:function(a){return this.pushStack(z(this,a||[],!0))},is:function(a){return!!z(this,"string"==typeof a&&w.test(a)?n(a):a||[],!1).length}});var A,B=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,C=n.fn.init=function(a,b,c){var e,f;if(!a)return this;if(c=c||A,"string"==typeof a){if(e="<"===a[0]&&">"===a[a.length-1]&&a.length>=3?[null,a,null]:B.exec(a),!e||!e[1]&&b)return!b||b.jquery?(b||c).find(a):this.constructor(b).find(a);if(e[1]){if(b=b instanceof n?b[0]:b,n.merge(this,n.parseHTML(e[1],b&&b.nodeType?b.ownerDocument||b:d,!0)),x.test(e[1])&&n.isPlainObject(b))for(e in b)n.isFunction(this[e])?this[e](b[e]):this.attr(e,b[e]);return this}return f=d.getElementById(e[2]),f&&f.parentNode&&(this.length=1,this[0]=f),this.context=d,this.selector=a,this}return a.nodeType?(this.context=this[0]=a,this.length=1,this):n.isFunction(a)?void 0!==c.ready?c.ready(a):a(n):(void 0!==a.selector&&(this.selector=a.selector,this.context=a.context),n.makeArray(a,this))};C.prototype=n.fn,A=n(d);var D=/^(?:parents|prev(?:Until|All))/,E={children:!0,contents:!0,next:!0,prev:!0};n.fn.extend({has:function(a){var b=n(a,this),c=b.length;return this.filter(function(){for(var a=0;c>a;a++)if(n.contains(this,b[a]))return!0})},closest:function(a,b){for(var c,d=0,e=this.length,f=[],g=w.test(a)||"string"!=typeof a?n(a,b||this.context):0;e>d;d++)for(c=this[d];c&&c!==b;c=c.parentNode)if(c.nodeType<11&&(g?g.index(c)>-1:1===c.nodeType&&n.find.matchesSelector(c,a))){f.push(c);break}return this.pushStack(f.length>1?n.uniqueSort(f):f)},index:function(a){return a?"string"==typeof a?h.call(n(a),this[0]):h.call(this,a.jquery?a[0]:a):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(a,b){return this.pushStack(n.uniqueSort(n.merge(this.get(),n(a,b))))},addBack:function(a){return this.add(null==a?this.prevObject:this.prevObject.filter(a))}});function F(a,b){while((a=a[b])&&1!==a.nodeType);return a}n.each({parent:function(a){var b=a.parentNode;return b&&11!==b.nodeType?b:null},parents:function(a){return u(a,"parentNode")},parentsUntil:function(a,b,c){return u(a,"parentNode",c)},next:function(a){return F(a,"nextSibling")},prev:function(a){return F(a,"previousSibling")},nextAll:function(a){return u(a,"nextSibling")},prevAll:function(a){return u(a,"previousSibling")},nextUntil:function(a,b,c){return u(a,"nextSibling",c)},prevUntil:function(a,b,c){return u(a,"previousSibling",c)},siblings:function(a){return v((a.parentNode||{}).firstChild,a)},children:function(a){return v(a.firstChild)},contents:function(a){return a.contentDocument||n.merge([],a.childNodes)}},function(a,b){n.fn[a]=function(c,d){var e=n.map(this,b,c);return"Until"!==a.slice(-5)&&(d=c),d&&"string"==typeof d&&(e=n.filter(d,e)),this.length>1&&(E[a]||n.uniqueSort(e),D.test(a)&&e.reverse()),this.pushStack(e)}});var G=/\S+/g;function H(a){var b={};return n.each(a.match(G)||[],function(a,c){b[c]=!0}),b}n.Callbacks=function(a){a="string"==typeof a?H(a):n.extend({},a);var b,c,d,e,f=[],g=[],h=-1,i=function(){for(e=a.once,d=b=!0;g.length;h=-1){c=g.shift();while(++h-1)f.splice(c,1),h>=c&&h--}),this},has:function(a){return a?n.inArray(a,f)>-1:f.length>0},empty:function(){return f&&(f=[]),this},disable:function(){return e=g=[],f=c="",this},disabled:function(){return!f},lock:function(){return e=g=[],c||(f=c=""),this},locked:function(){return!!e},fireWith:function(a,c){return e||(c=c||[],c=[a,c.slice?c.slice():c],g.push(c),b||i()),this},fire:function(){return j.fireWith(this,arguments),this},fired:function(){return!!d}};return j},n.extend({Deferred:function(a){var b=[["resolve","done",n.Callbacks("once memory"),"resolved"],["reject","fail",n.Callbacks("once memory"),"rejected"],["notify","progress",n.Callbacks("memory")]],c="pending",d={state:function(){return c},always:function(){return e.done(arguments).fail(arguments),this},then:function(){var a=arguments;return n.Deferred(function(c){n.each(b,function(b,f){var g=n.isFunction(a[b])&&a[b];e[f[1]](function(){var a=g&&g.apply(this,arguments);a&&n.isFunction(a.promise)?a.promise().progress(c.notify).done(c.resolve).fail(c.reject):c[f[0]+"With"](this===d?c.promise():this,g?[a]:arguments)})}),a=null}).promise()},promise:function(a){return null!=a?n.extend(a,d):d}},e={};return d.pipe=d.then,n.each(b,function(a,f){var g=f[2],h=f[3];d[f[1]]=g.add,h&&g.add(function(){c=h},b[1^a][2].disable,b[2][2].lock),e[f[0]]=function(){return e[f[0]+"With"](this===e?d:this,arguments),this},e[f[0]+"With"]=g.fireWith}),d.promise(e),a&&a.call(e,e),e},when:function(a){var b=0,c=e.call(arguments),d=c.length,f=1!==d||a&&n.isFunction(a.promise)?d:0,g=1===f?a:n.Deferred(),h=function(a,b,c){return function(d){b[a]=this,c[a]=arguments.length>1?e.call(arguments):d,c===i?g.notifyWith(b,c):--f||g.resolveWith(b,c)}},i,j,k;if(d>1)for(i=new Array(d),j=new Array(d),k=new Array(d);d>b;b++)c[b]&&n.isFunction(c[b].promise)?c[b].promise().progress(h(b,j,i)).done(h(b,k,c)).fail(g.reject):--f;return f||g.resolveWith(k,c),g.promise()}});var I;n.fn.ready=function(a){return n.ready.promise().done(a),this},n.extend({isReady:!1,readyWait:1,holdReady:function(a){a?n.readyWait++:n.ready(!0)},ready:function(a){(a===!0?--n.readyWait:n.isReady)||(n.isReady=!0,a!==!0&&--n.readyWait>0||(I.resolveWith(d,[n]),n.fn.triggerHandler&&(n(d).triggerHandler("ready"),n(d).off("ready"))))}});function J(){d.removeEventListener("DOMContentLoaded",J),a.removeEventListener("load",J),n.ready()}n.ready.promise=function(b){return I||(I=n.Deferred(),"complete"===d.readyState||"loading"!==d.readyState&&!d.documentElement.doScroll?a.setTimeout(n.ready):(d.addEventListener("DOMContentLoaded",J),a.addEventListener("load",J))),I.promise(b)},n.ready.promise();var K=function(a,b,c,d,e,f,g){var h=0,i=a.length,j=null==c;if("object"===n.type(c)){e=!0;for(h in c)K(a,b,h,c[h],!0,f,g)}else if(void 0!==d&&(e=!0,n.isFunction(d)||(g=!0),j&&(g?(b.call(a,d),b=null):(j=b,b=function(a,b,c){return j.call(n(a),c)})),b))for(;i>h;h++)b(a[h],c,g?d:d.call(a[h],h,b(a[h],c)));return e?a:j?b.call(a):i?b(a[0],c):f},L=function(a){return 1===a.nodeType||9===a.nodeType||!+a.nodeType};function M(){this.expando=n.expando+M.uid++}M.uid=1,M.prototype={register:function(a,b){var c=b||{};return a.nodeType?a[this.expando]=c:Object.defineProperty(a,this.expando,{value:c,writable:!0,configurable:!0}),a[this.expando]},cache:function(a){if(!L(a))return{};var b=a[this.expando];return b||(b={},L(a)&&(a.nodeType?a[this.expando]=b:Object.defineProperty(a,this.expando,{value:b,configurable:!0}))),b},set:function(a,b,c){var d,e=this.cache(a);if("string"==typeof b)e[b]=c;else for(d in b)e[d]=b[d];return e},get:function(a,b){return void 0===b?this.cache(a):a[this.expando]&&a[this.expando][b]},access:function(a,b,c){var d;return void 0===b||b&&"string"==typeof b&&void 0===c?(d=this.get(a,b),void 0!==d?d:this.get(a,n.camelCase(b))):(this.set(a,b,c),void 0!==c?c:b)},remove:function(a,b){var c,d,e,f=a[this.expando];if(void 0!==f){if(void 0===b)this.register(a);else{n.isArray(b)?d=b.concat(b.map(n.camelCase)):(e=n.camelCase(b),b in f?d=[b,e]:(d=e,d=d in f?[d]:d.match(G)||[])),c=d.length;while(c--)delete f[d[c]]}(void 0===b||n.isEmptyObject(f))&&(a.nodeType?a[this.expando]=void 0:delete a[this.expando])}},hasData:function(a){var b=a[this.expando];return void 0!==b&&!n.isEmptyObject(b)}};var N=new M,O=new M,P=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,Q=/[A-Z]/g;function R(a,b,c){var d;if(void 0===c&&1===a.nodeType)if(d="data-"+b.replace(Q,"-$&").toLowerCase(),c=a.getAttribute(d),"string"==typeof c){try{c="true"===c?!0:"false"===c?!1:"null"===c?null:+c+""===c?+c:P.test(c)?n.parseJSON(c):c; -}catch(e){}O.set(a,b,c)}else c=void 0;return c}n.extend({hasData:function(a){return O.hasData(a)||N.hasData(a)},data:function(a,b,c){return O.access(a,b,c)},removeData:function(a,b){O.remove(a,b)},_data:function(a,b,c){return N.access(a,b,c)},_removeData:function(a,b){N.remove(a,b)}}),n.fn.extend({data:function(a,b){var c,d,e,f=this[0],g=f&&f.attributes;if(void 0===a){if(this.length&&(e=O.get(f),1===f.nodeType&&!N.get(f,"hasDataAttrs"))){c=g.length;while(c--)g[c]&&(d=g[c].name,0===d.indexOf("data-")&&(d=n.camelCase(d.slice(5)),R(f,d,e[d])));N.set(f,"hasDataAttrs",!0)}return e}return"object"==typeof a?this.each(function(){O.set(this,a)}):K(this,function(b){var c,d;if(f&&void 0===b){if(c=O.get(f,a)||O.get(f,a.replace(Q,"-$&").toLowerCase()),void 0!==c)return c;if(d=n.camelCase(a),c=O.get(f,d),void 0!==c)return c;if(c=R(f,d,void 0),void 0!==c)return c}else d=n.camelCase(a),this.each(function(){var c=O.get(this,d);O.set(this,d,b),a.indexOf("-")>-1&&void 0!==c&&O.set(this,a,b)})},null,b,arguments.length>1,null,!0)},removeData:function(a){return this.each(function(){O.remove(this,a)})}}),n.extend({queue:function(a,b,c){var d;return a?(b=(b||"fx")+"queue",d=N.get(a,b),c&&(!d||n.isArray(c)?d=N.access(a,b,n.makeArray(c)):d.push(c)),d||[]):void 0},dequeue:function(a,b){b=b||"fx";var c=n.queue(a,b),d=c.length,e=c.shift(),f=n._queueHooks(a,b),g=function(){n.dequeue(a,b)};"inprogress"===e&&(e=c.shift(),d--),e&&("fx"===b&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return N.get(a,c)||N.access(a,c,{empty:n.Callbacks("once memory").add(function(){N.remove(a,[b+"queue",c])})})}}),n.fn.extend({queue:function(a,b){var c=2;return"string"!=typeof a&&(b=a,a="fx",c--),arguments.length",""],thead:[1,"
","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};$.optgroup=$.option,$.tbody=$.tfoot=$.colgroup=$.caption=$.thead,$.th=$.td;function _(a,b){var c="undefined"!=typeof a.getElementsByTagName?a.getElementsByTagName(b||"*"):"undefined"!=typeof a.querySelectorAll?a.querySelectorAll(b||"*"):[];return void 0===b||b&&n.nodeName(a,b)?n.merge([a],c):c}function aa(a,b){for(var c=0,d=a.length;d>c;c++)N.set(a[c],"globalEval",!b||N.get(b[c],"globalEval"))}var ba=/<|&#?\w+;/;function ca(a,b,c,d,e){for(var f,g,h,i,j,k,l=b.createDocumentFragment(),m=[],o=0,p=a.length;p>o;o++)if(f=a[o],f||0===f)if("object"===n.type(f))n.merge(m,f.nodeType?[f]:f);else if(ba.test(f)){g=g||l.appendChild(b.createElement("div")),h=(Y.exec(f)||["",""])[1].toLowerCase(),i=$[h]||$._default,g.innerHTML=i[1]+n.htmlPrefilter(f)+i[2],k=i[0];while(k--)g=g.lastChild;n.merge(m,g.childNodes),g=l.firstChild,g.textContent=""}else m.push(b.createTextNode(f));l.textContent="",o=0;while(f=m[o++])if(d&&n.inArray(f,d)>-1)e&&e.push(f);else if(j=n.contains(f.ownerDocument,f),g=_(l.appendChild(f),"script"),j&&aa(g),c){k=0;while(f=g[k++])Z.test(f.type||"")&&c.push(f)}return l}!function(){var a=d.createDocumentFragment(),b=a.appendChild(d.createElement("div")),c=d.createElement("input");c.setAttribute("type","radio"),c.setAttribute("checked","checked"),c.setAttribute("name","t"),b.appendChild(c),l.checkClone=b.cloneNode(!0).cloneNode(!0).lastChild.checked,b.innerHTML="",l.noCloneChecked=!!b.cloneNode(!0).lastChild.defaultValue}();var da=/^key/,ea=/^(?:mouse|pointer|contextmenu|drag|drop)|click/,fa=/^([^.]*)(?:\.(.+)|)/;function ga(){return!0}function ha(){return!1}function ia(){try{return d.activeElement}catch(a){}}function ja(a,b,c,d,e,f){var g,h;if("object"==typeof b){"string"!=typeof c&&(d=d||c,c=void 0);for(h in b)ja(a,h,c,d,b[h],f);return a}if(null==d&&null==e?(e=c,d=c=void 0):null==e&&("string"==typeof c?(e=d,d=void 0):(e=d,d=c,c=void 0)),e===!1)e=ha;else if(!e)return a;return 1===f&&(g=e,e=function(a){return n().off(a),g.apply(this,arguments)},e.guid=g.guid||(g.guid=n.guid++)),a.each(function(){n.event.add(this,b,e,d,c)})}n.event={global:{},add:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,o,p,q,r=N.get(a);if(r){c.handler&&(f=c,c=f.handler,e=f.selector),c.guid||(c.guid=n.guid++),(i=r.events)||(i=r.events={}),(g=r.handle)||(g=r.handle=function(b){return"undefined"!=typeof n&&n.event.triggered!==b.type?n.event.dispatch.apply(a,arguments):void 0}),b=(b||"").match(G)||[""],j=b.length;while(j--)h=fa.exec(b[j])||[],o=q=h[1],p=(h[2]||"").split(".").sort(),o&&(l=n.event.special[o]||{},o=(e?l.delegateType:l.bindType)||o,l=n.event.special[o]||{},k=n.extend({type:o,origType:q,data:d,handler:c,guid:c.guid,selector:e,needsContext:e&&n.expr.match.needsContext.test(e),namespace:p.join(".")},f),(m=i[o])||(m=i[o]=[],m.delegateCount=0,l.setup&&l.setup.call(a,d,p,g)!==!1||a.addEventListener&&a.addEventListener(o,g)),l.add&&(l.add.call(a,k),k.handler.guid||(k.handler.guid=c.guid)),e?m.splice(m.delegateCount++,0,k):m.push(k),n.event.global[o]=!0)}},remove:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,o,p,q,r=N.hasData(a)&&N.get(a);if(r&&(i=r.events)){b=(b||"").match(G)||[""],j=b.length;while(j--)if(h=fa.exec(b[j])||[],o=q=h[1],p=(h[2]||"").split(".").sort(),o){l=n.event.special[o]||{},o=(d?l.delegateType:l.bindType)||o,m=i[o]||[],h=h[2]&&new RegExp("(^|\\.)"+p.join("\\.(?:.*\\.|)")+"(\\.|$)"),g=f=m.length;while(f--)k=m[f],!e&&q!==k.origType||c&&c.guid!==k.guid||h&&!h.test(k.namespace)||d&&d!==k.selector&&("**"!==d||!k.selector)||(m.splice(f,1),k.selector&&m.delegateCount--,l.remove&&l.remove.call(a,k));g&&!m.length&&(l.teardown&&l.teardown.call(a,p,r.handle)!==!1||n.removeEvent(a,o,r.handle),delete i[o])}else for(o in i)n.event.remove(a,o+b[j],c,d,!0);n.isEmptyObject(i)&&N.remove(a,"handle events")}},dispatch:function(a){a=n.event.fix(a);var b,c,d,f,g,h=[],i=e.call(arguments),j=(N.get(this,"events")||{})[a.type]||[],k=n.event.special[a.type]||{};if(i[0]=a,a.delegateTarget=this,!k.preDispatch||k.preDispatch.call(this,a)!==!1){h=n.event.handlers.call(this,a,j),b=0;while((f=h[b++])&&!a.isPropagationStopped()){a.currentTarget=f.elem,c=0;while((g=f.handlers[c++])&&!a.isImmediatePropagationStopped())a.rnamespace&&!a.rnamespace.test(g.namespace)||(a.handleObj=g,a.data=g.data,d=((n.event.special[g.origType]||{}).handle||g.handler).apply(f.elem,i),void 0!==d&&(a.result=d)===!1&&(a.preventDefault(),a.stopPropagation()))}return k.postDispatch&&k.postDispatch.call(this,a),a.result}},handlers:function(a,b){var c,d,e,f,g=[],h=b.delegateCount,i=a.target;if(h&&i.nodeType&&("click"!==a.type||isNaN(a.button)||a.button<1))for(;i!==this;i=i.parentNode||this)if(1===i.nodeType&&(i.disabled!==!0||"click"!==a.type)){for(d=[],c=0;h>c;c++)f=b[c],e=f.selector+" ",void 0===d[e]&&(d[e]=f.needsContext?n(e,this).index(i)>-1:n.find(e,this,null,[i]).length),d[e]&&d.push(f);d.length&&g.push({elem:i,handlers:d})}return h]*)\/>/gi,la=/\s*$/g;function pa(a,b){return n.nodeName(a,"table")&&n.nodeName(11!==b.nodeType?b:b.firstChild,"tr")?a.getElementsByTagName("tbody")[0]||a.appendChild(a.ownerDocument.createElement("tbody")):a}function qa(a){return a.type=(null!==a.getAttribute("type"))+"/"+a.type,a}function ra(a){var b=na.exec(a.type);return b?a.type=b[1]:a.removeAttribute("type"),a}function sa(a,b){var c,d,e,f,g,h,i,j;if(1===b.nodeType){if(N.hasData(a)&&(f=N.access(a),g=N.set(b,f),j=f.events)){delete g.handle,g.events={};for(e in j)for(c=0,d=j[e].length;d>c;c++)n.event.add(b,e,j[e][c])}O.hasData(a)&&(h=O.access(a),i=n.extend({},h),O.set(b,i))}}function ta(a,b){var c=b.nodeName.toLowerCase();"input"===c&&X.test(a.type)?b.checked=a.checked:"input"!==c&&"textarea"!==c||(b.defaultValue=a.defaultValue)}function ua(a,b,c,d){b=f.apply([],b);var e,g,h,i,j,k,m=0,o=a.length,p=o-1,q=b[0],r=n.isFunction(q);if(r||o>1&&"string"==typeof q&&!l.checkClone&&ma.test(q))return a.each(function(e){var f=a.eq(e);r&&(b[0]=q.call(this,e,f.html())),ua(f,b,c,d)});if(o&&(e=ca(b,a[0].ownerDocument,!1,a,d),g=e.firstChild,1===e.childNodes.length&&(e=g),g||d)){for(h=n.map(_(e,"script"),qa),i=h.length;o>m;m++)j=e,m!==p&&(j=n.clone(j,!0,!0),i&&n.merge(h,_(j,"script"))),c.call(a[m],j,m);if(i)for(k=h[h.length-1].ownerDocument,n.map(h,ra),m=0;i>m;m++)j=h[m],Z.test(j.type||"")&&!N.access(j,"globalEval")&&n.contains(k,j)&&(j.src?n._evalUrl&&n._evalUrl(j.src):n.globalEval(j.textContent.replace(oa,"")))}return a}function va(a,b,c){for(var d,e=b?n.filter(b,a):a,f=0;null!=(d=e[f]);f++)c||1!==d.nodeType||n.cleanData(_(d)),d.parentNode&&(c&&n.contains(d.ownerDocument,d)&&aa(_(d,"script")),d.parentNode.removeChild(d));return a}n.extend({htmlPrefilter:function(a){return a.replace(ka,"<$1>")},clone:function(a,b,c){var d,e,f,g,h=a.cloneNode(!0),i=n.contains(a.ownerDocument,a);if(!(l.noCloneChecked||1!==a.nodeType&&11!==a.nodeType||n.isXMLDoc(a)))for(g=_(h),f=_(a),d=0,e=f.length;e>d;d++)ta(f[d],g[d]);if(b)if(c)for(f=f||_(a),g=g||_(h),d=0,e=f.length;e>d;d++)sa(f[d],g[d]);else sa(a,h);return g=_(h,"script"),g.length>0&&aa(g,!i&&_(a,"script")),h},cleanData:function(a){for(var b,c,d,e=n.event.special,f=0;void 0!==(c=a[f]);f++)if(L(c)){if(b=c[N.expando]){if(b.events)for(d in b.events)e[d]?n.event.remove(c,d):n.removeEvent(c,d,b.handle);c[N.expando]=void 0}c[O.expando]&&(c[O.expando]=void 0)}}}),n.fn.extend({domManip:ua,detach:function(a){return va(this,a,!0)},remove:function(a){return va(this,a)},text:function(a){return K(this,function(a){return void 0===a?n.text(this):this.empty().each(function(){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||(this.textContent=a)})},null,a,arguments.length)},append:function(){return ua(this,arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=pa(this,a);b.appendChild(a)}})},prepend:function(){return ua(this,arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=pa(this,a);b.insertBefore(a,b.firstChild)}})},before:function(){return ua(this,arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this)})},after:function(){return ua(this,arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this.nextSibling)})},empty:function(){for(var a,b=0;null!=(a=this[b]);b++)1===a.nodeType&&(n.cleanData(_(a,!1)),a.textContent="");return this},clone:function(a,b){return a=null==a?!1:a,b=null==b?a:b,this.map(function(){return n.clone(this,a,b)})},html:function(a){return K(this,function(a){var b=this[0]||{},c=0,d=this.length;if(void 0===a&&1===b.nodeType)return b.innerHTML;if("string"==typeof a&&!la.test(a)&&!$[(Y.exec(a)||["",""])[1].toLowerCase()]){a=n.htmlPrefilter(a);try{for(;d>c;c++)b=this[c]||{},1===b.nodeType&&(n.cleanData(_(b,!1)),b.innerHTML=a);b=0}catch(e){}}b&&this.empty().append(a)},null,a,arguments.length)},replaceWith:function(){var a=[];return ua(this,arguments,function(b){var c=this.parentNode;n.inArray(this,a)<0&&(n.cleanData(_(this)),c&&c.replaceChild(b,this))},a)}}),n.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){n.fn[a]=function(a){for(var c,d=[],e=n(a),f=e.length-1,h=0;f>=h;h++)c=h===f?this:this.clone(!0),n(e[h])[b](c),g.apply(d,c.get());return this.pushStack(d)}});var wa,xa={HTML:"block",BODY:"block"};function ya(a,b){var c=n(b.createElement(a)).appendTo(b.body),d=n.css(c[0],"display");return c.detach(),d}function za(a){var b=d,c=xa[a];return c||(c=ya(a,b),"none"!==c&&c||(wa=(wa||n("