From 83a9c225441166bfcd9fc9da7d06ba00f64921a1 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 25 Jul 2024 12:16:49 +1000 Subject: [PATCH 001/105] Mute org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT test {inlinestats.BeforeKeep ASYNC} #111257 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 23c8866989411..b0dcbe3ecaf4b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -108,6 +108,9 @@ tests: - class: org.elasticsearch.repositories.azure.AzureBlobContainerRetriesTests method: testReadNonexistentBlobThrowsNoSuchFileException issue: https://github.com/elastic/elasticsearch/issues/111233 +- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT + method: test {inlinestats.BeforeKeep ASYNC} + issue: https://github.com/elastic/elasticsearch/issues/111257 # Examples: # From 92eab3c8c938ff59c0b5ff72cbcbef684ac68884 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 25 Jul 2024 14:32:31 +1000 Subject: [PATCH 002/105] Mute org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT test {inlinestats.BeforeKeep ASYNC} #111259 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index b0dcbe3ecaf4b..0c4c1abee8d3d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -111,6 +111,9 @@ tests: - class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT method: test {inlinestats.BeforeKeep ASYNC} issue: https://github.com/elastic/elasticsearch/issues/111257 +- class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT + method: test {inlinestats.BeforeKeep ASYNC} + issue: https://github.com/elastic/elasticsearch/issues/111259 # Examples: # From fab2521a8c8aee8297652c40fa148e9398a554f1 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 25 Jul 2024 14:39:10 +1000 Subject: [PATCH 003/105] Mute org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT test {inlinestats.BeforeKeep SYNC} #111260 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 0c4c1abee8d3d..e4405dbe9f5cf 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -114,6 +114,9 @@ tests: - class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT method: test {inlinestats.BeforeKeep ASYNC} issue: https://github.com/elastic/elasticsearch/issues/111259 +- class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT + method: test {inlinestats.BeforeKeep SYNC} + issue: https://github.com/elastic/elasticsearch/issues/111260 # Examples: # From a1706dcaa19dc566858ae2094b434c73e1770255 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 25 Jul 2024 15:25:51 +1000 Subject: [PATCH 004/105] Mute org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT test {inlinestats.BeforeKeep SYNC} #111262 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index e4405dbe9f5cf..125bc0e496a2e 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -117,6 +117,9 @@ tests: - class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT method: test {inlinestats.BeforeKeep SYNC} issue: https://github.com/elastic/elasticsearch/issues/111260 +- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT + method: test {inlinestats.BeforeKeep SYNC} + issue: https://github.com/elastic/elasticsearch/issues/111262 # Examples: # From 0c69dae7113b2da4548398410b290bd65977ebb0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 25 Jul 2024 16:08:11 +1000 Subject: [PATCH 005/105] Mute org.elasticsearch.xpack.esql.qa.single_node.RestEsqlIT testInlineStatsProfile {SYNC} #111263 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 125bc0e496a2e..b5751c69223d5 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -120,6 +120,9 @@ tests: - class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT method: test {inlinestats.BeforeKeep SYNC} issue: https://github.com/elastic/elasticsearch/issues/111262 +- class: org.elasticsearch.xpack.esql.qa.single_node.RestEsqlIT + method: testInlineStatsProfile {SYNC} + issue: https://github.com/elastic/elasticsearch/issues/111263 # Examples: # From 47b08777cfe7459a985297c5969f3041bbb14a26 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 25 Jul 2024 16:08:17 +1000 Subject: [PATCH 006/105] Mute org.elasticsearch.xpack.esql.qa.single_node.RestEsqlIT testInlineStatsProfile {ASYNC} #111264 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index b5751c69223d5..df491aa34b896 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -123,6 +123,9 @@ tests: - class: org.elasticsearch.xpack.esql.qa.single_node.RestEsqlIT method: testInlineStatsProfile {SYNC} issue: https://github.com/elastic/elasticsearch/issues/111263 +- class: org.elasticsearch.xpack.esql.qa.single_node.RestEsqlIT + method: testInlineStatsProfile {ASYNC} + issue: https://github.com/elastic/elasticsearch/issues/111264 # Examples: # From 38bcf07379393e048b3093a04df868e6ed0ff6e9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Thu, 25 Jul 2024 09:10:30 +0200 Subject: [PATCH 007/105] Explicit handling of a different Executor in TransportAction (#100895) --- .../reindex/AsyncBulkByScrollActionTests.java | 3 +- .../rest/root/TransportMainAction.java | 3 +- .../netty4/Netty4ChunkedContinuationsIT.java | 37 ++-- .../support/HandledTransportAction.java | 4 +- .../action/support/TransportAction.java | 47 ++++- .../support/nodes/TransportNodesAction.java | 4 +- .../TransportReplicationAction.java | 3 +- .../shard/TransportSingleShardAction.java | 5 +- .../elasticsearch/health/GetHealthAction.java | 3 +- ...erifyNodeRepositoryCoordinationAction.java | 3 +- .../action/ActionModuleTests.java | 3 +- ...portActionFilterChainRefCountingTests.java | 2 +- .../TransportActionFilterChainTests.java | 7 +- .../action/support/TransportActionTests.java | 167 ++++++++++++++++++ .../internal/node/NodeClientHeadersTests.java | 3 +- .../indices/RestValidateQueryActionTests.java | 4 +- .../elasticsearch/tasks/TaskManagerTests.java | 8 +- .../DeleteInternalCcrRepositoryAction.java | 3 +- .../PutInternalCcrRepositoryAction.java | 3 +- .../action/RestTermsEnumActionTests.java | 4 +- ...DownsampleShardPersistentTaskExecutor.java | 3 +- .../action/GetGlobalCheckpointsAction.java | 3 +- .../TransportGetTrainedModelsStatsAction.java | 32 ++-- .../xpack/ml/LocalStateMachineLearning.java | 8 +- .../action/TransportGetFlamegraphAction.java | 3 +- .../action/TransportGetStackTracesAction.java | 3 +- .../TransportGetTopNFunctionsAction.java | 3 +- .../action/TransportRollupSearchAction.java | 2 +- .../security/action/TransportGrantAction.java | 3 +- .../TransportBaseUpdateApiKeyAction.java | 3 +- .../apikey/TransportCreateApiKeyAction.java | 3 +- ...ansportCreateCrossClusterApiKeyAction.java | 3 +- .../apikey/TransportGetApiKeyAction.java | 3 +- .../apikey/TransportQueryApiKeyAction.java | 3 +- .../TransportGetBuiltinPrivilegesAction.java | 3 +- .../role/TransportBulkDeleteRolesAction.java | 3 +- .../role/TransportBulkPutRolesAction.java | 3 +- .../role/TransportDeleteRoleAction.java | 3 +- .../action/role/TransportGetRolesAction.java | 3 +- .../action/role/TransportPutRoleAction.java | 3 +- .../action/role/TransportQueryRoleAction.java | 3 +- ...tReloadRemoteClusterCredentialsAction.java | 8 +- .../action/user/TransportQueryUserAction.java | 3 +- ...TransportSLMGetExpiredSnapshotsAction.java | 3 +- .../TransportTestGrokPatternAction.java | 10 +- 45 files changed, 357 insertions(+), 79 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/action/support/TransportActionTests.java diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/AsyncBulkByScrollActionTests.java index c40a4f72bc133..47505919ba7d2 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/AsyncBulkByScrollActionTests.java @@ -49,6 +49,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.TimeValue; @@ -992,7 +993,7 @@ private static class DummyTransportAsyncBulkByScrollAction extends TransportActi BulkByScrollResponse> { protected DummyTransportAsyncBulkByScrollAction(String actionName, ActionFilters actionFilters, TaskManager taskManager) { - super(actionName, actionFilters, taskManager); + super(actionName, actionFilters, taskManager, EsExecutors.DIRECT_EXECUTOR_SERVICE); } @Override diff --git a/modules/rest-root/src/main/java/org/elasticsearch/rest/root/TransportMainAction.java b/modules/rest-root/src/main/java/org/elasticsearch/rest/root/TransportMainAction.java index 6b4b0a52b643a..2d378c12823ff 100644 --- a/modules/rest-root/src/main/java/org/elasticsearch/rest/root/TransportMainAction.java +++ b/modules/rest-root/src/main/java/org/elasticsearch/rest/root/TransportMainAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.node.Node; import org.elasticsearch.tasks.Task; @@ -33,7 +34,7 @@ public TransportMainAction( ActionFilters actionFilters, ClusterService clusterService ) { - super(MainRestPlugin.MAIN_ACTION.name(), actionFilters, transportService.getTaskManager()); + super(MainRestPlugin.MAIN_ACTION.name(), actionFilters, transportService.getTaskManager(), EsExecutors.DIRECT_EXECUTOR_SERVICE); this.nodeName = Node.NODE_NAME_SETTING.get(settings); this.clusterService = clusterService; } diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4ChunkedContinuationsIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4ChunkedContinuationsIT.java index c4c35b410af78..46684faf9fb66 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4ChunkedContinuationsIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4ChunkedContinuationsIT.java @@ -20,7 +20,6 @@ import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.CountDownActionListener; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.TransportAction; @@ -71,6 +70,7 @@ import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLog; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.ThreadPool; @@ -435,13 +435,22 @@ public static class TransportYieldsContinuationsAction extends TransportAction listener) { - executor.execute(ActionRunnable.supply(listener, () -> new Response(request.failIndex, executor))); + var response = new Response(request.failIndex, executor); + try { + listener.onResponse(response); + } catch (Exception e) { + ESTestCase.fail(e); + } } } @@ -585,18 +594,22 @@ public static class TransportInfiniteContinuationsAction extends TransportAction @Inject public TransportInfiniteContinuationsAction(ActionFilters actionFilters, TransportService transportService) { - super(TYPE.name(), actionFilters, transportService.getTaskManager()); - this.executor = transportService.getThreadPool().executor(ThreadPool.Names.GENERIC); + this(actionFilters, transportService, transportService.getThreadPool().executor(ThreadPool.Names.GENERIC)); + } + + TransportInfiniteContinuationsAction(ActionFilters actionFilters, TransportService transportService, ExecutorService executor) { + super(TYPE.name(), actionFilters, transportService.getTaskManager(), executor); + this.executor = executor; } @Override protected void doExecute(Task task, Request request, ActionListener listener) { - executor.execute( - ActionRunnable.supply( - ActionTestUtils.assertNoFailureListener(listener::onResponse), - () -> new Response(randomFrom(executor, EsExecutors.DIRECT_EXECUTOR_SERVICE)) - ) - ); + var response = new Response(randomFrom(executor, EsExecutors.DIRECT_EXECUTOR_SERVICE)); + try { + listener.onResponse(response); + } catch (Exception e) { + ESTestCase.fail(e); + } } } diff --git a/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java b/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java index 69bdfdea31ae4..b9200a0e32736 100644 --- a/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java @@ -40,14 +40,14 @@ protected HandledTransportAction( Writeable.Reader requestReader, Executor executor ) { - super(actionName, actionFilters, transportService.getTaskManager()); + super(actionName, actionFilters, transportService.getTaskManager(), executor); transportService.registerRequestHandler( actionName, executor, false, canTripCircuitBreaker, requestReader, - (request, channel, task) -> execute(task, request, new ChannelActionListener<>(channel)) + (request, channel, task) -> executeDirect(task, request, new ChannelActionListener<>(channel)) ); } } diff --git a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java index 222941981f05a..65a7e2302b9ae 100644 --- a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java @@ -14,11 +14,14 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionRunnable; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; +import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicInteger; public abstract class TransportAction { @@ -26,22 +29,46 @@ public abstract class TransportAction { + void execute(Task task, Request request, ActionListener listener); + } + + protected TransportAction(String actionName, ActionFilters actionFilters, TaskManager taskManager, Executor executor) { this.actionName = actionName; this.filters = actionFilters.filters(); this.taskManager = taskManager; + this.executor = executor; } /** * Use this method when the transport action should continue to run in the context of the current task */ + protected final void executeDirect(Task task, Request request, ActionListener listener) { + handleExecution(task, request, listener, this::doExecute); + } + public final void execute(Task task, Request request, ActionListener listener) { + handleExecution( + task, + request, + listener, + executor == EsExecutors.DIRECT_EXECUTOR_SERVICE ? this::doExecute : this::doExecuteForking + ); + } + + private void handleExecution( + Task task, + Request request, + ActionListener listener, + TransportActionHandler handler + ) { final ActionRequestValidationException validationException; try { validationException = request.validate(); @@ -64,10 +91,14 @@ public final void execute(Task task, Request request, ActionListener l // Releasables#releaseOnce to avoid a double-release. request.mustIncRef(); final var releaseRef = Releasables.releaseOnce(request::decRef); - RequestFilterChain requestFilterChain = new RequestFilterChain<>(this, logger, releaseRef); + RequestFilterChain requestFilterChain = new RequestFilterChain<>(this, logger, handler, releaseRef); requestFilterChain.proceed(task, actionName, request, ActionListener.runBefore(listener, releaseRef::close)); } + private void doExecuteForking(Task task, Request request, ActionListener listener) { + executor.execute(ActionRunnable.wrap(listener, l -> doExecute(task, request, listener))); + } + protected abstract void doExecute(Task task, Request request, ActionListener listener); private static class RequestFilterChain @@ -75,13 +106,20 @@ private static class RequestFilterChain { private final TransportAction action; + private final TransportActionHandler handler; private final AtomicInteger index = new AtomicInteger(); private final Logger logger; private final Releasable releaseRef; - private RequestFilterChain(TransportAction action, Logger logger, Releasable releaseRef) { + private RequestFilterChain( + TransportAction action, + Logger logger, + TransportActionHandler handler, + Releasable releaseRef + ) { this.action = action; this.logger = logger; + this.handler = handler; this.releaseRef = releaseRef; } @@ -93,7 +131,7 @@ public void proceed(Task task, String actionName, Request request, ActionListene this.action.filters[i].apply(task, actionName, request, listener, this); } else if (i == this.action.filters.length) { try (releaseRef) { - this.action.doExecute(task, request, listener); + handler.execute(task, request, listener); } } else { listener.onFailure(new IllegalStateException("proceed was called too many times")); @@ -103,7 +141,6 @@ public void proceed(Task task, String actionName, Request request, ActionListene listener.onFailure(e); } } - } /** diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index fcd513b175bb1..347edd0916fc5 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -78,7 +78,9 @@ protected TransportNodesAction( Writeable.Reader nodeRequest, Executor executor ) { - super(actionName, actionFilters, transportService.getTaskManager()); + // Only part of this action execution needs to be forked off - coordination can run on SAME because it's only O(#nodes) work. + // Hence the separate "finalExecutor", and why we run the whole TransportAction.execute on SAME. + super(actionName, actionFilters, transportService.getTaskManager(), EsExecutors.DIRECT_EXECUTOR_SERVICE); assert executor.equals(EsExecutors.DIRECT_EXECUTOR_SERVICE) == false : "TransportNodesAction must always fork off the transport thread"; this.clusterService = Objects.requireNonNull(clusterService); diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index c2d7e173fd0bf..3c97bda2ef8d0 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -172,7 +172,8 @@ protected TransportReplicationAction( SyncGlobalCheckpointAfterOperation syncGlobalCheckpointAfterOperation, PrimaryActionExecution primaryActionExecution ) { - super(actionName, actionFilters, transportService.getTaskManager()); + // TODO: consider passing the executor, investigate doExecute and let InboundHandler/TransportAction handle concurrency. + super(actionName, actionFilters, transportService.getTaskManager(), EsExecutors.DIRECT_EXECUTOR_SERVICE); assert syncGlobalCheckpointAfterOperation != null : "Must specify global checkpoint sync behaviour"; assert primaryActionExecution != null : "Must specify primary action execution behaviour"; this.threadPool = threadPool; diff --git a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java index a6a6b2c332a0a..180aa3b336149 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java @@ -72,7 +72,8 @@ protected TransportSingleShardAction( Writeable.Reader request, Executor executor ) { - super(actionName, actionFilters, transportService.getTaskManager()); + // TODO: consider passing the executor, remove it from doExecute and let InboundHandler/TransportAction handle concurrency. + super(actionName, actionFilters, transportService.getTaskManager(), EsExecutors.DIRECT_EXECUTOR_SERVICE); this.threadPool = threadPool; this.clusterService = clusterService; this.transportService = transportService; @@ -250,7 +251,7 @@ private class TransportHandler implements TransportRequestHandler { @Override public void messageReceived(Request request, final TransportChannel channel, Task task) throws Exception { // if we have a local operation, execute it on a thread since we don't spawn - execute(task, request, new ChannelActionListener<>(channel)); + executeDirect(task, request, new ChannelActionListener<>(channel)); } } diff --git a/server/src/main/java/org/elasticsearch/health/GetHealthAction.java b/server/src/main/java/org/elasticsearch/health/GetHealthAction.java index eeff060c174da..5a5f8f6d93c47 100644 --- a/server/src/main/java/org/elasticsearch/health/GetHealthAction.java +++ b/server/src/main/java/org/elasticsearch/health/GetHealthAction.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.Nullable; import org.elasticsearch.health.stats.HealthApiStats; @@ -199,7 +200,7 @@ public LocalAction( NodeClient client, HealthApiStats healthApiStats ) { - super(NAME, actionFilters, transportService.getTaskManager()); + super(NAME, actionFilters, transportService.getTaskManager(), EsExecutors.DIRECT_EXECUTOR_SERVICE); this.clusterService = clusterService; this.healthService = healthService; this.client = client; diff --git a/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryCoordinationAction.java b/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryCoordinationAction.java index b892ff93c7a9c..8d15510c308e2 100644 --- a/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryCoordinationAction.java +++ b/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryCoordinationAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.repositories.VerifyNodeRepositoryAction.Request; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportException; @@ -68,7 +69,7 @@ public LocalAction( ClusterService clusterService, NodeClient client ) { - super(NAME, actionFilters, transportService.getTaskManager()); + super(NAME, actionFilters, transportService.getTaskManager(), EsExecutors.DIRECT_EXECUTOR_SERVICE); this.transportService = transportService; this.clusterService = clusterService; this.client = client; diff --git a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java index 7afa7adedc7bf..c015dc6177cad 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.indices.TestIndexNameExpressionResolver; @@ -88,7 +89,7 @@ public ActionRequestValidationException validate() { } class FakeTransportAction extends TransportAction { protected FakeTransportAction(String actionName, ActionFilters actionFilters, TaskManager taskManager) { - super(actionName, actionFilters, taskManager); + super(actionName, actionFilters, taskManager, EsExecutors.DIRECT_EXECUTOR_SERVICE); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainRefCountingTests.java b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainRefCountingTests.java index a199003fc59c4..f4fa42bd1204f 100644 --- a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainRefCountingTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainRefCountingTests.java @@ -152,7 +152,7 @@ public static class TestAction extends TransportAction { @Inject public TestAction(TransportService transportService, ActionFilters actionFilters) { - super(TYPE.name(), actionFilters, transportService.getTaskManager()); + super(TYPE.name(), actionFilters, transportService.getTaskManager(), EsExecutors.DIRECT_EXECUTOR_SERVICE); threadPool = transportService.getThreadPool(); } diff --git a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java index 82c204b1d0b88..f793255f3b98d 100644 --- a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.node.Node; import org.elasticsearch.tasks.Task; @@ -79,7 +80,8 @@ public void testActionFiltersRequest() throws InterruptedException { TransportAction transportAction = new TransportAction( actionName, actionFilters, - new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()) + new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()), + EsExecutors.DIRECT_EXECUTOR_SERVICE ) { @Override protected void doExecute(Task task, TestRequest request, ActionListener listener) { @@ -165,7 +167,8 @@ public void exe TransportAction transportAction = new TransportAction( actionName, actionFilters, - new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()) + new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()), + EsExecutors.DIRECT_EXECUTOR_SERVICE ) { @Override protected void doExecute(Task task, TestRequest request, ActionListener listener) { diff --git a/server/src/test/java/org/elasticsearch/action/support/TransportActionTests.java b/server/src/test/java/org/elasticsearch/action/support/TransportActionTests.java new file mode 100644 index 0000000000000..97fa537874397 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/support/TransportActionTests.java @@ -0,0 +1,167 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.support; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.node.Node; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.telemetry.metric.MeterRegistry; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Before; + +import java.util.Collections; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Executor; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.not; + +public class TransportActionTests extends ESTestCase { + + private ThreadPool threadPool; + + @Before + public void init() throws Exception { + threadPool = new ThreadPool( + Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), "TransportActionTests").build(), + MeterRegistry.NOOP + ); + } + + @After + public void shutdown() throws Exception { + terminate(threadPool); + } + + public void testDirectExecuteRunsOnCallingThread() throws ExecutionException, InterruptedException { + + String actionName = randomAlphaOfLength(randomInt(30)); + var testExecutor = new Executor() { + @Override + public void execute(Runnable command) { + fail("executeDirect should not run a TransportAction on a different executor"); + } + }; + + var transportAction = getTestTransportAction(actionName, testExecutor); + + PlainActionFuture future = new PlainActionFuture<>(); + + transportAction.executeDirect(null, new TestRequest(), future); + + var response = future.get(); + assertThat(response, notNullValue()); + assertThat(response.executingThreadName, equalTo(Thread.currentThread().getName())); + assertThat(response.executingThreadId, equalTo(Thread.currentThread().getId())); + } + + public void testExecuteRunsOnExecutor() throws ExecutionException, InterruptedException { + + String actionName = randomAlphaOfLength(randomInt(30)); + + boolean[] executedOnExecutor = new boolean[1]; + var testExecutor = new Executor() { + @Override + public void execute(Runnable command) { + command.run(); + executedOnExecutor[0] = true; + } + }; + + var transportAction = getTestTransportAction(actionName, testExecutor); + + PlainActionFuture future = new PlainActionFuture<>(); + + ActionTestUtils.execute(transportAction, null, new TestRequest(), future); + + var response = future.get(); + assertThat(response, notNullValue()); + assertTrue(executedOnExecutor[0]); + } + + public void testExecuteWithGenericExecutorRunsOnDifferentThread() throws ExecutionException, InterruptedException { + + String actionName = randomAlphaOfLength(randomInt(30)); + var transportAction = getTestTransportAction(actionName, threadPool.executor(ThreadPool.Names.GENERIC)); + + PlainActionFuture future = new PlainActionFuture<>(); + + ActionTestUtils.execute(transportAction, null, new TestRequest(), future); + + var response = future.get(); + assertThat(response, notNullValue()); + assertThat(response.executingThreadName, not(equalTo(Thread.currentThread().getName()))); + assertThat(response.executingThreadName, containsString("[generic]")); + assertThat(response.executingThreadId, not(equalTo(Thread.currentThread().getId()))); + } + + public void testExecuteWithDirectExecutorRunsOnCallingThread() throws ExecutionException, InterruptedException { + + String actionName = randomAlphaOfLength(randomInt(30)); + var transportAction = getTestTransportAction(actionName, EsExecutors.DIRECT_EXECUTOR_SERVICE); + + PlainActionFuture future = new PlainActionFuture<>(); + + ActionTestUtils.execute(transportAction, null, new TestRequest(), future); + + var response = future.get(); + assertThat(response, notNullValue()); + assertThat(response, notNullValue()); + assertThat(response.executingThreadName, equalTo(Thread.currentThread().getName())); + assertThat(response.executingThreadId, equalTo(Thread.currentThread().getId())); + } + + private TransportAction getTestTransportAction(String actionName, Executor executor) { + ActionFilters actionFilters = new ActionFilters(Collections.emptySet()); + TransportAction transportAction = new TransportAction<>( + actionName, + actionFilters, + new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()), + executor + ) { + @Override + protected void doExecute(Task task, TestRequest request, ActionListener listener) { + listener.onResponse(new TestResponse(Thread.currentThread().getName(), Thread.currentThread().getId())); + } + }; + return transportAction; + } + + private static class TestRequest extends ActionRequest { + @Override + public ActionRequestValidationException validate() { + return null; + } + } + + private static class TestResponse extends ActionResponse { + + private final String executingThreadName; + private final long executingThreadId; + + TestResponse(String executingThreadName, long executingThreadId) { + this.executingThreadName = executingThreadName; + this.executingThreadId = executingThreadId; + } + + @Override + public void writeTo(StreamOutput out) {} + } +} diff --git a/server/src/test/java/org/elasticsearch/client/internal/node/NodeClientHeadersTests.java b/server/src/test/java/org/elasticsearch/client/internal/node/NodeClientHeadersTests.java index 9aea310180410..84537359e0399 100644 --- a/server/src/test/java/org/elasticsearch/client/internal/node/NodeClientHeadersTests.java +++ b/server/src/test/java/org/elasticsearch/client/internal/node/NodeClientHeadersTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.client.internal.AbstractClientHeadersTestCase; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.transport.Transport; @@ -54,7 +55,7 @@ private Actions(ActionType[] actions, TaskManager taskManager) { private static class InternalTransportAction extends TransportAction { private InternalTransportAction(String actionName, TaskManager taskManager) { - super(actionName, EMPTY_FILTERS, taskManager); + super(actionName, EMPTY_FILTERS, taskManager, EsExecutors.DIRECT_EXECUTOR_SERVICE); } @Override diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java index 59ab7ec719cf4..1270c23227756 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -74,7 +75,8 @@ public void stubValidateQueryAction() { final TransportAction transportAction = new TransportAction<>( ValidateQueryAction.NAME, new ActionFilters(Collections.emptySet()), - taskManager + taskManager, + EsExecutors.DIRECT_EXECUTOR_SERVICE ) { @Override protected void doExecute(Task task, ActionRequest request, ActionListener listener) {} diff --git a/server/src/test/java/org/elasticsearch/tasks/TaskManagerTests.java b/server/src/test/java/org/elasticsearch/tasks/TaskManagerTests.java index 05150cd5dd362..d15eae47968e4 100644 --- a/server/src/test/java/org/elasticsearch/tasks/TaskManagerTests.java +++ b/server/src/test/java/org/elasticsearch/tasks/TaskManagerTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; @@ -335,7 +336,12 @@ public void testRegisterAndExecuteStartsAndStopsTracing() { final Task task = taskManager.registerAndExecute( "testType", - new TransportAction("actionName", new ActionFilters(Set.of()), taskManager) { + new TransportAction( + "actionName", + new ActionFilters(Set.of()), + taskManager, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ) { @Override protected void doExecute(Task task, ActionRequest request, ActionListener listener) { listener.onResponse(new ActionResponse() { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryAction.java index f8e4cda1501b6..7f6a04bfee746 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; @@ -38,7 +39,7 @@ public TransportDeleteInternalRepositoryAction( ActionFilters actionFilters, TransportService transportService ) { - super(NAME, actionFilters, transportService.getTaskManager()); + super(NAME, actionFilters, transportService.getTaskManager(), EsExecutors.DIRECT_EXECUTOR_SERVICE); this.repositoriesService = repositoriesService; } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryAction.java index 0de1715e17a14..497339930d551 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; @@ -38,7 +39,7 @@ public TransportPutInternalRepositoryAction( ActionFilters actionFilters, TransportService transportService ) { - super(NAME, actionFilters, transportService.getTaskManager()); + super(NAME, actionFilters, transportService.getTaskManager(), EsExecutors.DIRECT_EXECUTOR_SERVICE); this.repositoriesService = repositoriesService; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java index 2ea372a84b66c..9df49c72baee0 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -71,7 +72,8 @@ public static void stubTermEnumAction() { final TransportAction transportAction = new TransportAction<>( TermsEnumAction.NAME, new ActionFilters(Collections.emptySet()), - taskManager + taskManager, + EsExecutors.DIRECT_EXECUTOR_SERVICE ) { @Override protected void doExecute(Task task, ActionRequest request, ActionListener listener) {} diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java index 5e6f8b6b5b18e..e66c88f70a93e 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java @@ -315,7 +315,8 @@ public TA( IndicesService indicesService, DownsampleMetrics downsampleMetrics ) { - super(NAME, actionFilters, transportService.getTaskManager()); + // TODO: consider moving to Downsample.DOWSAMPLE_TASK_THREAD_POOL_NAME and simplify realNodeOperation + super(NAME, actionFilters, transportService.getTaskManager(), EsExecutors.DIRECT_EXECUTOR_SERVICE); this.client = client; this.indicesService = indicesService; this.downsampleMetrics = downsampleMetrics; diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetGlobalCheckpointsAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetGlobalCheckpointsAction.java index a87297702bd30..7cc501ff888f4 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetGlobalCheckpointsAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetGlobalCheckpointsAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -170,7 +171,7 @@ public LocalAction( final IndexNameExpressionResolver resolver, final ThreadPool threadPool ) { - super(NAME, actionFilters, transportService.getTaskManager()); + super(NAME, actionFilters, transportService.getTaskManager(), EsExecutors.DIRECT_EXECUTOR_SERVICE); this.clusterService = clusterService; this.client = client; this.resolver = resolver; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java index cb37be3b1fcb7..7c282d88aebfd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestParameters; @@ -96,11 +95,29 @@ public TransportGetTrainedModelsStatsAction( TrainedModelProvider trainedModelProvider, Client client ) { - super(GetTrainedModelsStatsAction.NAME, actionFilters, transportService.getTaskManager()); + this( + transportService, + actionFilters, + clusterService, + trainedModelProvider, + client, + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) + ); + } + + private TransportGetTrainedModelsStatsAction( + TransportService transportService, + ActionFilters actionFilters, + ClusterService clusterService, + TrainedModelProvider trainedModelProvider, + Client client, + Executor executor + ) { + super(GetTrainedModelsStatsAction.NAME, actionFilters, transportService.getTaskManager(), executor); this.client = client; this.clusterService = clusterService; this.trainedModelProvider = trainedModelProvider; - this.executor = threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME); + this.executor = executor; } @Override @@ -108,15 +125,6 @@ protected void doExecute( Task task, GetTrainedModelsStatsAction.Request request, ActionListener listener - ) { - // workaround for https://github.com/elastic/elasticsearch/issues/97916 - TODO remove this when we can - executor.execute(ActionRunnable.wrap(listener, l -> doExecuteForked(task, request, l))); - } - - protected void doExecuteForked( - Task task, - GetTrainedModelsStatsAction.Request request, - ActionListener listener ) { final TaskId parentTaskId = new TaskId(clusterService.localNode().getId(), task.getId()); final ModelAliasMetadata modelAliasMetadata = ModelAliasMetadata.fromState(clusterService.state()); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java index 2d7832d747de4..08766f8a054df 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.indices.analysis.AnalysisModule; @@ -121,7 +122,12 @@ public static class MockedRollupIndexCapsTransport extends TransportAction< @Inject public MockedRollupIndexCapsTransport(TransportService transportService) { - super(GetRollupIndexCapsAction.NAME, new ActionFilters(new HashSet<>()), transportService.getTaskManager()); + super( + GetRollupIndexCapsAction.NAME, + new ActionFilters(new HashSet<>()), + transportService.getTaskManager(), + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); } @Override diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphAction.java index 4f3778081563b..59f5ce1d7cbf5 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; @@ -33,7 +34,7 @@ public class TransportGetFlamegraphAction extends TransportAction listener) { - // As matching a regular expression might take a while, we run in a different thread to avoid blocking the network thread. - threadPool.generic().execute(ActionRunnable.supply(listener, () -> getResponse(request))); + listener.onResponse(getResponse(request)); } private TestGrokPatternAction.Response getResponse(TestGrokPatternAction.Request request) { From ea208d19244ad6d863f88bf28f9b1094b8c87420 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 25 Jul 2024 08:54:14 +0100 Subject: [PATCH 008/105] Follow-up to Azure auth validation change (#111252) Arising from a post-merge review of #111242 --- .../azure/AzureBlobStoreRepositoryTests.java | 2 +- .../AzureStorageCleanupThirdPartyTests.java | 2 +- .../RepositoryAzureClientYamlTestSuiteIT.java | 2 +- .../java/fixture/azure/AzureHttpFixture.java | 12 ++++++-- .../java/fixture/azure/AzureHttpHandler.java | 28 +++++++++---------- .../azure/AzureRepositoriesMeteringIT.java | 2 +- .../AzureSearchableSnapshotsIT.java | 2 +- .../AzureSnapshotBasedRecoveryIT.java | 2 +- .../testkit/AzureSnapshotRepoTestKitIT.java | 2 +- 9 files changed, 30 insertions(+), 24 deletions(-) diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java index d8d3c2180ac07..15d47f6bec800 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -154,7 +154,7 @@ long getUploadBlockSize() { @SuppressForbidden(reason = "this test uses a HttpHandler to emulate an Azure endpoint") private static class AzureBlobStoreHttpHandler extends AzureHttpHandler implements BlobStoreHttpHandler { AzureBlobStoreHttpHandler(final String account, final String container) { - super(account, container, null /* no auth header validation */); + super(account, container, null /* no auth header validation - sometimes it's omitted in these tests (TODO why?) */); } } diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java index 78757020b8531..1d7f8092e4939 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java @@ -51,7 +51,7 @@ public class AzureStorageCleanupThirdPartyTests extends AbstractThirdPartyReposi USE_FIXTURE ? AzureHttpFixture.Protocol.HTTP : AzureHttpFixture.Protocol.NONE, AZURE_ACCOUNT, System.getProperty("test.azure.container"), - AzureHttpFixture.startsWithPredicate("SharedKey " + AZURE_ACCOUNT + ":") + AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_ACCOUNT) ); @Override diff --git a/modules/repository-azure/src/yamlRestTest/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java b/modules/repository-azure/src/yamlRestTest/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java index 9a3afd03b270a..c04b2bc6a6d7c 100644 --- a/modules/repository-azure/src/yamlRestTest/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java +++ b/modules/repository-azure/src/yamlRestTest/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java @@ -33,7 +33,7 @@ public class RepositoryAzureClientYamlTestSuiteIT extends ESClientYamlSuiteTestC USE_FIXTURE ? AzureHttpFixture.Protocol.HTTPS : AzureHttpFixture.Protocol.NONE, AZURE_TEST_ACCOUNT, AZURE_TEST_CONTAINER, - AzureHttpFixture.startsWithPredicate("SharedKey " + AZURE_TEST_ACCOUNT + ":") + AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_TEST_ACCOUNT) ); private static TestTrustStore trustStore = new TestTrustStore( diff --git a/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpFixture.java b/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpFixture.java index 0611f4012c36b..71c635972d62e 100644 --- a/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpFixture.java +++ b/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpFixture.java @@ -49,16 +49,22 @@ public enum Protocol { HTTPS } - public static Predicate startsWithPredicate(String expectedPrefix) { + /** + * @param account The name of the Azure Blob Storage account against which the request should be authorized.. + * @return a predicate that matches the {@code Authorization} HTTP header that the Azure SDK sends when using shared key auth (i.e. + * using a key or SAS token). + * @see Azure docs on shared key auth + */ + public static Predicate sharedKeyForAccountPredicate(String account) { return new Predicate<>() { @Override public boolean test(String s) { - return s.startsWith(expectedPrefix); + return s.startsWith("SharedKey " + account + ":"); } @Override public String toString() { - return "startsWith[" + expectedPrefix + "]"; + return "SharedKey[" + account + "]"; } }; } diff --git a/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java b/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java index 83f3cc3f3e10d..d46afdcf93cd2 100644 --- a/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java +++ b/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java @@ -82,27 +82,27 @@ private boolean isValidAuthHeader(HttpExchange exchange) { @Override public void handle(final HttpExchange exchange) throws IOException { if (isValidAuthHeader(exchange) == false) { - try (exchange; var xcb = XContentBuilder.builder(XContentType.JSON.xContent())) { - xcb.startObject(); - xcb.field("method", exchange.getRequestMethod()); - xcb.field("uri", exchange.getRequestURI().toString()); - xcb.field("predicate", authHeaderPredicate.toString()); - xcb.field("authorization", Objects.toString(getAuthHeader(exchange))); - xcb.startObject("headers"); + try (exchange; var builder = XContentBuilder.builder(XContentType.JSON.xContent())) { + builder.startObject(); + builder.field("method", exchange.getRequestMethod()); + builder.field("uri", exchange.getRequestURI().toString()); + builder.field("predicate", authHeaderPredicate.toString()); + builder.field("authorization", Objects.toString(getAuthHeader(exchange))); + builder.startObject("headers"); for (final var header : exchange.getRequestHeaders().entrySet()) { if (header.getValue() == null) { - xcb.nullField(header.getKey()); + builder.nullField(header.getKey()); } else { - xcb.startArray(header.getKey()); + builder.startArray(header.getKey()); for (final var value : header.getValue()) { - xcb.value(value); + builder.value(value); } - xcb.endArray(); + builder.endArray(); } } - xcb.endObject(); - xcb.endObject(); - final var responseBytes = BytesReference.bytes(xcb); + builder.endObject(); + builder.endObject(); + final var responseBytes = BytesReference.bytes(builder); exchange.getResponseHeaders().add("Content-Type", "application/json; charset=utf-8"); exchange.sendResponseHeaders(RestStatus.FORBIDDEN.getStatus(), responseBytes.length()); responseBytes.writeTo(exchange.getResponseBody()); diff --git a/x-pack/plugin/repositories-metering-api/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/repositories/metering/azure/AzureRepositoriesMeteringIT.java b/x-pack/plugin/repositories-metering-api/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/repositories/metering/azure/AzureRepositoriesMeteringIT.java index 793116507d3f5..1a616e1f4276a 100644 --- a/x-pack/plugin/repositories-metering-api/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/repositories/metering/azure/AzureRepositoriesMeteringIT.java +++ b/x-pack/plugin/repositories-metering-api/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/repositories/metering/azure/AzureRepositoriesMeteringIT.java @@ -31,7 +31,7 @@ public class AzureRepositoriesMeteringIT extends AbstractRepositoriesMeteringAPI USE_FIXTURE ? AzureHttpFixture.Protocol.HTTPS : AzureHttpFixture.Protocol.NONE, AZURE_TEST_ACCOUNT, AZURE_TEST_CONTAINER, - AzureHttpFixture.startsWithPredicate("SharedKey " + AZURE_TEST_ACCOUNT + ":") + AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_TEST_ACCOUNT) ); private static TestTrustStore trustStore = new TestTrustStore( diff --git a/x-pack/plugin/searchable-snapshots/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/AzureSearchableSnapshotsIT.java b/x-pack/plugin/searchable-snapshots/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/AzureSearchableSnapshotsIT.java index 284631fe4590a..4d7aabe489b9c 100644 --- a/x-pack/plugin/searchable-snapshots/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/AzureSearchableSnapshotsIT.java +++ b/x-pack/plugin/searchable-snapshots/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/AzureSearchableSnapshotsIT.java @@ -32,7 +32,7 @@ public class AzureSearchableSnapshotsIT extends AbstractSearchableSnapshotsRestT USE_FIXTURE ? AzureHttpFixture.Protocol.HTTPS : AzureHttpFixture.Protocol.NONE, AZURE_TEST_ACCOUNT, AZURE_TEST_CONTAINER, - AzureHttpFixture.startsWithPredicate("SharedKey " + AZURE_TEST_ACCOUNT + ":") + AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_TEST_ACCOUNT) ); private static TestTrustStore trustStore = new TestTrustStore( diff --git a/x-pack/plugin/snapshot-based-recoveries/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/AzureSnapshotBasedRecoveryIT.java b/x-pack/plugin/snapshot-based-recoveries/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/AzureSnapshotBasedRecoveryIT.java index 5e083b4faf78d..8cebe9fafdb52 100644 --- a/x-pack/plugin/snapshot-based-recoveries/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/AzureSnapshotBasedRecoveryIT.java +++ b/x-pack/plugin/snapshot-based-recoveries/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/AzureSnapshotBasedRecoveryIT.java @@ -31,7 +31,7 @@ public class AzureSnapshotBasedRecoveryIT extends AbstractSnapshotBasedRecoveryR USE_FIXTURE ? AzureHttpFixture.Protocol.HTTPS : AzureHttpFixture.Protocol.NONE, AZURE_TEST_ACCOUNT, AZURE_TEST_CONTAINER, - AzureHttpFixture.startsWithPredicate("SharedKey " + AZURE_TEST_ACCOUNT + ":") + AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_TEST_ACCOUNT) ); private static TestTrustStore trustStore = new TestTrustStore( diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AzureSnapshotRepoTestKitIT.java b/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AzureSnapshotRepoTestKitIT.java index 85a95cba50131..afe17d2dd6f2d 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AzureSnapshotRepoTestKitIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AzureSnapshotRepoTestKitIT.java @@ -30,7 +30,7 @@ public class AzureSnapshotRepoTestKitIT extends AbstractSnapshotRepoTestKitRestT USE_FIXTURE ? AzureHttpFixture.Protocol.HTTPS : AzureHttpFixture.Protocol.NONE, AZURE_TEST_ACCOUNT, AZURE_TEST_CONTAINER, - AzureHttpFixture.startsWithPredicate("SharedKey " + AZURE_TEST_ACCOUNT + ":") + AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_TEST_ACCOUNT) ); private static TestTrustStore trustStore = new TestTrustStore( From 87d9a0b2684dacbc8f70920e23c65e557d06d793 Mon Sep 17 00:00:00 2001 From: Valeriy Khakhutskyy <1292899+valeriy42@users.noreply.github.com> Date: Thu, 25 Jul 2024 10:32:36 +0200 Subject: [PATCH 009/105] [ML] Extend lat_long documentation (#111239) This PR adds the explanation of what "typical" means for the lat_long function. --- .../anomaly-detection/functions/ml-geo-functions.asciidoc | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/reference/ml/anomaly-detection/functions/ml-geo-functions.asciidoc b/docs/reference/ml/anomaly-detection/functions/ml-geo-functions.asciidoc index 5c061daa1cd44..63a0f047db647 100644 --- a/docs/reference/ml/anomaly-detection/functions/ml-geo-functions.asciidoc +++ b/docs/reference/ml/anomaly-detection/functions/ml-geo-functions.asciidoc @@ -52,6 +52,12 @@ detects anomalies where the geographic location of a credit card transaction is unusual for a particular customer’s credit card. An anomaly might indicate fraud. +A "typical" value indicates a centroid of a cluster of previously observed +locations that is closest to the "actual" location at that time. For example, +there may be one centroid near the person's home that is associated with the +cluster of local grocery stores and restaurants, and another centroid near the +person's work associated with the cluster of lunch and coffee places. + IMPORTANT: The `field_name` that you supply must be a single string that contains two comma-separated numbers of the form `latitude,longitude`, a `geo_point` field, a `geo_shape` field that contains point values, or a From 8fec9f70252a86badda2a1e6b0d118dd12f9c357 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 25 Jul 2024 12:35:57 +0100 Subject: [PATCH 010/105] Include account name in Azure settings exceptions (#111274) If there are several Azure accounts defined, and one of them has invalid settings, then it's hard to pinpoint the problem from the messages given today. This commit includes the account name in the exception message to make troubleshooting easier. --- docs/changelog/111274.yaml | 5 +++++ .../repositories/azure/AzureStorageSettings.java | 15 +++++---------- .../azure/AzureStorageServiceTests.java | 16 ++++++++++------ 3 files changed, 20 insertions(+), 16 deletions(-) create mode 100644 docs/changelog/111274.yaml diff --git a/docs/changelog/111274.yaml b/docs/changelog/111274.yaml new file mode 100644 index 0000000000000..e26bcc03ce118 --- /dev/null +++ b/docs/changelog/111274.yaml @@ -0,0 +1,5 @@ +pr: 111274 +summary: Include account name in Azure settings exceptions +area: Snapshot/Restore +type: enhancement +issues: [] diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java index 93a388dea98ae..511d7c400c8f8 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java @@ -124,10 +124,6 @@ final class AzureStorageSettings { ); private final String account; - - @Nullable - private final String sasToken; - private final String connectString; private final String endpointSuffix; private final TimeValue timeout; @@ -148,7 +144,6 @@ private AzureStorageSettings( String secondaryEndpoint ) { this.account = account; - this.sasToken = sasToken; this.connectString = buildConnectString(account, key, sasToken, endpointSuffix, endpoint, secondaryEndpoint); this.endpointSuffix = endpointSuffix; this.timeout = timeout; @@ -204,10 +199,10 @@ private static String buildConnectString( final boolean hasSasToken = Strings.hasText(sasToken); final boolean hasKey = Strings.hasText(key); if (hasSasToken == false && hasKey == false) { - throw new SettingsException("Neither a secret key nor a shared access token was set."); + throw new SettingsException("Neither a secret key nor a shared access token was set for account [" + account + "]"); } if (hasSasToken && hasKey) { - throw new SettingsException("Both a secret as well as a shared access token were set."); + throw new SettingsException("Both a secret as well as a shared access token were set for account [" + account + "]"); } final StringBuilder connectionStringBuilder = new StringBuilder(); connectionStringBuilder.append("DefaultEndpointsProtocol=https").append(";AccountName=").append(account); @@ -221,15 +216,15 @@ private static String buildConnectString( final boolean hasSecondaryEndpoint = Strings.hasText(secondaryEndpoint); if (hasEndpointSuffix && hasEndpoint) { - throw new SettingsException("Both an endpoint suffix as well as a primary endpoint were set"); + throw new SettingsException("Both an endpoint suffix as well as a primary endpoint were set for account [" + account + "]"); } if (hasEndpointSuffix && hasSecondaryEndpoint) { - throw new SettingsException("Both an endpoint suffix as well as a secondary endpoint were set"); + throw new SettingsException("Both an endpoint suffix as well as a secondary endpoint were set for account [" + account + "]"); } if (hasEndpoint == false && hasSecondaryEndpoint) { - throw new SettingsException("A primary endpoint is required when setting a secondary endpoint"); + throw new SettingsException("A primary endpoint is required when setting a secondary endpoint for account [" + account + "]"); } if (hasEndpointSuffix) { diff --git a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceTests.java b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceTests.java index 6f43f253db4c8..f60daccdb5cdf 100644 --- a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceTests.java +++ b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceTests.java @@ -199,10 +199,14 @@ public void testReinitClientWrongSettings() throws IOException { final AzureStorageService azureStorageService = plugin.azureStoreService.get(); AzureBlobServiceClient client11 = azureStorageService.client("azure1", LocationMode.PRIMARY_ONLY); assertThat(client11.getSyncClient().getAccountUrl(), equalTo("https://myaccount1.blob.core.windows.net")); - final SettingsException e1 = expectThrows(SettingsException.class, () -> plugin.reload(settings2)); - assertThat(e1.getMessage(), is("Neither a secret key nor a shared access token was set.")); - final SettingsException e2 = expectThrows(SettingsException.class, () -> plugin.reload(settings3)); - assertThat(e2.getMessage(), is("Both a secret as well as a shared access token were set.")); + assertThat( + expectThrows(SettingsException.class, () -> plugin.reload(settings2)).getMessage(), + is("Neither a secret key nor a shared access token was set for account [myaccount1]") + ); + assertThat( + expectThrows(SettingsException.class, () -> plugin.reload(settings3)).getMessage(), + is("Both a secret as well as a shared access token were set for account [myaccount3]") + ); // existing client untouched assertThat(client11.getSyncClient().getAccountUrl(), equalTo("https://myaccount1.blob.core.windows.net")); } @@ -499,7 +503,7 @@ public void testEndpointSettingValidation() { .build() ) ); - assertEquals("A primary endpoint is required when setting a secondary endpoint", e.getMessage()); + assertEquals("A primary endpoint is required when setting a secondary endpoint for account [myaccount1]", e.getMessage()); } { @@ -513,7 +517,7 @@ public void testEndpointSettingValidation() { .build() ) ); - assertEquals("Both an endpoint suffix as well as a secondary endpoint were set", e.getMessage()); + assertEquals("Both an endpoint suffix as well as a secondary endpoint were set for account [myaccount1]", e.getMessage()); } } From e3308f0b5c7c582078f5c0c51b557567a57c7dbd Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 25 Jul 2024 12:36:27 +0100 Subject: [PATCH 011/105] Specify order for test rule chain in Azure tests (#111265) We must not clean up the temporary directory holding the truststore while the cluster is still running. This commit specifies an order for the relevant rule chain so that it runs within the one in `LuceneTestCase`. Closes #111251 --- .../azure/RepositoryAzureClientYamlTestSuiteIT.java | 2 +- .../main/java/org/elasticsearch/test/TestTrustStore.java | 6 ++++++ .../metering/azure/AzureRepositoriesMeteringIT.java | 2 +- .../searchablesnapshots/AzureSearchableSnapshotsIT.java | 2 +- .../recovery/AzureSnapshotBasedRecoveryIT.java | 2 +- .../blobstore/testkit/AzureSnapshotRepoTestKitIT.java | 2 +- 6 files changed, 11 insertions(+), 5 deletions(-) diff --git a/modules/repository-azure/src/yamlRestTest/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java b/modules/repository-azure/src/yamlRestTest/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java index c04b2bc6a6d7c..ba476e754a59e 100644 --- a/modules/repository-azure/src/yamlRestTest/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java +++ b/modules/repository-azure/src/yamlRestTest/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java @@ -62,7 +62,7 @@ public class RepositoryAzureClientYamlTestSuiteIT extends ESClientYamlSuiteTestC .systemProperty("javax.net.ssl.trustStore", () -> trustStore.getTrustStorePath().toString(), s -> USE_FIXTURE) .build(); - @ClassRule + @ClassRule(order = 1) public static TestRule ruleChain = RuleChain.outerRule(fixture).around(trustStore).around(cluster); public RepositoryAzureClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestTrustStore.java b/test/framework/src/main/java/org/elasticsearch/test/TestTrustStore.java index 23d178ea0c672..01069b8fb295c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestTrustStore.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestTrustStore.java @@ -22,6 +22,7 @@ import java.util.Objects; import static org.apache.lucene.tests.util.LuceneTestCase.createTempDir; +import static org.junit.Assert.assertTrue; public class TestTrustStore extends ExternalResource { @@ -54,4 +55,9 @@ protected void before() { throw new AssertionError("unexpected", e); } } + + @Override + protected void after() { + assertTrue(trustStorePath + " should still exist at teardown", Files.exists(trustStorePath)); + } } diff --git a/x-pack/plugin/repositories-metering-api/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/repositories/metering/azure/AzureRepositoriesMeteringIT.java b/x-pack/plugin/repositories-metering-api/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/repositories/metering/azure/AzureRepositoriesMeteringIT.java index 1a616e1f4276a..0e7eeb965d1f3 100644 --- a/x-pack/plugin/repositories-metering-api/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/repositories/metering/azure/AzureRepositoriesMeteringIT.java +++ b/x-pack/plugin/repositories-metering-api/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/repositories/metering/azure/AzureRepositoriesMeteringIT.java @@ -60,7 +60,7 @@ public class AzureRepositoriesMeteringIT extends AbstractRepositoriesMeteringAPI .systemProperty("javax.net.ssl.trustStore", () -> trustStore.getTrustStorePath().toString(), s -> USE_FIXTURE) .build(); - @ClassRule + @ClassRule(order = 1) public static TestRule ruleChain = RuleChain.outerRule(fixture).around(trustStore).around(cluster); @Override diff --git a/x-pack/plugin/searchable-snapshots/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/AzureSearchableSnapshotsIT.java b/x-pack/plugin/searchable-snapshots/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/AzureSearchableSnapshotsIT.java index 4d7aabe489b9c..d86632d77b51c 100644 --- a/x-pack/plugin/searchable-snapshots/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/AzureSearchableSnapshotsIT.java +++ b/x-pack/plugin/searchable-snapshots/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/AzureSearchableSnapshotsIT.java @@ -66,7 +66,7 @@ public class AzureSearchableSnapshotsIT extends AbstractSearchableSnapshotsRestT .systemProperty("javax.net.ssl.trustStore", () -> trustStore.getTrustStorePath().toString(), s -> USE_FIXTURE) .build(); - @ClassRule + @ClassRule(order = 1) public static TestRule ruleChain = RuleChain.outerRule(fixture).around(trustStore).around(cluster); @Override diff --git a/x-pack/plugin/snapshot-based-recoveries/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/AzureSnapshotBasedRecoveryIT.java b/x-pack/plugin/snapshot-based-recoveries/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/AzureSnapshotBasedRecoveryIT.java index 8cebe9fafdb52..8895574f85d02 100644 --- a/x-pack/plugin/snapshot-based-recoveries/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/AzureSnapshotBasedRecoveryIT.java +++ b/x-pack/plugin/snapshot-based-recoveries/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/AzureSnapshotBasedRecoveryIT.java @@ -62,7 +62,7 @@ public class AzureSnapshotBasedRecoveryIT extends AbstractSnapshotBasedRecoveryR .systemProperty("javax.net.ssl.trustStore", () -> trustStore.getTrustStorePath().toString(), s -> USE_FIXTURE) .build(); - @ClassRule + @ClassRule(order = 1) public static TestRule ruleChain = RuleChain.outerRule(fixture).around(trustStore).around(cluster); @Override diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AzureSnapshotRepoTestKitIT.java b/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AzureSnapshotRepoTestKitIT.java index afe17d2dd6f2d..7451f37cd0e40 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AzureSnapshotRepoTestKitIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AzureSnapshotRepoTestKitIT.java @@ -65,7 +65,7 @@ public class AzureSnapshotRepoTestKitIT extends AbstractSnapshotRepoTestKitRestT .systemProperty("javax.net.ssl.trustStore", () -> trustStore.getTrustStorePath().toString(), s -> USE_FIXTURE) .build(); - @ClassRule + @ClassRule(order = 1) public static TestRule ruleChain = RuleChain.outerRule(fixture).around(trustStore).around(cluster); @Override From 2b1ffceb078541bae9f3d77d855a64f1a956567b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 25 Jul 2024 22:32:52 +1000 Subject: [PATCH 012/105] Mute org.elasticsearch.action.admin.indices.create.SplitIndexIT testSplitIndexPrimaryTerm #111282 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index df491aa34b896..f0a84549ad746 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -126,6 +126,9 @@ tests: - class: org.elasticsearch.xpack.esql.qa.single_node.RestEsqlIT method: testInlineStatsProfile {ASYNC} issue: https://github.com/elastic/elasticsearch/issues/111264 +- class: org.elasticsearch.action.admin.indices.create.SplitIndexIT + method: testSplitIndexPrimaryTerm + issue: https://github.com/elastic/elasticsearch/issues/111282 # Examples: # From 30152c0774aee8d546f41100c6cc9243905f4e5b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 25 Jul 2024 22:50:24 +1000 Subject: [PATCH 013/105] Mute org.elasticsearch.xpack.esql.CsvTests test {inlinestats.ShadowingSelf} #111261 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index f0a84549ad746..57759c9a497cc 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -129,6 +129,9 @@ tests: - class: org.elasticsearch.action.admin.indices.create.SplitIndexIT method: testSplitIndexPrimaryTerm issue: https://github.com/elastic/elasticsearch/issues/111282 +- class: org.elasticsearch.xpack.esql.CsvTests + method: test {inlinestats.ShadowingSelf} + issue: https://github.com/elastic/elasticsearch/issues/111261 # Examples: # From 7aeabd8fb198513b8682873bd441684265b56647 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Thu, 25 Jul 2024 15:06:07 +0200 Subject: [PATCH 014/105] ESQL: Fix inlinestats beforeKeep test (#111275) * Make csv test deterministic * Remove mutes --- muted-tests.yml | 12 ------------ .../src/main/resources/inlinestats.csv-spec | 6 ++---- 2 files changed, 2 insertions(+), 16 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 57759c9a497cc..1d71d7d735e96 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -108,18 +108,6 @@ tests: - class: org.elasticsearch.repositories.azure.AzureBlobContainerRetriesTests method: testReadNonexistentBlobThrowsNoSuchFileException issue: https://github.com/elastic/elasticsearch/issues/111233 -- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT - method: test {inlinestats.BeforeKeep ASYNC} - issue: https://github.com/elastic/elasticsearch/issues/111257 -- class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT - method: test {inlinestats.BeforeKeep ASYNC} - issue: https://github.com/elastic/elasticsearch/issues/111259 -- class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT - method: test {inlinestats.BeforeKeep SYNC} - issue: https://github.com/elastic/elasticsearch/issues/111260 -- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT - method: test {inlinestats.BeforeKeep SYNC} - issue: https://github.com/elastic/elasticsearch/issues/111262 - class: org.elasticsearch.xpack.esql.qa.single_node.RestEsqlIT method: testInlineStatsProfile {SYNC} issue: https://github.com/elastic/elasticsearch/issues/111263 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec index 90d5bbd514c81..e52f1e45cead8 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec @@ -336,18 +336,16 @@ emp_no:integer | languages:integer | max_salary:integer 10003 | 4 | 74572 ; -beforeKeep +beforeKeepWhere required_capability: inlinestats FROM employees | INLINESTATS max_salary = MAX(salary) by languages | KEEP emp_no, languages, max_salary -| LIMIT 3; +| WHERE emp_no == 10003; ignoreOrder:true emp_no:integer | languages:integer | max_salary:integer - 10001 | 2 | 73578 - 10002 | 5 | 66817 10003 | 4 | 74572 ; From 2fa5400e232161f2ffec654fd7f7021527923e9e Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Thu, 25 Jul 2024 15:15:27 +0200 Subject: [PATCH 015/105] Test LogsDB backward compatibility (#110180) Test LogsDB backward compatibility with a rolling upgrade and full cluster restart. We try to start indexing logs using a `standard` index, then we switch to a `LogsDB` index. We also improve the existing test which switches between the two index modes, `standard` and `logs`. --- .../datastreams/LogsDataStreamRestIT.java | 146 +++++++++-- .../LogsIndexModeFullClusterRestartIT.java | 224 ++++++++++++++++ .../LogsIndexModeRollingUpgradeIT.java | 248 ++++++++++++++++++ 3 files changed, 596 insertions(+), 22 deletions(-) create mode 100644 qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java create mode 100644 qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamRestIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamRestIT.java index 780864db8b629..1500674e3aee9 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamRestIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamRestIT.java @@ -28,6 +28,7 @@ import java.util.Locale; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; public class LogsDataStreamRestIT extends ESRestTestCase { @@ -102,19 +103,53 @@ private static void waitForLogs(RestClient client) throws Exception { private static final String STANDARD_TEMPLATE = """ { - "index_patterns": [ "logs-*-*" ], - "data_stream": {}, - "priority": 201, - "template": { - "settings": { - "index": { - "mode": "standard" + "index_patterns": [ "logs-*-*" ], + "data_stream": {}, + "priority": 201, + "template": { + "settings": { + "index": { + "mode": "standard" + } + }, + "mappings": { + "properties": { + "@timestamp" : { + "type": "date" + }, + "host.name": { + "type": "keyword" + }, + "pid": { + "type": "long" + }, + "method": { + "type": "keyword" + }, + "ip_address": { + "type": "ip" } - }, - "mappings": { - "properties": { - "@timestamp" : { - "type": "date" + } + } + } + }"""; + + private static final String TIME_SERIES_TEMPLATE = """ + { + "index_patterns": [ "logs-*-*" ], + "data_stream": {}, + "priority": 201, + "template": { + "settings": { + "index": { + "mode": "time_series", + "look_ahead_time": "5m" + } + }, + "mappings": { + "properties": { + "@timestamp" : { + "type": "date" }, "host.name": { "type": "keyword", @@ -129,6 +164,10 @@ private static void waitForLogs(RestClient client) throws Exception { }, "ip_address": { "type": "ip" + }, + "memory_usage_bytes": { + "type": "long", + "time_series_metric": "gauge" } } } @@ -142,7 +181,8 @@ private static void waitForLogs(RestClient client) throws Exception { "pid": "%d", "method": "%s", "message": "%s", - "ip_address": "%s" + "ip_address": "%s", + "memory_usage_bytes": "%d" } """; @@ -158,7 +198,8 @@ public void testLogsIndexing() throws IOException { randomNonNegativeLong(), randomFrom("PUT", "POST", "GET"), randomAlphaOfLength(32), - randomIp(randomBoolean()) + randomIp(randomBoolean()), + randomLongBetween(1_000_000L, 2_000_000L) ) ); assertDataStreamBackingIndexMode("logsdb", 0); @@ -172,7 +213,8 @@ public void testLogsIndexing() throws IOException { randomNonNegativeLong(), randomFrom("PUT", "POST", "GET"), randomAlphaOfLength(32), - randomIp(randomBoolean()) + randomIp(randomBoolean()), + randomLongBetween(1_000_000L, 2_000_000L) ) ); assertDataStreamBackingIndexMode("logsdb", 1); @@ -190,7 +232,8 @@ public void testLogsStandardIndexModeSwitch() throws IOException { randomNonNegativeLong(), randomFrom("PUT", "POST", "GET"), randomAlphaOfLength(32), - randomIp(randomBoolean()) + randomIp(randomBoolean()), + randomLongBetween(1_000_000L, 2_000_000L) ) ); assertDataStreamBackingIndexMode("logsdb", 0); @@ -206,7 +249,8 @@ public void testLogsStandardIndexModeSwitch() throws IOException { randomNonNegativeLong(), randomFrom("PUT", "POST", "GET"), randomAlphaOfLength(64), - randomIp(randomBoolean()) + randomIp(randomBoolean()), + randomLongBetween(1_000_000L, 2_000_000L) ) ); assertDataStreamBackingIndexMode("standard", 1); @@ -222,7 +266,61 @@ public void testLogsStandardIndexModeSwitch() throws IOException { randomNonNegativeLong(), randomFrom("PUT", "POST", "GET"), randomAlphaOfLength(32), - randomIp(randomBoolean()) + randomIp(randomBoolean()), + randomLongBetween(1_000_000L, 2_000_000L) + ) + ); + assertDataStreamBackingIndexMode("logsdb", 2); + } + + public void testLogsTimeSeriesIndexModeSwitch() throws IOException { + putTemplate(client, "custom-template", LOGS_TEMPLATE); + createDataStream(client, DATA_STREAM_NAME); + indexDocument( + client, + DATA_STREAM_NAME, + document( + Instant.now(), + randomAlphaOfLength(10), + randomNonNegativeLong(), + randomFrom("PUT", "POST", "GET"), + randomAlphaOfLength(32), + randomIp(randomBoolean()), + randomLongBetween(1_000_000L, 2_000_000L) + ) + ); + assertDataStreamBackingIndexMode("logsdb", 0); + + putTemplate(client, "custom-template", TIME_SERIES_TEMPLATE); + rolloverDataStream(client, DATA_STREAM_NAME); + indexDocument( + client, + DATA_STREAM_NAME, + document( + Instant.now().plusSeconds(10), + randomAlphaOfLength(10), + randomNonNegativeLong(), + randomFrom("PUT", "POST", "GET"), + randomAlphaOfLength(64), + randomIp(randomBoolean()), + randomLongBetween(1_000_000L, 2_000_000L) + ) + ); + assertDataStreamBackingIndexMode("time_series", 1); + + putTemplate(client, "custom-template", LOGS_TEMPLATE); + rolloverDataStream(client, DATA_STREAM_NAME); + indexDocument( + client, + DATA_STREAM_NAME, + document( + Instant.now().plusSeconds(320), // 5 mins index.look_ahead_time + randomAlphaOfLength(10), + randomNonNegativeLong(), + randomFrom("PUT", "POST", "GET"), + randomAlphaOfLength(32), + randomIp(randomBoolean()), + randomLongBetween(1_000_000L, 2_000_000L) ) ); assertDataStreamBackingIndexMode("logsdb", 2); @@ -238,17 +336,19 @@ private String document( long pid, final String method, final String message, - final InetAddress ipAddress + final InetAddress ipAddress, + long memoryUsageBytes ) { return String.format( Locale.ROOT, DOC_TEMPLATE, - DateFormatter.forPattern(FormatNames.DATE.getName()).format(timestamp), + DateFormatter.forPattern(FormatNames.DATE_TIME.getName()).format(timestamp), hostname, pid, method, message, - InetAddresses.toAddrString(ipAddress) + InetAddresses.toAddrString(ipAddress), + memoryUsageBytes ); } @@ -266,7 +366,9 @@ private static void putTemplate(final RestClient client, final String templateNa private static void indexDocument(final RestClient client, String dataStreamName, String doc) throws IOException { final Request request = new Request("POST", "/" + dataStreamName + "/_doc?refresh=true"); request.setJsonEntity(doc); - assertOK(client.performRequest(request)); + final Response response = client.performRequest(request); + assertOK(response); + assertThat(entityAsMap(response).get("result"), equalTo("created")); } private static void rolloverDataStream(final RestClient client, final String dataStreamName) throws IOException { diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java new file mode 100644 index 0000000000000..da168f2999086 --- /dev/null +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java @@ -0,0 +1,224 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.FormatNames; +import org.elasticsearch.test.MapMatcher; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.hamcrest.Matcher; +import org.hamcrest.Matchers; +import org.junit.ClassRule; + +import java.io.IOException; +import java.time.Instant; +import java.util.List; +import java.util.Map; +import java.util.function.Supplier; + +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; + +public class LogsIndexModeFullClusterRestartIT extends ParameterizedFullClusterRestartTestCase { + + @ClassRule + public static final ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .module("constant-keyword") + .module("data-streams") + .module("mapper-extras") + .module("x-pack-aggregate-metric") + .module("x-pack-stack") + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .setting("cluster.logsdb.enabled", "true") + .build(); + + public LogsIndexModeFullClusterRestartIT(@Name("cluster") FullClusterRestartUpgradeStatus upgradeStatus) { + super(upgradeStatus); + } + + @Override + protected ElasticsearchCluster getUpgradeCluster() { + return cluster; + } + + private static final String BULK_INDEX_REQUEST = """ + { "create": {} } + { "@timestamp": "%s", "host.name": "%s", "method": "%s", "ip.address": "%s", "message": "%s" } + """; + + private static final String STANDARD_TEMPLATE = """ + { + "index_patterns": [ "logs-*-*" ], + "data_stream": {}, + "priority": 500, + "template": { + "mappings": { + "properties": { + "@timestamp" : { + "type": "date" + }, + "host.name": { + "type": "keyword" + }, + "method": { + "type": "keyword" + }, + "message": { + "type": "text" + }, + "ip.address": { + "type": "ip" + } + } + } + } + }"""; + + private static final String LOGS_TEMPLATE = """ + { + "index_patterns": [ "logs-*-*" ], + "data_stream": {}, + "priority": 500, + "template": { + "settings": { + "index": { + "mode": "logsdb" + } + }, + "mappings": { + "properties": { + "@timestamp" : { + "type": "date" + }, + "host.name": { + "type": "keyword" + }, + "method": { + "type": "keyword" + }, + "message": { + "type": "text" + }, + "ip.address": { + "type": "ip" + } + } + } + } + }"""; + + public void testLogsIndexing() throws IOException { + if (isRunningAgainstOldCluster()) { + assertOK(client().performRequest(putTemplate(client(), "logs-template", STANDARD_TEMPLATE))); + assertOK(client().performRequest(createDataStream("logs-apache-production"))); + final Response bulkIndexResponse = client().performRequest(bulkIndex("logs-apache-production", () -> { + final StringBuilder sb = new StringBuilder(); + for (int i = 0; i < randomIntBetween(10, 20); i++) { + sb.append( + String.format( + BULK_INDEX_REQUEST, + DateFormatter.forPattern(FormatNames.DATE_TIME.getName()).format(Instant.now()), + randomFrom("foo", "bar"), + randomFrom("PUT", "POST", "GET"), + InetAddresses.toAddrString(randomIp(randomBoolean())), + randomAlphaOfLengthBetween(100, 200) + ) + ); + sb.append("\n"); + } + return sb.toString(); + })); + assertOK(bulkIndexResponse); + assertThat(entityAsMap(bulkIndexResponse).get("errors"), Matchers.is(false)); + } else { + assertOK(client().performRequest(putTemplate(client(), "logs-template", LOGS_TEMPLATE))); + assertOK(client().performRequest(rolloverDataStream(client(), "logs-apache-production"))); + final Response bulkIndexResponse = client().performRequest(bulkIndex("logs-apache-production", () -> { + final StringBuilder sb = new StringBuilder(); + for (int i = 0; i < randomIntBetween(10, 20); i++) { + sb.append( + String.format( + BULK_INDEX_REQUEST, + DateFormatter.forPattern(FormatNames.DATE_TIME.getName()).format(Instant.now()), + randomFrom("foo", "bar"), + randomFrom("PUT", "POST", "GET"), + InetAddresses.toAddrString(randomIp(randomBoolean())), + randomAlphaOfLengthBetween(100, 200) + ) + ); + sb.append("\n"); + } + return sb.toString(); + })); + assertOK(bulkIndexResponse); + assertThat(entityAsMap(bulkIndexResponse).get("errors"), Matchers.is(false)); + + assertIndexMappingsAndSettings(0, Matchers.nullValue(), matchesMap().extraOk()); + assertIndexMappingsAndSettings( + 1, + Matchers.equalTo("logsdb"), + matchesMap().extraOk().entry("_source", Map.of("mode", "synthetic")) + ); + } + } + + private void assertIndexMappingsAndSettings(int backingIndex, final Matcher indexModeMatcher, final MapMatcher mappingsMatcher) + throws IOException { + assertThat( + getSettings(client(), getWriteBackingIndex(client(), "logs-apache-production", backingIndex)).get("index.mode"), + indexModeMatcher + ); + assertMap(getIndexMappingAsMap(getWriteBackingIndex(client(), "logs-apache-production", backingIndex)), mappingsMatcher); + } + + private static Request createDataStream(final String dataStreamName) { + return new Request("PUT", "/_data_stream/" + dataStreamName); + } + + private static Request bulkIndex(final String dataStreamName, final Supplier bulkIndexRequestSupplier) { + final Request request = new Request("POST", dataStreamName + "/_bulk"); + request.setJsonEntity(bulkIndexRequestSupplier.get()); + request.addParameter("refresh", "true"); + return request; + } + + private static Request putTemplate(final RestClient client, final String templateName, final String mappings) throws IOException { + final Request request = new Request("PUT", "/_index_template/" + templateName); + request.setJsonEntity(mappings); + return request; + } + + private static Request rolloverDataStream(final RestClient client, final String dataStreamName) throws IOException { + return new Request("POST", "/" + dataStreamName + "/_rollover"); + } + + @SuppressWarnings("unchecked") + private static String getWriteBackingIndex(final RestClient client, final String dataStreamName, int backingIndex) throws IOException { + final Request request = new Request("GET", "_data_stream/" + dataStreamName); + final List dataStreams = (List) entityAsMap(client.performRequest(request)).get("data_streams"); + final Map dataStream = (Map) dataStreams.get(0); + final List> backingIndices = (List>) dataStream.get("indices"); + return backingIndices.get(backingIndex).get("index_name"); + } + + @SuppressWarnings("unchecked") + private static Map getSettings(final RestClient client, final String indexName) throws IOException { + final Request request = new Request("GET", "/" + indexName + "/_settings?flat_settings"); + return ((Map>) entityAsMap(client.performRequest(request)).get(indexName)).get("settings"); + } +} diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java new file mode 100644 index 0000000000000..1549789bcc44a --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java @@ -0,0 +1,248 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.FormatNames; +import org.elasticsearch.test.MapMatcher; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.hamcrest.Matcher; +import org.hamcrest.Matchers; +import org.junit.ClassRule; + +import java.io.IOException; +import java.time.Instant; +import java.util.List; +import java.util.Map; +import java.util.function.Supplier; + +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; + +public class LogsIndexModeRollingUpgradeIT extends AbstractRollingUpgradeTestCase { + + @ClassRule() + public static final ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .module("constant-keyword") + .module("data-streams") + .module("mapper-extras") + .module("x-pack-aggregate-metric") + .module("x-pack-stack") + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .setting("cluster.logsdb.enabled", "true") + .setting("stack.templates.enabled", "false") + .build(); + + public LogsIndexModeRollingUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + private static final String BULK_INDEX_REQUEST = """ + { "create": {} } + { "@timestamp": "%s", "host.name": "%s", "method": "%s", "ip.address": "%s", "message": "%s" } + """; + + private static final String STANDARD_TEMPLATE = """ + { + "index_patterns": [ "logs-*-*" ], + "data_stream": {}, + "priority": 500, + "template": { + "mappings": { + "properties": { + "@timestamp" : { + "type": "date" + }, + "host.name": { + "type": "keyword" + }, + "method": { + "type": "keyword" + }, + "message": { + "type": "text" + }, + "ip.address": { + "type": "ip" + } + } + } + } + }"""; + + private static final String LOGS_TEMPLATE = """ + { + "index_patterns": [ "logs-*-*" ], + "data_stream": {}, + "priority": 500, + "template": { + "settings": { + "index": { + "mode": "logsdb" + } + }, + "mappings": { + "properties": { + "@timestamp" : { + "type": "date" + }, + "host.name": { + "type": "keyword" + }, + "method": { + "type": "keyword" + }, + "message": { + "type": "text" + }, + "ip.address": { + "type": "ip" + } + } + } + } + }"""; + + public void testLogsIndexing() throws IOException { + if (isOldCluster()) { + assertOK(client().performRequest(putTemplate(client(), "logs-template", STANDARD_TEMPLATE))); + assertOK(client().performRequest(createDataStream("logs-apache-production"))); + final Response bulkIndexResponse = client().performRequest(bulkIndex("logs-apache-production", () -> { + final StringBuilder sb = new StringBuilder(); + for (int i = 0; i < randomIntBetween(10, 20); i++) { + sb.append( + String.format( + BULK_INDEX_REQUEST, + DateFormatter.forPattern(FormatNames.DATE_TIME.getName()).format(Instant.now()), + randomFrom("foo", "bar"), + randomFrom("PUT", "POST", "GET"), + InetAddresses.toAddrString(randomIp(randomBoolean())), + randomIntBetween(20, 50) + ) + ); + sb.append("\n"); + } + return sb.toString(); + })); + assertOK(bulkIndexResponse); + assertThat(entityAsMap(bulkIndexResponse).get("errors"), Matchers.is(false)); + } else if (isMixedCluster()) { + assertOK(client().performRequest(rolloverDataStream(client(), "logs-apache-production"))); + final Response bulkIndexResponse = client().performRequest(bulkIndex("logs-apache-production", () -> { + final StringBuilder sb = new StringBuilder(); + for (int i = 0; i < randomIntBetween(10, 20); i++) { + sb.append( + String.format( + BULK_INDEX_REQUEST, + DateFormatter.forPattern(FormatNames.DATE_TIME.getName()).format(Instant.now()), + randomFrom("foo", "bar"), + randomFrom("PUT", "POST", "GET"), + InetAddresses.toAddrString(randomIp(randomBoolean())), + randomIntBetween(20, 50) + ) + ); + sb.append("\n"); + } + return sb.toString(); + })); + assertOK(bulkIndexResponse); + assertThat(entityAsMap(bulkIndexResponse).get("errors"), Matchers.is(false)); + } else if (isUpgradedCluster()) { + assertOK(client().performRequest(putTemplate(client(), "logs-template", LOGS_TEMPLATE))); + assertOK(client().performRequest(rolloverDataStream(client(), "logs-apache-production"))); + final Response bulkIndexResponse = client().performRequest(bulkIndex("logs-apache-production", () -> { + final StringBuilder sb = new StringBuilder(); + for (int i = 0; i < randomIntBetween(10, 20); i++) { + sb.append( + String.format( + BULK_INDEX_REQUEST, + DateFormatter.forPattern(FormatNames.DATE_TIME.getName()).format(Instant.now()), + randomFrom("foo", "bar"), + randomFrom("PUT", "POST", "GET"), + InetAddresses.toAddrString(randomIp(randomBoolean())), + randomIntBetween(20, 50) + ) + ); + sb.append("\n"); + } + return sb.toString(); + })); + assertOK(bulkIndexResponse); + assertThat(entityAsMap(bulkIndexResponse).get("errors"), Matchers.is(false)); + + assertIndexMappingsAndSettings(0, Matchers.nullValue(), matchesMap().extraOk()); + assertIndexMappingsAndSettings(1, Matchers.nullValue(), matchesMap().extraOk()); + assertIndexMappingsAndSettings(2, Matchers.nullValue(), matchesMap().extraOk()); + assertIndexMappingsAndSettings( + 3, + Matchers.equalTo("logsdb"), + matchesMap().extraOk().entry("_source", Map.of("mode", "synthetic")) + ); + } + } + + private void assertIndexMappingsAndSettings(int backingIndex, final Matcher indexModeMatcher, final MapMatcher mappingsMatcher) + throws IOException { + assertThat( + getSettings(client(), getWriteBackingIndex(client(), "logs-apache-production", backingIndex)).get("index.mode"), + indexModeMatcher + ); + assertMap(getIndexMappingAsMap(getWriteBackingIndex(client(), "logs-apache-production", backingIndex)), mappingsMatcher); + } + + private static Request createDataStream(final String dataStreamName) { + return new Request("PUT", "/_data_stream/" + dataStreamName); + } + + private static Request bulkIndex(final String dataStreamName, final Supplier bulkIndexRequestSupplier) { + final Request request = new Request("POST", dataStreamName + "/_bulk"); + request.setJsonEntity(bulkIndexRequestSupplier.get()); + request.addParameter("refresh", "true"); + return request; + } + + private static Request putTemplate(final RestClient client, final String templateName, final String mappings) throws IOException { + final Request request = new Request("PUT", "/_index_template/" + templateName); + request.setJsonEntity(mappings); + return request; + } + + private static Request rolloverDataStream(final RestClient client, final String dataStreamName) throws IOException { + return new Request("POST", "/" + dataStreamName + "/_rollover"); + } + + @SuppressWarnings("unchecked") + private static String getWriteBackingIndex(final RestClient client, final String dataStreamName, int backingIndex) throws IOException { + final Request request = new Request("GET", "_data_stream/" + dataStreamName); + final List dataStreams = (List) entityAsMap(client.performRequest(request)).get("data_streams"); + final Map dataStream = (Map) dataStreams.get(0); + final List> backingIndices = (List>) dataStream.get("indices"); + return backingIndices.get(backingIndex).get("index_name"); + } + + @SuppressWarnings("unchecked") + private static Map getSettings(final RestClient client, final String indexName) throws IOException { + final Request request = new Request("GET", "/" + indexName + "/_settings?flat_settings"); + return ((Map>) entityAsMap(client.performRequest(request)).get(indexName)).get("settings"); + } +} From 4c6f37da8b2d723c9a420ded21ec801df4a614c1 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 25 Jul 2024 09:47:22 -0400 Subject: [PATCH 016/105] ESQL: Add `ExpressionEvaluator` for Lucene `Query` (#111157) I was talking with @ioanatia on Friday about building an `ExpressionEvaluator` that could run a Lucene `Query` during the compute engine's normal runtime. It sounded fun so I took a crack at it. It's not finished or plugged in, but I think something like this would be useful to build on. The idea here is that, for stuff like "this text field matches this string" AKA `WHERE title MATCH "harry potter"`, we push it to Lucene where possible, but we don't *have* to. With this handy tool! That lines up better with the way ESQL works in general. It makes planning simpler if you can fall back on "doing it at runtime". Now, running a lucene query at runtime isn't ideal. In the worst case we're running a `MatchAll` query to iterate everything and then running this query, block by block. --- .../LuceneQueryExpressionEvaluator.java | 327 ++++++++++++++++++ .../elasticsearch/compute/OperatorTests.java | 122 +++---- .../LuceneQueryExpressionEvaluatorTests.java | 262 ++++++++++++++ .../compute/operator/ShuffleDocsOperator.java | 74 ++++ 4 files changed, 703 insertions(+), 82 deletions(-) create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ShuffleDocsOperator.java diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java new file mode 100644 index 0000000000000..dcd13671670d8 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java @@ -0,0 +1,327 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.lucene; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.BulkScorer; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorable; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Weight; +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.Bits; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.DocBlock; +import org.elasticsearch.compute.data.DocVector; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; + +import java.io.IOException; +import java.io.UncheckedIOException; + +/** + * {@link EvalOperator.ExpressionEvaluator} to run a Lucene {@link Query} during + * the compute engine's normal execution, yielding matches/does not match into + * a {@link BooleanVector}. It's much faster to push these to the + * {@link LuceneSourceOperator} or the like, but sometimes this isn't possible. So + * this evaluator is here to save the day. + */ +public class LuceneQueryExpressionEvaluator implements EvalOperator.ExpressionEvaluator { + public record ShardConfig(Query query, IndexSearcher searcher) {} + + private final BlockFactory blockFactory; + private final ShardConfig[] shards; + private final int docChannel; + + private ShardState[] perShardState = EMPTY_SHARD_STATES; + + public LuceneQueryExpressionEvaluator(BlockFactory blockFactory, ShardConfig[] shards, int docChannel) { + this.blockFactory = blockFactory; + this.shards = shards; + this.docChannel = docChannel; + } + + @Override + public Block eval(Page page) { + DocVector docs = page.getBlock(docChannel).asVector(); + try { + if (docs.singleSegmentNonDecreasing()) { + return evalSingleSegmentNonDecreasing(docs).asBlock(); + } else { + return evalSlow(docs).asBlock(); + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + /** + * Evaluate {@link DocVector#singleSegmentNonDecreasing()} documents. + *

+ * ESQL receives documents in DocVector, and they can be in one of two + * states. Either the DocVector contains documents from a single segment + * non-decreasing order, or it doesn't. that first case is much more like + * how Lucene likes to process documents. and it's much more common. So we + * optimize for it. + *

+ * Vectors that are {@link DocVector#singleSegmentNonDecreasing()} + * represent many documents from a single Lucene segment. In Elasticsearch + * terms that's a segment in a single shard. And the document ids are in + * non-decreasing order. Probably just {@code 0, 1, 2, 3, 4, 5...}. + * But maybe something like {@code 0, 5, 6, 10, 10, 10}. Both of those are + * very like how lucene "natively" processes documents and this optimizes + * those accesses. + *

+ *

+ * If the documents are literally {@code 0, 1, ... n} then we use + * {@link BulkScorer#score(LeafCollector, Bits, int, int)} which processes + * a whole range. This should be quite common in the case where we don't + * have deleted documents because that's the order that + * {@link LuceneSourceOperator} produces them. + *

+ *

+ * If there are gaps in the sequence we use {@link Scorer} calls to + * score the sequence. This'll be less fast but isn't going be particularly + * common. + *

+ */ + private BooleanVector evalSingleSegmentNonDecreasing(DocVector docs) throws IOException { + ShardState shardState = shardState(docs.shards().getInt(0)); + SegmentState segmentState = shardState.segmentState(docs.segments().getInt(0)); + int min = docs.docs().getInt(0); + int max = docs.docs().getInt(docs.getPositionCount() - 1); + int length = max - min + 1; + if (length == docs.getPositionCount() && length > 1) { + return segmentState.scoreDense(min, max); + } + return segmentState.scoreSparse(docs.docs()); + } + + /** + * Evaluate non-{@link DocVector#singleSegmentNonDecreasing()} documents. See + * {@link #evalSingleSegmentNonDecreasing} for the meaning of + * {@link DocVector#singleSegmentNonDecreasing()} and how we can efficiently + * evaluate those segments. + *

+ * This processes the worst case blocks of documents. These can be from any + * number of shards and any number of segments and in any order. We do this + * by iterating the docs in {@code shard ASC, segment ASC, doc ASC} order. + * So, that's segment by segment, docs ascending. We build a boolean block + * out of that. Then we sort that to put the booleans in + * the order that the {@link DocVector} came in. + *

+ */ + private BooleanVector evalSlow(DocVector docs) throws IOException { + int[] map = docs.shardSegmentDocMapForwards(); + // Clear any state flags from the previous run + int prevShard = -1; + int prevSegment = -1; + SegmentState segmentState = null; + try (BooleanVector.Builder builder = blockFactory.newBooleanVectorFixedBuilder(docs.getPositionCount())) { + for (int i = 0; i < docs.getPositionCount(); i++) { + int shard = docs.shards().getInt(docs.shards().getInt(map[i])); + int segment = docs.segments().getInt(map[i]); + if (segmentState == null || prevShard != shard || prevSegment != segment) { + segmentState = shardState(shard).segmentState(segment); + segmentState.initScorer(docs.docs().getInt(map[i])); + prevShard = shard; + prevSegment = segment; + } + if (segmentState.noMatch) { + builder.appendBoolean(false); + } else { + segmentState.scoreSingleDocWithScorer(builder, docs.docs().getInt(map[i])); + } + } + try (BooleanVector outOfOrder = builder.build()) { + return outOfOrder.filter(docs.shardSegmentDocMapBackwards()); + } + } + } + + @Override + public void close() { + + } + + private ShardState shardState(int shard) throws IOException { + if (shard >= perShardState.length) { + perShardState = ArrayUtil.grow(perShardState, shard + 1); + } else if (perShardState[shard] != null) { + return perShardState[shard]; + } + perShardState[shard] = new ShardState(shards[shard]); + return perShardState[shard]; + } + + private class ShardState { + private final Weight weight; + private final IndexSearcher searcher; + private SegmentState[] perSegmentState = EMPTY_SEGMENT_STATES; + + ShardState(ShardConfig config) throws IOException { + weight = config.searcher.createWeight(config.query, ScoreMode.COMPLETE_NO_SCORES, 0.0f); + searcher = config.searcher; + } + + SegmentState segmentState(int segment) throws IOException { + if (segment >= perSegmentState.length) { + perSegmentState = ArrayUtil.grow(perSegmentState, segment + 1); + } else if (perSegmentState[segment] != null) { + return perSegmentState[segment]; + } + perSegmentState[segment] = new SegmentState(weight, searcher.getLeafContexts().get(segment)); + return perSegmentState[segment]; + } + } + + private class SegmentState { + private final Weight weight; + private final LeafReaderContext ctx; + /** + * Lazily initialed {@link Scorer} for this. {@code null} here means uninitialized + * or that {@link #noMatch} is true. + */ + private Scorer scorer; + + /** + * Lazily initialed {@link BulkScorer} for this. {@code null} here means uninitialized + * or that {@link #noMatch} is true. + */ + private BulkScorer bulkScorer; + + /** + * Set to {@code true} if, in the process of building a {@link Scorer} or {@link BulkScorer}, + * the {@link Weight} tells us there aren't any matches. + */ + private boolean noMatch; + + private SegmentState(Weight weight, LeafReaderContext ctx) { + this.weight = weight; + this.ctx = ctx; + } + + /** + * Score a range using the {@link BulkScorer}. This should be faster + * than using {@link #scoreSparse} for dense doc ids. + */ + BooleanVector scoreDense(int min, int max) throws IOException { + int length = max - min + 1; + if (noMatch) { + return blockFactory.newConstantBooleanVector(false, length); + } + if (bulkScorer == null) { + bulkScorer = weight.bulkScorer(ctx); + if (bulkScorer == null) { + noMatch = true; + return blockFactory.newConstantBooleanVector(false, length); + } + } + try (DenseCollector collector = new DenseCollector(blockFactory, min, max)) { + bulkScorer.score(collector, ctx.reader().getLiveDocs(), min, max + 1); + return collector.build(); + } + } + + /** + * Score a vector of doc ids using {@link Scorer}. If you have a dense range of + * doc ids it'd be faster to use {@link #scoreDense}. + */ + BooleanVector scoreSparse(IntVector docs) throws IOException { + initScorer(docs.getInt(0)); + if (noMatch) { + return blockFactory.newConstantBooleanVector(false, docs.getPositionCount()); + } + try (BooleanVector.Builder builder = blockFactory.newBooleanVectorFixedBuilder(docs.getPositionCount())) { + for (int i = 0; i < docs.getPositionCount(); i++) { + scoreSingleDocWithScorer(builder, docs.getInt(i)); + } + return builder.build(); + } + } + + private void initScorer(int minDocId) throws IOException { + if (noMatch) { + return; + } + if (scorer == null || scorer.iterator().docID() > minDocId) { + // The previous block might have been beyond this one, reset the scorer and try again. + scorer = weight.scorer(ctx); + if (scorer == null) { + noMatch = true; + } + } + } + + private void scoreSingleDocWithScorer(BooleanVector.Builder builder, int doc) throws IOException { + if (scorer.iterator().docID() == doc) { + builder.appendBoolean(true); + } else if (scorer.iterator().docID() > doc) { + builder.appendBoolean(false); + } else { + builder.appendBoolean(scorer.iterator().advance(doc) == doc); + } + } + } + + private static final ShardState[] EMPTY_SHARD_STATES = new ShardState[0]; + private static final SegmentState[] EMPTY_SEGMENT_STATES = new SegmentState[0]; + + /** + * Collects matching information for dense range of doc ids. This assumes that + * doc ids are sent to {@link LeafCollector#collect(int)} in ascending order + * which isn't documented, but @jpountz swears is true. + */ + static class DenseCollector implements LeafCollector, Releasable { + private final BooleanVector.FixedBuilder builder; + private final int max; + + int next; + + DenseCollector(BlockFactory blockFactory, int min, int max) { + this.builder = blockFactory.newBooleanVectorFixedBuilder(max - min + 1); + this.max = max; + next = min; + } + + @Override + public void setScorer(Scorable scorable) {} + + @Override + public void collect(int doc) { + while (next++ < doc) { + builder.appendBoolean(false); + } + builder.appendBoolean(true); + } + + public BooleanVector build() { + return builder.build(); + } + + @Override + public void finish() { + while (next++ <= max) { + builder.appendBoolean(false); + } + } + + @Override + public void close() { + Releasables.closeExpectNoException(builder); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java index 8902293ca945f..8b69b5584e65d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java @@ -62,6 +62,7 @@ import org.elasticsearch.compute.operator.PageConsumerOperator; import org.elasticsearch.compute.operator.RowInTableLookupOperator; import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; +import org.elasticsearch.compute.operator.ShuffleDocsOperator; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.mapper.KeywordFieldMapper; @@ -74,7 +75,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -164,93 +164,51 @@ public void testGroupingWithOrdinals() throws Exception { } writer.commit(); Map actualCounts = new HashMap<>(); - boolean shuffleDocs = randomBoolean(); - Operator shuffleDocsOperator = new AbstractPageMappingOperator() { - @Override - protected Page process(Page page) { - if (shuffleDocs == false) { - return page; - } - DocVector docVector = (DocVector) page.getBlock(0).asVector(); - int positionCount = docVector.getPositionCount(); - IntVector shards = null; - IntVector segments = null; - IntVector docs = null; - try ( - IntVector.Builder shardsBuilder = blockFactory.newIntVectorBuilder(positionCount); - IntVector.Builder segmentsBuilder = blockFactory.newIntVectorBuilder(positionCount); - IntVector.Builder docsBuilder = blockFactory.newIntVectorBuilder(positionCount); - ) { - List docIds = new ArrayList<>(positionCount); - for (int i = 0; i < positionCount; i++) { - shardsBuilder.appendInt(docVector.shards().getInt(i)); - segmentsBuilder.appendInt(docVector.segments().getInt(i)); - docIds.add(docVector.docs().getInt(i)); - } - shards = shardsBuilder.build(); - segments = segmentsBuilder.build(); - Collections.shuffle(docIds, random()); - for (Integer d : docIds) { - docsBuilder.appendInt(d); - } - docs = docsBuilder.build(); - } finally { - if (docs == null) { - Releasables.closeExpectNoException(docVector, shards, segments); - } else { - Releasables.closeExpectNoException(docVector); - } - } - Block[] blocks = new Block[page.getBlockCount()]; - blocks[0] = new DocVector(shards, segments, docs, false).asBlock(); - for (int i = 1; i < blocks.length; i++) { - blocks[i] = page.getBlock(i); - } - return new Page(blocks); - } - @Override - public String toString() { - return "ShuffleDocs"; + try (DirectoryReader reader = writer.getReader()) { + List operators = new ArrayList<>(); + if (randomBoolean()) { + operators.add(new ShuffleDocsOperator(blockFactory)); } - }; + operators.add(new AbstractPageMappingOperator() { + @Override + protected Page process(Page page) { + return page.appendBlock(driverContext.blockFactory().newConstantIntBlockWith(1, page.getPositionCount())); + } - try (DirectoryReader reader = writer.getReader()) { + @Override + public String toString() { + return "Add(1)"; + } + }); + operators.add( + new OrdinalsGroupingOperator( + shardIdx -> new KeywordFieldMapper.KeywordFieldType("g").blockLoader(null), + List.of(new ValuesSourceReaderOperator.ShardContext(reader, () -> SourceLoader.FROM_STORED_SOURCE)), + ElementType.BYTES_REF, + 0, + gField, + List.of(CountAggregatorFunction.supplier(List.of(1)).groupingAggregatorFactory(INITIAL)), + randomPageSize(), + driverContext + ) + ); + operators.add( + new HashAggregationOperator( + List.of(CountAggregatorFunction.supplier(List.of(1, 2)).groupingAggregatorFactory(FINAL)), + () -> BlockHash.build( + List.of(new BlockHash.GroupSpec(0, ElementType.BYTES_REF)), + driverContext.blockFactory(), + randomPageSize(), + false + ), + driverContext + ) + ); Driver driver = new Driver( driverContext, luceneOperatorFactory(reader, new MatchAllDocsQuery(), LuceneOperator.NO_LIMIT).get(driverContext), - List.of(shuffleDocsOperator, new AbstractPageMappingOperator() { - @Override - protected Page process(Page page) { - return page.appendBlock(driverContext.blockFactory().newConstantIntBlockWith(1, page.getPositionCount())); - } - - @Override - public String toString() { - return "Add(1)"; - } - }, - new OrdinalsGroupingOperator( - shardIdx -> new KeywordFieldMapper.KeywordFieldType("g").blockLoader(null), - List.of(new ValuesSourceReaderOperator.ShardContext(reader, () -> SourceLoader.FROM_STORED_SOURCE)), - ElementType.BYTES_REF, - 0, - gField, - List.of(CountAggregatorFunction.supplier(List.of(1)).groupingAggregatorFactory(INITIAL)), - randomPageSize(), - driverContext - ), - new HashAggregationOperator( - List.of(CountAggregatorFunction.supplier(List.of(1, 2)).groupingAggregatorFactory(FINAL)), - () -> BlockHash.build( - List.of(new BlockHash.GroupSpec(0, ElementType.BYTES_REF)), - driverContext.blockFactory(), - randomPageSize(), - false - ), - driverContext - ) - ), + operators, new PageConsumerOperator(page -> { BytesRefBlock keys = page.getBlock(0); LongBlock counts = page.getBlock(1); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java new file mode 100644 index 0000000000000..cfac6adfd3cda --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java @@ -0,0 +1,262 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.lucene; + +import org.apache.lucene.document.Field; +import org.apache.lucene.document.KeywordField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermInSetQuery; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.tests.store.BaseDirectoryWrapper; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.OperatorTests; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.lucene.LuceneQueryExpressionEvaluator.DenseCollector; +import org.elasticsearch.compute.operator.ComputeTestCase; +import org.elasticsearch.compute.operator.Driver; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Operator; +import org.elasticsearch.compute.operator.OperatorTestCase; +import org.elasticsearch.compute.operator.ShuffleDocsOperator; +import org.elasticsearch.compute.operator.TestResultPageSinkOperator; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.index.mapper.BlockDocValuesReader; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Set; +import java.util.TreeSet; + +import static org.elasticsearch.compute.operator.OperatorTestCase.randomPageSize; +import static org.hamcrest.Matchers.equalTo; + +public class LuceneQueryExpressionEvaluatorTests extends ComputeTestCase { + private static final String FIELD = "g"; + + public void testDenseCollectorSmall() { + try (DenseCollector collector = new DenseCollector(blockFactory(), 0, 2)) { + collector.collect(0); + collector.collect(1); + collector.collect(2); + collector.finish(); + try (BooleanVector result = collector.build()) { + for (int i = 0; i <= 2; i++) { + assertThat(result.getBoolean(i), equalTo(true)); + } + } + } + } + + public void testDenseCollectorSimple() { + try (DenseCollector collector = new DenseCollector(blockFactory(), 0, 10)) { + collector.collect(2); + collector.collect(5); + collector.finish(); + try (BooleanVector result = collector.build()) { + for (int i = 0; i < 11; i++) { + assertThat(result.getBoolean(i), equalTo(i == 2 || i == 5)); + } + } + } + } + + public void testDenseCollector() { + int length = between(1, 10_000); + int min = between(0, Integer.MAX_VALUE - length - 1); + int max = min + length + 1; + boolean[] expected = new boolean[length]; + try (DenseCollector collector = new DenseCollector(blockFactory(), min, max)) { + for (int i = 0; i < length; i++) { + expected[i] = randomBoolean(); + if (expected[i]) { + collector.collect(min + i); + } + } + collector.finish(); + try (BooleanVector result = collector.build()) { + for (int i = 0; i < length; i++) { + assertThat(result.getBoolean(i), equalTo(expected[i])); + } + } + } + } + + public void testTermQuery() throws IOException { + Set values = values(); + String term = values.iterator().next(); + List results = runQuery(values, new TermQuery(new Term(FIELD, term)), false); + assertTermQuery(term, results); + } + + public void testTermQueryShuffled() throws IOException { + Set values = values(); + String term = values.iterator().next(); + List results = runQuery(values, new TermQuery(new Term(FIELD, term)), true); + assertTermQuery(term, results); + } + + private void assertTermQuery(String term, List results) { + int matchCount = 0; + for (Page page : results) { + BytesRefVector terms = page.getBlock(1).asVector(); + BooleanVector matches = page.getBlock(2).asVector(); + for (int i = 0; i < page.getPositionCount(); i++) { + BytesRef termAtPosition = terms.getBytesRef(i, new BytesRef()); + assertThat(matches.getBoolean(i), equalTo(termAtPosition.utf8ToString().equals(term))); + if (matches.getBoolean(i)) { + matchCount++; + } + } + } + assertThat(matchCount, equalTo(1)); + } + + public void testTermsQuery() throws IOException { + testTermsQuery(false); + } + + public void testTermsQueryShuffled() throws IOException { + testTermsQuery(true); + } + + private void testTermsQuery(boolean shuffleDocs) throws IOException { + Set values = values(); + Iterator itr = values.iterator(); + TreeSet matching = new TreeSet<>(); + TreeSet matchingBytes = new TreeSet<>(); + int expectedMatchCount = between(2, values.size()); + for (int i = 0; i < expectedMatchCount; i++) { + String v = itr.next(); + matching.add(v); + matchingBytes.add(new BytesRef(v)); + } + List results = runQuery(values, new TermInSetQuery(MultiTermQuery.CONSTANT_SCORE_REWRITE, FIELD, matchingBytes), shuffleDocs); + int matchCount = 0; + for (Page page : results) { + BytesRefVector terms = page.getBlock(1).asVector(); + BooleanVector matches = page.getBlock(2).asVector(); + for (int i = 0; i < page.getPositionCount(); i++) { + BytesRef termAtPosition = terms.getBytesRef(i, new BytesRef()); + assertThat(matches.getBoolean(i), equalTo(matching.contains(termAtPosition.utf8ToString()))); + if (matches.getBoolean(i)) { + matchCount++; + } + } + } + assertThat(matchCount, equalTo(expectedMatchCount)); + } + + private List runQuery(Set values, Query query, boolean shuffleDocs) throws IOException { + DriverContext driverContext = driverContext(); + BlockFactory blockFactory = driverContext.blockFactory(); + return withReader(values, reader -> { + IndexSearcher searcher = new IndexSearcher(reader); + LuceneQueryExpressionEvaluator.ShardConfig shard = new LuceneQueryExpressionEvaluator.ShardConfig( + searcher.rewrite(query), + searcher + ); + LuceneQueryExpressionEvaluator luceneQueryEvaluator = new LuceneQueryExpressionEvaluator( + blockFactory, + new LuceneQueryExpressionEvaluator.ShardConfig[] { shard }, + 0 + ); + + List operators = new ArrayList<>(); + if (shuffleDocs) { + operators.add(new ShuffleDocsOperator(blockFactory)); + } + operators.add( + new ValuesSourceReaderOperator( + blockFactory, + List.of( + new ValuesSourceReaderOperator.FieldInfo( + FIELD, + ElementType.BYTES_REF, + unused -> new BlockDocValuesReader.BytesRefsFromOrdsBlockLoader(FIELD) + ) + ), + List.of(new ValuesSourceReaderOperator.ShardContext(reader, () -> { + throw new UnsupportedOperationException(); + })), + 0 + ) + ); + operators.add(new EvalOperator(blockFactory, luceneQueryEvaluator)); + List results = new ArrayList<>(); + Driver driver = new Driver( + driverContext, + luceneOperatorFactory(reader, new MatchAllDocsQuery(), LuceneOperator.NO_LIMIT).get(driverContext), + operators, + new TestResultPageSinkOperator(results::add), + () -> {} + ); + OperatorTestCase.runDriver(driver); + OperatorTests.assertDriverContext(driverContext); + return results; + }); + } + + private T withReader(Set values, CheckedFunction run) throws IOException { + try (BaseDirectoryWrapper dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir)) { + for (String value : values) { + writer.addDocument(List.of(new KeywordField(FIELD, value, Field.Store.NO))); + } + writer.commit(); + try (DirectoryReader reader = writer.getReader()) { + return run.apply(reader); + } + } + } + + private Set values() { + int maxNumDocs = between(10, 10_000); + int keyLength = randomIntBetween(1, 10); + Set values = new HashSet<>(); + for (int i = 0; i < maxNumDocs; i++) { + values.add(randomAlphaOfLength(keyLength)); + } + return values; + } + + /** + * A {@link DriverContext} with a non-breaking-BigArrays. + */ + private DriverContext driverContext() { + BlockFactory blockFactory = blockFactory(); + return new DriverContext(blockFactory.bigArrays(), blockFactory); + } + + static LuceneOperator.Factory luceneOperatorFactory(IndexReader reader, Query query, int limit) { + final ShardContext searchContext = new LuceneSourceOperatorTests.MockShardContext(reader, 0); + return new LuceneSourceOperator.Factory( + List.of(searchContext), + ctx -> query, + randomFrom(DataPartitioning.values()), + randomIntBetween(1, 10), + randomPageSize(), + limit + ); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ShuffleDocsOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ShuffleDocsOperator.java new file mode 100644 index 0000000000000..955d0237c65f7 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ShuffleDocsOperator.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.DocVector; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static org.apache.lucene.tests.util.LuceneTestCase.random; + +public class ShuffleDocsOperator extends AbstractPageMappingOperator { + private final BlockFactory blockFactory; + + public ShuffleDocsOperator(BlockFactory blockFactory) { + this.blockFactory = blockFactory; + } + + @Override + protected Page process(Page page) { + DocVector docVector = (DocVector) page.getBlock(0).asVector(); + int positionCount = docVector.getPositionCount(); + IntVector shards = null; + IntVector segments = null; + IntVector docs = null; + try ( + IntVector.Builder shardsBuilder = blockFactory.newIntVectorBuilder(positionCount); + IntVector.Builder segmentsBuilder = blockFactory.newIntVectorBuilder(positionCount); + IntVector.Builder docsBuilder = blockFactory.newIntVectorBuilder(positionCount); + ) { + List docIds = new ArrayList<>(positionCount); + for (int i = 0; i < positionCount; i++) { + shardsBuilder.appendInt(docVector.shards().getInt(i)); + segmentsBuilder.appendInt(docVector.segments().getInt(i)); + docIds.add(docVector.docs().getInt(i)); + } + shards = shardsBuilder.build(); + segments = segmentsBuilder.build(); + Collections.shuffle(docIds, random()); + for (Integer d : docIds) { + docsBuilder.appendInt(d); + } + docs = docsBuilder.build(); + } finally { + if (docs == null) { + Releasables.closeExpectNoException(docVector, shards, segments); + } else { + Releasables.closeExpectNoException(docVector); + } + } + Block[] blocks = new Block[page.getBlockCount()]; + blocks[0] = new DocVector(shards, segments, docs, false).asBlock(); + for (int i = 1; i < blocks.length; i++) { + blocks[i] = page.getBlock(i); + } + return new Page(blocks); + } + + @Override + public String toString() { + return "ShuffleDocs"; + } +} From a7470c05b16f8c0c7b2f1b8d1d0ef82543a04954 Mon Sep 17 00:00:00 2001 From: Stef Nestor <26751266+stefnestor@users.noreply.github.com> Date: Thu, 25 Jul 2024 08:28:46 -0600 Subject: [PATCH 017/105] (Doc+) How to resolve shards >50GB (#111254) * (Doc+) How to resolve shards >50GB --------- Co-authored-by: Ievgen Degtiarenko --- .../how-to/size-your-shards.asciidoc | 30 +++++++++++++++++-- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/docs/reference/how-to/size-your-shards.asciidoc b/docs/reference/how-to/size-your-shards.asciidoc index ba0c0ab8b0b15..36aba99adb8c8 100644 --- a/docs/reference/how-to/size-your-shards.asciidoc +++ b/docs/reference/how-to/size-your-shards.asciidoc @@ -152,9 +152,10 @@ same data. However, very large shards can also cause slower searches and will take longer to recover after a failure. There is no hard limit on the physical size of a shard, and each shard can in -theory contain up to just over two billion documents. However, experience shows -that shards between 10GB and 50GB typically work well for many use cases, as -long as the per-shard document count is kept below 200 million. +theory contain up to <>. However, experience shows that shards between 10GB and 50GB +typically work well for many use cases, as long as the per-shard document count +is kept below 200 million. You may be able to use larger shards depending on your network and use case, and smaller shards may be appropriate for @@ -184,6 +185,29 @@ index prirep shard store // TESTRESPONSE[s/\.ds-my-data-stream-2099\.05\.06-000001/my-index-000001/] // TESTRESPONSE[s/50gb/.*/] +If an index's shard is experiencing degraded performance from surpassing the +recommended 50GB size, you may consider fixing the index's shards' sizing. +Shards are immutable and therefore their size is fixed in place, +so indices must be copied with corrected settings. This requires first ensuring +sufficient disk to copy the data. Afterwards, you can copy the index's data +with corrected settings via one of the following options: + +* running <> to increase number of primary +shards + +* creating a destination index with corrected settings and then running +<> + +Kindly note performing a <> and/or +<> would be insufficient to resolve shards' +sizing. + +Once a source index's data is copied into its destination index, the source +index can be <>. You may then consider setting +<> against the destination index for the source +index's name to point to it for continuity. + + [discrete] [[shard-count-recommendation]] ==== Master-eligible nodes should have at least 1GB of heap per 3000 indices From f755e809e7b48546debdf3889583319ed4f2f87e Mon Sep 17 00:00:00 2001 From: Valeriy Khakhutskyy <1292899+valeriy42@users.noreply.github.com> Date: Thu, 25 Jul 2024 16:58:58 +0200 Subject: [PATCH 018/105] [ML] Add custom rule parameters to force time shift (#110974) This PR extends the schema of the detection rule API by adding a new parametrizable action `force_time_shift` and a new property `params` to hold the parameter `total_shift_amount` as signed long in seconds to control the behavior of the time shift. A valid schema looks like this: ```http POST _ml/anomaly_detectors/my_job/_update { "detectors": { "detector_index": 0, "custom_rules": [ // update the detector with a custom rule that forces a time shift of 1 hour back starting now { "actions": [ "force_time_shift" ], "params": { "force_time_shift": { "time_shift_amount": 3600} } }, "conditions": [{ "applies_to": "time", "operator": "gt", "value": "now" }, { "applies_to": "time", "operator": "lt", "value": "now+bucket_span" }, ] }, ... ``` Execution of the detection rule action `force_time_shift` will shift the time inside the anomaly detector by a specified amount. This is useful, e.g. to quickly adapt to the daylight saving time events that are known beforehand. This PR is accompanied by changes in Kibana https://github.com/elastic/kibana/pull/188710 and the native ml-cpp code https://github.com/elastic/ml-cpp/issues/2690. The integration tests between Java and C++ parts of the force-time shift action will be implemented in a separate PR. --- docs/changelog/110974.yaml | 5 + .../org/elasticsearch/TransportVersions.java | 1 + .../core/ml/job/config/DetectionRule.java | 47 +++++++- .../xpack/core/ml/job/config/RuleAction.java | 3 +- .../xpack/core/ml/job/config/RuleParams.java | 102 ++++++++++++++++++ .../config/RuleParamsForForceTimeShift.java | 78 ++++++++++++++ .../xpack/core/ml/job/messages/Messages.java | 4 + .../ml/job/config/DetectionRuleTests.java | 30 +++++- .../resources/ml/config_index_mappings.json | 15 +++ 9 files changed, 277 insertions(+), 8 deletions(-) create mode 100644 docs/changelog/110974.yaml create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleParams.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleParamsForForceTimeShift.java diff --git a/docs/changelog/110974.yaml b/docs/changelog/110974.yaml new file mode 100644 index 0000000000000..c9e8c9b78675e --- /dev/null +++ b/docs/changelog/110974.yaml @@ -0,0 +1,5 @@ +pr: 110974 +summary: Add custom rule parameters to force time shift +area: Machine Learning +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 4889709b89259..34324ec2a1c16 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -178,6 +178,7 @@ static TransportVersion def(int id) { public static final TransportVersion NODES_STATS_ENUM_SET = def(8_709_00_0); public static final TransportVersion MASTER_NODE_METRICS = def(8_710_00_0); public static final TransportVersion SEGMENT_LEVEL_FIELDS_STATS = def(8_711_00_0); + public static final TransportVersion ML_ADD_DETECTION_RULE_PARAMS = def(8_712_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRule.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRule.java index ff2921d755b40..eb952a7dc7e5c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRule.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRule.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.ml.job.config; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -30,6 +31,7 @@ public class DetectionRule implements ToXContentObject, Writeable { public static final ParseField ACTIONS_FIELD = new ParseField("actions"); public static final ParseField SCOPE_FIELD = new ParseField("scope"); public static final ParseField CONDITIONS_FIELD = new ParseField("conditions"); + public static final ParseField PARAMS_FIELD = new ParseField("params"); // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly public static final ObjectParser LENIENT_PARSER = createParser(true); @@ -45,6 +47,7 @@ private static ObjectParser createParser(boolean ignoreUnknownFie ignoreUnknownFields ? RuleCondition.LENIENT_PARSER : RuleCondition.STRICT_PARSER, CONDITIONS_FIELD ); + parser.declareObject(Builder::setParams, ignoreUnknownFields ? RuleParams.LENIENT_PARSER : RuleParams.STRICT_PARSER, PARAMS_FIELD); return parser; } @@ -52,17 +55,24 @@ private static ObjectParser createParser(boolean ignoreUnknownFie private final EnumSet actions; private final RuleScope scope; private final List conditions; + private final RuleParams params; - private DetectionRule(EnumSet actions, RuleScope scope, List conditions) { + private DetectionRule(EnumSet actions, RuleScope scope, List conditions, RuleParams params) { this.actions = Objects.requireNonNull(actions); this.scope = Objects.requireNonNull(scope); this.conditions = Collections.unmodifiableList(conditions); + this.params = params; } public DetectionRule(StreamInput in) throws IOException { actions = in.readEnumSet(RuleAction.class); scope = new RuleScope(in); conditions = in.readCollectionAsList(RuleCondition::new); + if (in.getTransportVersion().onOrAfter(TransportVersions.ML_ADD_DETECTION_RULE_PARAMS)) { + params = new RuleParams(in); + } else { + params = new RuleParams(); + } } @Override @@ -70,6 +80,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeEnumSet(actions); scope.writeTo(out); out.writeCollection(conditions); + if (out.getTransportVersion().onOrAfter(TransportVersions.ML_ADD_DETECTION_RULE_PARAMS)) { + params.writeTo(out); + } } @Override @@ -82,6 +95,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (conditions.isEmpty() == false) { builder.field(CONDITIONS_FIELD.getPreferredName(), conditions); } + if (this.params.isEmpty() == false) { + builder.field(PARAMS_FIELD.getPreferredName(), this.params); + } builder.endObject(); return builder; } @@ -98,6 +114,10 @@ public List getConditions() { return conditions; } + public RuleParams getParams() { + return params; + } + public Set extractReferencedFilters() { return scope.getReferencedFilters(); } @@ -113,18 +133,22 @@ public boolean equals(Object obj) { } DetectionRule other = (DetectionRule) obj; - return Objects.equals(actions, other.actions) && Objects.equals(scope, other.scope) && Objects.equals(conditions, other.conditions); + return Objects.equals(actions, other.actions) + && Objects.equals(scope, other.scope) + && Objects.equals(conditions, other.conditions) + && Objects.equals(params, other.params); } @Override public int hashCode() { - return Objects.hash(actions, scope, conditions); + return Objects.hash(actions, scope, conditions, params); } public static class Builder { private EnumSet actions = EnumSet.of(RuleAction.SKIP_RESULT); private RuleScope scope = new RuleScope(); private List conditions = Collections.emptyList(); + private RuleParams params = new RuleParams(); public Builder(RuleScope.Builder scope) { this.scope = scope.build(); @@ -163,12 +187,27 @@ public Builder setConditions(List conditions) { return this; } + public Builder setParams(RuleParams params) { + this.params = params; + return this; + } + public DetectionRule build() { if (scope.isEmpty() && conditions.isEmpty()) { String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_REQUIRES_SCOPE_OR_CONDITION); throw ExceptionsHelper.badRequestException(msg); } - return new DetectionRule(actions, scope, conditions); + // if actions contain FORCE_TIME_SHIFT, then params must contain RuleParamsForForceTimeShift + if (actions.contains(RuleAction.FORCE_TIME_SHIFT) && params.getForceTimeShift() == null) { + String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_REQUIRES_FORCE_TIME_SHIFT_PARAMS); + throw ExceptionsHelper.badRequestException(msg); + } + // Return error if params must contain RuleParamsForForceTimeShift, but actions do not contain FORCE_TIME_SHIFT + if (actions.contains(RuleAction.FORCE_TIME_SHIFT) == false && params.getForceTimeShift() != null) { + String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_PARAMS_FORCE_TIME_SHIFT_NOT_REQUIRED); + throw ExceptionsHelper.badRequestException(msg); + } + return new DetectionRule(actions, scope, conditions, params); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleAction.java index cce1d8a2f87ee..933a68ca287a2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleAction.java @@ -15,7 +15,8 @@ public enum RuleAction implements Writeable { SKIP_RESULT, - SKIP_MODEL_UPDATE; + SKIP_MODEL_UPDATE, + FORCE_TIME_SHIFT; /** * Case-insensitive from string method. diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleParams.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleParams.java new file mode 100644 index 0000000000000..fc9126de9944f --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleParams.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.job.config; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +public class RuleParams implements ToXContentObject, Writeable { + + public static final ParseField RULE_PARAMS_FIELD = new ParseField("params"); + public static final ParseField FORCE_TIME_SHIFT_FIELD = new ParseField("force_time_shift"); + + // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly + public static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + public static final ConstructingObjectParser STRICT_PARSER = createParser(false); + + public static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { + ConstructingObjectParser parser = new ConstructingObjectParser<>( + RULE_PARAMS_FIELD.getPreferredName(), + ignoreUnknownFields, + a -> new RuleParams((RuleParamsForForceTimeShift) a[0]) + ); + + parser.declareObject( + ConstructingObjectParser.optionalConstructorArg(), + RuleParamsForForceTimeShift.LENIENT_PARSER, + FORCE_TIME_SHIFT_FIELD + ); + return parser; + } + + private final RuleParamsForForceTimeShift forceTimeShift; + + public RuleParams() { + this.forceTimeShift = null; + } + + public RuleParams(RuleParamsForForceTimeShift forceTimeShift) { + this.forceTimeShift = forceTimeShift; + } + + public RuleParams(StreamInput in) throws IOException { + // initialize optional forceTimeShift from in + forceTimeShift = in.readOptionalWriteable(RuleParamsForForceTimeShift::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + // write optional forceTimeShift to out + out.writeOptionalWriteable(forceTimeShift); + } + + boolean isEmpty() { + return forceTimeShift == null; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (forceTimeShift != null) { + builder.field(FORCE_TIME_SHIFT_FIELD.getPreferredName(), forceTimeShift); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj instanceof RuleParams == false) { + return false; + } + + RuleParams other = (RuleParams) obj; + return Objects.equals(forceTimeShift, other.forceTimeShift); + } + + @Override + public int hashCode() { + return Objects.hash(forceTimeShift); + } + + public RuleParamsForForceTimeShift getForceTimeShift() { + return forceTimeShift; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleParamsForForceTimeShift.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleParamsForForceTimeShift.java new file mode 100644 index 0000000000000..02c762196d8ce --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleParamsForForceTimeShift.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.job.config; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; + +public class RuleParamsForForceTimeShift implements ToXContentObject, Writeable { + public static final ParseField TYPE_FIELD = new ParseField("force_time_shift_params"); + public static final ParseField TIME_SHIFT_AMOUNT_FIELD = new ParseField("time_shift_amount"); + + public static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + public static final ConstructingObjectParser STRICT_PARSER = createParser(false); + + private static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { + ConstructingObjectParser parser = new ConstructingObjectParser<>( + TYPE_FIELD.getPreferredName(), + ignoreUnknownFields, + a -> new RuleParamsForForceTimeShift((Long) a[0]) + ); + parser.declareLong(ConstructingObjectParser.constructorArg(), TIME_SHIFT_AMOUNT_FIELD); + return parser; + } + + private final long timeShiftAmount; + + public RuleParamsForForceTimeShift(long timeShiftAmount) { + this.timeShiftAmount = timeShiftAmount; + } + + public RuleParamsForForceTimeShift(StreamInput in) throws IOException { + timeShiftAmount = in.readLong(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(timeShiftAmount); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(TIME_SHIFT_AMOUNT_FIELD.getPreferredName(), timeShiftAmount); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj instanceof RuleParamsForForceTimeShift == false) { + return false; + } + + RuleParamsForForceTimeShift other = (RuleParamsForForceTimeShift) obj; + return timeShiftAmount == other.timeShiftAmount; + } + + @Override + public int hashCode() { + return Long.hashCode(timeShiftAmount); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java index 52c97ece1b017..6ebed55451ae7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java @@ -204,6 +204,10 @@ public final class Messages { "Invalid detector rule: function {0} only supports conditions that apply to time"; public static final String JOB_CONFIG_DETECTION_RULE_REQUIRES_SCOPE_OR_CONDITION = "Invalid detector rule: at least scope or a condition is required"; + public static final String JOB_CONFIG_DETECTION_RULE_REQUIRES_FORCE_TIME_SHIFT_PARAMS = + "Invalid detector rule: actions contain force_time_shift, but corresponding parameters are missing"; + public static final String JOB_CONFIG_DETECTION_RULE_PARAMS_FORCE_TIME_SHIFT_NOT_REQUIRED = + "Invalid detector rule: actions do not contain force_time_shift, but corresponding parameters are present"; public static final String JOB_CONFIG_DETECTION_RULE_SCOPE_NO_AVAILABLE_FIELDS = "Invalid detector rule: scope field ''{0}'' is invalid; detector has no available fields for scoping"; public static final String JOB_CONFIG_DETECTION_RULE_SCOPE_HAS_INVALID_FIELD = diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRuleTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRuleTests.java index d78d929fd580f..d716f34f86e6c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRuleTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRuleTests.java @@ -38,7 +38,7 @@ public void testExtractReferencedLists() { @Override protected DetectionRule createTestInstance() { DetectionRule.Builder builder = new DetectionRule.Builder(); - + boolean hasForceTimeShiftAction = false; if (randomBoolean()) { EnumSet actions = EnumSet.noneOf(RuleAction.class); int actionsCount = randomIntBetween(1, RuleAction.values().length); @@ -46,10 +46,12 @@ protected DetectionRule createTestInstance() { actions.add(randomFrom(RuleAction.values())); } builder.setActions(actions); + hasForceTimeShiftAction = actions.contains(RuleAction.FORCE_TIME_SHIFT); } boolean hasScope = randomBoolean(); boolean hasConditions = randomBoolean(); + boolean hasParams = randomBoolean() || hasForceTimeShiftAction; if (hasScope == false && hasConditions == false) { // at least one of the two should be present @@ -79,6 +81,15 @@ protected DetectionRule createTestInstance() { builder.setConditions(ruleConditions); } + if (hasParams) { + if (hasForceTimeShiftAction) { + long timeShiftAmount = randomLong(); + builder.setParams(new RuleParams(new RuleParamsForForceTimeShift(timeShiftAmount))); + } else { + builder.setParams(new RuleParams()); + } + } + return builder.build(); } @@ -97,8 +108,9 @@ protected DetectionRule mutateInstance(DetectionRule instance) { List conditions = instance.getConditions(); RuleScope scope = instance.getScope(); EnumSet actions = instance.getActions(); + RuleParams params = instance.getParams(); - switch (between(0, 2)) { + switch (between(0, 3)) { case 0: if (actions.size() == RuleAction.values().length) { actions = EnumSet.of(randomFrom(RuleAction.values())); @@ -113,11 +125,23 @@ protected DetectionRule mutateInstance(DetectionRule instance) { case 2: scope = new RuleScope.Builder(scope).include("another_field", "another_filter").build(); break; + case 3: + if (params.getForceTimeShift() != null) { + params = new RuleParams(new RuleParamsForForceTimeShift(randomLong())); + } else { + params = new RuleParams(new RuleParamsForForceTimeShift(randomLong())); + actions.add(RuleAction.FORCE_TIME_SHIFT); + } + break; default: throw new AssertionError("Illegal randomisation branch"); } - return new DetectionRule.Builder(conditions).setActions(actions).setScope(scope).build(); + if (actions.contains(RuleAction.FORCE_TIME_SHIFT) && params.getForceTimeShift() == null) { + params = new RuleParams(new RuleParamsForForceTimeShift(randomLong())); + } + + return new DetectionRule.Builder(conditions).setActions(actions).setScope(scope).setParams(params).build(); } private static List createCondition(double value) { diff --git a/x-pack/plugin/core/template-resources/src/main/resources/ml/config_index_mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/ml/config_index_mappings.json index 811810f5d0f2f..80585bea8c2b8 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/ml/config_index_mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/ml/config_index_mappings.json @@ -225,6 +225,21 @@ "scope" : { "type" : "object", "enabled" : false + }, + "params": { + "type": "object", + "enabled": false, + "properties": { + "force_time_shift": { + "type": "object", + "enabled": false, + "properties": { + "time_shift_amount": { + "type": "long" + } + } + } + } } } }, From 05060f8413f32f4a74f09e945ef68d133e3ec437 Mon Sep 17 00:00:00 2001 From: Stef Nestor <26751266+stefnestor@users.noreply.github.com> Date: Thu, 25 Jul 2024 09:10:19 -0600 Subject: [PATCH 019/105] (Doc+) Link Gateway Settings to Full Restart (#110902) * (Doc+) Link Gateway Settings to Full Restart --------- Co-authored-by: shainaraskas <58563081+shainaraskas@users.noreply.github.com> --- docs/reference/modules/gateway.asciidoc | 6 +++--- docs/reference/setup/restart-cluster.asciidoc | 2 +- docs/reference/upgrade/disable-shard-alloc.asciidoc | 4 ++++ 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/docs/reference/modules/gateway.asciidoc b/docs/reference/modules/gateway.asciidoc index d6ee730d5021c..bf7e6de64f093 100644 --- a/docs/reference/modules/gateway.asciidoc +++ b/docs/reference/modules/gateway.asciidoc @@ -4,11 +4,11 @@ The local gateway stores the cluster state and shard data across full cluster restarts. -The following _static_ settings, which must be set on every master node, +The following _static_ settings, which must be set on every <>, control how long a freshly elected master should wait before it tries to -recover the cluster state and the cluster's data. +recover the <> and the cluster's data. -NOTE: These settings only take effect on a full cluster restart. +NOTE: These settings only take effect during a <>. `gateway.expected_data_nodes`:: (<>) diff --git a/docs/reference/setup/restart-cluster.asciidoc b/docs/reference/setup/restart-cluster.asciidoc index 9488c6632836b..a3bf7723cb5a9 100644 --- a/docs/reference/setup/restart-cluster.asciidoc +++ b/docs/reference/setup/restart-cluster.asciidoc @@ -11,7 +11,7 @@ time, so the service remains uninterrupted. [WARNING] ==== Nodes exceeding the low watermark threshold will be slow to restart. Reduce the disk -usage below the <> before to restarting nodes. +usage below the <> before restarting nodes. ==== [discrete] diff --git a/docs/reference/upgrade/disable-shard-alloc.asciidoc b/docs/reference/upgrade/disable-shard-alloc.asciidoc index a93b6dfc6c60b..f69a673095257 100644 --- a/docs/reference/upgrade/disable-shard-alloc.asciidoc +++ b/docs/reference/upgrade/disable-shard-alloc.asciidoc @@ -17,3 +17,7 @@ PUT _cluster/settings } -------------------------------------------------- // TEST[skip:indexes don't assign] + +You can also consider <> when restarting +large clusters to reduce initial strain while nodes are processing +<>. \ No newline at end of file From 86c41c6670d72986ac07116bf911e8c274b10a6b Mon Sep 17 00:00:00 2001 From: Parker Timmins Date: Thu, 25 Jul 2024 11:00:45 -0500 Subject: [PATCH 020/105] Make SnapshotLifecycleStats immutable (#111215) Make SnapshotLifecycleStats immutable so SnapshotLifecycleMetadata.EMPTY isn't changed as side-effect. Previously, when SnapshotLifecycleStats was updated, the value in cluster state was mutated and re-stored in the cluster state. #111079 started using a default value of SnapshotLifecycleMetadata.EMPTY rather than checking if the SnapshotLifecycleMetadata wasn't present in the cluster state. This resulted in the same SnapshotLifecycleStats object, the one in EMPTY, being stored for every new policy. This was expected, but because stats objects were mutated during update, the stats object in EMPTY was now mutated. This causes all new policies to share a single stats object with combined stat values. To fix this issue, SnapshotLifecycleStats is made immutable and all operations now create a new stats object. --- docs/changelog/111215.yaml | 6 + .../core/slm/SnapshotLifecycleStats.java | 233 ++++++++---------- .../GetSnapshotLifecycleStatsAction.java | 4 + .../core/slm/SnapshotLifecycleStatsTests.java | 73 +++++- .../xpack/slm/SLMStatsImmutableIT.java | 113 +++++++++ .../xpack/slm/SnapshotLifecycleTask.java | 7 +- .../xpack/slm/SnapshotRetentionTask.java | 25 +- .../xpack/slm/SnapshotRetentionTaskTests.java | 35 ++- 8 files changed, 338 insertions(+), 158 deletions(-) create mode 100644 docs/changelog/111215.yaml create mode 100644 x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMStatsImmutableIT.java diff --git a/docs/changelog/111215.yaml b/docs/changelog/111215.yaml new file mode 100644 index 0000000000000..dc044c2283fc4 --- /dev/null +++ b/docs/changelog/111215.yaml @@ -0,0 +1,6 @@ +pr: 111215 +summary: Make `SnapshotLifecycleStats` immutable so `SnapshotLifecycleMetadata.EMPTY` + isn't changed as side-effect +area: ILM+SLM +type: bug +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecycleStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecycleStats.java index d1b71a92c061d..427f1e58706eb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecycleStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecycleStats.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -27,7 +26,6 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; import java.util.stream.Collectors; @@ -38,12 +36,11 @@ */ public class SnapshotLifecycleStats implements Writeable, ToXContentObject { - private final CounterMetric retentionRunCount = new CounterMetric(); - private final CounterMetric retentionFailedCount = new CounterMetric(); - private final CounterMetric retentionTimedOut = new CounterMetric(); - private final CounterMetric retentionTimeMs = new CounterMetric(); + private final long retentionRun; + private final long retentionFailed; + private final long retentionTimedOut; + private final long retentionTimeMs; private final Map policyStats; - public static final ParseField RETENTION_RUNS = new ParseField("retention_runs"); public static final ParseField RETENTION_FAILED = new ParseField("retention_failed"); public static final ParseField RETENTION_TIMED_OUT = new ParseField("retention_timed_out"); @@ -66,7 +63,7 @@ public class SnapshotLifecycleStats implements Writeable, ToXContentObject { long timeMs = (long) a[3]; Map policyStatsMap = ((List) a[4]).stream() .collect(Collectors.toMap(m -> m.policyId, Function.identity())); - return new SnapshotLifecycleStats(runs, failed, timedOut, timeMs, policyStatsMap); + return new SnapshotLifecycleStats(runs, failed, timedOut, timeMs, Collections.unmodifiableMap(policyStatsMap)); } ); @@ -79,7 +76,7 @@ public class SnapshotLifecycleStats implements Writeable, ToXContentObject { } public SnapshotLifecycleStats() { - this.policyStats = new ConcurrentHashMap<>(); + this(0, 0, 0, 0, Map.of()); } // public for testing @@ -90,19 +87,23 @@ public SnapshotLifecycleStats( long retentionTimeMs, Map policyStats ) { - this.retentionRunCount.inc(retentionRuns); - this.retentionFailedCount.inc(retentionFailed); - this.retentionTimedOut.inc(retentionTimedOut); - this.retentionTimeMs.inc(retentionTimeMs); - this.policyStats = policyStats; + this.retentionRun = retentionRuns; + this.retentionFailed = retentionFailed; + this.retentionTimedOut = retentionTimedOut; + this.retentionTimeMs = retentionTimeMs; + this.policyStats = Collections.unmodifiableMap(policyStats); + } + + private SnapshotLifecycleStats(Map policyStats) { + this(0, 0, 0, 0, policyStats); } public SnapshotLifecycleStats(StreamInput in) throws IOException { - this.policyStats = new ConcurrentHashMap<>(in.readMap(SnapshotPolicyStats::new)); - this.retentionRunCount.inc(in.readVLong()); - this.retentionFailedCount.inc(in.readVLong()); - this.retentionTimedOut.inc(in.readVLong()); - this.retentionTimeMs.inc(in.readVLong()); + this.policyStats = in.readImmutableMap(SnapshotPolicyStats::new); + this.retentionRun = in.readVLong(); + this.retentionFailed = in.readVLong(); + this.retentionTimedOut = in.readVLong(); + this.retentionTimeMs = in.readVLong(); } public static SnapshotLifecycleStats parse(XContentParser parser) { @@ -110,7 +111,6 @@ public static SnapshotLifecycleStats parse(XContentParser parser) { } public SnapshotLifecycleStats merge(SnapshotLifecycleStats other) { - HashMap newPolicyStats = new HashMap<>(this.policyStats); // Merges the per-run stats (the stats in "other") with the stats already present other.policyStats.forEach((policyId, perRunPolicyStats) -> { @@ -124,11 +124,11 @@ public SnapshotLifecycleStats merge(SnapshotLifecycleStats other) { }); return new SnapshotLifecycleStats( - this.retentionRunCount.count() + other.retentionRunCount.count(), - this.retentionFailedCount.count() + other.retentionFailedCount.count(), - this.retentionTimedOut.count() + other.retentionTimedOut.count(), - this.retentionTimeMs.count() + other.retentionTimeMs.count(), - newPolicyStats + this.retentionRun + other.retentionRun, + this.retentionFailed + other.retentionFailed, + this.retentionTimedOut + other.retentionTimedOut, + this.retentionTimeMs + other.retentionTimeMs, + Collections.unmodifiableMap(newPolicyStats) ); } @@ -136,11 +136,11 @@ public SnapshotLifecycleStats removePolicy(String policyId) { Map policyStatsCopy = new HashMap<>(this.policyStats); policyStatsCopy.remove(policyId); return new SnapshotLifecycleStats( - this.retentionRunCount.count(), - this.retentionFailedCount.count(), - this.retentionTimedOut.count(), - this.retentionTimeMs.count(), - policyStatsCopy + this.retentionRun, + this.retentionFailed, + this.retentionTimedOut, + this.retentionTimeMs, + Collections.unmodifiableMap(policyStatsCopy) ); } @@ -148,82 +148,83 @@ public SnapshotLifecycleStats removePolicy(String policyId) { * @return a map of per-policy stats for each SLM policy */ public Map getMetrics() { - return Collections.unmodifiableMap(this.policyStats); + return this.policyStats; } /** - * Increment the number of times SLM retention has been run + * Return new stats with number of times SLM retention has been run incremented */ - public void retentionRun() { - this.retentionRunCount.inc(); + public SnapshotLifecycleStats withRetentionRunIncremented() { + return new SnapshotLifecycleStats(retentionRun + 1, retentionFailed, retentionTimedOut, retentionTimeMs, policyStats); } /** - * Increment the number of times SLM retention has failed + * Return new stats with number of times SLM retention has failed incremented */ - public void retentionFailed() { - this.retentionFailedCount.inc(); + public SnapshotLifecycleStats withRetentionFailedIncremented() { + return new SnapshotLifecycleStats(retentionRun, retentionFailed + 1, retentionTimedOut, retentionTimeMs, policyStats); } /** - * Increment the number of times that SLM retention timed out due to the max delete time - * window being exceeded. + * Return new stats the number of times that SLM retention timed out due to the max delete time + * window being exceeded incremented */ - public void retentionTimedOut() { - this.retentionTimedOut.inc(); + public SnapshotLifecycleStats withRetentionTimedOutIncremented() { + return new SnapshotLifecycleStats(retentionRun, retentionFailed, retentionTimedOut + 1, retentionTimeMs, policyStats); } /** - * Register the amount of time taken for deleting snapshots during SLM retention + * Return new stats with the amount of time taken for deleting snapshots during SLM retention updated */ - public void deletionTime(TimeValue elapsedTime) { - this.retentionTimeMs.inc(elapsedTime.millis()); + public SnapshotLifecycleStats withDeletionTimeUpdated(TimeValue elapsedTime) { + final long newRetentionTimeMs = retentionTimeMs + elapsedTime.millis(); + return new SnapshotLifecycleStats(retentionRun, retentionFailed, retentionTimedOut, newRetentionTimeMs, policyStats); } /** - * Increment the per-policy snapshot taken count for the given policy id + * Return new stats with the per-policy snapshot taken count for the given policy id incremented */ - public void snapshotTaken(String slmPolicy) { - this.policyStats.computeIfAbsent(slmPolicy, SnapshotPolicyStats::new).snapshotTaken(); + public SnapshotLifecycleStats withTakenIncremented(String slmPolicy) { + return merge(new SnapshotLifecycleStats(Map.of(slmPolicy, SnapshotPolicyStats.taken(slmPolicy)))); } /** - * Increment the per-policy snapshot failure count for the given policy id + * Return new stats with the per-policy snapshot failure count for the given policy id incremented */ - public void snapshotFailed(String slmPolicy) { - this.policyStats.computeIfAbsent(slmPolicy, SnapshotPolicyStats::new).snapshotFailed(); + public SnapshotLifecycleStats withFailedIncremented(String slmPolicy) { + return merge(new SnapshotLifecycleStats(Map.of(slmPolicy, SnapshotPolicyStats.failed(slmPolicy)))); } /** - * Increment the per-policy snapshot deleted count for the given policy id + * Return new stats with the per-policy snapshot deleted count for the given policy id incremented */ - public void snapshotDeleted(String slmPolicy) { - this.policyStats.computeIfAbsent(slmPolicy, SnapshotPolicyStats::new).snapshotDeleted(); + public SnapshotLifecycleStats withDeletedIncremented(String slmPolicy) { + return merge(new SnapshotLifecycleStats(Map.of(slmPolicy, SnapshotPolicyStats.deleted(slmPolicy)))); } /** - * Increment the per-policy snapshot deletion failure count for the given policy id + * Return new stats with the per-policy snapshot deletion failure count for the given policy id incremented */ - public void snapshotDeleteFailure(String slmPolicy) { - this.policyStats.computeIfAbsent(slmPolicy, SnapshotPolicyStats::new).snapshotDeleteFailure(); + public SnapshotLifecycleStats withDeleteFailureIncremented(String slmPolicy) { + return merge(new SnapshotLifecycleStats(Map.of(slmPolicy, SnapshotPolicyStats.deleteFailure(slmPolicy)))); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeMap(policyStats, StreamOutput::writeWriteable); - out.writeVLong(retentionRunCount.count()); - out.writeVLong(retentionFailedCount.count()); - out.writeVLong(retentionTimedOut.count()); - out.writeVLong(retentionTimeMs.count()); + out.writeVLong(retentionRun); + out.writeVLong(retentionFailed); + out.writeVLong(retentionTimedOut); + out.writeVLong(retentionTimeMs); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(RETENTION_RUNS.getPreferredName(), this.retentionRunCount.count()); - builder.field(RETENTION_FAILED.getPreferredName(), this.retentionFailedCount.count()); - builder.field(RETENTION_TIMED_OUT.getPreferredName(), this.retentionTimedOut.count()); - TimeValue retentionTime = TimeValue.timeValueMillis(this.retentionTimeMs.count()); + builder.field(RETENTION_RUNS.getPreferredName(), this.retentionRun); + builder.field(RETENTION_FAILED.getPreferredName(), this.retentionFailed); + builder.field(RETENTION_TIMED_OUT.getPreferredName(), this.retentionTimedOut); + TimeValue retentionTime = TimeValue.timeValueMillis(this.retentionTimeMs); builder.field(RETENTION_TIME.getPreferredName(), retentionTime); builder.field(RETENTION_TIME_MILLIS.getPreferredName(), retentionTime.millis()); @@ -231,10 +232,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws .stream() .sorted(Comparator.comparing(SnapshotPolicyStats::getPolicyId)) // maintain a consistent order when serializing .toList(); - long totalTaken = metrics.stream().mapToLong(s -> s.snapshotsTaken.count()).sum(); - long totalFailed = metrics.stream().mapToLong(s -> s.snapshotsFailed.count()).sum(); - long totalDeleted = metrics.stream().mapToLong(s -> s.snapshotsDeleted.count()).sum(); - long totalDeleteFailures = metrics.stream().mapToLong(s -> s.snapshotDeleteFailures.count()).sum(); + long totalTaken = metrics.stream().mapToLong(s -> s.snapshotsTaken).sum(); + long totalFailed = metrics.stream().mapToLong(s -> s.snapshotsFailed).sum(); + long totalDeleted = metrics.stream().mapToLong(s -> s.snapshotsDeleted).sum(); + long totalDeleteFailures = metrics.stream().mapToLong(s -> s.snapshotDeleteFailures).sum(); builder.field(TOTAL_TAKEN.getPreferredName(), totalTaken); builder.field(TOTAL_FAILED.getPreferredName(), totalFailed); builder.field(TOTAL_DELETIONS.getPreferredName(), totalDeleted); @@ -253,13 +254,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public int hashCode() { - return Objects.hash( - retentionRunCount.count(), - retentionFailedCount.count(), - retentionTimedOut.count(), - retentionTimeMs.count(), - policyStats - ); + return Objects.hash(retentionRun, retentionFailed, retentionTimedOut, retentionTimeMs, policyStats); } @Override @@ -271,10 +266,10 @@ public boolean equals(Object obj) { return false; } SnapshotLifecycleStats other = (SnapshotLifecycleStats) obj; - return Objects.equals(retentionRunCount.count(), other.retentionRunCount.count()) - && Objects.equals(retentionFailedCount.count(), other.retentionFailedCount.count()) - && Objects.equals(retentionTimedOut.count(), other.retentionTimedOut.count()) - && Objects.equals(retentionTimeMs.count(), other.retentionTimeMs.count()) + return Objects.equals(retentionRun, other.retentionRun) + && Objects.equals(retentionFailed, other.retentionFailed) + && Objects.equals(retentionTimedOut, other.retentionTimedOut) + && Objects.equals(retentionTimeMs, other.retentionTimeMs) && Objects.equals(policyStats, other.policyStats); } @@ -285,10 +280,10 @@ public String toString() { public static class SnapshotPolicyStats implements Writeable, ToXContentFragment { private final String policyId; - private final CounterMetric snapshotsTaken = new CounterMetric(); - private final CounterMetric snapshotsFailed = new CounterMetric(); - private final CounterMetric snapshotsDeleted = new CounterMetric(); - private final CounterMetric snapshotDeleteFailures = new CounterMetric(); + private final long snapshotsTaken; + private final long snapshotsFailed; + private final long snapshotsDeleted; + private final long snapshotDeleteFailures; public static final ParseField POLICY_ID = new ParseField("policy"); public static final ParseField SNAPSHOTS_TAKEN = new ParseField("snapshots_taken"); @@ -318,49 +313,45 @@ public static class SnapshotPolicyStats implements Writeable, ToXContentFragment } public SnapshotPolicyStats(String slmPolicy) { - this.policyId = slmPolicy; + this(slmPolicy, 0, 0, 0, 0); } public SnapshotPolicyStats(String policyId, long snapshotsTaken, long snapshotsFailed, long deleted, long failedDeletes) { this.policyId = policyId; - this.snapshotsTaken.inc(snapshotsTaken); - this.snapshotsFailed.inc(snapshotsFailed); - this.snapshotsDeleted.inc(deleted); - this.snapshotDeleteFailures.inc(failedDeletes); + this.snapshotsTaken = snapshotsTaken; + this.snapshotsFailed = snapshotsFailed; + this.snapshotsDeleted = deleted; + this.snapshotDeleteFailures = failedDeletes; } public SnapshotPolicyStats(StreamInput in) throws IOException { - this.policyId = in.readString(); - this.snapshotsTaken.inc(in.readVLong()); - this.snapshotsFailed.inc(in.readVLong()); - this.snapshotsDeleted.inc(in.readVLong()); - this.snapshotDeleteFailures.inc(in.readVLong()); + this(in.readString(), in.readVLong(), in.readVLong(), in.readVLong(), in.readVLong()); } public SnapshotPolicyStats merge(SnapshotPolicyStats other) { return new SnapshotPolicyStats( this.policyId, - this.snapshotsTaken.count() + other.snapshotsTaken.count(), - this.snapshotsFailed.count() + other.snapshotsFailed.count(), - this.snapshotsDeleted.count() + other.snapshotsDeleted.count(), - this.snapshotDeleteFailures.count() + other.snapshotDeleteFailures.count() + this.snapshotsTaken + other.snapshotsTaken, + this.snapshotsFailed + other.snapshotsFailed, + this.snapshotsDeleted + other.snapshotsDeleted, + this.snapshotDeleteFailures + other.snapshotDeleteFailures ); } - void snapshotTaken() { - snapshotsTaken.inc(); + private static SnapshotPolicyStats taken(String policyId) { + return new SnapshotPolicyStats(policyId, 1, 0, 0, 0); } - void snapshotFailed() { - snapshotsFailed.inc(); + private static SnapshotPolicyStats failed(String policyId) { + return new SnapshotPolicyStats(policyId, 0, 1, 0, 0); } - void snapshotDeleted() { - snapshotsDeleted.inc(); + private static SnapshotPolicyStats deleted(String policyId) { + return new SnapshotPolicyStats(policyId, 0, 0, 1, 0); } - void snapshotDeleteFailure() { - snapshotDeleteFailures.inc(); + private static SnapshotPolicyStats deleteFailure(String policyId) { + return new SnapshotPolicyStats(policyId, 0, 0, 0, 1); } public String getPolicyId() { @@ -370,21 +361,15 @@ public String getPolicyId() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(policyId); - out.writeVLong(snapshotsTaken.count()); - out.writeVLong(snapshotsFailed.count()); - out.writeVLong(snapshotsDeleted.count()); - out.writeVLong(snapshotDeleteFailures.count()); + out.writeVLong(snapshotsTaken); + out.writeVLong(snapshotsFailed); + out.writeVLong(snapshotsDeleted); + out.writeVLong(snapshotDeleteFailures); } @Override public int hashCode() { - return Objects.hash( - policyId, - snapshotsTaken.count(), - snapshotsFailed.count(), - snapshotsDeleted.count(), - snapshotDeleteFailures.count() - ); + return Objects.hash(policyId, snapshotsTaken, snapshotsFailed, snapshotsDeleted, snapshotDeleteFailures); } @Override @@ -397,19 +382,19 @@ public boolean equals(Object obj) { } SnapshotPolicyStats other = (SnapshotPolicyStats) obj; return Objects.equals(policyId, other.policyId) - && Objects.equals(snapshotsTaken.count(), other.snapshotsTaken.count()) - && Objects.equals(snapshotsFailed.count(), other.snapshotsFailed.count()) - && Objects.equals(snapshotsDeleted.count(), other.snapshotsDeleted.count()) - && Objects.equals(snapshotDeleteFailures.count(), other.snapshotDeleteFailures.count()); + && Objects.equals(snapshotsTaken, other.snapshotsTaken) + && Objects.equals(snapshotsFailed, other.snapshotsFailed) + && Objects.equals(snapshotsDeleted, other.snapshotsDeleted) + && Objects.equals(snapshotDeleteFailures, other.snapshotDeleteFailures); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(SnapshotPolicyStats.POLICY_ID.getPreferredName(), policyId); - builder.field(SnapshotPolicyStats.SNAPSHOTS_TAKEN.getPreferredName(), snapshotsTaken.count()); - builder.field(SnapshotPolicyStats.SNAPSHOTS_FAILED.getPreferredName(), snapshotsFailed.count()); - builder.field(SnapshotPolicyStats.SNAPSHOTS_DELETED.getPreferredName(), snapshotsDeleted.count()); - builder.field(SnapshotPolicyStats.SNAPSHOT_DELETION_FAILURES.getPreferredName(), snapshotDeleteFailures.count()); + builder.field(SnapshotPolicyStats.SNAPSHOTS_TAKEN.getPreferredName(), snapshotsTaken); + builder.field(SnapshotPolicyStats.SNAPSHOTS_FAILED.getPreferredName(), snapshotsFailed); + builder.field(SnapshotPolicyStats.SNAPSHOTS_DELETED.getPreferredName(), snapshotsDeleted); + builder.field(SnapshotPolicyStats.SNAPSHOT_DELETION_FAILURES.getPreferredName(), snapshotDeleteFailures); return builder; } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleStatsAction.java index 6279c4208b878..478f2bd17cb8d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleStatsAction.java @@ -46,6 +46,10 @@ public Response(StreamInput in) throws IOException { this.slmStats = new SnapshotLifecycleStats(in); } + public SnapshotLifecycleStats getSlmStats() { + return slmStats; + } + @Override public String toString() { return Strings.toString(this); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/slm/SnapshotLifecycleStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/slm/SnapshotLifecycleStatsTests.java index 156df48e79829..4a5121004660f 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/slm/SnapshotLifecycleStatsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/slm/SnapshotLifecycleStatsTests.java @@ -7,15 +7,74 @@ package org.elasticsearch.xpack.core.slm; +import org.elasticsearch.common.io.stream.InputStreamStreamInput; +import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.Maps; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; import java.util.Map; public class SnapshotLifecycleStatsTests extends AbstractXContentSerializingTestCase { + + public void testPolicyStatsMapImmutable() throws IOException { + { + SnapshotLifecycleStats stats = new SnapshotLifecycleStats(); + assertThrows(UnsupportedOperationException.class, () -> stats.getMetrics().put("new_policy", null)); + } + + { + SnapshotLifecycleStats stats = new SnapshotLifecycleStats(0, 0, 0, 0, new HashMap<>()); + assertThrows(UnsupportedOperationException.class, () -> stats.getMetrics().put("new_policy", null)); + } + + { + SnapshotLifecycleStats stats1 = new SnapshotLifecycleStats(1, 0, 0, 0, new HashMap<>()); + SnapshotLifecycleStats stats2 = new SnapshotLifecycleStats(0, 1, 0, 0, new HashMap<>()); + SnapshotLifecycleStats stats = stats1.merge(stats2); + assertThrows(UnsupportedOperationException.class, () -> stats.getMetrics().put("new_policy", null)); + } + + { + // write + ByteArrayOutputStream outBuffer = new ByteArrayOutputStream(); + OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer); + SnapshotLifecycleStats stats1 = new SnapshotLifecycleStats(0, 0, 0, 0, new HashMap<>()); + stats1.writeTo(out); + + // read + ByteArrayInputStream inBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); + InputStreamStreamInput in = new InputStreamStreamInput(inBuffer); + SnapshotLifecycleStats stats = new SnapshotLifecycleStats(in); + assertThrows(UnsupportedOperationException.class, () -> stats.getMetrics().put("new_policy", null)); + } + } + + public void testIncrementAllFields() { + final String policy = "policy"; + SnapshotLifecycleStats stats = new SnapshotLifecycleStats(); + SnapshotLifecycleStats updated = stats.withRetentionRunIncremented() + .withRetentionFailedIncremented() + .withRetentionTimedOutIncremented() + .withDeletionTimeUpdated(TimeValue.ONE_MINUTE) + .withTakenIncremented(policy) + .withDeletedIncremented(policy) + .withFailedIncremented(policy) + .withDeleteFailureIncremented(policy); + + var policyStats = Map.of(policy, new SnapshotLifecycleStats.SnapshotPolicyStats(policy, 1, 1, 1, 1)); + SnapshotLifecycleStats expected = new SnapshotLifecycleStats(1, 1, 1, TimeValue.ONE_MINUTE.millis(), policyStats); + assertEquals(expected, updated); + } + @Override protected SnapshotLifecycleStats doParseInstance(XContentParser parser) throws IOException { return SnapshotLifecycleStats.parse(parser); @@ -54,7 +113,19 @@ protected SnapshotLifecycleStats createTestInstance() { @Override protected SnapshotLifecycleStats mutateInstance(SnapshotLifecycleStats instance) { - return randomValueOtherThan(instance, () -> instance.merge(createTestInstance())); + List policies = new ArrayList<>(instance.getMetrics().keySet()); + String policy = policies.isEmpty() ? "policy-" + randomAlphaOfLength(4) : randomFrom(policies); + + return switch (between(0, 7)) { + case 0 -> instance.withRetentionRunIncremented(); + case 1 -> instance.withRetentionFailedIncremented(); + case 2 -> instance.withRetentionTimedOutIncremented(); + case 3 -> instance.withDeletionTimeUpdated(randomTimeValue()); + case 4 -> instance.withTakenIncremented(policy); + case 5 -> instance.withFailedIncremented(policy); + case 6 -> instance.withDeletedIncremented(policy); + default -> instance.withDeleteFailureIncremented(policy); + }; } @Override diff --git a/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMStatsImmutableIT.java b/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMStatsImmutableIT.java new file mode 100644 index 0000000000000..352647e86899c --- /dev/null +++ b/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMStatsImmutableIT.java @@ -0,0 +1,113 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.slm; + +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.datastreams.DataStreamsPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.core.slm.SnapshotLifecycleMetadata; +import org.elasticsearch.xpack.core.slm.SnapshotLifecyclePolicy; +import org.elasticsearch.xpack.core.slm.SnapshotRetentionConfiguration; +import org.elasticsearch.xpack.core.slm.action.ExecuteSnapshotLifecycleAction; +import org.elasticsearch.xpack.core.slm.action.GetSnapshotLifecycleStatsAction; +import org.elasticsearch.xpack.core.slm.action.PutSnapshotLifecycleAction; +import org.elasticsearch.xpack.ilm.IndexLifecycle; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.ExecutionException; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 2) +public class SLMStatsImmutableIT extends AbstractSnapshotIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return Arrays.asList(LocalStateCompositeXPackPlugin.class, IndexLifecycle.class, SnapshotLifecycle.class, DataStreamsPlugin.class); + } + + private static final String NEVER_EXECUTE_CRON_SCHEDULE = "* * * 31 FEB ? *"; + + public void testSnapshotLifeCycleMetadataEmptyNotChanged() throws Exception { + final String policyId = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + final String idxName = "test-idx"; + final String repoName = "test-repo"; + final String snapshot = "snap"; + + createRepository(repoName, "fs"); + + createSnapshotPolicy(policyId, snapshot, NEVER_EXECUTE_CRON_SCHEDULE, repoName, idxName); + assertEquals(Map.of(), SnapshotLifecycleMetadata.EMPTY.getStats().getMetrics()); + + executePolicy(policyId); + + assertBusy(() -> { + GetSnapshotLifecycleStatsAction.Response policyStats = getPolicyStats(); + assertNotNull(policyStats.getSlmStats()); + assertTrue(policyStats.getSlmStats().getMetrics().containsKey(policyId)); + }); + + assertEquals(Map.of(), SnapshotLifecycleMetadata.EMPTY.getStats().getMetrics()); + disableRepoConsistencyCheck("nothing stored in repo"); + } + + private GetSnapshotLifecycleStatsAction.Response getPolicyStats() { + try { + final var req = new AcknowledgedRequest.Plain(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT); + return client().execute(GetSnapshotLifecycleStatsAction.INSTANCE, req).get(); + } catch (Exception e) { + fail("failed to get stats"); + } + return null; + } + + private void createSnapshotPolicy(String policyName, String snapshotNamePattern, String schedule, String repoId, String indexPattern) { + logger.info("creating snapshot lifecycle policy: " + policyName); + Map snapConfig = new HashMap<>(); + snapConfig.put("indices", Collections.singletonList(indexPattern)); + snapConfig.put("ignore_unavailable", false); + snapConfig.put("partial", true); + + SnapshotLifecyclePolicy policy = new SnapshotLifecyclePolicy( + policyName, + snapshotNamePattern, + schedule, + repoId, + snapConfig, + SnapshotRetentionConfiguration.EMPTY + ); + + PutSnapshotLifecycleAction.Request putLifecycle = new PutSnapshotLifecycleAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + policyName, + policy + ); + try { + client().execute(PutSnapshotLifecycleAction.INSTANCE, putLifecycle).get(); + } catch (Exception e) { + logger.error("failed to create slm policy", e); + fail("failed to create policy " + policy + " got: " + e); + } + } + + private String executePolicy(String policyId) throws ExecutionException, InterruptedException { + ExecuteSnapshotLifecycleAction.Request executeReq = new ExecuteSnapshotLifecycleAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + policyId + ); + ExecuteSnapshotLifecycleAction.Response resp = client().execute(ExecuteSnapshotLifecycleAction.INSTANCE, executeReq).get(); + return resp.getSnapshotName(); + } +} diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTask.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTask.java index 028633a480314..ac58771936019 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTask.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTask.java @@ -267,8 +267,9 @@ public ClusterState execute(ClusterState currentState) throws Exception { SnapshotLifecyclePolicyMetadata.Builder newPolicyMetadata = SnapshotLifecyclePolicyMetadata.builder(policyMetadata); final SnapshotLifecycleStats stats = snapMeta.getStats(); + SnapshotLifecycleStats newStats; if (exception.isPresent()) { - stats.snapshotFailed(policyName); + newStats = stats.withFailedIncremented(policyName); newPolicyMetadata.setLastFailure( new SnapshotInvocationRecord( snapshotName, @@ -279,7 +280,7 @@ public ClusterState execute(ClusterState currentState) throws Exception { ); newPolicyMetadata.setInvocationsSinceLastSuccess(policyMetadata.getInvocationsSinceLastSuccess() + 1L); } else { - stats.snapshotTaken(policyName); + newStats = stats.withTakenIncremented(policyName); newPolicyMetadata.setLastSuccess(new SnapshotInvocationRecord(snapshotName, snapshotStartTime, snapshotFinishTime, null)); newPolicyMetadata.setInvocationsSinceLastSuccess(0L); } @@ -288,7 +289,7 @@ public ClusterState execute(ClusterState currentState) throws Exception { SnapshotLifecycleMetadata lifecycleMetadata = new SnapshotLifecycleMetadata( snapLifecycles, currentSLMMode(currentState), - stats + newStats ); Metadata currentMeta = currentState.metadata(); return ClusterState.builder(currentState) diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java index fea84e1a032dd..0cf1373e92beb 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java @@ -41,6 +41,7 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.function.LongSupplier; import java.util.stream.Collectors; @@ -106,14 +107,14 @@ public void triggered(SchedulerEngine.Event event) { return; } - final SnapshotLifecycleStats slmStats = new SnapshotLifecycleStats(); + AtomicReference slmStats = new AtomicReference<>(new SnapshotLifecycleStats()); // Defined here so it can be re-used without having to repeat it final Consumer failureHandler = e -> { try { logger.error("error during snapshot retention task", e); - slmStats.retentionFailed(); - updateStateWithStats(slmStats); + slmStats.getAndUpdate(SnapshotLifecycleStats::withRetentionFailedIncremented); + updateStateWithStats(slmStats.get()); } finally { logger.info("SLM retention snapshot cleanup task completed with error"); } @@ -121,7 +122,9 @@ public void triggered(SchedulerEngine.Event event) { try { logger.info("starting SLM retention snapshot cleanup task"); - slmStats.retentionRun(); + + slmStats.getAndUpdate(SnapshotLifecycleStats::withRetentionRunIncremented); + // Find all SLM policies that have retention enabled final Map policiesWithRetention = getAllPoliciesWithRetentionEnabled(state); logger.trace("policies with retention enabled: {}", policiesWithRetention.keySet()); @@ -150,7 +153,7 @@ public void onResponse(Map>> snapshotsToB // Finally, delete the snapshots that need to be deleted deleteSnapshots(snapshotsToBeDeleted, slmStats, ActionListener.running(() -> { - updateStateWithStats(slmStats); + updateStateWithStats(slmStats.get()); logger.info("SLM retention snapshot cleanup task complete"); })); } @@ -192,7 +195,7 @@ void getSnapshotsEligibleForDeletion( void deleteSnapshots( Map>> snapshotsToDelete, - SnapshotLifecycleStats slmStats, + AtomicReference slmStats, ActionListener listener ) { int count = snapshotsToDelete.values().stream().mapToInt(List::size).sum(); @@ -211,7 +214,7 @@ void deleteSnapshots( ActionListener.runAfter(listener, () -> { TimeValue totalElapsedTime = TimeValue.timeValueNanos(nowNanoSupplier.getAsLong() - startTime); logger.debug("total elapsed time for deletion of [{}] snapshots: {}", deleted, totalElapsedTime); - slmStats.deletionTime(totalElapsedTime); + slmStats.getAndUpdate(s -> s.withDeletionTimeUpdated(totalElapsedTime)); }) ); for (Map.Entry>> entry : snapshotsToDelete.entrySet()) { @@ -224,7 +227,7 @@ void deleteSnapshots( } private void deleteSnapshots( - SnapshotLifecycleStats slmStats, + AtomicReference slmStats, AtomicInteger deleted, AtomicInteger failed, String repo, @@ -302,7 +305,7 @@ void deleteSnapshot( String slmPolicy, String repo, SnapshotId snapshot, - SnapshotLifecycleStats slmStats, + AtomicReference slmStats, ActionListener listener ) { logger.info("[{}] snapshot retention deleting snapshot [{}]", repo, snapshot); @@ -311,12 +314,12 @@ void deleteSnapshot( .cluster() .prepareDeleteSnapshot(TimeValue.MAX_VALUE, repo, snapshot.getName()) .execute(ActionListener.wrap(acknowledgedResponse -> { - slmStats.snapshotDeleted(slmPolicy); + slmStats.getAndUpdate(s -> s.withDeletedIncremented(slmPolicy)); listener.onResponse(acknowledgedResponse); }, e -> { try { logger.warn(() -> format("[%s] failed to delete snapshot [%s] for retention", repo, snapshot), e); - slmStats.snapshotDeleteFailure(slmPolicy); + slmStats.getAndUpdate(s -> s.withDeleteFailureIncremented(slmPolicy)); } finally { listener.onFailure(e); } diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotRetentionTaskTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotRetentionTaskTests.java index 1384cd4499624..184d207ee173d 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotRetentionTaskTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotRetentionTaskTests.java @@ -44,7 +44,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -351,27 +350,25 @@ protected void ) ); + AtomicReference slmStats = new AtomicReference<>(new SnapshotLifecycleStats()); AtomicBoolean onFailureCalled = new AtomicBoolean(false); - task.deleteSnapshot( - "policy", - "foo", - new SnapshotId("name", "uuid"), - new SnapshotLifecycleStats(0, 0, 0, 0, new HashMap<>()), - new ActionListener<>() { - @Override - public void onResponse(AcknowledgedResponse acknowledgedResponse) { - logger.info("--> forcing failure"); - throw new ElasticsearchException("forced failure"); - } + task.deleteSnapshot("policy", "foo", new SnapshotId("name", "uuid"), slmStats, new ActionListener<>() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) { + logger.info("--> forcing failure"); + throw new ElasticsearchException("forced failure"); + } - @Override - public void onFailure(Exception e) { - onFailureCalled.set(true); - } + @Override + public void onFailure(Exception e) { + onFailureCalled.set(true); } - ); + }); assertThat(onFailureCalled.get(), equalTo(true)); + + var expectedPolicyStats = Map.of(policyId, new SnapshotLifecycleStats.SnapshotPolicyStats(policyId, 0, 0, 1, 1)); + assertThat(slmStats.get(), equalTo(new SnapshotLifecycleStats(0, 0, 0, 0, expectedPolicyStats))); } finally { threadPool.shutdownNow(); threadPool.awaitTermination(10, TimeUnit.SECONDS); @@ -544,7 +541,7 @@ void deleteSnapshot( String policyId, String repo, SnapshotId snapshot, - SnapshotLifecycleStats slmStats, + AtomicReference slmStats, ActionListener listener ) { deleteRunner.apply(policyId, repo, snapshot, slmStats, listener); @@ -557,7 +554,7 @@ void apply( String policyId, String repo, SnapshotId snapshot, - SnapshotLifecycleStats slmStats, + AtomicReference slmStats, ActionListener listener ); } From 779f09ea87aa7de0566536738771ef65407d5b0f Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Thu, 25 Jul 2024 18:21:36 +0200 Subject: [PATCH 021/105] Update get snapshot status API doc (#111240) Make it clear that this API should be used only if the detailed shard info is needed and only on ongoing snapshots. Remove incorrectly mentioned `STATE` value. --- .../apis/get-snapshot-status-api.asciidoc | 9 ++------- .../snapshots/status/TransportSnapshotsStatusAction.java | 3 +-- 2 files changed, 3 insertions(+), 9 deletions(-) diff --git a/docs/reference/snapshot-restore/apis/get-snapshot-status-api.asciidoc b/docs/reference/snapshot-restore/apis/get-snapshot-status-api.asciidoc index d8b03cbc0e880..e677408da3f25 100644 --- a/docs/reference/snapshot-restore/apis/get-snapshot-status-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/get-snapshot-status-api.asciidoc @@ -4,7 +4,7 @@ Get snapshot status ++++ -Retrieves a detailed description of the current state for each shard participating in the snapshot. +Retrieves a detailed description of the current state for each shard participating in the snapshot. Note that this API should only be used to obtain detailed shard-level information for ongoing snapshots. If this detail is not needed, or you want to obtain information about one or more existing snapshots, use the <>. //// [source,console] @@ -172,13 +172,8 @@ Indicates the current snapshot state. `STARTED`:: The snapshot is currently running. -`PARTIAL`:: - The global cluster state was stored, but data of at least one shard was not stored successfully. - The <> section of the response contains more detailed information about shards - that were not processed correctly. - `SUCCESS`:: - The snapshot finished and all shards were stored successfully. + The snapshot completed. ==== -- diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java index caedc3363e9a3..9e83a29ffb943 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java @@ -332,8 +332,7 @@ private void loadRepositoryData( final SnapshotsInProgress.State state = switch (snapshotInfo.state()) { case FAILED -> SnapshotsInProgress.State.FAILED; case SUCCESS, PARTIAL -> - // Translating both PARTIAL and SUCCESS to SUCCESS for now - // TODO: add the differentiation on the metadata level in the next major release + // Both of these means the snapshot has completed. SnapshotsInProgress.State.SUCCESS; default -> throw new IllegalArgumentException("Unexpected snapshot state " + snapshotInfo.state()); }; From 379cb05e72b9b6a0b7e2383abe2ae16f25ad0d89 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 26 Jul 2024 02:41:23 +1000 Subject: [PATCH 022/105] Mute org.elasticsearch.xpack.core.slm.SnapshotLifecycleStatsTests testEqualsAndHashcode #111300 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 1d71d7d735e96..7d5ce943681d1 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -120,6 +120,9 @@ tests: - class: org.elasticsearch.xpack.esql.CsvTests method: test {inlinestats.ShadowingSelf} issue: https://github.com/elastic/elasticsearch/issues/111261 +- class: org.elasticsearch.xpack.core.slm.SnapshotLifecycleStatsTests + method: testEqualsAndHashcode + issue: https://github.com/elastic/elasticsearch/issues/111300 # Examples: # From 140758fdd04f698e118d695be6e6e599b664c664 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Thu, 25 Jul 2024 10:53:01 -0700 Subject: [PATCH 023/105] LogsDB data generator - support generating arrays and null values (#111249) --- .../datageneration/arbitrary/Arbitrary.java | 10 +++ .../arbitrary/RandomBasedArbitrary.java | 29 ++++++- .../logsdb/datageneration/fields/Context.java | 4 + .../datageneration/fields/FieldValues.java | 34 +++++++++ .../GenericSubObjectFieldDataGenerator.java | 26 +++++-- .../fields/KeywordFieldDataGenerator.java | 10 ++- .../fields/LongFieldDataGenerator.java | 10 ++- .../DataGeneratorSnapshotTests.java | 76 ++++++++++++++++--- .../datageneration/DataGeneratorTests.java | 38 +++++++--- .../datageneration/FieldValuesTests.java | 43 +++++++++++ 10 files changed, 244 insertions(+), 36 deletions(-) create mode 100644 test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/FieldValues.java create mode 100644 test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/FieldValuesTests.java diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/arbitrary/Arbitrary.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/arbitrary/Arbitrary.java index 139994d530f77..7a4bb880c5335 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/arbitrary/Arbitrary.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/arbitrary/Arbitrary.java @@ -27,4 +27,14 @@ public interface Arbitrary { long longValue(); String stringValue(int lengthLowerBound, int lengthUpperBound); + + boolean generateNullValue(); + + boolean generateArrayOfValues(); + + int valueArraySize(); + + boolean generateArrayOfObjects(); + + int objectArraySize(); } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/arbitrary/RandomBasedArbitrary.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/arbitrary/RandomBasedArbitrary.java index 71152191e27f9..257bd17fc1892 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/arbitrary/RandomBasedArbitrary.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/arbitrary/RandomBasedArbitrary.java @@ -11,6 +11,7 @@ import org.elasticsearch.logsdb.datageneration.FieldType; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLengthBetween; +import static org.elasticsearch.test.ESTestCase.randomBoolean; import static org.elasticsearch.test.ESTestCase.randomDouble; import static org.elasticsearch.test.ESTestCase.randomFrom; import static org.elasticsearch.test.ESTestCase.randomIntBetween; @@ -51,6 +52,32 @@ public long longValue() { @Override public String stringValue(int lengthLowerBound, int lengthUpperBound) { - return randomAlphaOfLengthBetween(lengthLowerBound, lengthLowerBound); + return randomAlphaOfLengthBetween(lengthLowerBound, lengthUpperBound); + } + + @Override + public boolean generateNullValue() { + // Using a static 10% chance, this is just a chosen value that can be tweaked. + return randomDouble() < 0.1; + } + + @Override + public boolean generateArrayOfValues() { + return randomBoolean(); + } + + @Override + public int valueArraySize() { + return randomIntBetween(0, 5); + } + + @Override + public boolean generateArrayOfObjects() { + return randomBoolean(); + } + + @Override + public int objectArraySize() { + return randomIntBetween(0, 5); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/Context.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/Context.java index b78e1e2dda0d4..b257807890c00 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/Context.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/Context.java @@ -46,4 +46,8 @@ public boolean shouldAddNestedField() { && objectDepth < specification.maxObjectDepth() && nestedFieldsCount < specification.nestedFieldsLimit(); } + + public boolean shouldGenerateObjectArray() { + return objectDepth > 0 && specification.arbitrary().generateArrayOfObjects(); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/FieldValues.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/FieldValues.java new file mode 100644 index 0000000000000..74196c5c8926c --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/FieldValues.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.logsdb.datageneration.fields; + +import org.elasticsearch.logsdb.datageneration.arbitrary.Arbitrary; + +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.stream.IntStream; + +public class FieldValues { + private FieldValues() {} + + public static Function, Supplier> injectNulls(Arbitrary arbitrary) { + return (values) -> () -> arbitrary.generateNullValue() ? null : values.get(); + } + + public static Function, Supplier> wrappedInArray(Arbitrary arbitrary) { + return (values) -> () -> { + if (arbitrary.generateArrayOfValues()) { + var size = arbitrary.valueArraySize(); + return IntStream.range(0, size).mapToObj((i) -> values.get()).toList(); + } + + return values.get(); + }; + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/GenericSubObjectFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/GenericSubObjectFieldDataGenerator.java index cc1ae57b8996c..5d05fc1f35a77 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/GenericSubObjectFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/GenericSubObjectFieldDataGenerator.java @@ -54,17 +54,29 @@ public CheckedConsumer mappingWriter( public CheckedConsumer fieldValueGenerator() { return b -> { - b.startObject(); - - for (var childField : childFields) { - b.field(childField.fieldName); - childField.generator.fieldValueGenerator().accept(b); + if (context.shouldGenerateObjectArray()) { + int size = context.specification().arbitrary().objectArraySize(); + + b.startArray(); + for (int i = 0; i < size; i++) { + writeObject(b, childFields); + } + b.endArray(); + } else { + writeObject(b, childFields); } - - b.endObject(); }; } + private static void writeObject(XContentBuilder document, Iterable childFields) throws IOException { + document.startObject(); + for (var childField : childFields) { + document.field(childField.fieldName); + childField.generator.fieldValueGenerator().accept(document); + } + document.endObject(); + } + private void generateChildFields() { var existingFields = new HashSet(); // no child fields is legal diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/KeywordFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/KeywordFieldDataGenerator.java index 31a1499ce2799..11413d33a97c7 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/KeywordFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/KeywordFieldDataGenerator.java @@ -14,12 +14,16 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.function.Supplier; + +import static org.elasticsearch.logsdb.datageneration.fields.FieldValues.injectNulls; +import static org.elasticsearch.logsdb.datageneration.fields.FieldValues.wrappedInArray; public class KeywordFieldDataGenerator implements FieldDataGenerator { - private final Arbitrary arbitrary; + private final Supplier valueGenerator; public KeywordFieldDataGenerator(Arbitrary arbitrary) { - this.arbitrary = arbitrary; + this.valueGenerator = injectNulls(arbitrary).andThen(wrappedInArray(arbitrary)).apply(() -> arbitrary.stringValue(0, 50)); } @Override @@ -29,6 +33,6 @@ public CheckedConsumer mappingWriter() { @Override public CheckedConsumer fieldValueGenerator() { - return b -> b.value(arbitrary.stringValue(0, 50)); + return b -> b.value(valueGenerator.get()); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/LongFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/LongFieldDataGenerator.java index f8753a7cdd1c6..f1bb35f1f0401 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/LongFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/LongFieldDataGenerator.java @@ -14,12 +14,16 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.function.Supplier; + +import static org.elasticsearch.logsdb.datageneration.fields.FieldValues.injectNulls; +import static org.elasticsearch.logsdb.datageneration.fields.FieldValues.wrappedInArray; public class LongFieldDataGenerator implements FieldDataGenerator { - private final Arbitrary arbitrary; + private final Supplier valueGenerator; public LongFieldDataGenerator(Arbitrary arbitrary) { - this.arbitrary = arbitrary; + this.valueGenerator = injectNulls(arbitrary).andThen(wrappedInArray(arbitrary)).apply(arbitrary::longValue); } @Override @@ -29,6 +33,6 @@ public CheckedConsumer mappingWriter() { @Override public CheckedConsumer fieldValueGenerator() { - return b -> b.value(arbitrary.longValue()); + return b -> b.value(valueGenerator.get()); } } diff --git a/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGeneratorSnapshotTests.java b/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGeneratorSnapshotTests.java index 868c8c749ea11..41066e9ba3cac 100644 --- a/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGeneratorSnapshotTests.java +++ b/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGeneratorSnapshotTests.java @@ -83,22 +83,43 @@ public void testSnapshot() throws Exception { var expectedDocument = """ { - "f1" : { - "f2" : { - "f3" : "string1", - "f4" : 0 + "f1" : [ + { + "f2" : { + "f3" : [ + null, + "string1" + ], + "f4" : 0 + }, + "f5" : { + "f6" : "string2", + "f7" : null + } }, - "f5" : { - "f6" : "string2", - "f7" : 1 + { + "f2" : { + "f3" : [ + "string3", + "string4" + ], + "f4" : 1 + }, + "f5" : { + "f6" : null, + "f7" : 2 + } } - }, + ], "f8" : { "f9" : { - "f10" : "string3", - "f11" : 2 + "f10" : [ + "string5", + "string6" + ], + "f11" : null }, - "f12" : "string4" + "f12" : "string7" } }"""; @@ -111,6 +132,9 @@ private class TestArbitrary implements Arbitrary { private FieldType fieldType = FieldType.KEYWORD; private long longValue = 0; private long generatedStringValues = 0; + private int generateNullChecks = 0; + private int generateArrayChecks = 0; + private boolean producedObjectArray = false; @Override public boolean generateSubObject() { @@ -153,5 +177,35 @@ public long longValue() { public String stringValue(int lengthLowerBound, int lengthUpperBound) { return "string" + (generatedStringValues++ + 1); } + + @Override + public boolean generateNullValue() { + return generateNullChecks++ % 4 == 0; + } + + @Override + public boolean generateArrayOfValues() { + return generateArrayChecks++ % 4 == 0; + } + + @Override + public int valueArraySize() { + return 2; + } + + @Override + public boolean generateArrayOfObjects() { + if (producedObjectArray == false) { + producedObjectArray = true; + return true; + } + + return false; + } + + @Override + public int objectArraySize() { + return 2; + } }; } diff --git a/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGeneratorTests.java b/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGeneratorTests.java index cd8b2424ac5ae..309c5ad428829 100644 --- a/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGeneratorTests.java +++ b/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGeneratorTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.index.mapper.MapperServiceTestCase; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.logsdb.datageneration.arbitrary.Arbitrary; +import org.elasticsearch.logsdb.datageneration.arbitrary.RandomBasedArbitrary; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; @@ -33,7 +34,7 @@ public void testDataGeneratorSanity() throws IOException { public void testDataGeneratorProducesValidMappingAndDocument() throws IOException { // Make sure objects, nested objects and all field types are covered. - var testArbitrary = new Arbitrary() { + var testArbitrary = new RandomBasedArbitrary() { private boolean subObjectCovered = false; private boolean nestedCovered = false; private int generatedFields = 0; @@ -73,16 +74,6 @@ public String fieldName(int lengthLowerBound, int lengthUpperBound) { public FieldType fieldType() { return FieldType.values()[generatedFields % FieldType.values().length]; } - - @Override - public long longValue() { - return randomLong(); - } - - @Override - public String stringValue(int lengthLowerBound, int lengthUpperBound) { - return randomAlphaOfLengthBetween(lengthLowerBound, lengthUpperBound); - } }; var dataGenerator = new DataGenerator(DataGeneratorSpecification.builder().withArbitrary(testArbitrary).build()); @@ -138,6 +129,31 @@ public long longValue() { public String stringValue(int lengthLowerBound, int lengthUpperBound) { return ""; } + + @Override + public boolean generateNullValue() { + return false; + } + + @Override + public boolean generateArrayOfValues() { + return false; + } + + @Override + public int valueArraySize() { + return 3; + } + + @Override + public boolean generateArrayOfObjects() { + return false; + } + + @Override + public int objectArraySize() { + return 3; + } }; var dataGenerator = new DataGenerator( DataGeneratorSpecification.builder().withArbitrary(arbitrary).withMaxFieldCountPerLevel(100).withMaxObjectDepth(2).build() diff --git a/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/FieldValuesTests.java b/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/FieldValuesTests.java new file mode 100644 index 0000000000000..5e6a405ba1f87 --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/FieldValuesTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.logsdb.datageneration; + +import org.elasticsearch.logsdb.datageneration.arbitrary.RandomBasedArbitrary; +import org.elasticsearch.logsdb.datageneration.fields.FieldValues; +import org.elasticsearch.test.ESTestCase; + +import java.util.List; +import java.util.function.Supplier; + +public class FieldValuesTests extends ESTestCase { + public void testSanity() { + Supplier values = () -> 100; + var arbitrary = new RandomBasedArbitrary(); + + var valuesWithNullsAndWrappedInArray = FieldValues.injectNulls(arbitrary) + .andThen(FieldValues.wrappedInArray(arbitrary)) + .apply(values); + var value = valuesWithNullsAndWrappedInArray.get(); + + if (value instanceof List list) { + assertTrue(list.size() <= 5); + for (var item : list) { + if (item instanceof Integer intValue) { + assertEquals(Integer.valueOf(100), intValue); + } else { + assertNull(item); + } + } + } else if (value instanceof Integer intValue) { + assertEquals(Integer.valueOf(100), intValue); + } else { + assertNull(value); + } + } +} From cb7a21e8ff3a4f1623cb91751df9cd3c1bbfb2f7 Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Thu, 25 Jul 2024 20:26:57 +0200 Subject: [PATCH 024/105] [cache] Support async RangeMissingHandler callbacks (#110587) Change `fillCacheRange` method to accept a completion listener that must be called by `RangeMissingHandler` implementations when they finish fetching data. By doing so, we support asynchronously fetching the data from a third party storage. We also support asynchronous `SourceInputStreamFactory` for reading gaps from the storage. Depends on #111177 --- .../shared/SharedBlobCacheService.java | 101 +++++--- .../shared/SharedBlobCacheServiceTests.java | 220 ++++++++++++------ .../store/input/FrozenIndexInput.java | 59 ++--- 3 files changed, 255 insertions(+), 125 deletions(-) diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java index 9cb83e35b63d6..9a908b7e943c5 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java @@ -646,13 +646,14 @@ private RangeMissingHandler writerWithOffset(RangeMissingHandler writer, int wri // no need to allocate a new capturing lambda if the offset isn't adjusted return writer; } - return (channel, channelPos, streamFactory, relativePos, len, progressUpdater) -> writer.fillCacheRange( + return (channel, channelPos, streamFactory, relativePos, len, progressUpdater, completionListener) -> writer.fillCacheRange( channel, channelPos, streamFactory, relativePos - writeOffset, len, - progressUpdater + progressUpdater, + completionListener ); } @@ -987,16 +988,17 @@ void populateAndRead( executor.execute(fillGapRunnable(gap, writer, null, refs.acquireListener())); } } else { - final List gapFillingTasks = gaps.stream() - .map(gap -> fillGapRunnable(gap, writer, streamFactory, refs.acquireListener())) - .toList(); - executor.execute(() -> { - try (streamFactory) { + var gapFillingListener = refs.acquireListener(); + try (var gfRefs = new RefCountingRunnable(ActionRunnable.run(gapFillingListener, streamFactory::close))) { + final List gapFillingTasks = gaps.stream() + .map(gap -> fillGapRunnable(gap, writer, streamFactory, gfRefs.acquireListener())) + .toList(); + executor.execute(() -> { // Fill the gaps in order. If a gap fails to fill for whatever reason, the task for filling the next // gap will still be executed. gapFillingTasks.forEach(Runnable::run); - } - }); + }); + } } } } @@ -1005,13 +1007,13 @@ void populateAndRead( } } - private AbstractRunnable fillGapRunnable( + private Runnable fillGapRunnable( SparseFileTracker.Gap gap, RangeMissingHandler writer, @Nullable SourceInputStreamFactory streamFactory, ActionListener listener ) { - return ActionRunnable.run(listener.delegateResponse((l, e) -> failGapAndListener(gap, l, e)), () -> { + return () -> ActionListener.run(listener, l -> { var ioRef = io; assert regionOwners.get(ioRef) == CacheFileRegion.this; assert CacheFileRegion.this.hasReferences() : CacheFileRegion.this; @@ -1022,10 +1024,15 @@ private AbstractRunnable fillGapRunnable( streamFactory, start, Math.toIntExact(gap.end() - start), - progress -> gap.onProgress(start + progress) + progress -> gap.onProgress(start + progress), + l.map(unused -> { + assert regionOwners.get(ioRef) == CacheFileRegion.this; + assert CacheFileRegion.this.hasReferences() : CacheFileRegion.this; + writeCount.increment(); + gap.onCompletion(); + return null; + }).delegateResponse((delegate, e) -> failGapAndListener(gap, delegate, e)) ); - writeCount.increment(); - gap.onCompletion(); }); } @@ -1113,12 +1120,23 @@ public void fillCacheRange( SourceInputStreamFactory streamFactory, int relativePos, int length, - IntConsumer progressUpdater + IntConsumer progressUpdater, + ActionListener completionListener ) throws IOException { - writer.fillCacheRange(channel, channelPos, streamFactory, relativePos, length, progressUpdater); - var elapsedTime = TimeUnit.NANOSECONDS.toMicros(relativeTimeInNanosSupplier.getAsLong() - startTime); - SharedBlobCacheService.this.blobCacheMetrics.getCacheMissLoadTimes().record(elapsedTime); - SharedBlobCacheService.this.blobCacheMetrics.getCacheMissCounter().increment(); + writer.fillCacheRange( + channel, + channelPos, + streamFactory, + relativePos, + length, + progressUpdater, + completionListener.map(unused -> { + var elapsedTime = TimeUnit.NANOSECONDS.toMicros(relativeTimeInNanosSupplier.getAsLong() - startTime); + blobCacheMetrics.getCacheMissLoadTimes().record(elapsedTime); + blobCacheMetrics.getCacheMissCounter().increment(); + return null; + }) + ); } }; if (rangeToRead.isEmpty()) { @@ -1211,9 +1229,18 @@ public void fillCacheRange( SourceInputStreamFactory streamFactory, int relativePos, int len, - IntConsumer progressUpdater + IntConsumer progressUpdater, + ActionListener completionListener ) throws IOException { - delegate.fillCacheRange(channel, channelPos, streamFactory, relativePos - writeOffset, len, progressUpdater); + delegate.fillCacheRange( + channel, + channelPos, + streamFactory, + relativePos - writeOffset, + len, + progressUpdater, + completionListener + ); } }; } @@ -1226,14 +1253,25 @@ public void fillCacheRange( SourceInputStreamFactory streamFactory, int relativePos, int len, - IntConsumer progressUpdater + IntConsumer progressUpdater, + ActionListener completionListener ) throws IOException { assert assertValidRegionAndLength(fileRegion, channelPos, len); - delegate.fillCacheRange(channel, channelPos, streamFactory, relativePos, len, progressUpdater); - assert regionOwners.get(fileRegion.io) == fileRegion - : "File chunk [" + fileRegion.regionKey + "] no longer owns IO [" + fileRegion.io + "]"; + delegate.fillCacheRange( + channel, + channelPos, + streamFactory, + relativePos, + len, + progressUpdater, + Assertions.ENABLED ? ActionListener.runBefore(completionListener, () -> { + assert regionOwners.get(fileRegion.io) == fileRegion + : "File chunk [" + fileRegion.regionKey + "] no longer owns IO [" + fileRegion.io + "]"; + }) : completionListener + ); } }; + } return adjustedWriter; } @@ -1320,6 +1358,7 @@ default SourceInputStreamFactory sharedInputStreamFactory(List completionListener ) throws IOException; } @@ -1339,9 +1379,9 @@ public interface SourceInputStreamFactory extends Releasable { /** * Create the input stream at the specified position. * @param relativePos the relative position in the remote storage to read from. - * @return the input stream ready to be read from. + * @param listener listener for the input stream ready to be read from. */ - InputStream create(int relativePos) throws IOException; + void create(int relativePos, ActionListener listener) throws IOException; } private abstract static class DelegatingRangeMissingHandler implements RangeMissingHandler { @@ -1363,9 +1403,10 @@ public void fillCacheRange( SourceInputStreamFactory streamFactory, int relativePos, int length, - IntConsumer progressUpdater + IntConsumer progressUpdater, + ActionListener completionListener ) throws IOException { - delegate.fillCacheRange(channel, channelPos, streamFactory, relativePos, length, progressUpdater); + delegate.fillCacheRange(channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener); } } diff --git a/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java b/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java index e477673c90d6d..4ea954a1a76ce 100644 --- a/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java +++ b/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.StoppableExecutorServiceWrapper; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.CheckedRunnable; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.TestEnvironment; @@ -72,6 +73,13 @@ private static long size(long numPages) { return numPages * SharedBytes.PAGE_SIZE; } + private static void completeWith(ActionListener listener, CheckedRunnable runnable) { + ActionListener.completeWith(listener, () -> { + runnable.run(); + return null; + }); + } + public void testBasicEviction() throws IOException { Settings settings = Settings.builder() .put(NODE_NAME_SETTING.getKey(), "node") @@ -115,7 +123,10 @@ public void testBasicEviction() throws IOException { ByteRange.of(0L, 1L), ByteRange.of(0L, 1L), (channel, channelPos, relativePos, length) -> 1, - (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> progressUpdater.accept(length), + (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( + completionListener, + () -> progressUpdater.accept(length) + ), taskQueue.getThreadPool().generic(), bytesReadFuture ); @@ -552,11 +563,14 @@ public void execute(Runnable command) { cacheService.maybeFetchFullEntry( cacheKey, size, - (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> { - assert streamFactory == null : streamFactory; - bytesRead.addAndGet(-length); - progressUpdater.accept(length); - }, + (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( + completionListener, + () -> { + assert streamFactory == null : streamFactory; + bytesRead.addAndGet(-length); + progressUpdater.accept(length); + } + ), bulkExecutor, future ); @@ -570,9 +584,15 @@ public void execute(Runnable command) { // a download that would use up all regions should not run final var cacheKey = generateCacheKey(); assertEquals(2, cacheService.freeRegionCount()); - var configured = cacheService.maybeFetchFullEntry(cacheKey, size(500), (ch, chPos, streamFactory, relPos, len, update) -> { - throw new AssertionError("Should never reach here"); - }, bulkExecutor, ActionListener.noop()); + var configured = cacheService.maybeFetchFullEntry( + cacheKey, + size(500), + (ch, chPos, streamFactory, relPos, len, update, completionListener) -> completeWith(completionListener, () -> { + throw new AssertionError("Should never reach here"); + }), + bulkExecutor, + ActionListener.noop() + ); assertFalse(configured); assertEquals(2, cacheService.freeRegionCount()); } @@ -613,9 +633,14 @@ public void testFetchFullCacheEntryConcurrently() throws Exception { (ActionListener listener) -> cacheService.maybeFetchFullEntry( cacheKey, size, - (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> progressUpdater.accept( - length - ), + ( + channel, + channelPos, + streamFactory, + relativePos, + length, + progressUpdater, + completionListener) -> completeWith(completionListener, () -> progressUpdater.accept(length)), bulkExecutor, listener ) @@ -859,7 +884,10 @@ public void testMaybeEvictLeastUsed() throws Exception { var entry = cacheService.get(cacheKey, regionSize, 0); entry.populate( ByteRange.of(0L, regionSize), - (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> progressUpdater.accept(length), + (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( + completionListener, + () -> progressUpdater.accept(length) + ), taskQueue.getThreadPool().generic(), ActionListener.noop() ); @@ -954,11 +982,14 @@ public void execute(Runnable command) { cacheKey, 0, blobLength, - (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> { - assert streamFactory == null : streamFactory; - bytesRead.addAndGet(length); - progressUpdater.accept(length); - }, + (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( + completionListener, + () -> { + assert streamFactory == null : streamFactory; + bytesRead.addAndGet(length); + progressUpdater.accept(length); + } + ), bulkExecutor, future ); @@ -985,11 +1016,14 @@ public void execute(Runnable command) { cacheKey, region, blobLength, - (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> { - assert streamFactory == null : streamFactory; - bytesRead.addAndGet(length); - progressUpdater.accept(length); - }, + (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( + completionListener, + () -> { + assert streamFactory == null : streamFactory; + bytesRead.addAndGet(length); + progressUpdater.accept(length); + } + ), bulkExecutor, listener ); @@ -1010,13 +1044,16 @@ public void execute(Runnable command) { cacheKey, randomIntBetween(0, 10), randomLongBetween(1L, regionSize), - (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> { - throw new AssertionError("should not be executed"); - }, + (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( + completionListener, + () -> { + throw new AssertionError("should not be executed"); + } + ), bulkExecutor, future ); - assertThat("Listener is immediately completed", future.isDone(), is(true)); + assertThat("Listener is immediately completionListener", future.isDone(), is(true)); assertThat("Region already exists in cache", future.get(), is(false)); } { @@ -1032,11 +1069,14 @@ public void execute(Runnable command) { cacheKey, 0, blobLength, - (channel, channelPos, ignore, relativePos, length, progressUpdater) -> { - assert ignore == null : ignore; - bytesRead.addAndGet(length); - progressUpdater.accept(length); - }, + (channel, channelPos, ignore, relativePos, length, progressUpdater, completionListener) -> completeWith( + completionListener, + () -> { + assert ignore == null : ignore; + bytesRead.addAndGet(length); + progressUpdater.accept(length); + } + ), bulkExecutor, future ); @@ -1110,12 +1150,15 @@ public void execute(Runnable command) { region, range, blobLength, - (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> { - assertThat(range.start() + relativePos, equalTo(cacheService.getRegionStart(region) + regionRange.start())); - assertThat(channelPos, equalTo(Math.toIntExact(regionRange.start()))); - assertThat(length, equalTo(Math.toIntExact(regionRange.length()))); - bytesCopied.addAndGet(length); - }, + (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( + completionListener, + () -> { + assertThat(range.start() + relativePos, equalTo(cacheService.getRegionStart(region) + regionRange.start())); + assertThat(channelPos, equalTo(Math.toIntExact(regionRange.start()))); + assertThat(length, equalTo(Math.toIntExact(regionRange.length()))); + bytesCopied.addAndGet(length); + } + ), bulkExecutor, future ); @@ -1150,7 +1193,10 @@ public void execute(Runnable command) { region, ByteRange.of(0L, blobLength), blobLength, - (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> bytesCopied.addAndGet(length), + (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( + completionListener, + () -> bytesCopied.addAndGet(length) + ), bulkExecutor, listener ); @@ -1173,13 +1219,16 @@ public void execute(Runnable command) { randomIntBetween(0, 10), ByteRange.of(0L, blobLength), blobLength, - (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> { - throw new AssertionError("should not be executed"); - }, + (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( + completionListener, + () -> { + throw new AssertionError("should not be executed"); + } + ), bulkExecutor, future ); - assertThat("Listener is immediately completed", future.isDone(), is(true)); + assertThat("Listener is immediately completionListener", future.isDone(), is(true)); assertThat("Region already exists in cache", future.get(), is(false)); } { @@ -1196,7 +1245,10 @@ public void execute(Runnable command) { 0, ByteRange.of(0L, blobLength), blobLength, - (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> bytesCopied.addAndGet(length), + (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( + completionListener, + () -> bytesCopied.addAndGet(length) + ), bulkExecutor, future ); @@ -1237,10 +1289,18 @@ public void testPopulate() throws Exception { var entry = cacheService.get(cacheKey, blobLength, 0); AtomicLong bytesWritten = new AtomicLong(0L); final PlainActionFuture future1 = new PlainActionFuture<>(); - entry.populate(ByteRange.of(0, regionSize - 1), (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> { - bytesWritten.addAndGet(length); - progressUpdater.accept(length); - }, taskQueue.getThreadPool().generic(), future1); + entry.populate( + ByteRange.of(0, regionSize - 1), + (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( + completionListener, + () -> { + bytesWritten.addAndGet(length); + progressUpdater.accept(length); + } + ), + taskQueue.getThreadPool().generic(), + future1 + ); assertThat(future1.isDone(), is(false)); assertThat(taskQueue.hasRunnableTasks(), is(true)); @@ -1248,18 +1308,34 @@ public void testPopulate() throws Exception { // start populating the second region entry = cacheService.get(cacheKey, blobLength, 1); final PlainActionFuture future2 = new PlainActionFuture<>(); - entry.populate(ByteRange.of(0, regionSize - 1), (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> { - bytesWritten.addAndGet(length); - progressUpdater.accept(length); - }, taskQueue.getThreadPool().generic(), future2); + entry.populate( + ByteRange.of(0, regionSize - 1), + (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( + completionListener, + () -> { + bytesWritten.addAndGet(length); + progressUpdater.accept(length); + } + ), + taskQueue.getThreadPool().generic(), + future2 + ); // start populating again the first region, listener should be called immediately entry = cacheService.get(cacheKey, blobLength, 0); final PlainActionFuture future3 = new PlainActionFuture<>(); - entry.populate(ByteRange.of(0, regionSize - 1), (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> { - bytesWritten.addAndGet(length); - progressUpdater.accept(length); - }, taskQueue.getThreadPool().generic(), future3); + entry.populate( + ByteRange.of(0, regionSize - 1), + (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( + completionListener, + () -> { + bytesWritten.addAndGet(length); + progressUpdater.accept(length); + } + ), + taskQueue.getThreadPool().generic(), + future3 + ); assertThat(future3.isDone(), is(true)); var written = future3.get(10L, TimeUnit.SECONDS); @@ -1377,7 +1453,10 @@ public void testSharedSourceInputStreamFactory() throws Exception { range, range, (channel, channelPos, relativePos, length) -> length, - (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> progressUpdater.accept(length), + (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( + completionListener, + () -> progressUpdater.accept(length) + ), EsExecutors.DIRECT_EXECUTOR_SERVICE, future ); @@ -1394,8 +1473,8 @@ public void testSharedSourceInputStreamFactory() throws Exception { final var factoryClosed = new AtomicBoolean(false); final var dummyStreamFactory = new SourceInputStreamFactory() { @Override - public InputStream create(int relativePos) { - return null; + public void create(int relativePos, ActionListener listener) { + listener.onResponse(null); } @Override @@ -1420,17 +1499,20 @@ public void fillCacheRange( SourceInputStreamFactory streamFactory, int relativePos, int length, - IntConsumer progressUpdater + IntConsumer progressUpdater, + ActionListener completion ) throws IOException { - if (invocationCounter.incrementAndGet() == 1) { - final Thread witness = invocationThread.compareAndExchange(null, Thread.currentThread()); - assertThat(witness, nullValue()); - } else { - assertThat(invocationThread.get(), sameInstance(Thread.currentThread())); - } - assertThat(streamFactory, sameInstance(dummyStreamFactory)); - assertThat(position.getAndSet(relativePos), lessThan(relativePos)); - progressUpdater.accept(length); + completeWith(completion, () -> { + if (invocationCounter.incrementAndGet() == 1) { + final Thread witness = invocationThread.compareAndExchange(null, Thread.currentThread()); + assertThat(witness, nullValue()); + } else { + assertThat(invocationThread.get(), sameInstance(Thread.currentThread())); + } + assertThat(streamFactory, sameInstance(dummyStreamFactory)); + assertThat(position.getAndSet(relativePos), lessThan(relativePos)); + progressUpdater.accept(length); + }); } }; diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInput.java index 56efc72f2f6f7..d7cf22a05981f 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInput.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.blobcache.BlobCacheUtils; import org.elasticsearch.blobcache.common.ByteBufferReference; import org.elasticsearch.blobcache.common.ByteRange; @@ -146,32 +147,38 @@ private void readWithoutBlobCacheSlow(ByteBuffer b, long position, int length) t final int read = SharedBytes.readCacheFile(channel, pos, relativePos, len, byteBufferReference); stats.addCachedBytesRead(read); return read; - }, (channel, channelPos, streamFactory, relativePos, len, progressUpdater) -> { - assert streamFactory == null : streamFactory; - final long startTimeNanos = stats.currentTimeNanos(); - try (InputStream input = openInputStreamFromBlobStore(rangeToWrite.start() + relativePos, len)) { - assert ThreadPool.assertCurrentThreadPool(SearchableSnapshots.CACHE_FETCH_ASYNC_THREAD_POOL_NAME); - logger.trace( - "{}: writing channel {} pos {} length {} (details: {})", - fileInfo.physicalName(), - channelPos, - relativePos, - len, - cacheFile - ); - SharedBytes.copyToCacheFileAligned( - channel, - input, - channelPos, - relativePos, - len, - progressUpdater, - writeBuffer.get().clear() - ); - final long endTimeNanos = stats.currentTimeNanos(); - stats.addCachedBytesWritten(len, endTimeNanos - startTimeNanos); - } - }); + }, + (channel, channelPos, streamFactory, relativePos, len, progressUpdater, completionListener) -> ActionListener.completeWith( + completionListener, + () -> { + assert streamFactory == null : streamFactory; + final long startTimeNanos = stats.currentTimeNanos(); + try (InputStream input = openInputStreamFromBlobStore(rangeToWrite.start() + relativePos, len)) { + assert ThreadPool.assertCurrentThreadPool(SearchableSnapshots.CACHE_FETCH_ASYNC_THREAD_POOL_NAME); + logger.trace( + "{}: writing channel {} pos {} length {} (details: {})", + fileInfo.physicalName(), + channelPos, + relativePos, + len, + cacheFile + ); + SharedBytes.copyToCacheFileAligned( + channel, + input, + channelPos, + relativePos, + len, + progressUpdater, + writeBuffer.get().clear() + ); + final long endTimeNanos = stats.currentTimeNanos(); + stats.addCachedBytesWritten(len, endTimeNanos - startTimeNanos); + return null; + } + } + ) + ); assert bytesRead == length : bytesRead + " vs " + length; byteBufferReference.finish(bytesRead); } finally { From 648e0916ff41f4b87bee7a190b97ef3b0bb25dab Mon Sep 17 00:00:00 2001 From: Parker Timmins Date: Thu, 25 Jul 2024 13:29:07 -0500 Subject: [PATCH 025/105] Fix SnapshotLifecycleStatsTests.testEqualsAndHashcode (#111301) mutateInstance returned unchanged object because randomTimeValue could return 0 or a value smaller than 1 millisecond. This was truncated to 0ms causing the call to SnapshotLifecycleStatsTests.withDeletionTimeUpdated to leave the object unchanged. closes #111300 --- muted-tests.yml | 3 --- .../xpack/core/slm/SnapshotLifecycleStatsTests.java | 3 ++- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 7d5ce943681d1..1d71d7d735e96 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -120,9 +120,6 @@ tests: - class: org.elasticsearch.xpack.esql.CsvTests method: test {inlinestats.ShadowingSelf} issue: https://github.com/elastic/elasticsearch/issues/111261 -- class: org.elasticsearch.xpack.core.slm.SnapshotLifecycleStatsTests - method: testEqualsAndHashcode - issue: https://github.com/elastic/elasticsearch/issues/111300 # Examples: # diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/slm/SnapshotLifecycleStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/slm/SnapshotLifecycleStatsTests.java index 4a5121004660f..0332fcf6e6f71 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/slm/SnapshotLifecycleStatsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/slm/SnapshotLifecycleStatsTests.java @@ -22,6 +22,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.TimeUnit; public class SnapshotLifecycleStatsTests extends AbstractXContentSerializingTestCase { @@ -120,7 +121,7 @@ protected SnapshotLifecycleStats mutateInstance(SnapshotLifecycleStats instance) case 0 -> instance.withRetentionRunIncremented(); case 1 -> instance.withRetentionFailedIncremented(); case 2 -> instance.withRetentionTimedOutIncremented(); - case 3 -> instance.withDeletionTimeUpdated(randomTimeValue()); + case 3 -> instance.withDeletionTimeUpdated(randomTimeValue(1, 1_000_000, TimeUnit.MILLISECONDS)); case 4 -> instance.withTakenIncremented(policy); case 5 -> instance.withFailedIncremented(policy); case 6 -> instance.withDeletedIncremented(policy); From 25ad974a61d13c1433b2288a826d13ae721adfd8 Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Thu, 25 Jul 2024 14:32:09 -0400 Subject: [PATCH 026/105] Unmute DatafeedJobsIT (#111299) Related: https://github.com/elastic/elasticsearch/issues/105239 --- .../org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java index 46a7ca17714ae..623b676e0e0ee 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java @@ -74,7 +74,6 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.oneOf; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105239") public class DatafeedJobsIT extends MlNativeAutodetectIntegTestCase { @After @@ -780,6 +779,7 @@ private void startRealtime(String jobId, Integer maxEmptySearches) throws Except }, 30, TimeUnit.SECONDS); } + @LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105239") public void testStartDatafeed_GivenTimeout_Returns408() throws Exception { client().admin().indices().prepareCreate("data-1").setMapping("time", "type=date").get(); long numDocs = 100; From 50bccf560998e1b1514648ea953115a9288f828a Mon Sep 17 00:00:00 2001 From: shainaraskas <58563081+shainaraskas@users.noreply.github.com> Date: Thu, 25 Jul 2024 14:44:57 -0400 Subject: [PATCH 027/105] Round up shard allocation / recovery / relocation concepts (#109943) --- docs/reference/cat/recovery.asciidoc | 2 +- .../high-availability/cluster-design.asciidoc | 3 +- docs/reference/indices/recovery.asciidoc | 18 +---- docs/reference/modules/cluster.asciidoc | 4 +- .../modules/shard-allocation-desc.asciidoc | 2 + docs/reference/modules/shard-ops.asciidoc | 75 +++++++++++++++++++ .../modules/shard-recovery-desc.asciidoc | 16 ++++ docs/reference/setup.asciidoc | 5 +- 8 files changed, 103 insertions(+), 22 deletions(-) create mode 100644 docs/reference/modules/shard-allocation-desc.asciidoc create mode 100644 docs/reference/modules/shard-ops.asciidoc create mode 100644 docs/reference/modules/shard-recovery-desc.asciidoc diff --git a/docs/reference/cat/recovery.asciidoc b/docs/reference/cat/recovery.asciidoc index 058f4e69ae8e3..c3292fc9971ee 100644 --- a/docs/reference/cat/recovery.asciidoc +++ b/docs/reference/cat/recovery.asciidoc @@ -39,7 +39,7 @@ The cat recovery API returns information about shard recoveries, both ongoing and completed. It is a more compact view of the JSON <> API. -include::{es-ref-dir}/indices/recovery.asciidoc[tag=shard-recovery-desc] +include::{es-ref-dir}/modules/shard-recovery-desc.asciidoc[] [[cat-recovery-path-params]] diff --git a/docs/reference/high-availability/cluster-design.asciidoc b/docs/reference/high-availability/cluster-design.asciidoc index 6c17a494f36ae..105c8b236b0b1 100644 --- a/docs/reference/high-availability/cluster-design.asciidoc +++ b/docs/reference/high-availability/cluster-design.asciidoc @@ -246,7 +246,8 @@ accumulate into a noticeable performance penalty. An unreliable network may have frequent network partitions. {es} will automatically recover from a network partition as quickly as it can but your cluster may be partly unavailable during a partition and will need to spend time and resources to -resynchronize any missing data and rebalance itself once the partition heals. +<> and <> +itself once the partition heals. Recovering from a failure may involve copying a large amount of data between nodes so the recovery time is often determined by the available bandwidth. diff --git a/docs/reference/indices/recovery.asciidoc b/docs/reference/indices/recovery.asciidoc index b4e4bd33f819a..06b4d9d92e49f 100644 --- a/docs/reference/indices/recovery.asciidoc +++ b/docs/reference/indices/recovery.asciidoc @@ -35,21 +35,7 @@ index, or alias. Use the index recovery API to get information about ongoing and completed shard recoveries. -// tag::shard-recovery-desc[] -Shard recovery is the process of initializing a shard copy, such as restoring a -primary shard from a snapshot or syncing a replica shard from a primary shard. -When a shard recovery completes, the recovered shard is available for search -and indexing. - -Recovery automatically occurs during the following processes: - -* Node startup. This type of recovery is called a local store recovery. -* Primary shard replication. -* Relocation of a shard to a different node in the same cluster. -* <> operation. -* <>, <>, or -<> operation. -// end::shard-recovery-desc[] +include::{es-ref-dir}/modules/shard-recovery-desc.asciidoc[] The index recovery API reports information about completed recoveries only for shard copies that currently exist in the cluster. It only reports the last @@ -360,7 +346,7 @@ The API returns the following response: "index1" : { "shards" : [ { "id" : 0, - "type" : "STORE", + "type" : "EXISTING_STORE", "stage" : "DONE", "primary" : true, "start_time" : "2014-02-24T12:38:06.349", diff --git a/docs/reference/modules/cluster.asciidoc b/docs/reference/modules/cluster.asciidoc index 4b9ede5450683..b3eaa5b47c238 100644 --- a/docs/reference/modules/cluster.asciidoc +++ b/docs/reference/modules/cluster.asciidoc @@ -1,9 +1,7 @@ [[modules-cluster]] === Cluster-level shard allocation and routing settings -_Shard allocation_ is the process of allocating shards to nodes. This can -happen during initial recovery, replica allocation, rebalancing, or -when nodes are added or removed. +include::{es-ref-dir}/modules/shard-allocation-desc.asciidoc[] One of the main roles of the master is to decide which shards to allocate to which nodes, and when to move shards between nodes in order to rebalance the diff --git a/docs/reference/modules/shard-allocation-desc.asciidoc b/docs/reference/modules/shard-allocation-desc.asciidoc new file mode 100644 index 0000000000000..426ad0da72e1b --- /dev/null +++ b/docs/reference/modules/shard-allocation-desc.asciidoc @@ -0,0 +1,2 @@ +Shard allocation is the process of assigning shard copies to nodes. This can +happen during initial recovery, replica allocation, rebalancing, when nodes are added to or removed from the cluster, or when cluster or index settings that impact allocation are updated. \ No newline at end of file diff --git a/docs/reference/modules/shard-ops.asciidoc b/docs/reference/modules/shard-ops.asciidoc new file mode 100644 index 0000000000000..c0e5ee6a220f0 --- /dev/null +++ b/docs/reference/modules/shard-ops.asciidoc @@ -0,0 +1,75 @@ +[[shard-allocation-relocation-recovery]] +=== Shard allocation, relocation, and recovery + +Each <> in Elasticsearch is divided into one or more <>. +Each document in an index belongs to a single shard. + +A cluster can contain multiple copies of a shard. Each shard has one distinguished shard copy called the _primary_, and zero or more non-primary copies called _replicas_. The primary shard copy serves as the main entry point for all indexing operations. The operations on the primary shard copy are then forwarded to its replicas. + +Replicas maintain redundant copies of your data across the <> in your cluster, protecting against hardware failure and increasing capacity to serve read requests like searching or retrieving a document. If the primary shard copy fails, then a replica is promoted to primary and takes over the primary's responsibilities. + +Over the course of normal operation, Elasticsearch allocates shard copies to nodes, relocates shard copies across nodes to balance the cluster or satisfy new allocation constraints, and recovers shards to initialize new copies. In this topic, you'll learn how these operations work and how you can control them. + +TIP: To learn about optimizing the number and size of shards in your cluster, refer to <>. To learn about how read and write operations are replicated across shards and shard copies, refer to <>. + +[[shard-allocation]] +==== Shard allocation + +include::{es-ref-dir}/modules/shard-allocation-desc.asciidoc[] + +By default, the primary and replica shard copies for an index can be allocated to any node in the cluster, and may be relocated to rebalance the cluster. + +===== Adjust shard allocation settings + +You can control how shard copies are allocated using the following settings: + +- <>: Use these settings to control how shard copies are allocated and balanced across the entire cluster. For example, you might want to allocate nodes availability zones, or prevent certain nodes from being used so you can perform maintenance. + +- <>: Use these settings to control how the shard copies for a specific index are allocated. For example, you might want to allocate an index to a node in a specific data tier, or to an node with specific attributes. + +===== Monitor shard allocation + +If a shard copy is unassigned, it means that the shard copy is not allocated to any node in the cluster. This can happen if there are not enough nodes in the cluster to allocate the shard copy, or if the shard copy can't be allocated to any node that satisfies the shard allocation filtering rules. When a shard copy is unassigned, your cluster is considered unhealthy and returns a yellow or red cluster health status. + +You can use the following APIs to monitor shard allocation: + +- <> +- <> +- <> + +<>. + +[[shard-recovery]] +==== Shard recovery + +include::{es-ref-dir}/modules/shard-recovery-desc.asciidoc[] + +===== Adjust shard recovery settings + +To control how shards are recovered, for example the resources that can be used by recovery operations, and which indices should be prioritized for recovery, you can adjust the following settings: + +- <> +- <> +- <>, including <> and <> + +Shard recovery operations also respect general shard allocation settings. + +===== Monitor shard recovery + +You can use the following APIs to monitor shard allocation: + + - View a list of in-progress and completed recoveries using the <> + - View detailed information about a specific recovery using the <> + +[[shard-relocation]] +==== Shard relocation + +Shard relocation is the process of moving shard copies from one node to another. This can happen when a node joins or leaves the cluster, or when the cluster is rebalancing. + +When a shard copy is relocated, it is created as a new shard copy on the target node. When the shard copy is fully allocated and recovered, the old shard copy is deleted. If the shard copy being relocated is a primary, then the new shard copy is marked as primary before the old shard copy is deleted. + +===== Adjust shard relocation settings + +You can control how and when shard copies are relocated. For example, you can adjust the rebalancing settings that control when shard copies are relocated to balance the cluster, or the high watermark for disk-based shard allocation that can trigger relocation. These settings are part of the <>. + +Shard relocation operations also respect shard allocation and recovery settings. \ No newline at end of file diff --git a/docs/reference/modules/shard-recovery-desc.asciidoc b/docs/reference/modules/shard-recovery-desc.asciidoc new file mode 100644 index 0000000000000..67eaceb528962 --- /dev/null +++ b/docs/reference/modules/shard-recovery-desc.asciidoc @@ -0,0 +1,16 @@ +Shard recovery is the process of initializing a shard copy, such as restoring a +primary shard from a snapshot or creating a replica shard from a primary shard. +When a shard recovery completes, the recovered shard is available for search +and indexing. + +Recovery automatically occurs during the following processes: + +* When creating an index for the first time. +* When a node rejoins the cluster and starts up any missing primary shard copies using the data that it holds in its data path. +* Creation of new replica shard copies from the primary. +* Relocation of a shard copy to a different node in the same cluster. +* A <> operation. +* A <>, <>, or +<> operation. + +You can determine the cause of a shard recovery using the <> or <> APIs. \ No newline at end of file diff --git a/docs/reference/setup.asciidoc b/docs/reference/setup.asciidoc index 64626aafb2441..b346fddc5e5a1 100644 --- a/docs/reference/setup.asciidoc +++ b/docs/reference/setup.asciidoc @@ -33,7 +33,6 @@ include::setup/configuration.asciidoc[] include::setup/important-settings.asciidoc[] - include::setup/secure-settings.asciidoc[] include::settings/audit-settings.asciidoc[] @@ -82,6 +81,8 @@ include::modules/indices/search-settings.asciidoc[] include::settings/security-settings.asciidoc[] +include::modules/shard-ops.asciidoc[] + include::modules/indices/request_cache.asciidoc[] include::settings/snapshot-settings.asciidoc[] @@ -93,7 +94,9 @@ include::modules/threadpool.asciidoc[] include::settings/notification-settings.asciidoc[] include::setup/advanced-configuration.asciidoc[] + include::setup/sysconfig.asciidoc[] + include::setup/bootstrap-checks.asciidoc[] include::setup/bootstrap-checks-xes.asciidoc[] From 73f52efffc3ddcfc36ff157d2b57fd0dd09d0d1f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 26 Jul 2024 05:13:36 +1000 Subject: [PATCH 028/105] Mute org.elasticsearch.http.netty4.Netty4ChunkedContinuationsIT testContinuationFailure #111283 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 1d71d7d735e96..d597939ce0cd6 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -120,6 +120,9 @@ tests: - class: org.elasticsearch.xpack.esql.CsvTests method: test {inlinestats.ShadowingSelf} issue: https://github.com/elastic/elasticsearch/issues/111261 +- class: org.elasticsearch.http.netty4.Netty4ChunkedContinuationsIT + method: testContinuationFailure + issue: https://github.com/elastic/elasticsearch/issues/111283 # Examples: # From ca8fd12e62bd84206ac752c0f9a076bb6d998147 Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Thu, 25 Jul 2024 15:18:24 -0400 Subject: [PATCH 029/105] [ML] Trained Models account for partial deployments in serverless (#110734) This change adds support for scaling based on the number of processors required, whereas previously scaling was only done based on memory required. ## TODO As soon as this change is merged, a corresponding change in `MachineLearningTierMetrics` in the serverless repo must be made to update the autoscaling stats variable names. --- docs/changelog/110734.yaml | 5 + .../StartTrainedModelDeploymentAction.java | 9 + .../ml/autoscaling/MlAutoscalingStats.java | 97 +- .../ml/inference/assignment/RoutingInfo.java | 15 + .../assignment/TrainedModelAssignment.java | 18 + .../autoscaling/MlAutoscalingStatsTests.java | 18 +- .../TrainedModelAssignmentTests.java | 24 + .../MlAutoscalingResourceTracker.java | 190 +- .../xpack/ml/utils/MlProcessors.java | 2 +- ...lingResourceTrackerParameterizedTests.java | 1526 +++++++++++++++++ .../MlAutoscalingResourceTrackerTests.java | 446 +++-- 11 files changed, 2109 insertions(+), 241 deletions(-) create mode 100644 docs/changelog/110734.yaml create mode 100644 x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerParameterizedTests.java diff --git a/docs/changelog/110734.yaml b/docs/changelog/110734.yaml new file mode 100644 index 0000000000000..d6dce144b89cd --- /dev/null +++ b/docs/changelog/110734.yaml @@ -0,0 +1,5 @@ +pr: 110734 +summary: Fix bug in ML serverless autoscaling which prevented trained model updates from triggering a scale up +area: Machine Learning +type: bug +issues: [ ] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java index 59eaf4affa9a8..d47f5be77347a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java @@ -620,6 +620,9 @@ public String getDeploymentId() { return deploymentId; } + /** + * @return the estimated memory (in bytes) required for the model deployment to run + */ public long estimateMemoryUsageBytes() { // We already take into account 2x the model bytes. If the cache size is larger than the model bytes, then // we need to take it into account when returning the estimate. @@ -729,10 +732,16 @@ public long getModelBytes() { return modelBytes; } + /** + * @return the number of threads per allocation used by the model during inference. each thread requires one processor. + */ public int getThreadsPerAllocation() { return threadsPerAllocation; } + /** + * @return the number of allocations requested by the user + */ public int getNumberOfAllocations() { return numberOfAllocations; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/autoscaling/MlAutoscalingStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/autoscaling/MlAutoscalingStats.java index 645d80525cf6b..ffadf4cafaf12 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/autoscaling/MlAutoscalingStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/autoscaling/MlAutoscalingStats.java @@ -3,6 +3,8 @@ * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. + * + * this file was contributed to by a Generative AI */ package org.elasticsearch.xpack.core.ml.autoscaling; @@ -13,48 +15,79 @@ import java.io.IOException; +/** + * MlAutoscalingStats is the record which is transmitted to the elasticsearch-autoscaler to decide which nodes to deliver. + *

+ * The "existing" attributes exist only so that the autoscaler can confirm that elasticsearch has the same view of the available hardware. + *

+ * The "extra" attributes are used to communicate the additional resources that are required. + *

+ * The "perNode" attributes define the minimum amount of resources that must be available on every node. + *

+ * unwantedNodeMemoryBytesToRemove is used to communicate the amount of memory that should be removed from the node. + * No attribute exists to remove processors. + *

+ * The word "total" in an attribute name indicates that the attribute is a sum across all nodes. + * + * @param currentTotalNodes the count of nodes that are currently in the cluster + * @param currentPerNodeMemoryBytes the minimum size (memory) of all nodes in the cluster + * @param currentTotalModelMemoryBytes the sum of model memory over every assignment/deployment + * @param currentTotalProcessorsInUse the sum of processors used over every assignment/deployment + * @param currentPerNodeMemoryOverheadBytes always equal to MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD + * @param wantedMinNodes the minimum number of nodes that must be provided by the autoscaler + * @param wantedExtraPerNodeMemoryBytes the amount of additional memory that must be provided on every node + * (this value must be >0 to trigger a scale up based on memory) + * @param wantedExtraPerNodeNodeProcessors the number of additional processors that must be provided on every node + * (this value must be >0 to trigger a scale up based on processors) + * @param wantedExtraModelMemoryBytes the amount of additional model memory that is newly required + * (due to a new assignment/deployment) + * @param wantedExtraProcessors the number of additional processors that are required to be added to the cluster + * @param unwantedNodeMemoryBytesToRemove the amount of memory that should be removed from the cluster. If this is equal to the amount of + * memory provided by a node, a node will be removed. + */ + public record MlAutoscalingStats( - int nodes, - long perNodeMemoryInBytes, - long modelMemoryInBytesSum, - int processorsSum, - int minNodes, - long extraSingleNodeModelMemoryInBytes, - int extraSingleNodeProcessors, - long extraModelMemoryInBytes, - int extraProcessors, - long removeNodeMemoryInBytes, - long perNodeMemoryOverheadInBytes + int currentTotalNodes, + long currentPerNodeMemoryBytes, + long currentTotalModelMemoryBytes, + int currentTotalProcessorsInUse, + int wantedMinNodes, + long wantedExtraPerNodeMemoryBytes, + int wantedExtraPerNodeNodeProcessors, + long wantedExtraModelMemoryBytes, + int wantedExtraProcessors, + long unwantedNodeMemoryBytesToRemove, + long currentPerNodeMemoryOverheadBytes ) implements Writeable { public MlAutoscalingStats(StreamInput in) throws IOException { this( - in.readVInt(), // nodes - in.readVLong(), // perNodeMemoryInBytes + in.readVInt(), // currentTotalNodes + in.readVLong(), // currentPerNodeMemoryBytes in.readVLong(), // modelMemoryInBytes - in.readVInt(), // processorsSum - in.readVInt(), // minNodes - in.readVLong(), // extraSingleNodeModelMemoryInBytes - in.readVInt(), // extraSingleNodeProcessors - in.readVLong(), // extraModelMemoryInBytes - in.readVInt(), // extraProcessors - in.readVLong(), // removeNodeMemoryInBytes - in.readVLong() // perNodeMemoryOverheadInBytes + in.readVInt(), // currentTotalProcessorsInUse + in.readVInt(), // wantedMinNodes + in.readVLong(), // wantedExtraPerNodeMemoryBytes + in.readVInt(), // wantedExtraPerNodeNodeProcessors + in.readVLong(), // wantedExtraModelMemoryBytes + in.readVInt(), // wantedExtraProcessors + in.readVLong(), // unwantedNodeMemoryBytesToRemove + in.readVLong() // currentPerNodeMemoryOverheadBytes ); } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(nodes); - out.writeVLong(perNodeMemoryInBytes); - out.writeVLong(modelMemoryInBytesSum); - out.writeVLong(processorsSum); - out.writeVInt(minNodes); - out.writeVLong(extraSingleNodeModelMemoryInBytes); - out.writeVInt(extraSingleNodeProcessors); - out.writeVLong(extraModelMemoryInBytes); - out.writeVInt(extraProcessors); - out.writeVLong(removeNodeMemoryInBytes); - out.writeVLong(perNodeMemoryOverheadInBytes); + out.writeVInt(currentTotalNodes); + out.writeVLong(currentPerNodeMemoryBytes); + out.writeVLong(currentTotalModelMemoryBytes); + out.writeVLong(currentTotalProcessorsInUse); + out.writeVInt(wantedMinNodes); + out.writeVLong(wantedExtraPerNodeMemoryBytes); + out.writeVInt(wantedExtraPerNodeNodeProcessors); + out.writeVLong(wantedExtraModelMemoryBytes); + out.writeVInt(wantedExtraProcessors); + out.writeVLong(unwantedNodeMemoryBytesToRemove); + out.writeVLong(currentPerNodeMemoryOverheadBytes); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/RoutingInfo.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/RoutingInfo.java index 826b0785aa563..6e7cdf6e9ab03 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/RoutingInfo.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/RoutingInfo.java @@ -59,6 +59,14 @@ private RoutingInfo( this(currentAllocations == null ? 0 : currentAllocations, targetAllocations == null ? 0 : targetAllocations, state, reason); } + /** + * RoutingInfo defines the state of a particular trained model assignment on a particular node. + * @param currentAllocations The number of allocations currently running on a node. + * @param targetAllocations The number of allocations that have been assigned to a node, and will run on the node. Should never be + * higher than the number of available processors on the node. + * @param state Indicates the availability of the allocations on the node. + * @param reason Will contain the reason that currentAllocations != targetAllocations, if applicable, otherwise empty string. + */ public RoutingInfo(int currentAllocations, int targetAllocations, RoutingState state, String reason) { this.currentAllocations = currentAllocations; this.targetAllocations = targetAllocations; @@ -78,10 +86,17 @@ public RoutingInfo(StreamInput in) throws IOException { this.reason = in.readOptionalString(); } + /** + * @return The number of allocations currently running on a node. + */ public int getCurrentAllocations() { return currentAllocations; } + /** + * @return The number of allocations that have been assigned to a node, and will run on the node. Should never be + * higher than the number of available processors on the node. + */ public int getTargetAllocations() { return targetAllocations; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java index 60e0c0e86a828..4a87b8e24f481 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java @@ -136,6 +136,17 @@ private TrainedModelAssignment( ); } + /** + * A long-lived object which defines a trained model deployment/assignment. + * + * @param taskParams the parameters provided by the StartTrainedModelDeploymentAction during the creation of the deployment/assignment + * @param nodeRoutingTable shows where allocations for this assignment/deployment are located (on which nodes) + * @param assignmentState used to track the state of the assignment for rebalancing, autoscaling, and more + * @param reason may contain a human-readable explanation for the current state + * @param startTime the time when the assignment was created + * @param maxAssignedAllocations used for adaptive allocations + * @param adaptiveAllocationsSettings how the assignment should scale based on usage + */ TrainedModelAssignment( StartTrainedModelDeploymentAction.TaskParams taskParams, Map nodeRoutingTable, @@ -178,6 +189,9 @@ public boolean isRoutedToNode(String nodeId) { return nodeRoutingTable.containsKey(nodeId); } + /** + * @return shows where allocations for this assignment/deployment are located (on which nodes) + */ public Map getNodeRoutingTable() { return Collections.unmodifiableMap(nodeRoutingTable); } @@ -310,6 +324,10 @@ public int totalTargetAllocations() { return nodeRoutingTable.values().stream().mapToInt(RoutingInfo::getTargetAllocations).sum(); } + public int totalTargetProcessors() { + return nodeRoutingTable.values().stream().mapToInt(r -> r.getTargetAllocations() * getTaskParams().getThreadsPerAllocation()).sum(); + } + public int totalFailedAllocations() { return nodeRoutingTable.values().stream().mapToInt(RoutingInfo::getFailedAllocations).sum(); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/autoscaling/MlAutoscalingStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/autoscaling/MlAutoscalingStatsTests.java index 007f3a657dd8f..af6e16eabed8d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/autoscaling/MlAutoscalingStatsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/autoscaling/MlAutoscalingStatsTests.java @@ -17,16 +17,16 @@ public class MlAutoscalingStatsTests extends AbstractWireSerializingTestCase nodes = Stream.generate(() -> randomAlphaOfLength(10)).limit(randomInt(5)).toList(); + for (String node : nodes) { + builder.addRoutingEntry(node, RoutingInfoTests.randomInstance()); + } + if (assignmentState == null) { + builder.setAssignmentState(randomFrom(AssignmentState.values())); + } else { + builder.setAssignmentState(assignmentState); + } + if (randomBoolean()) { + builder.setReason(randomAlphaOfLength(10)); + } + return builder; + } + @Override protected TrainedModelAssignment doParseInstance(XContentParser parser) throws IOException { return TrainedModelAssignment.fromXContent(parser); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java index ad5a07387bc74..9a9fbfa0340a9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java @@ -3,6 +3,8 @@ * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. + * + * this file was contributed to by a Generative AI */ package org.elasticsearch.xpack.ml.autoscaling; @@ -20,7 +22,6 @@ import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.OpenJobAction; import org.elasticsearch.xpack.core.ml.autoscaling.MlAutoscalingStats; -import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentState; import org.elasticsearch.xpack.core.ml.inference.assignment.Priority; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; import org.elasticsearch.xpack.core.ml.utils.MemoryTrackedTaskState; @@ -52,6 +53,11 @@ public final class MlAutoscalingResourceTracker { private static final Logger logger = LogManager.getLogger(MlAutoscalingResourceTracker.class); + /** + * @param memory (bytes) + * @param processors (count) + * @param jobs (count) + */ record MlJobRequirements(long memory, int processors, int jobs) { static MlJobRequirements of(long memory, int processors, int jobs) { return new MlJobRequirements(memory, processors, jobs); @@ -110,6 +116,7 @@ public static void getMlAutoscalingStats( processorsAvailableFirstNode, maxOpenJobsPerNode, mlDummyAutoscalingEntity, + clusterSettings.get(MachineLearning.ALLOCATED_PROCESSORS_SCALE), listener ); } @@ -118,13 +125,15 @@ static void getMemoryAndProcessors( MlAutoscalingContext autoscalingContext, MlMemoryTracker mlMemoryTracker, Map nodeSizeByMlNode, - long perNodeAvailableModelMemoryInBytes, + long perNodeAvailableModelMemoryBytes, int perNodeAvailableProcessors, int maxOpenJobsPerNode, MlDummyAutoscalingEntity dummyAutoscalingEntity, + int allocatedProcessorsScale, ActionListener listener ) { - Map> perNodeModelMemoryInBytes = new HashMap<>(); + + Map> jobRequirementsByNode = new HashMap<>(); int numberMlNodes = nodeSizeByMlNode.size(); @@ -134,12 +143,12 @@ static void getMemoryAndProcessors( ? 0L : nodeSizeByMlNode.values().iterator().next(); - long modelMemoryBytesSum = 0; - long extraSingleNodeModelMemoryInBytes = 0; + long existingModelMemoryBytes = 0; + long extraPerNodeModelMemoryBytes = 0; long extraModelMemoryInBytes = 0; - int extraSingleNodeProcessors = 0; + int extraPerNodeProcessors = 0; int extraProcessors = 0; - int processorsSum = 0; + int sumOfCurrentlyExistingAndUsedProcessors = 0; logger.debug( "getting ml resources, found [{}] ad jobs, [{}] dfa jobs and [{}] inference deployments", @@ -148,7 +157,7 @@ static void getMemoryAndProcessors( autoscalingContext.modelAssignments.size() ); - // Start with `minNodes = 0`. If any ML job is started this will be increased to 1 in the loops below, + // Start with `wantedMinNodes = 0`. If any ML job is started this will be increased to 1 in the loops below, // and further adjustments are made for trained models depending on allocations. int minNodes = 0; @@ -174,14 +183,14 @@ static void getMemoryAndProcessors( // implementation decision: don't count processors for AD, if this gets a revisit, ensure to change it for the // old autoscaling, too - extraSingleNodeModelMemoryInBytes = Math.max(extraSingleNodeModelMemoryInBytes, jobMemory); + extraPerNodeModelMemoryBytes = Math.max(extraPerNodeModelMemoryBytes, jobMemory); extraModelMemoryInBytes += jobMemory; } else { logger.debug("job [{}] assigned to [{}], memory required [{}]", jobId, task.getAssignment(), jobMemory); - modelMemoryBytesSum += jobMemory; + existingModelMemoryBytes += jobMemory; - perNodeModelMemoryInBytes.computeIfAbsent(task.getExecutorNode(), k -> new ArrayList<>()) + jobRequirementsByNode.computeIfAbsent(task.getExecutorNode(), k -> new ArrayList<>()) .add(MlJobRequirements.of(jobMemory, 0)); } } @@ -208,91 +217,114 @@ static void getMemoryAndProcessors( // implementation decision: don't count processors for DFA, if this gets a revisit, ensure to change it for the // old autoscaling, too - extraSingleNodeModelMemoryInBytes = Math.max(extraSingleNodeModelMemoryInBytes, jobMemory); + extraPerNodeModelMemoryBytes = Math.max(extraPerNodeModelMemoryBytes, jobMemory); extraModelMemoryInBytes += jobMemory; } else { logger.debug("dfa job [{}] assigned to [{}], memory required [{}]", jobId, task.getAssignment(), jobMemory); - modelMemoryBytesSum += jobMemory; - perNodeModelMemoryInBytes.computeIfAbsent(task.getExecutorNode(), k -> new ArrayList<>()) + existingModelMemoryBytes += jobMemory; + jobRequirementsByNode.computeIfAbsent(task.getExecutorNode(), k -> new ArrayList<>()) .add(MlJobRequirements.of(jobMemory, 0)); } } // trained models + int numberOfAvailableProcessors = (int) Math.floor( + MlProcessors.getTotalMlNodeProcessors(autoscalingContext.mlNodes, allocatedProcessorsScale).count() + ) - totalAssignedProcessors(autoscalingContext); for (var modelAssignment : autoscalingContext.modelAssignments.entrySet()) { TrainedModelAssignment assignment = modelAssignment.getValue(); - final int numberOfAllocations = assignment.getTaskParams().getNumberOfAllocations(); + final int numberOfRequestedAllocations = assignment.getTaskParams().getNumberOfAllocations(); final int numberOfThreadsPerAllocation = assignment.getTaskParams().getThreadsPerAllocation(); final long estimatedMemoryUsage = assignment.getTaskParams().estimateMemoryUsageBytes(); + final int numberOfTargetAllocationsOnExistingNodes = assignment.totalTargetAllocations(); + final int numMissingAllocations = numberOfRequestedAllocations - numberOfTargetAllocationsOnExistingNodes; + final int numMissingProcessors = numMissingAllocations * numberOfThreadsPerAllocation; + int numExistingProcessorsToBeUsed = Math.min(numMissingProcessors, numberOfAvailableProcessors); + + if (assignment.getNodeRoutingTable().isEmpty() == false + && assignment.getNodeRoutingTable().values().stream().allMatch(r -> r.getState().consumesMemory() == false)) { + // Ignore states that don't consume memory, for example all allocations are failed or stopped + // if the node routing table is empty, then it will match the above condition, but it needs to be handled in the next branch + continue; + } else { - if (AssignmentState.STARTING.equals(assignment.getAssignmentState()) && assignment.getNodeRoutingTable().isEmpty()) { - - logger.debug( - () -> format( - "trained model [%s] lacks assignment , memory required [%d]", - modelAssignment.getKey(), - estimatedMemoryUsage - ) - ); - - extraSingleNodeModelMemoryInBytes = Math.max(extraSingleNodeModelMemoryInBytes, estimatedMemoryUsage); - extraModelMemoryInBytes += estimatedMemoryUsage; + if (assignment.getNodeRoutingTable().isEmpty() == false) { + // if the routing table is non-empty, this is an existing model + existingModelMemoryBytes += estimatedMemoryUsage; + } else { + // only increase memory requirements for new models + extraPerNodeModelMemoryBytes += Math.max(extraPerNodeModelMemoryBytes, estimatedMemoryUsage); + extraModelMemoryInBytes += estimatedMemoryUsage; + } - // if not low priority, check processor requirements + // if not low priority, check processor requirements. if (Priority.LOW.equals(modelAssignment.getValue().getTaskParams().getPriority()) == false) { - // as assignments can be placed on different nodes, we only need numberOfThreadsPerAllocation here - extraSingleNodeProcessors = Math.max(extraSingleNodeProcessors, numberOfThreadsPerAllocation); - extraProcessors += numberOfAllocations * numberOfThreadsPerAllocation; + if (numMissingProcessors > numberOfAvailableProcessors) { + // as assignments can be placed on different nodes, we only need numberOfThreadsPerAllocation here + extraProcessors += numMissingProcessors - numExistingProcessorsToBeUsed; + extraPerNodeProcessors = Math.max(extraPerNodeProcessors, 1); // if extra processors >0, we need at least 1 + // extraPerNodeProcessors + } + if (perNodeAvailableProcessors < numberOfThreadsPerAllocation) { + extraPerNodeProcessors = Math.max(extraPerNodeProcessors, numberOfThreadsPerAllocation); + } + numberOfAvailableProcessors -= numExistingProcessorsToBeUsed; } - } else if (assignment.getNodeRoutingTable().values().stream().allMatch(r -> r.getState().consumesMemory() == false)) { - // Ignore states that don't consume memory, for example all allocations are failed - continue; - } else { - logger.debug( - () -> format( - "trained model [%s] assigned to [%s], memory required [%d]", - modelAssignment.getKey(), - Strings.arrayToCommaDelimitedString(modelAssignment.getValue().getStartedNodes()), - estimatedMemoryUsage - ) - ); - modelMemoryBytesSum += estimatedMemoryUsage; - processorsSum += numberOfAllocations * numberOfThreadsPerAllocation; + if (extraProcessors > 0 || extraPerNodeProcessors > 0 || extraModelMemoryInBytes > 0 || extraPerNodeModelMemoryBytes > 0) { + logger.info( + () -> format( + "trained model [%s] assigned to [%s], waiting for [%d] allocations to start due to missing hardware", + modelAssignment.getKey(), + Strings.arrayToCommaDelimitedString(modelAssignment.getValue().getStartedNodes()), + numMissingAllocations + ) + ); + } for (String node : modelAssignment.getValue().getNodeRoutingTable().keySet()) { - perNodeModelMemoryInBytes.computeIfAbsent(node, k -> new ArrayList<>()) + sumOfCurrentlyExistingAndUsedProcessors += modelAssignment.getValue() + .getNodeRoutingTable() + .get(node) + .getTargetAllocations() * numberOfThreadsPerAllocation; + + jobRequirementsByNode.computeIfAbsent(node, k -> new ArrayList<>()) .add( MlJobRequirements.of( estimatedMemoryUsage, Priority.LOW.equals(modelAssignment.getValue().getTaskParams().getPriority()) ? 0 - : numberOfThreadsPerAllocation + : modelAssignment.getValue().getNodeRoutingTable().get(node).getTargetAllocations() + * numberOfThreadsPerAllocation ) ); } - } - // min(3, max(number of allocations over all deployed models) - minNodes = Math.min(3, Math.max(minNodes, numberOfAllocations)); + // min(3, max(number of allocations over all deployed models) + // the minimum number of nodes is equal to the number of allocations, up to 3 + // if the number of allocations is greater than 3, then wantedMinNodes is still 3 + // in theory this should help availability for 2-3 allocations + // the planner should split over all available nodes + minNodes = Math.min(3, Math.max(minNodes, numberOfRequestedAllocations)); + } } // dummy autoscaling entity if (dummyEntityFitsOnLeastLoadedNode( - perNodeModelMemoryInBytes, - perNodeAvailableModelMemoryInBytes, + jobRequirementsByNode, + perNodeAvailableModelMemoryBytes, perNodeAvailableProcessors, dummyAutoscalingEntity ) == false) { - logger.info( + logger.debug( "Scaling up due to dummy entity: dummyEntityMemory: [{}], dummyEntityProcessors: [{}]", dummyAutoscalingEntity.memory, dummyAutoscalingEntity.processors ); - modelMemoryBytesSum += dummyAutoscalingEntity.memory; - processorsSum += dummyAutoscalingEntity.processors; + existingModelMemoryBytes += dummyAutoscalingEntity.memory; + sumOfCurrentlyExistingAndUsedProcessors += dummyAutoscalingEntity.processors; } // check for downscaling @@ -305,15 +337,15 @@ static void getMemoryAndProcessors( // - the total memory usage is less than memory usage after taking away 1 node // - the current number of nodes is greater than the minimum number of nodes if (perNodeMemoryInBytes > 0 - && perNodeAvailableModelMemoryInBytes > 0 + && perNodeAvailableModelMemoryBytes > 0 && extraModelMemoryInBytes == 0 && extraProcessors == 0 - && modelMemoryBytesSum <= perNodeMemoryInBytes * (numberMlNodes - 1) + && existingModelMemoryBytes <= perNodeMemoryInBytes * (numberMlNodes - 1) && minNodes < numberMlNodes - && (perNodeModelMemoryInBytes.size() < numberMlNodes // a node has no assigned jobs + && (jobRequirementsByNode.size() < numberMlNodes // a node has no assigned jobs || checkIfOneNodeCouldBeRemoved( - perNodeModelMemoryInBytes, - perNodeAvailableModelMemoryInBytes, + jobRequirementsByNode, + perNodeAvailableModelMemoryBytes, perNodeAvailableProcessors, maxOpenJobsPerNode, dummyAutoscalingEntity @@ -321,15 +353,18 @@ static void getMemoryAndProcessors( removeNodeMemoryInBytes = perNodeMemoryInBytes; } + // if we need extra processors, we need to tell the elasticsearch-autoscaler that we need at least 1 processor per node + assert extraProcessors == 0 || extraPerNodeProcessors > 0; + listener.onResponse( new MlAutoscalingStats( numberMlNodes, perNodeMemoryInBytes, - modelMemoryBytesSum, - processorsSum, + existingModelMemoryBytes, + sumOfCurrentlyExistingAndUsedProcessors, minNodes, - extraSingleNodeModelMemoryInBytes, - extraSingleNodeProcessors, + extraPerNodeModelMemoryBytes, + extraPerNodeProcessors, extraModelMemoryInBytes, extraProcessors, removeNodeMemoryInBytes, @@ -338,23 +373,27 @@ static void getMemoryAndProcessors( ); } + private static int totalAssignedProcessors(MlAutoscalingContext autoscalingContext) { + return autoscalingContext.modelAssignments.values().stream().mapToInt(TrainedModelAssignment::totalTargetProcessors).sum(); + } + /** * Check if the dummy autoscaling entity task can be added by placing * the task on the least loaded node. - * + *

* If there exists a node that can accommodate the dummy entity then return true (nothing to do), * else return false and increment the memory and processor counts accordingly. - * + *

* We perform the calculation by identifying the least loaded node in terms of memory * and determining if the addition of the dummy entity's memory and processor requirements could * be accommodated on it. - * + *

* If the calculation returns false then treat the case as for a single trained model job - * that is already assigned, i.e. increment modelMemoryBytesSum and processorsSum appropriately. + * that is already assigned, i.e. increment modelMemoryBytesSum and currentTotalProcessorsInUse appropriately. * * @param perNodeJobRequirements per Node lists of requirements - * @param perNodeMemoryInBytes total model memory available on every node - * @param perNodeProcessors total processors on every node + * @param perNodeMemoryInBytes total model memory available on every node + * @param perNodeProcessors total processors on every node * @param dummyAutoscalingEntity "dummy" entity requirements used to potentially trigger a scaling event * @return true if the dummy entity can be accommodated, false if not */ @@ -369,7 +408,7 @@ static boolean dummyEntityFitsOnLeastLoadedNode( return true; } - if (perNodeJobRequirements.size() < 1) { + if (perNodeJobRequirements.isEmpty()) { return false; } @@ -389,7 +428,6 @@ static boolean dummyEntityFitsOnLeastLoadedNode( ) .min(Comparator.comparingLong(value -> value.memory)); - assert (leastLoadedNodeRequirements.isPresent()); assert leastLoadedNodeRequirements.get().memory >= 0L; assert leastLoadedNodeRequirements.get().processors >= 0; @@ -433,8 +471,8 @@ private static MlAutoscalingStats noScaleStats(int numberMlNodes) { * Check if one node can be removed by placing the jobs of the least loaded node to others. * * @param perNodeJobRequirements per Node lists of requirements - * @param perNodeMemoryInBytes total model memory available on every node - * @param maxOpenJobsPerNode the maximum number of jobs per node + * @param perNodeMemoryInBytes total model memory available on every node + * @param maxOpenJobsPerNode the maximum number of jobs per node * @return true if a node can be removed, false if not */ static boolean checkIfOneNodeCouldBeRemoved( @@ -502,9 +540,9 @@ static boolean checkIfOneNodeCouldBeRemoved( *

Because the metric has no influence on how the jobs are placed in the end, it calculates the possibility of moving in the least * efficient way. That way we ensure that autoscaling is not looping between scaling down, up, down, up ...

* - * @param candidateJobRequirements list of job requirements given running on the candidate node + * @param candidateJobRequirements list of job requirements given running on the candidate node * @param perNodeMlJobRequirementsSum other nodes requirements - * @param perNodeMemoryInBytes available memory per node + * @param perNodeMemoryInBytes available memory per node * @return remaining memory, that could not be placed on other nodes, 0L if all jobs got placed */ static long checkIfJobsCanBeMovedInLeastEfficientWay( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/MlProcessors.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/MlProcessors.java index 4d317ee7925fc..ee982dc395d32 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/MlProcessors.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/MlProcessors.java @@ -60,7 +60,7 @@ public static Processors getMaxMlNodeProcessors(DiscoveryNodes nodes, Integer al return answer; } - public static Processors getTotalMlNodeProcessors(DiscoveryNodes nodes, Integer allocatedProcessorScale) { + public static Processors getTotalMlNodeProcessors(Iterable nodes, Integer allocatedProcessorScale) { int total = 0; for (DiscoveryNode node : nodes) { if (node.getRoles().contains(DiscoveryNodeRole.ML_ROLE)) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerParameterizedTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerParameterizedTests.java new file mode 100644 index 0000000000000..229926e0c9afb --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerParameterizedTests.java @@ -0,0 +1,1526 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.autoscaling; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.autoscaling.MlAutoscalingStats; +import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentState; +import org.elasticsearch.xpack.core.ml.inference.assignment.Priority; +import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; +import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.process.MlMemoryTracker; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; + +import static java.util.stream.Collectors.toList; +import static org.elasticsearch.xpack.core.ml.MachineLearningField.USE_AUTO_MACHINE_MEMORY_PERCENT; +import static org.elasticsearch.xpack.ml.MachineLearning.MACHINE_MEMORY_NODE_ATTR; +import static org.elasticsearch.xpack.ml.MachineLearning.MAX_JVM_SIZE_NODE_ATTR; +import static org.elasticsearch.xpack.ml.MachineLearning.MAX_MACHINE_MEMORY_PERCENT; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; + +public class MlAutoscalingResourceTrackerParameterizedTests extends ESTestCase { + private static final long MODEL_BYTES = ByteSizeValue.ofGb(2).getBytes(); + private static final String NODE_NAME_PREFIX = "ML-Node-"; + private final TestCase testCase; + + @ParametersFactory(shuffle = true) + public static Iterable parameterizedTestCases() { + AtomicInteger testCounter = new AtomicInteger(0); + List testCases = new ArrayList<>(30); + testCases.addAll(testSuite(testCounter, 1, MODEL_BYTES, 0)); + testCases.addAll(testSuite(testCounter, 2, MODEL_BYTES, 0)); + testCases.addAll(testSuite(testCounter, 4, MODEL_BYTES, 0)); + testCases.addAll(testSuite(testCounter, 1, 2 * MODEL_BYTES, 0)); + testCases.addAll(testSuite(testCounter, 1, 3 * MODEL_BYTES, 0)); + testCases.addAll(testSuite(testCounter, 1, MODEL_BYTES, ByteSizeValue.ofGb(1).getBytes())); + + return testCases.stream().map(MlAutoscalingResourceTrackerParameterizedTests.TestCase::toArray).collect(toList()); + } + + private static List testSuite(AtomicInteger testCounter, int threadsPerAllocation, long modelBytes, long cacheSize) { + return List.of( + WhenStartTrainedModelDeployment_ThenScaleUp_GivenNoExistingDeployments( + testCounter.getAndAdd(1), + threadsPerAllocation, + modelBytes, + cacheSize + ), + WhenStartTrainedModelDeployment_ThenScaleUpDueToProcessors_GivenExistingDeployments( + testCounter.getAndAdd(1), + threadsPerAllocation, + modelBytes, + cacheSize + ), + WhenStartTrainedModelDeployment_ThenScaleUpDueToThreads_GivenExistingDeployments( + testCounter.getAndAdd(1), + threadsPerAllocation, + modelBytes, + cacheSize + ), + WhenStartTrainedModelDeployment_ThenScaleUpDueToMemory_GivenExistingDeployments( + testCounter.getAndAdd(1), + threadsPerAllocation, + modelBytes, + cacheSize + ), + WhenStartTrainedModelDeployment_ThenNoScale_GivenExistingDeployments( + testCounter.getAndAdd(1), + threadsPerAllocation, + modelBytes, + cacheSize + ), + + WhenUpdateTrainedModelDeployment_ThenScaleUp_GivenDeploymentGetsLarger( + testCounter.getAndAdd(1), + threadsPerAllocation, + modelBytes, + cacheSize + ), + WhenUpdateTrainedModelDeployment_ThenNoScale_GivenDeploymentGetsLargerAndNodesAreSufficient( + testCounter.getAndAdd(1), + threadsPerAllocation, + modelBytes, + cacheSize + ), + WhenUpdateTrainedModelDeployment_ThenNoScale_GivenDeploymentGetsSmallerButAllNodesAreStillRequired( + testCounter.getAndAdd(1), + threadsPerAllocation, + modelBytes, + cacheSize + ), + // WhenUpdateTrainedModelDeployment_ThenScaleDown_GivenDeploymentGetsSmaller() TODO we don't currently + // support shrinking nodes + + // Some of the below test cases test for states in between the Stop request being made and being completed + WhenStoppingTrainedModelDeployment_ThenNoScale_GivenAllNodesAreStillRequired( + testCounter.getAndAdd(1), + threadsPerAllocation, + modelBytes, + cacheSize + ), + WhenStopTrainedModelDeployment_ThenNoScale_GivenAllNodesAreStillRequired( + testCounter.getAndAdd(1), + threadsPerAllocation, + modelBytes, + cacheSize + ), + WhenStoppedTrainedModelDeployment_ThenNoScale_GivenAllNodesAreStillRequired( + testCounter.getAndAdd(1), + threadsPerAllocation, + modelBytes, + cacheSize + ), + WhenStopTrainedModelDeployment_ThenScaledown_GivenDeploymentRequiredAWholeNode( + testCounter.getAndAdd(1), + threadsPerAllocation, + modelBytes, + cacheSize + ) + ); + } + + private record TestCase( + String testDescription, + ClusterState clusterState, + ClusterSettings clusterSettings, + MlMemoryTracker mlMemoryTracker, + Settings settings, + ActionListener verificationListener + ) { + Object[] toArray() { + return new Object[] { this }; + } + } + + public MlAutoscalingResourceTrackerParameterizedTests(MlAutoscalingResourceTrackerParameterizedTests.TestCase testCase) { + this.testCase = testCase; + } + + static ActionListener createVerificationListener(String message, MlAutoscalingStats expectedStats) { + return new ActionListener<>() { + @Override + public void onResponse(MlAutoscalingStats actualMlAutoscalingStats) { + assertEquals(message, expectedStats, actualMlAutoscalingStats); + } + + @Override + public void onFailure(Exception e) { + fail("Unexpected failure" + e); + } + }; + } + + static ClusterState createClusterStateWithoutNodes(TrainedModelAssignmentMetadata trainedModelAssignmentMetadata) { + return createClusterState(trainedModelAssignmentMetadata, null); + } + + static ClusterState createClusterState(TrainedModelAssignmentMetadata trainedModelAssignmentMetadata, DiscoveryNodes nodes) { + ClusterState.Builder csBuilder = new ClusterState.Builder(ClusterState.EMPTY_STATE); + + Metadata.Builder metadataBuilder = Metadata.builder(); + // TODO PersistentTasksCustomMetadata is required for jobs other than TrainedModels + // .customs(Map.of(PersistentTasksCustomMetadata.TYPE, PersistentTasksCustomMetadata.builder().build())) + if (trainedModelAssignmentMetadata != null) { + metadataBuilder.putCustom(TrainedModelAssignmentMetadata.NAME, trainedModelAssignmentMetadata); + } + + Metadata metadata = metadataBuilder.build(); + csBuilder.metadata(metadata); + + if (nodes != null) { + csBuilder.nodes(nodes); + } + return csBuilder.build(); + } + + /** + * Create nodes of the same size which together have enough resources to satisfy the requirements. The smallest nodes which satisfy the + * requirements are used. + *

+ * Using the smallest nodes is a business requirement to minimize costs and simplify the logic. + * + * @param minProcessorsPerNode + * @param minMemoryPerNode + * @param totalProcessors can be zero if total processors is not a requirement, but totalMemory must be non-zero + * @param totalMemory can be zero if total memory is not a requirement, but totalProcessors must be non-zero + * @return an iterable of the smallest nodes which satisfy the requirements + */ + static DiscoveryNodes createMlNodesOfUniformSize( + int minProcessorsPerNode, + ByteSizeValue minMemoryPerNode, + int totalProcessors, + ByteSizeValue totalMemory + ) { + + List nodeMemorySizes = List.of( + ByteSizeValue.ofGb(4), + ByteSizeValue.ofGb(8), + ByteSizeValue.ofGb(16), + ByteSizeValue.ofGb(32), + ByteSizeValue.ofGb(64), + ByteSizeValue.ofGb(128), + ByteSizeValue.ofGb(256), + ByteSizeValue.ofGb(512) + + ); + List nodeProcessorSizes = List.of(2, 4, 8, 16, 32, 64, 128, 256); + assertEquals( + "Test misconfigured: nodeMemorySizes and nodeProcessorSizes must have the same size", + nodeMemorySizes.size(), + nodeProcessorSizes.size() + ); + + int smallestSufficientNodeIndex = nodeMemorySizes.size(); + + for (int i = 0; i < nodeMemorySizes.size(); i++) { + if (nodeMemorySizes.get(i).getBytes() >= minMemoryPerNode.getBytes() && nodeProcessorSizes.get(i) >= minProcessorsPerNode) { + smallestSufficientNodeIndex = i; + break; + } + } + + double numProcessorsPerNode = nodeProcessorSizes.get(smallestSufficientNodeIndex); + ByteSizeValue memoryPerNode = nodeMemorySizes.get(smallestSufficientNodeIndex); + + int assignedProcessors = 0; + ByteSizeValue assignedMemory = ByteSizeValue.ZERO; + DiscoveryNodes.Builder dnBuilder = DiscoveryNodes.builder(); + int nodeCount = 0; + + while (assignedProcessors < totalProcessors || assignedMemory.getBytes() < totalMemory.getBytes()) { + dnBuilder.add(buildDiscoveryNode(numProcessorsPerNode, String.valueOf(memoryPerNode.getBytes()), nodeCount)); + assignedProcessors += (int) numProcessorsPerNode; + assignedMemory = ByteSizeValue.add(memoryPerNode, assignedMemory); + nodeCount += 1; + } + + return dnBuilder.build(); + } + + private static DiscoveryNode buildDiscoveryNode(double numProcessorsPerNode, String memoryPerNode, int nodeNumber) { + Map attributes = Map.of( + MACHINE_MEMORY_NODE_ATTR, + memoryPerNode, + MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, + String.valueOf(numProcessorsPerNode), + MAX_JVM_SIZE_NODE_ATTR, + Long.toString(Runtime.getRuntime().maxMemory()) + ); + + return DiscoveryNodeUtils.builder(NODE_NAME_PREFIX + nodeNumber) + .attributes(attributes) + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .build(); + } + + private static ClusterSettings createClusterSettings() { + return new ClusterSettings(Settings.EMPTY, Set.of(MachineLearning.ALLOCATED_PROCESSORS_SCALE)); + } + + private static MlMemoryTracker createMlMemoryTracker() { + return mock(MlMemoryTracker.class); + } + + private static Settings createSettings() { + return Settings.builder() + .put(MAX_MACHINE_MEMORY_PERCENT.getKey(), MAX_MACHINE_MEMORY_PERCENT.get(Settings.EMPTY)) + .put(USE_AUTO_MACHINE_MEMORY_PERCENT.getKey(), USE_AUTO_MACHINE_MEMORY_PERCENT.get(Settings.EMPTY)) + .build(); + } + + private static StartTrainedModelDeploymentAction.TaskParams createTaskParams( + int numAllocations, + int seed, + int threadsPerAllocation, + long modelBytes, + long cacheSize + ) { + + String modelId = "modelId" + seed; + String deploymentId = "deploymentId" + seed; + int queueCapacity = 1024; + Priority priority = Priority.NORMAL; + long perDeploymentMemoryBytes = modelBytes; + long perAllocationMemoryBytes = 0; + + return new StartTrainedModelDeploymentAction.TaskParams( + modelId, + deploymentId, + modelBytes, + numAllocations, + threadsPerAllocation, + queueCapacity, + ByteSizeValue.ofBytes(cacheSize), + priority, + perDeploymentMemoryBytes, + perAllocationMemoryBytes + ); + } + + private static Map createModelAssignments( + int numAssignments, + int[] numAllocationsPerAssignment, + Map routingInfo, + int seed, + int threadsPerAllocation, + long modelBytes, + long cacheSize + ) { + Map assignments = new HashMap<>(numAssignments); + + for (int i = 0; i < numAssignments; i++) { + StartTrainedModelDeploymentAction.TaskParams taskParams = createTaskParams( + numAllocationsPerAssignment[i], + i, + threadsPerAllocation, + modelBytes, + cacheSize + ); + TrainedModelAssignment.Builder tmaBuilder = TrainedModelAssignment.Builder.empty(taskParams, null); + tmaBuilder.setAssignmentState(AssignmentState.STARTING); + for (var entry : routingInfo.entrySet()) { + tmaBuilder.addRoutingEntry(entry.getKey(), entry.getValue()); + } + assignments.put("TrainedModelAssignment-" + seed + "-" + i, tmaBuilder.build()); + } + + return assignments; + } + + private static int calculateMaxThreadsPerAllocation(Map assignments) { + return assignments.values().stream().mapToInt(a -> a.getTaskParams().getThreadsPerAllocation()).max().orElseGet(() -> 0); + } + + private static int calculateNodeSize(ClusterState clusterState) { + return (int) Double.parseDouble( + clusterState.nodes() + .getAllNodes() + .stream() + .findFirst() + .get() + .getAttributes() + .get(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR) + ); + } + + private static long calculateExistingPerNodeMemoryBytes(ClusterState clusterState) { + return Long.parseLong(clusterState.nodes().getAllNodes().stream().findFirst().get().getAttributes().get(MACHINE_MEMORY_NODE_ATTR)); + } + + private static DiscoveryNodes createDiscoveryNode(int memoryGb, int processors) { + ByteSizeValue minMemoryPerNode = ByteSizeValue.ofGb(memoryGb); + int totalProcessors = processors; + ByteSizeValue totalMemory = ByteSizeValue.ofBytes(minMemoryPerNode.getBytes()); + DiscoveryNodes nodes = createMlNodesOfUniformSize(processors, minMemoryPerNode, totalProcessors, totalMemory); + return nodes; + } + + private static DiscoveryNodes createSmallDiscoveryNodes(int memoryGb, int totalProcessors) { + ByteSizeValue minMemoryPerNode = ByteSizeValue.ofGb(memoryGb); + ByteSizeValue totalMemory = ByteSizeValue.ofBytes(minMemoryPerNode.getBytes()); + DiscoveryNodes nodes = createMlNodesOfUniformSize(2, minMemoryPerNode, totalProcessors, totalMemory); + return nodes; + } + + private static long calculateExtraPerNodeModelMemoryBytes(Collection assignments) { + return assignments.stream().findFirst().get().getTaskParams().estimateMemoryUsageBytes(); + } + + private static int calculateTotalExistingAndUsedProcessors(Map assignments) { + return assignments.values().stream().mapToInt(TrainedModelAssignment::totalTargetProcessors).sum(); + } + + private static long calculateExistingTotalModelMemoryBytes(Map assignments) { + return assignments.values().stream().filter(tma -> { + if (tma.getAssignmentState() == AssignmentState.STARTED) { + return true; + } else if (tma.getAssignmentState() == AssignmentState.STARTING && tma.getNodeRoutingTable().isEmpty() == false) { + return true; + } else if (tma.getAssignmentState() == AssignmentState.STOPPING && tma.getNodeRoutingTable().isEmpty() == false) { + return true; + } else { + return false; + } + }).mapToLong(tma -> tma.getTaskParams().estimateMemoryUsageBytes()).sum(); + } + + /** + * This test is run for each of the supplied {@link TestCase} configurations. + * @throws IOException _ + */ + public void test() throws IOException { + SetOnce executeCalled = new SetOnce<>(); + + var executionVerificationListener = new ActionListener() { + @Override + public void onResponse(MlAutoscalingStats mlAutoscalingStats) { + executeCalled.set(true); + testCase.verificationListener.onResponse(mlAutoscalingStats); + } + + @Override + public void onFailure(Exception e) { + fail("Unexpected failure" + e); + } + }; + + MlAutoscalingResourceTracker.getMlAutoscalingStats( + testCase.clusterState, + testCase.clusterSettings, + testCase.mlMemoryTracker, + testCase.settings, + executionVerificationListener + ); + + assertThat(testCase.testDescription, executeCalled.get(), equalTo(true)); + // other assertions are run in testCase.verificationListener + } + + private static TestCase WhenStartTrainedModelDeployment_ThenScaleUp_GivenNoExistingDeployments( + int seed, + int threadsPerAllocation, + long modelBytes, + long cacheSize + ) { + String testDescription = "test scaling from zero"; + + // generic parameters + ClusterSettings clusterSettings = createClusterSettings(); + MlMemoryTracker mlMemoryTracker = createMlMemoryTracker(); + Settings settings = createSettings(); + + // TrainedModelAssignments + int numAllocationsRequested = 1; + Map assignments = createModelAssignments( + 1, + new int[] { numAllocationsRequested }, + Map.of(), + seed, + threadsPerAllocation, + modelBytes, + cacheSize + ); + TrainedModelAssignmentMetadata trainedModelAssignmentMetadata = new TrainedModelAssignmentMetadata(assignments); + + // Cluster state starts with zero nodes + ClusterState clusterState = createClusterStateWithoutNodes(trainedModelAssignmentMetadata); + + // expected stats: + int existingNodes = 0; + long existingPerNodeMemoryBytes = 0; + long existingTotalModelMemoryBytes = 0; + int totalExistingProcessors = 0; + int minNodes = 1; + long extraSingleNodeModelMemoryInBytes = assignments.values() + .stream() + .mapToLong(tma -> tma.getTaskParams().estimateMemoryUsageBytes()) + .sum(); + int extraSingleNodeProcessors = threadsPerAllocation; + long extraModelMemoryInBytes = extraSingleNodeModelMemoryInBytes; + int extraProcessors = threadsPerAllocation; + long removeNodeMemoryInBytes = 0; + long perNodeMemoryOverheadInBytes = MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(); + + MlAutoscalingStats expectedStats = new MlAutoscalingStats( + existingNodes, + existingPerNodeMemoryBytes, + existingTotalModelMemoryBytes, + totalExistingProcessors, + minNodes, + extraSingleNodeModelMemoryInBytes, + extraSingleNodeProcessors, + extraModelMemoryInBytes, + extraProcessors, + removeNodeMemoryInBytes, + perNodeMemoryOverheadInBytes + ); + + ActionListener verificationListener = createVerificationListener(testDescription, expectedStats); + return new TestCase(testDescription, clusterState, clusterSettings, mlMemoryTracker, settings, verificationListener); + } + + private static TestCase WhenStartTrainedModelDeployment_ThenScaleUpDueToProcessors_GivenExistingDeployments( + int seed, + int threadsPerAllocation, + long modelBytes, + long cacheSize + ) { + String testDescription = + "test scaling up with existing deployment when the new deployment requires more processors than are available"; + // Generic settings + ClusterSettings clusterSettings = createClusterSettings(); + MlMemoryTracker mlMemoryTracker = createMlMemoryTracker(); + Settings settings = createSettings(); + + // TrainedModelAssignments + int numAllocationsRequestedPreviously = 8; + DiscoveryNodes nodes = createDiscoveryNode(0, numAllocationsRequestedPreviously * threadsPerAllocation); + int numProcessorsOnNodes = nodes.stream() + .mapToInt(n -> (int) Double.parseDouble(n.getAttributes().get(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR))) + .sum(); + int numAssignments = 2; + Map assignments = new HashMap<>(numAssignments); + // assignment 1 - already deployed + { + StartTrainedModelDeploymentAction.TaskParams taskParams = createTaskParams( + numAllocationsRequestedPreviously, + 1, + threadsPerAllocation, + modelBytes, + cacheSize + ); + TrainedModelAssignment.Builder tmaBuilder = TrainedModelAssignment.Builder.empty(taskParams, null); + tmaBuilder.setAssignmentState(AssignmentState.STARTED); + tmaBuilder.addRoutingEntry( + NODE_NAME_PREFIX + 0, + new RoutingInfo(numAllocationsRequestedPreviously, numAllocationsRequestedPreviously, RoutingState.STARTED, null) + ); + assignments.put("TrainedModelAssignment-" + seed + "-" + 0, tmaBuilder.build()); + } + // asssignment 2 - not deployed yet + { + StartTrainedModelDeploymentAction.TaskParams taskParams = createTaskParams( + numAllocationsRequestedPreviously, + 2, + threadsPerAllocation, + modelBytes, + cacheSize + ); + TrainedModelAssignment.Builder tmaBuilder = TrainedModelAssignment.Builder.empty(taskParams, null); + tmaBuilder.setAssignmentState(AssignmentState.STARTING); + tmaBuilder.clearNodeRoutingTable(); + assignments.put("TrainedModelAssignment-" + seed + "-" + 1, tmaBuilder.build()); + } + TrainedModelAssignmentMetadata trainedModelAssignmentMetadata = new TrainedModelAssignmentMetadata(assignments); + + // Cluster state + ClusterState clusterState = createClusterState(trainedModelAssignmentMetadata, nodes); + + // expected stats: + int existingNodes = clusterState.nodes().getSize(); + long existingPerNodeMemoryBytes = calculateExistingPerNodeMemoryBytes(clusterState); + long existingTotalModelMemoryBytes = calculateExistingTotalModelMemoryBytes(assignments); + int totalExistingProcessors = calculateTotalExistingAndUsedProcessors(assignments); + int minNodes = Math.min(3, Math.max(1, numAllocationsRequestedPreviously)); + long extraPerNodeModelMemoryBytes = calculateExtraPerNodeModelMemoryBytes(assignments.values()); + int extraPerNodeProcessors = calculateExtraProcessorsPerNode(assignments, clusterState, true); + long extraModelMemoryBytes = extraPerNodeModelMemoryBytes; + int extraProcessors = (numAllocationsRequestedPreviously * threadsPerAllocation) * 2 - numProcessorsOnNodes; + long removeNodeMemoryInBytes = 0; + long perNodeMemoryOverheadInBytes = MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(); + + MlAutoscalingStats expectedStats = new MlAutoscalingStats( + existingNodes, + existingPerNodeMemoryBytes, + existingTotalModelMemoryBytes, + totalExistingProcessors, + minNodes, + extraPerNodeModelMemoryBytes, + extraPerNodeProcessors, + extraModelMemoryBytes, + extraProcessors, + removeNodeMemoryInBytes, + perNodeMemoryOverheadInBytes + ); + + ActionListener verificationListener = createVerificationListener(testDescription, expectedStats); + return new TestCase(testDescription, clusterState, clusterSettings, mlMemoryTracker, settings, verificationListener); + } + + private static TestCase WhenStartTrainedModelDeployment_ThenScaleUpDueToThreads_GivenExistingDeployments( + int seed, + int threadsPerAllocation, + long modelBytes, + long cacheSize + ) { + String testDescription = + "test scaling up with existing deployment when the new deployment requires more processers per node than are available"; + // Generic settings + ClusterSettings clusterSettings = createClusterSettings(); + MlMemoryTracker mlMemoryTracker = createMlMemoryTracker(); + Settings settings = createSettings(); + + // TrainedModelAssignments + int numAllocationsRequestedPreviously = 8; + DiscoveryNodes nodes = createMlNodesOfUniformSize( + threadsPerAllocation, + ByteSizeValue.ofBytes(modelBytes + cacheSize), + numAllocationsRequestedPreviously * threadsPerAllocation, + ByteSizeValue.ZERO + ); + int numProcessorsOnNodes = nodes.stream() + .mapToInt(n -> (int) Double.parseDouble(n.getAttributes().get(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR))) + .sum(); + int numAssignments = 2; + Map assignments = new HashMap<>(numAssignments); + // assignment 1 - already deployed + { + StartTrainedModelDeploymentAction.TaskParams taskParams = createTaskParams( + numAllocationsRequestedPreviously, + 1, + threadsPerAllocation, + modelBytes, + cacheSize + ); + TrainedModelAssignment.Builder tmaBuilder = TrainedModelAssignment.Builder.empty(taskParams, null); + tmaBuilder.setAssignmentState(AssignmentState.STARTED); + tmaBuilder.addRoutingEntry( + NODE_NAME_PREFIX + 0, + new RoutingInfo(numAllocationsRequestedPreviously, numAllocationsRequestedPreviously, RoutingState.STARTED, null) + ); + assignments.put("TrainedModelAssignment-" + seed + "-" + 0, tmaBuilder.build()); + } + // asssignment 2 - not deployed yet + { + StartTrainedModelDeploymentAction.TaskParams taskParams = createTaskParams( + numAllocationsRequestedPreviously, + 2, + threadsPerAllocation * 2, + modelBytes, + cacheSize + ); + TrainedModelAssignment.Builder tmaBuilder = TrainedModelAssignment.Builder.empty(taskParams, null); + tmaBuilder.setAssignmentState(AssignmentState.STARTING); + tmaBuilder.clearNodeRoutingTable(); + assignments.put("TrainedModelAssignment-" + seed + "-" + 1, tmaBuilder.build()); + } + TrainedModelAssignmentMetadata trainedModelAssignmentMetadata = new TrainedModelAssignmentMetadata(assignments); + + // Cluster state + ClusterState clusterState = createClusterState(trainedModelAssignmentMetadata, nodes); + + // expected stats: + int existingNodes = clusterState.nodes().getSize(); + long existingPerNodeMemoryBytes = calculateExistingPerNodeMemoryBytes(clusterState); + long existingTotalModelMemoryBytes = calculateExistingTotalModelMemoryBytes(assignments); + int totalExistingProcessors = calculateTotalExistingAndUsedProcessors(assignments); + int minNodes = Math.min(3, Math.max(1, numAllocationsRequestedPreviously)); + long extraPerNodeModelMemoryBytes = calculateExtraPerNodeModelMemoryBytes(assignments.values()); + int extraPerNodeProcessors = calculateExtraProcessorsPerNode(assignments, clusterState, true); + long extraModelMemoryBytes = extraPerNodeModelMemoryBytes; + int extraProcessors = (numAllocationsRequestedPreviously * threadsPerAllocation) * 2; + long removeNodeMemoryInBytes = 0; + long perNodeMemoryOverheadInBytes = MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(); + + MlAutoscalingStats expectedStats = new MlAutoscalingStats( + existingNodes, + existingPerNodeMemoryBytes, + existingTotalModelMemoryBytes, + totalExistingProcessors, + minNodes, + extraPerNodeModelMemoryBytes, + extraPerNodeProcessors, + extraModelMemoryBytes, + extraProcessors, + removeNodeMemoryInBytes, + perNodeMemoryOverheadInBytes + ); + + ActionListener verificationListener = createVerificationListener(testDescription, expectedStats); + return new TestCase(testDescription, clusterState, clusterSettings, mlMemoryTracker, settings, verificationListener); + } + + private static int calculateExtraProcessorsPerNode( + Map assignments, + ClusterState clusterState, + boolean isScaleUp + ) { + int nodeSize = calculateNodeSize(clusterState); + int maxTPA = calculateMaxThreadsPerAllocation(assignments); + if (nodeSize >= maxTPA) { + return isScaleUp ? 1 : 0; + } else { + return maxTPA; + } + } + + private static TestCase WhenStartTrainedModelDeployment_ThenScaleUpDueToMemory_GivenExistingDeployments( + int seed, + int threadsPerAllocation, + long modelBytes, + long cacheSize + ) { + String testDescription = "test scaling up with existing deployment when the new deployment requires more memory than is available"; + // Generic settings + ClusterSettings clusterSettings = createClusterSettings(); + MlMemoryTracker mlMemoryTracker = createMlMemoryTracker(); + Settings settings = createSettings(); + + // TrainedModelAssignments + int numAllocationsRequestedPreviously = 4; + DiscoveryNodes nodes = createMlNodesOfUniformSize( + threadsPerAllocation, + ByteSizeValue.ofBytes(modelBytes + cacheSize), + numAllocationsRequestedPreviously * threadsPerAllocation, + ByteSizeValue.ZERO + ); + int numAssignments = 2; + Map assignments = new HashMap<>(numAssignments); + // assignment 1 - already deployed + { + int numAllocationsInAssignment1 = numAllocationsRequestedPreviously - 1; + StartTrainedModelDeploymentAction.TaskParams taskParams = createTaskParams( + numAllocationsInAssignment1, + 1, + threadsPerAllocation, + modelBytes, + cacheSize + ); + TrainedModelAssignment.Builder tmaBuilder = TrainedModelAssignment.Builder.empty(taskParams, null); + tmaBuilder.setAssignmentState(AssignmentState.STARTED); + tmaBuilder.addRoutingEntry( + NODE_NAME_PREFIX + 0, + new RoutingInfo(numAllocationsInAssignment1, numAllocationsInAssignment1, RoutingState.STARTED, null) + ); + assignments.put("TrainedModelAssignment-" + seed + "-" + 0, tmaBuilder.build()); + } + // assignment 2 - not deployed yet + { + StartTrainedModelDeploymentAction.TaskParams taskParams = createTaskParams( + 1, + 2, + threadsPerAllocation, + modelBytes * 4, + cacheSize + ); + TrainedModelAssignment.Builder tmaBuilder = TrainedModelAssignment.Builder.empty(taskParams, null); + tmaBuilder.setAssignmentState(AssignmentState.STARTING); + tmaBuilder.clearNodeRoutingTable(); + assignments.put("TrainedModelAssignment-" + seed + "-" + 1, tmaBuilder.build()); + } + TrainedModelAssignmentMetadata trainedModelAssignmentMetadata = new TrainedModelAssignmentMetadata(assignments); + + // Cluster state + ClusterState clusterState = createClusterState(trainedModelAssignmentMetadata, nodes); + + // expected stats: + int existingNodes = clusterState.nodes().getSize(); + long existingPerNodeMemoryBytes = calculateExistingPerNodeMemoryBytes(clusterState); + long existingTotalModelMemoryBytes = calculateExistingTotalModelMemoryBytes(assignments); + int totalExistingProcessors = calculateTotalExistingAndUsedProcessors(assignments); + int minNodes = 3; // TODO understand why this value + long extraPerNodeModelMemoryBytes = calculateExtraPerNodeModelMemoryBytes( + Collections.singleton(assignments.get("TrainedModelAssignment-" + seed + "-" + 1)) + ); + int extraPerNodeProcessors = 0; + long extraModelMemoryBytes = calculateExtraPerNodeModelMemoryBytes( + Collections.singleton(assignments.get("TrainedModelAssignment-" + seed + "-" + 1)) + ); + int extraProcessors = 0; + long removeNodeMemoryInBytes = 0; + long perNodeMemoryOverheadInBytes = MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(); + + MlAutoscalingStats expectedStats = new MlAutoscalingStats( + existingNodes, + existingPerNodeMemoryBytes, + existingTotalModelMemoryBytes, + totalExistingProcessors, + minNodes, + extraPerNodeModelMemoryBytes, + extraPerNodeProcessors, + extraModelMemoryBytes, + extraProcessors, + removeNodeMemoryInBytes, + perNodeMemoryOverheadInBytes + ); + + ActionListener verificationListener = createVerificationListener(testDescription, expectedStats); + return new TestCase(testDescription, clusterState, clusterSettings, mlMemoryTracker, settings, verificationListener); + } + + private static TestCase WhenStartTrainedModelDeployment_ThenNoScale_GivenExistingDeployments( + int seed, + int threadsPerAllocation, + long modelBytes, + long cacheSize + ) { + String testDescription = "test scaling when existing nodes have room for the new deployment"; + + // Generic settings + ClusterSettings clusterSettings = createClusterSettings(); + MlMemoryTracker mlMemoryTracker = createMlMemoryTracker(); + Settings settings = createSettings(); + + // TrainedModelAssignments + int numAllocationsRequestedPreviously = seed; + DiscoveryNodes nodes = createDiscoveryNode( + (int) ByteSizeValue.ofBytes((modelBytes + cacheSize) * 2).getGb(), + numAllocationsRequestedPreviously * 2 * threadsPerAllocation + ); + int numAssignments = 2; + Map assignments = new HashMap<>(numAssignments); + // assignment 1 - already deployed + { + StartTrainedModelDeploymentAction.TaskParams taskParams = createTaskParams( + numAllocationsRequestedPreviously, + 1, + threadsPerAllocation, + modelBytes, + cacheSize + ); + TrainedModelAssignment.Builder tmaBuilder = TrainedModelAssignment.Builder.empty(taskParams, null); + tmaBuilder.setAssignmentState(AssignmentState.STARTED); + tmaBuilder.addRoutingEntry( + NODE_NAME_PREFIX + 0, + new RoutingInfo(numAllocationsRequestedPreviously, numAllocationsRequestedPreviously, RoutingState.STARTED, null) + ); + assignments.put("TrainedModelAssignment-" + seed + "-" + 0, tmaBuilder.build()); + } + // asssignment 2 - not deployed yet + { + StartTrainedModelDeploymentAction.TaskParams taskParams = createTaskParams( + numAllocationsRequestedPreviously, + 2, + threadsPerAllocation, + modelBytes, + cacheSize + ); + TrainedModelAssignment.Builder tmaBuilder = TrainedModelAssignment.Builder.empty(taskParams, null); + tmaBuilder.setAssignmentState(AssignmentState.STARTING); + tmaBuilder.addRoutingEntry( + NODE_NAME_PREFIX + 0, + new RoutingInfo(0, numAllocationsRequestedPreviously, RoutingState.STARTING, null) + ); + assignments.put("TrainedModelAssignment-" + seed + "-" + 1, tmaBuilder.build()); + } + TrainedModelAssignmentMetadata trainedModelAssignmentMetadata = new TrainedModelAssignmentMetadata(assignments); + + // Cluster state + ClusterState clusterState = createClusterState(trainedModelAssignmentMetadata, nodes); + + // expected stats: + int existingNodes = clusterState.nodes().getSize(); + long existingPerNodeMemoryBytes = calculateExistingPerNodeMemoryBytes(clusterState); + long existingTotalModelMemoryBytes = calculateExistingTotalModelMemoryBytes(assignments); + int totalExistingProcessors = calculateTotalExistingAndUsedProcessors(assignments); + int minNodes = 3; // TODO understand why this value + long extraPerNodeModelMemoryBytes = 0; + int extraPerNodeProcessors = 0; + long extraModelMemoryBytes = extraPerNodeModelMemoryBytes; + int extraProcessors = 0; + long removeNodeMemoryInBytes = 0; + long perNodeMemoryOverheadInBytes = MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(); + + MlAutoscalingStats expectedStats = new MlAutoscalingStats( + existingNodes, + existingPerNodeMemoryBytes, + existingTotalModelMemoryBytes, + totalExistingProcessors, + minNodes, + extraPerNodeModelMemoryBytes, + extraPerNodeProcessors, + extraModelMemoryBytes, + extraProcessors, + removeNodeMemoryInBytes, + perNodeMemoryOverheadInBytes + ); + + ActionListener verificationListener = createVerificationListener(testDescription, expectedStats); + return new TestCase(testDescription, clusterState, clusterSettings, mlMemoryTracker, settings, verificationListener); + } + + private static TestCase WhenUpdateTrainedModelDeployment_ThenScaleUp_GivenDeploymentGetsLarger( + int seed, + int threadsPerAllocation, + long modelBytes, + long cacheSize + ) { + String testDescription = "test scaling up when updating existing deployment to be larger"; + + // Generic settings + ClusterSettings clusterSettings = createClusterSettings(); + MlMemoryTracker mlMemoryTracker = createMlMemoryTracker(); + Settings settings = createSettings(); + + // TrainedModelAssignments + int numAllocationsRequestedPreviously = 4; + int updatedNumAllocations = 8; + DiscoveryNodes nodes = createDiscoveryNode( + (int) ByteSizeValue.ofBytes(modelBytes + cacheSize).getGb(), + numAllocationsRequestedPreviously * threadsPerAllocation + ); + int numAssignments = 1; + Map assignments = new HashMap<>(numAssignments); + // assignment 1 - already deployed - just updated to be larger + { + StartTrainedModelDeploymentAction.TaskParams taskParams = createTaskParams( + updatedNumAllocations, + seed, + threadsPerAllocation, + modelBytes, + cacheSize + ); + TrainedModelAssignment.Builder tmaBuilder = TrainedModelAssignment.Builder.empty(taskParams, null); + tmaBuilder.setAssignmentState(AssignmentState.STARTED); + tmaBuilder.addRoutingEntry( + NODE_NAME_PREFIX + 0, + new RoutingInfo(numAllocationsRequestedPreviously, numAllocationsRequestedPreviously, RoutingState.STARTED, null) + ); + assignments.put("TrainedModelAssignment-" + seed + "-" + 0, tmaBuilder.build()); + } + TrainedModelAssignmentMetadata trainedModelAssignmentMetadata = new TrainedModelAssignmentMetadata(assignments); + + // Cluster state + ClusterState clusterState = createClusterState(trainedModelAssignmentMetadata, nodes); + + // expected stats: + int existingNodes = clusterState.nodes().getSize(); + long existingPerNodeMemoryBytes = calculateExistingPerNodeMemoryBytes(clusterState); + long existingTotalModelMemoryBytes = calculateExistingTotalModelMemoryBytes(assignments); + int totalExistingProcessors = calculateTotalExistingAndUsedProcessors(assignments); + int minNodes = 3; + long extraPerNodeModelMemoryBytes = 0; + int extraPerNodeProcessors = 1; + long extraModelMemoryBytes = 0; + int extraProcessors = updatedNumAllocations * threadsPerAllocation - numAllocationsRequestedPreviously * threadsPerAllocation; + long removeNodeMemoryInBytes = 0; + long perNodeMemoryOverheadInBytes = MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(); + + MlAutoscalingStats expectedStats = new MlAutoscalingStats( + existingNodes, + existingPerNodeMemoryBytes, + existingTotalModelMemoryBytes, + totalExistingProcessors, + minNodes, + extraPerNodeModelMemoryBytes, + extraPerNodeProcessors, + extraModelMemoryBytes, + extraProcessors, + removeNodeMemoryInBytes, + perNodeMemoryOverheadInBytes + ); + + ActionListener verificationListener = createVerificationListener(testDescription, expectedStats); + return new TestCase(testDescription, clusterState, clusterSettings, mlMemoryTracker, settings, verificationListener); + } + + private static TestCase WhenUpdateTrainedModelDeployment_ThenNoScale_GivenDeploymentGetsLargerAndNodesAreSufficient( + int seed, + int threadsPerAllocation, + long modelBytes, + long cacheSize + ) { + String testDescription = "test scaling when updating existing deployment to be larger but still fits in existing nodes"; + if (modelBytes > ByteSizeValue.ofGb(32).getBytes()) { + modelBytes = ByteSizeValue.ofGb(32).getBytes(); + // this test case requires that the model fit on the existing nodes + } + + // Generic settings + ClusterSettings clusterSettings = createClusterSettings(); + MlMemoryTracker mlMemoryTracker = createMlMemoryTracker(); + Settings settings = createSettings(); + + // TrainedModelAssignments + int numAllocationsRequestedPreviously = 4; + int updatedNumAllocations = 8; + DiscoveryNodes nodes = createMlNodesOfUniformSize( + threadsPerAllocation * numAllocationsRequestedPreviously, + ByteSizeValue.ofBytes(modelBytes + cacheSize), + updatedNumAllocations * threadsPerAllocation, + ByteSizeValue.ZERO + ); + int numAssignments = 1; + Map assignments = new HashMap<>(numAssignments); + // assignment 1 - already deployed - just updated to be larger + { + StartTrainedModelDeploymentAction.TaskParams taskParams = createTaskParams( + updatedNumAllocations, + seed, + threadsPerAllocation, + modelBytes, + cacheSize + ); + TrainedModelAssignment.Builder tmaBuilder = TrainedModelAssignment.Builder.empty(taskParams, null); + tmaBuilder.setAssignmentState(AssignmentState.STARTED); + tmaBuilder.addRoutingEntry( + NODE_NAME_PREFIX + 0, + new RoutingInfo(numAllocationsRequestedPreviously, numAllocationsRequestedPreviously, RoutingState.STARTED, null) + ); + assignments.put("TrainedModelAssignment-" + seed + "-" + 0, tmaBuilder.build()); + } + TrainedModelAssignmentMetadata trainedModelAssignmentMetadata = new TrainedModelAssignmentMetadata(assignments); + + // Cluster state + ClusterState clusterState = createClusterState(trainedModelAssignmentMetadata, nodes); + + // expected stats: + int existingNodes = clusterState.nodes().getSize(); + long existingPerNodeMemoryBytes = calculateExistingPerNodeMemoryBytes(clusterState); + long existingTotalModelMemoryBytes = calculateExistingTotalModelMemoryBytes(assignments); + int totalExistingProcessors = calculateTotalExistingAndUsedProcessors(assignments); + int minNodes = 3; // TODO understand why this value + long extraPerNodeModelMemoryBytes = 0; + int extraPerNodeProcessors = 0; + long extraModelMemoryBytes = 0; + int extraProcessors = 0; + long removeNodeMemoryInBytes = 0; + long perNodeMemoryOverheadInBytes = MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(); + + MlAutoscalingStats expectedStats = new MlAutoscalingStats( + existingNodes, + existingPerNodeMemoryBytes, + existingTotalModelMemoryBytes, + totalExistingProcessors, + minNodes, + extraPerNodeModelMemoryBytes, + extraPerNodeProcessors, + extraModelMemoryBytes, + extraProcessors, + removeNodeMemoryInBytes, + perNodeMemoryOverheadInBytes + ); + + ActionListener verificationListener = createVerificationListener(testDescription, expectedStats); + return new TestCase(testDescription, clusterState, clusterSettings, mlMemoryTracker, settings, verificationListener); + } + + private static TestCase WhenUpdateTrainedModelDeployment_ThenNoScale_GivenDeploymentGetsSmallerButAllNodesAreStillRequired( + int seed, + int threadsPerAllocation, + long modelBytes, + long cacheSize + ) { + String testDescription = "test scaling when updating existing deployment to be smaller but all nodes are still required"; + + // Generic settings + ClusterSettings clusterSettings = createClusterSettings(); + MlMemoryTracker mlMemoryTracker = createMlMemoryTracker(); + Settings settings = createSettings(); + + // TrainedModelAssignments + int numAllocationsRequestedPreviously = 4; + int updatedNumAllocations = 3; + DiscoveryNodes nodes = createDiscoveryNode( + (int) ByteSizeValue.ofBytes(modelBytes + cacheSize).getGb(), + numAllocationsRequestedPreviously * threadsPerAllocation + ); + int numAssignments = 1; + Map assignments = new HashMap<>(numAssignments); + // assignment 1 - already deployed - just updated to be smaller + { + StartTrainedModelDeploymentAction.TaskParams taskParams = createTaskParams( + updatedNumAllocations, + seed, + threadsPerAllocation, + modelBytes, + cacheSize + ); + TrainedModelAssignment.Builder tmaBuilder = TrainedModelAssignment.Builder.empty(taskParams, null); + tmaBuilder.setAssignmentState(AssignmentState.STARTED); + tmaBuilder.addRoutingEntry( + NODE_NAME_PREFIX + 0, + new RoutingInfo(numAllocationsRequestedPreviously, numAllocationsRequestedPreviously, RoutingState.STARTED, null) + ); + assignments.put("TrainedModelAssignment-" + seed + "-" + 0, tmaBuilder.build()); + } + TrainedModelAssignmentMetadata trainedModelAssignmentMetadata = new TrainedModelAssignmentMetadata(assignments); + + // Cluster state + ClusterState clusterState = createClusterState(trainedModelAssignmentMetadata, nodes); + + // expected stats: + int existingNodes = clusterState.nodes().getSize(); + long existingPerNodeMemoryBytes = calculateExistingPerNodeMemoryBytes(clusterState); + long existingTotalModelMemoryBytes = calculateExistingTotalModelMemoryBytes(assignments); + int totalExistingProcessors = calculateTotalExistingAndUsedProcessors(assignments); + int minNodes = 3; + long extraPerNodeModelMemoryBytes = 0; + int extraPerNodeProcessors = 0; + long extraModelMemoryBytes = 0; + int extraProcessors = 0; + long removeNodeMemoryInBytes = 0; + long perNodeMemoryOverheadInBytes = MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(); + + MlAutoscalingStats expectedStats = new MlAutoscalingStats( + existingNodes, + existingPerNodeMemoryBytes, + existingTotalModelMemoryBytes, + totalExistingProcessors, + minNodes, + extraPerNodeModelMemoryBytes, + extraPerNodeProcessors, + extraModelMemoryBytes, + extraProcessors, + removeNodeMemoryInBytes, + perNodeMemoryOverheadInBytes + ); + + ActionListener verificationListener = createVerificationListener(testDescription, expectedStats); + return new TestCase(testDescription, clusterState, clusterSettings, mlMemoryTracker, settings, verificationListener); + } + + private static TestCase WhenUpdateTrainedModelDeployment_ThenScaleDown_GivenDeploymentGetsSmaller( + int seed, + int threadsPerAllocation, + long modelBytes, + long cacheSize + ) { + String testDescription = "test scaling down when updating existing deployment to be smaller"; + // TODO change this test to use threadsPerAllocation to accurately require a larger node size which then needs to be scaled down + // when + // threadsPerAllocation gets updated to a smaller number + + // Generic settings + ClusterSettings clusterSettings = createClusterSettings(); + MlMemoryTracker mlMemoryTracker = createMlMemoryTracker(); + Settings settings = createSettings(); + + // TrainedModelAssignments + int numAllocationsRequestedPreviously = 8; + int updatedNumAllocations = 2; + DiscoveryNodes nodes = createDiscoveryNode(16, numAllocationsRequestedPreviously); + int numAssignments = 1; + Map assignments = new HashMap<>(numAssignments); + // assignment 1 + { + StartTrainedModelDeploymentAction.TaskParams taskParams = createTaskParams( + updatedNumAllocations, + seed, + threadsPerAllocation, + modelBytes, + cacheSize + ); + TrainedModelAssignment.Builder tmaBuilder = TrainedModelAssignment.Builder.empty(taskParams, null); + tmaBuilder.setAssignmentState(AssignmentState.STARTED); + tmaBuilder.addRoutingEntry( + NODE_NAME_PREFIX + 0, + new RoutingInfo(updatedNumAllocations, updatedNumAllocations, RoutingState.STARTED, null) + ); + assignments.put("TrainedModelAssignment-" + seed + "-" + 0, tmaBuilder.build()); + } + TrainedModelAssignmentMetadata trainedModelAssignmentMetadata = new TrainedModelAssignmentMetadata(assignments); + + // Cluster state + ClusterState clusterState = createClusterState(trainedModelAssignmentMetadata, nodes); + + // expected stats: + int existingNodes = clusterState.nodes().getSize(); + long existingPerNodeMemoryBytes = calculateExistingPerNodeMemoryBytes(clusterState); + long existingTotalModelMemoryBytes = calculateExistingTotalModelMemoryBytes(assignments); + int totalExistingProcessors = calculateTotalExistingAndUsedProcessors(assignments); + int minNodes = 3; + long extraPerNodeModelMemoryBytes = 0; + int extraPerNodeProcessors = calculateMaxThreadsPerAllocation(assignments) == calculateNodeSize(clusterState) + ? 0 + : calculateMaxThreadsPerAllocation(assignments); + long extraModelMemoryBytes = 0; + int extraProcessors = 0; + long removeNodeMemoryInBytes = 1; + long perNodeMemoryOverheadInBytes = MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(); + + MlAutoscalingStats expectedStats = new MlAutoscalingStats( + existingNodes, + existingPerNodeMemoryBytes, + existingTotalModelMemoryBytes, + totalExistingProcessors, + minNodes, + extraPerNodeModelMemoryBytes, + extraPerNodeProcessors, + extraModelMemoryBytes, + extraProcessors, + removeNodeMemoryInBytes, + perNodeMemoryOverheadInBytes + ); + + ActionListener verificationListener = createVerificationListener(testDescription, expectedStats); + return new TestCase(testDescription, clusterState, clusterSettings, mlMemoryTracker, settings, verificationListener); + } + + private static TestCase WhenStoppingTrainedModelDeployment_ThenNoScale_GivenAllNodesAreStillRequired( + int seed, + int threadsPerAllocation, + long modelBytes, + long cacheSize + ) { + String testDescription = "test scaling when the existing deployments require the same nodes when as small deployment is stopping"; + + // Generic settings + ClusterSettings clusterSettings = createClusterSettings(); + MlMemoryTracker mlMemoryTracker = createMlMemoryTracker(); + Settings settings = createSettings(); + + // TrainedModelAssignments; + int numAllocationsOnAssignment1 = 3; + int numAllocationsOnAssignment2 = 1; + DiscoveryNodes nodes = createDiscoveryNode( + 8, + numAllocationsOnAssignment1 * threadsPerAllocation + numAllocationsOnAssignment2 * threadsPerAllocation + ); + int numAssignments = 2; + Map assignments = new HashMap<>(numAssignments); + // assignment 1 - has 3 allocations + { + StartTrainedModelDeploymentAction.TaskParams taskParams = createTaskParams( + numAllocationsOnAssignment1, + 1, + threadsPerAllocation, + modelBytes, + cacheSize + ); + TrainedModelAssignment.Builder tmaBuilder = TrainedModelAssignment.Builder.empty(taskParams, null); + tmaBuilder.setAssignmentState(AssignmentState.STARTED); + tmaBuilder.addRoutingEntry( + NODE_NAME_PREFIX + 0, + new RoutingInfo(numAllocationsOnAssignment1, numAllocationsOnAssignment1, RoutingState.STARTED, null) + ); + assignments.put("TrainedModelAssignment-" + seed + "-" + 0, tmaBuilder.build()); + } + // assignment 2 - is stopping, has 1 allocation + { + StartTrainedModelDeploymentAction.TaskParams taskParams = createTaskParams( + numAllocationsOnAssignment2, + 2, + threadsPerAllocation, + modelBytes, + cacheSize + ); + TrainedModelAssignment.Builder tmaBuilder = TrainedModelAssignment.Builder.empty(taskParams, null); + tmaBuilder.setAssignmentState(AssignmentState.STOPPING); + tmaBuilder.addRoutingEntry( + NODE_NAME_PREFIX + 0, + new RoutingInfo(numAllocationsOnAssignment2, numAllocationsOnAssignment2, RoutingState.STOPPING, "stopping deployment") + ); + assignments.put("TrainedModelAssignment-" + seed + "-" + 2, tmaBuilder.build()); + } + TrainedModelAssignmentMetadata trainedModelAssignmentMetadata = new TrainedModelAssignmentMetadata(assignments); + + // Cluster state + ClusterState clusterState = createClusterState(trainedModelAssignmentMetadata, nodes); + + // expected stats: + int existingNodes = clusterState.nodes().getSize(); + long existingPerNodeMemoryBytes = calculateExistingPerNodeMemoryBytes(clusterState); + long existingTotalModelMemoryBytes = calculateExistingTotalModelMemoryBytes(assignments); + int totalExistingProcessors = calculateTotalExistingAndUsedProcessors(assignments); + int minNodes = 3; // TODO understand why this value + long extraPerNodeModelMemoryBytes = 0; + int extraPerNodeProcessors = 0; + long extraModelMemoryBytes = 0; + int extraProcessors = 0; + long removeNodeMemoryInBytes = 0; + long perNodeMemoryOverheadInBytes = MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(); + + MlAutoscalingStats expectedStats = new MlAutoscalingStats( + existingNodes, + existingPerNodeMemoryBytes, + existingTotalModelMemoryBytes, + totalExistingProcessors, + minNodes, + extraPerNodeModelMemoryBytes, + extraPerNodeProcessors, + extraModelMemoryBytes, + extraProcessors, + removeNodeMemoryInBytes, + perNodeMemoryOverheadInBytes + ); + + ActionListener verificationListener = createVerificationListener(testDescription, expectedStats); + return new TestCase(testDescription, clusterState, clusterSettings, mlMemoryTracker, settings, verificationListener); + } + + private static TestCase WhenStoppedTrainedModelDeployment_ThenNoScale_GivenAllNodesAreStillRequired( + int seed, + int threadsPerAllocation, + long modelBytes, + long cacheSize + ) { + String testDescription = "test scaling when the existing deployments require the same nodes when as small deployment is stopped"; + + // Generic settings + ClusterSettings clusterSettings = createClusterSettings(); + MlMemoryTracker mlMemoryTracker = createMlMemoryTracker(); + Settings settings = createSettings(); + + // TrainedModelAssignments; + int numAllocationsOnAssignment1 = 3; + int numAllocationsOnAssignment2 = 1; + DiscoveryNodes nodes = createDiscoveryNode( + 8, + numAllocationsOnAssignment1 * threadsPerAllocation + numAllocationsOnAssignment2 * threadsPerAllocation + ); + int numAssignments = 2; + Map assignments = new HashMap<>(numAssignments); + // assignment 1 - has 3 allocations + { + StartTrainedModelDeploymentAction.TaskParams taskParams = createTaskParams( + numAllocationsOnAssignment1, + 1, + threadsPerAllocation, + modelBytes, + cacheSize + ); + TrainedModelAssignment.Builder tmaBuilder = TrainedModelAssignment.Builder.empty(taskParams, null); + tmaBuilder.setAssignmentState(AssignmentState.STARTED); + tmaBuilder.addRoutingEntry( + NODE_NAME_PREFIX + 0, + new RoutingInfo(numAllocationsOnAssignment1, numAllocationsOnAssignment1, RoutingState.STARTED, null) + ); + assignments.put("TrainedModelAssignment-" + seed + "-" + 0, tmaBuilder.build()); + } + // assignment 2 - is stopping, has 1 allocation + { + StartTrainedModelDeploymentAction.TaskParams taskParams = createTaskParams( + numAllocationsOnAssignment2, + 2, + threadsPerAllocation, + modelBytes, + cacheSize + ); + TrainedModelAssignment.Builder tmaBuilder = TrainedModelAssignment.Builder.empty(taskParams, null); + tmaBuilder.setAssignmentState(AssignmentState.STOPPING); + tmaBuilder.clearNodeRoutingTable(); + } + TrainedModelAssignmentMetadata trainedModelAssignmentMetadata = new TrainedModelAssignmentMetadata(assignments); + + // Cluster state + ClusterState clusterState = createClusterState(trainedModelAssignmentMetadata, nodes); + + // expected stats: + int existingNodes = clusterState.nodes().getSize(); + long existingPerNodeMemoryBytes = calculateExistingPerNodeMemoryBytes(clusterState); + long existingTotalModelMemoryBytes = calculateExistingTotalModelMemoryBytes(assignments); + int totalExistingProcessors = calculateTotalExistingAndUsedProcessors(assignments); + int minNodes = 3; // TODO understand why this value + long extraPerNodeModelMemoryBytes = 0; + int extraPerNodeProcessors = 0; + long extraModelMemoryBytes = 0; + int extraProcessors = 0; + long removeNodeMemoryInBytes = 0; + long perNodeMemoryOverheadInBytes = MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(); + + MlAutoscalingStats expectedStats = new MlAutoscalingStats( + existingNodes, + existingPerNodeMemoryBytes, + existingTotalModelMemoryBytes, + totalExistingProcessors, + minNodes, + extraPerNodeModelMemoryBytes, + extraPerNodeProcessors, + extraModelMemoryBytes, + extraProcessors, + removeNodeMemoryInBytes, + perNodeMemoryOverheadInBytes + ); + + ActionListener verificationListener = createVerificationListener(testDescription, expectedStats); + return new TestCase(testDescription, clusterState, clusterSettings, mlMemoryTracker, settings, verificationListener); + } + + private static TestCase WhenStopTrainedModelDeployment_ThenNoScale_GivenAllNodesAreStillRequired( + int seed, + int threadsPerAllocation, + long modelBytes, + long cacheSize + ) { + String testDescription = "test scaling when the existing deployments require the same nodes after a small deployment was removed"; + + // Generic settings + ClusterSettings clusterSettings = createClusterSettings(); + MlMemoryTracker mlMemoryTracker = createMlMemoryTracker(); + Settings settings = createSettings(); + int numAllocations = 3; + + // TrainedModelAssignments; + DiscoveryNodes nodes = createDiscoveryNode( + (int) ByteSizeValue.ofBytes(modelBytes + cacheSize).getGb(), + numAllocations * threadsPerAllocation + ); + int numAssignments = 1; + Map assignments = new HashMap<>(numAssignments); + // assignment 1 - has 3 allocations + { + StartTrainedModelDeploymentAction.TaskParams taskParams = createTaskParams( + numAllocations, + seed, + threadsPerAllocation, + modelBytes, + cacheSize + ); + TrainedModelAssignment.Builder tmaBuilder = TrainedModelAssignment.Builder.empty(taskParams, null); + tmaBuilder.setAssignmentState(AssignmentState.STARTED); + tmaBuilder.addRoutingEntry(NODE_NAME_PREFIX + 0, new RoutingInfo(numAllocations, numAllocations, RoutingState.STARTED, null)); + assignments.put("TrainedModelAssignment-" + seed + "-" + 0, tmaBuilder.build()); + } + TrainedModelAssignmentMetadata trainedModelAssignmentMetadata = new TrainedModelAssignmentMetadata(assignments); + + // Cluster state + ClusterState clusterState = createClusterState(trainedModelAssignmentMetadata, nodes); + + // expected stats: + int existingNodes = clusterState.nodes().getSize(); + long existingPerNodeMemoryBytes = calculateExistingPerNodeMemoryBytes(clusterState); + long existingTotalModelMemoryBytes = calculateExistingTotalModelMemoryBytes(assignments); + int totalExistingProcessors = calculateTotalExistingAndUsedProcessors(assignments); + int minNodes = 3; + long extraPerNodeModelMemoryBytes = 0; + int extraPerNodeProcessors = 0; + long extraModelMemoryBytes = 0; + int extraProcessors = 0; + long removeNodeMemoryInBytes = 0; + long perNodeMemoryOverheadInBytes = MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(); + + MlAutoscalingStats expectedStats = new MlAutoscalingStats( + existingNodes, + existingPerNodeMemoryBytes, + existingTotalModelMemoryBytes, + totalExistingProcessors, + minNodes, + extraPerNodeModelMemoryBytes, + extraPerNodeProcessors, + extraModelMemoryBytes, + extraProcessors, + removeNodeMemoryInBytes, + perNodeMemoryOverheadInBytes + ); + + ActionListener verificationListener = createVerificationListener(testDescription, expectedStats); + return new TestCase(testDescription, clusterState, clusterSettings, mlMemoryTracker, settings, verificationListener); + } + + private static TestCase WhenStopTrainedModelDeployment_ThenScaledown_GivenDeploymentRequiredAWholeNode( + int seed, + int threadsPerAllocation, + long modelBytes, + long cacheSize + ) { + String testDescription = "test scaling down when the removed deployment required a whole node"; + // Generic settings + ClusterSettings clusterSettings = createClusterSettings(); + MlMemoryTracker mlMemoryTracker = createMlMemoryTracker(); + Settings settings = createSettings(); + + int numAllocations = 12; + // TrainedModelAssignments; + DiscoveryNodes nodes = createMlNodesOfUniformSize( + threadsPerAllocation, + ByteSizeValue.ofBytes(modelBytes + cacheSize), + numAllocations * threadsPerAllocation * 2, + ByteSizeValue.ZERO // any amount of total memory is ok + ); + // more nodes than are needed, requiring a scaledown + int numAssignments = 2; + Map assignments = new HashMap<>(numAssignments); + // assignment 1 - has 12 allocations + { + StartTrainedModelDeploymentAction.TaskParams taskParams = createTaskParams( + numAllocations, + seed, + threadsPerAllocation, + modelBytes, + cacheSize + ); + TrainedModelAssignment.Builder tmaBuilder = TrainedModelAssignment.Builder.empty(taskParams, null); + tmaBuilder.setAssignmentState(AssignmentState.STARTED); + tmaBuilder.addRoutingEntry(NODE_NAME_PREFIX + 0, new RoutingInfo(numAllocations, numAllocations, RoutingState.STARTED, null)); + assignments.put("TrainedModelAssignment-" + seed + "-" + 0, tmaBuilder.build()); + } + TrainedModelAssignmentMetadata trainedModelAssignmentMetadata = new TrainedModelAssignmentMetadata(assignments); + + // Cluster state + ClusterState clusterState = createClusterState(trainedModelAssignmentMetadata, nodes); + + // expected stats: + int existingNodes = clusterState.nodes().getSize(); + long existingPerNodeMemoryBytes = calculateExistingPerNodeMemoryBytes(clusterState); + long existingTotalModelMemoryBytes = calculateExistingTotalModelMemoryBytes(assignments); + int totalExistingProcessors = calculateTotalExistingAndUsedProcessors(assignments); + int minNodes = 3; // TODO understand why this value + long extraPerNodeModelMemoryBytes = 0; + int extraPerNodeProcessors = 0; + long extraModelMemoryBytes = 0; + int extraProcessors = 0; + long removeNodeMemoryInBytes = calculateExistingPerNodeMemoryBytes(clusterState); // we need to remove a whole node + long perNodeMemoryOverheadInBytes = MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(); + + MlAutoscalingStats expectedStats = new MlAutoscalingStats( + existingNodes, + existingPerNodeMemoryBytes, + existingTotalModelMemoryBytes, + totalExistingProcessors, + minNodes, + extraPerNodeModelMemoryBytes, + extraPerNodeProcessors, + extraModelMemoryBytes, + extraProcessors, + removeNodeMemoryInBytes, + perNodeMemoryOverheadInBytes + ); + + ActionListener verificationListener = createVerificationListener(testDescription, expectedStats); + return new TestCase(testDescription, clusterState, clusterSettings, mlMemoryTracker, settings, verificationListener); + } + +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java index 41a86e436f468..3674dda3934bd 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.MachineLearningField; @@ -21,16 +22,19 @@ import org.elasticsearch.xpack.core.ml.action.OpenJobAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.autoscaling.MlAutoscalingStats; +import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentState; import org.elasticsearch.xpack.core.ml.inference.assignment.Priority; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentTests; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; import org.elasticsearch.xpack.ml.utils.NativeMemoryCalculator; +import java.io.IOException; import java.net.InetAddress; import java.time.Instant; import java.util.Collections; @@ -41,6 +45,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; +import java.util.stream.Stream; import static org.elasticsearch.xpack.ml.autoscaling.MlAutoscalingResourceTracker.MlDummyAutoscalingEntity; import static org.elasticsearch.xpack.ml.autoscaling.MlAutoscalingResourceTracker.MlJobRequirements; @@ -64,14 +69,15 @@ public void testGetMemoryAndProcessors() throws InterruptedException { 10, MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, MlDummyAutoscalingEntity.of(0L, 0), + 1, listener ), stats -> { - assertEquals(memory, stats.perNodeMemoryInBytes()); - assertEquals(2, stats.nodes()); - assertEquals(0, stats.minNodes()); - assertEquals(0, stats.extraSingleNodeProcessors()); - assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.perNodeMemoryOverheadInBytes()); + assertEquals(memory, stats.currentPerNodeMemoryBytes()); + assertEquals(2, stats.currentTotalNodes()); + assertEquals(0, stats.wantedMinNodes()); + assertEquals(0, stats.wantedExtraPerNodeNodeProcessors()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.currentPerNodeMemoryOverheadBytes()); } ); @@ -85,14 +91,15 @@ public void testGetMemoryAndProcessors() throws InterruptedException { 10, MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, MlDummyAutoscalingEntity.of(0L, 0), + 1, listener ), stats -> { - assertEquals(0, stats.perNodeMemoryInBytes()); - assertEquals(2, stats.nodes()); - assertEquals(0, stats.minNodes()); - assertEquals(0, stats.extraSingleNodeProcessors()); - assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.perNodeMemoryOverheadInBytes()); + assertEquals(0, stats.currentPerNodeMemoryBytes()); + assertEquals(2, stats.currentTotalNodes()); + assertEquals(0, stats.wantedMinNodes()); + assertEquals(0, stats.wantedExtraPerNodeNodeProcessors()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.currentPerNodeMemoryOverheadBytes()); } ); @@ -107,16 +114,188 @@ public void testGetMemoryAndProcessors() throws InterruptedException { 10, MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, MlDummyAutoscalingEntity.of(memory / 2, 1), + 1, listener ), stats -> { - assertEquals(0, stats.perNodeMemoryInBytes()); - assertEquals(2, stats.nodes()); - assertEquals(0, stats.minNodes()); - assertEquals(0, stats.extraSingleNodeProcessors()); - assertEquals(0, stats.extraModelMemoryInBytes()); - assertEquals(0, stats.extraSingleNodeModelMemoryInBytes()); - assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.perNodeMemoryOverheadInBytes()); + assertEquals(0, stats.currentPerNodeMemoryBytes()); + assertEquals(2, stats.currentTotalNodes()); + assertEquals(0, stats.wantedMinNodes()); + assertEquals(0, stats.wantedExtraPerNodeNodeProcessors()); + assertEquals(0, stats.wantedExtraModelMemoryBytes()); + assertEquals(0, stats.wantedExtraPerNodeMemoryBytes()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.currentPerNodeMemoryOverheadBytes()); + } + ); + } + + public void testScaleUpByProcessorsWhenAlreadyStarted() throws InterruptedException, IOException { + MlMemoryTracker mockTracker = mock(MlMemoryTracker.class); + + long memory = randomLongBetween(100, 1_000_000); + var taskParams1 = new StartTrainedModelDeploymentAction.TaskParams( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + memory, + 2, + 1, + randomIntBetween(1, 10000), + randomBoolean() ? null : ByteSizeValue.ofBytes(randomLongBetween(0, memory)), + Priority.NORMAL, + memory, + memory + ); + + var taskParams2 = new StartTrainedModelDeploymentAction.TaskParams( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + memory, + randomIntBetween(3, 80), + 1, + randomIntBetween(1, 10000), + randomBoolean() ? null : ByteSizeValue.ofBytes(randomLongBetween(0, memory)), + Priority.NORMAL, + memory, + memory + ); + + var randomAssignment1 = TrainedModelAssignmentTests.randomInstanceBuilder(taskParams1, AssignmentState.STARTED) + .clearNodeRoutingTable() + .addRoutingEntry("ml-1", new RoutingInfo(2, 2, RoutingState.STARTED, "")) + .build(); + + var randomAssignment2 = TrainedModelAssignmentTests.randomInstanceBuilder(taskParams2, AssignmentState.STARTED) + .clearNodeRoutingTable() + .addRoutingEntry("ml-2", new RoutingInfo(2, 2, RoutingState.STARTED, "")) + .build(); + + List nodes = new java.util.ArrayList<>( + Stream.of(randomAssignment1.getNodeRoutingTable().values()) + .map(r -> mock(DiscoveryNode.class)) + .peek(n -> when(n.getRoles()).thenReturn(Set.of(DiscoveryNodeRole.ML_ROLE))) + .peek(n -> when(n.getAttributes()).thenReturn(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "2.0"))) + .toList() + ); + nodes.addAll( + Stream.of(randomAssignment2.getNodeRoutingTable().values()) + .map(r -> mock(DiscoveryNode.class)) + .peek(n -> when(n.getRoles()).thenReturn(Set.of(DiscoveryNodeRole.ML_ROLE))) + .peek(n -> when(n.getAttributes()).thenReturn(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "2.0"))) + .toList() + ); + MlAutoscalingContext scaleUpContext = new MlAutoscalingContext( + List.of(), + List.of(), + List.of(), + Map.of("deployment-1", randomAssignment1, "deployment-2", randomAssignment2), + nodes, + null + ); + + int expectedProcessorsPerNode = scaleUpContext.modelAssignments.values() + .stream() + .map(TrainedModelAssignment::getTaskParams) + .mapToInt(StartTrainedModelDeploymentAction.TaskParams::getThreadsPerAllocation) + .max() + .orElse(0); + int expectedTotalProccessors = scaleUpContext.modelAssignments.values() + .stream() + .map(TrainedModelAssignment::getTaskParams) + .mapToInt(tp -> tp.getNumberOfAllocations() * tp.getThreadsPerAllocation()) + .sum(); + int existantProccessors = scaleUpContext.modelAssignments.values() + .stream() + .map(TrainedModelAssignment::getNodeRoutingTable) + .mapToInt(m -> m.values().stream().mapToInt(RoutingInfo::getTargetAllocations).sum()) // threads == 1 + .sum(); + int extraProcessors = expectedTotalProccessors - existantProccessors; + + this.assertAsync( + listener -> MlAutoscalingResourceTracker.getMemoryAndProcessors( + scaleUpContext, + mockTracker, + Map.of("ml-1", memory, "ml-2", memory), + memory, + 2, + MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, + MlDummyAutoscalingEntity.of(0, 0), + 1, + listener + ), + stats -> { + assertEquals(memory, stats.currentPerNodeMemoryBytes()); + assertEquals(2, stats.currentTotalNodes()); + assertEquals(extraProcessors, stats.wantedExtraProcessors()); + assertEquals(expectedProcessorsPerNode, stats.wantedExtraPerNodeNodeProcessors()); + assertEquals(0, stats.wantedExtraModelMemoryBytes()); + assertEquals(0, stats.wantedExtraPerNodeMemoryBytes()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.currentPerNodeMemoryOverheadBytes()); + } + ); + } + + public void testScaleUpByProcessorsWhenStarting() throws InterruptedException { + MlMemoryTracker mockTracker = mock(MlMemoryTracker.class); + long memory = randomLongBetween(100, 1_000_000); + long model_size = randomLongBetween(10, 10_000_000); + var taskParams = new StartTrainedModelDeploymentAction.TaskParams( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + model_size, + randomIntBetween(5, 10), + 1, + randomIntBetween(1, 10000), + randomBoolean() ? null : ByteSizeValue.ofBytes(randomNonNegativeLong()), + Priority.NORMAL, + model_size, + model_size + ); + + var randomAssignment = TrainedModelAssignmentTests.randomInstanceBuilder(taskParams, AssignmentState.STARTING) + .setAssignmentState(AssignmentState.STARTING) + .setNumberOfAllocations(0) + .clearNodeRoutingTable() + .build(); + + List nodes = Stream.of("ml-1", "ml-2") + .map(n -> mock(DiscoveryNode.class)) + .peek(n -> when(n.getRoles()).thenReturn(Set.of(DiscoveryNodeRole.ML_ROLE))) + .peek(n -> when(n.getAttributes()).thenReturn(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "2.0"))) + .toList(); + + MlAutoscalingContext scaleUpContext = new MlAutoscalingContext(List.of(), List.of(), List.of(), Map.of(), nodes, null); + + int expectedTotalProccessors = scaleUpContext.modelAssignments.values() + .stream() + .map(TrainedModelAssignment::getTaskParams) + .mapToInt(tp -> tp.getNumberOfAllocations() * tp.getThreadsPerAllocation()) + .sum(); + int existantProccessors = scaleUpContext.modelAssignments.values() + .stream() + .map(TrainedModelAssignment::getNodeRoutingTable) + .mapToInt(m -> m.values().stream().mapToInt(RoutingInfo::getTargetAllocations).sum()) + .sum(); + int extraProcessors = expectedTotalProccessors - existantProccessors; + + this.assertAsync( + listener -> MlAutoscalingResourceTracker.getMemoryAndProcessors( + scaleUpContext, + mockTracker, + Map.of("ml-1", memory, "ml-2", memory), + memory, + 2, + MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, + MlDummyAutoscalingEntity.of(0, 0), + 1, + listener + ), + stats -> { + assertEquals(memory, stats.currentPerNodeMemoryBytes()); + assertEquals(2, stats.currentTotalNodes()); + assertEquals(Math.max(extraProcessors, 0), stats.wantedExtraProcessors()); + assertEquals(extraProcessors > 0 ? 1 : 0, stats.wantedExtraPerNodeNodeProcessors()); + assertEquals(0, stats.wantedExtraPerNodeMemoryBytes()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.currentPerNodeMemoryOverheadBytes()); } ); } @@ -172,19 +351,20 @@ public void testGetMemoryAndProcessorsScaleUpGivenAwaitingLazyAssignment() throw 10, MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, MlDummyAutoscalingEntity.of(0L, 0), + 1, listener ), stats -> { - assertEquals(memory, stats.perNodeMemoryInBytes()); - assertEquals(2, stats.nodes()); - assertEquals(1, stats.minNodes()); - assertEquals(0, stats.extraProcessors()); - assertEquals(0, stats.modelMemoryInBytesSum()); - assertEquals(0, stats.processorsSum()); - assertEquals(0, stats.extraSingleNodeProcessors()); - assertEquals(memory / 4, stats.extraSingleNodeModelMemoryInBytes()); - assertEquals(memory / 4, stats.extraModelMemoryInBytes()); - assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.perNodeMemoryOverheadInBytes()); + assertEquals(memory, stats.currentPerNodeMemoryBytes()); + assertEquals(2, stats.currentTotalNodes()); + assertEquals(1, stats.wantedMinNodes()); + assertEquals(0, stats.wantedExtraProcessors()); + assertEquals(0, stats.currentTotalModelMemoryBytes()); + assertEquals(0, stats.currentTotalProcessorsInUse()); + assertEquals(0, stats.wantedExtraPerNodeNodeProcessors()); + assertEquals(memory / 4, stats.wantedExtraPerNodeMemoryBytes()); + assertEquals(memory / 4, stats.wantedExtraModelMemoryBytes()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.currentPerNodeMemoryOverheadBytes()); } ); @@ -198,19 +378,20 @@ public void testGetMemoryAndProcessorsScaleUpGivenAwaitingLazyAssignment() throw 10, MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, MlDummyAutoscalingEntity.of(memory / 4, 0), + 1, listener ), stats -> { - assertEquals(memory, stats.perNodeMemoryInBytes()); - assertEquals(2, stats.nodes()); - assertEquals(1, stats.minNodes()); - assertEquals(0, stats.extraProcessors()); - assertEquals(memory / 4, stats.modelMemoryInBytesSum()); - assertEquals(0, stats.processorsSum()); - assertEquals(0, stats.extraSingleNodeProcessors()); - assertEquals(memory / 4, stats.extraSingleNodeModelMemoryInBytes()); - assertEquals(memory / 4, stats.extraModelMemoryInBytes()); - assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.perNodeMemoryOverheadInBytes()); + assertEquals(memory, stats.currentPerNodeMemoryBytes()); + assertEquals(2, stats.currentTotalNodes()); + assertEquals(1, stats.wantedMinNodes()); + assertEquals(0, stats.wantedExtraProcessors()); + assertEquals(memory / 4, stats.currentTotalModelMemoryBytes()); + assertEquals(0, stats.currentTotalProcessorsInUse()); + assertEquals(0, stats.wantedExtraPerNodeNodeProcessors()); + assertEquals(memory / 4, stats.wantedExtraPerNodeMemoryBytes()); + assertEquals(memory / 4, stats.wantedExtraModelMemoryBytes()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.currentPerNodeMemoryOverheadBytes()); } ); @@ -224,19 +405,20 @@ public void testGetMemoryAndProcessorsScaleUpGivenAwaitingLazyAssignment() throw 10, MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, MlDummyAutoscalingEntity.of(memory / 4, 1), + 1, listener ), stats -> { - assertEquals(memory, stats.perNodeMemoryInBytes()); - assertEquals(2, stats.nodes()); - assertEquals(1, stats.minNodes()); - assertEquals(0, stats.extraProcessors()); - assertEquals(memory / 4, stats.modelMemoryInBytesSum()); - assertEquals(1, stats.processorsSum()); - assertEquals(0, stats.extraSingleNodeProcessors()); - assertEquals(memory / 4, stats.extraSingleNodeModelMemoryInBytes()); - assertEquals(memory / 4, stats.extraModelMemoryInBytes()); - assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.perNodeMemoryOverheadInBytes()); + assertEquals(memory, stats.currentPerNodeMemoryBytes()); + assertEquals(2, stats.currentTotalNodes()); + assertEquals(1, stats.wantedMinNodes()); + assertEquals(0, stats.wantedExtraProcessors()); + assertEquals(memory / 4, stats.currentTotalModelMemoryBytes()); + assertEquals(1, stats.currentTotalProcessorsInUse()); + assertEquals(0, stats.wantedExtraPerNodeNodeProcessors()); + assertEquals(memory / 4, stats.wantedExtraPerNodeMemoryBytes()); + assertEquals(memory / 4, stats.wantedExtraModelMemoryBytes()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.currentPerNodeMemoryOverheadBytes()); } ); } @@ -295,17 +477,18 @@ public void testGetMemoryAndProcessorsScaleUpGivenAwaitingLazyAssignmentButFaile 10, MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, MlDummyAutoscalingEntity.of(0L, 0), + 1, listener ), stats -> { - assertEquals(memory, stats.perNodeMemoryInBytes()); - assertEquals(memory, stats.removeNodeMemoryInBytes()); - assertEquals(2, stats.nodes()); - assertEquals(0, stats.minNodes()); - assertEquals(0, stats.extraSingleNodeProcessors()); - assertEquals(0, stats.extraSingleNodeModelMemoryInBytes()); - assertEquals(0, stats.extraModelMemoryInBytes()); - assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.perNodeMemoryOverheadInBytes()); + assertEquals(memory, stats.currentPerNodeMemoryBytes()); + assertEquals(memory, stats.unwantedNodeMemoryBytesToRemove()); + assertEquals(2, stats.currentTotalNodes()); + assertEquals(0, stats.wantedMinNodes()); + assertEquals(0, stats.wantedExtraPerNodeNodeProcessors()); + assertEquals(0, stats.wantedExtraPerNodeMemoryBytes()); + assertEquals(0, stats.wantedExtraModelMemoryBytes()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.currentPerNodeMemoryOverheadBytes()); } ); } @@ -1056,15 +1239,16 @@ public void testGetMemoryAndProcessorsScaleDownToZero() throws InterruptedExcept 10, MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, MlDummyAutoscalingEntity.of(0L, 0), + 1, listener ), stats -> { - assertEquals(memory, stats.perNodeMemoryInBytes()); - assertEquals(1, stats.nodes()); - assertEquals(0, stats.minNodes()); - assertEquals(0, stats.extraSingleNodeProcessors()); - assertEquals(memory, stats.removeNodeMemoryInBytes()); - assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.perNodeMemoryOverheadInBytes()); + assertEquals(memory, stats.currentPerNodeMemoryBytes()); + assertEquals(1, stats.currentTotalNodes()); + assertEquals(0, stats.wantedMinNodes()); + assertEquals(0, stats.wantedExtraPerNodeNodeProcessors()); + assertEquals(memory, stats.unwantedNodeMemoryBytesToRemove()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.currentPerNodeMemoryOverheadBytes()); } ); @@ -1078,16 +1262,17 @@ public void testGetMemoryAndProcessorsScaleDownToZero() throws InterruptedExcept 10, MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, MlDummyAutoscalingEntity.of(0L, 1), + 1, listener ), stats -> { - assertEquals(memory, stats.perNodeMemoryInBytes()); - assertEquals(1, stats.nodes()); - assertEquals(0, stats.minNodes()); - assertEquals(0, stats.extraSingleNodeProcessors()); - assertEquals(0, stats.extraProcessors()); - assertEquals(memory, stats.removeNodeMemoryInBytes()); - assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.perNodeMemoryOverheadInBytes()); + assertEquals(memory, stats.currentPerNodeMemoryBytes()); + assertEquals(1, stats.currentTotalNodes()); + assertEquals(0, stats.wantedMinNodes()); + assertEquals(0, stats.wantedExtraPerNodeNodeProcessors()); + assertEquals(0, stats.wantedExtraProcessors()); + assertEquals(memory, stats.unwantedNodeMemoryBytesToRemove()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.currentPerNodeMemoryOverheadBytes()); } ); @@ -1101,15 +1286,16 @@ public void testGetMemoryAndProcessorsScaleDownToZero() throws InterruptedExcept 10, MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, MlDummyAutoscalingEntity.of(0L, 0), + 1, listener ), stats -> { - assertEquals(memory, stats.perNodeMemoryInBytes()); - assertEquals(3, stats.nodes()); - assertEquals(0, stats.minNodes()); - assertEquals(0, stats.extraSingleNodeProcessors()); - assertEquals(memory, stats.removeNodeMemoryInBytes()); - assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.perNodeMemoryOverheadInBytes()); + assertEquals(memory, stats.currentPerNodeMemoryBytes()); + assertEquals(3, stats.currentTotalNodes()); + assertEquals(0, stats.wantedMinNodes()); + assertEquals(0, stats.wantedExtraPerNodeNodeProcessors()); + assertEquals(memory, stats.unwantedNodeMemoryBytesToRemove()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.currentPerNodeMemoryOverheadBytes()); } ); } @@ -1122,7 +1308,9 @@ public void testGetMemoryAndProcessorsScaleDown() throws InterruptedException { MachineLearning.MAX_JVM_SIZE_NODE_ATTR, "400000000", MachineLearning.ML_CONFIG_VERSION_NODE_ATTR, - "7.2.0" + "7.2.0", + MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, + "2.0" ); MlAutoscalingContext mlAutoscalingContext = new MlAutoscalingContext( @@ -1193,15 +1381,16 @@ public void testGetMemoryAndProcessorsScaleDown() throws InterruptedException { 10, MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, MlDummyAutoscalingEntity.of(0L, 0), + 1, listener ), stats -> { - assertEquals(memory, stats.perNodeMemoryInBytes()); - assertEquals(3, stats.nodes()); - assertEquals(1, stats.minNodes()); - assertEquals(0, stats.extraSingleNodeProcessors()); - assertEquals(memory, stats.removeNodeMemoryInBytes()); - assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.perNodeMemoryOverheadInBytes()); + assertEquals(memory, stats.currentPerNodeMemoryBytes()); + assertEquals(3, stats.currentTotalNodes()); + assertEquals(1, stats.wantedMinNodes()); + assertEquals(0, stats.wantedExtraPerNodeNodeProcessors()); + assertEquals(memory, stats.unwantedNodeMemoryBytesToRemove()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.currentPerNodeMemoryOverheadBytes()); } ); } @@ -1295,15 +1484,16 @@ public void testGetMemoryAndProcessorsScaleDownPreventedByMinNodes() throws Inte 4, MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, MlDummyAutoscalingEntity.of(0L, 0), + 1, listener ), stats -> { - assertEquals(memory, stats.perNodeMemoryInBytes()); - assertEquals(3, stats.nodes()); - assertEquals(3, stats.minNodes()); - assertEquals(0, stats.extraSingleNodeProcessors()); - assertEquals(0, stats.removeNodeMemoryInBytes()); - assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.perNodeMemoryOverheadInBytes()); + assertEquals(memory, stats.currentPerNodeMemoryBytes()); + assertEquals(3, stats.currentTotalNodes()); + assertEquals(3, stats.wantedMinNodes()); + assertEquals(0, stats.wantedExtraPerNodeNodeProcessors()); + assertEquals(0, stats.unwantedNodeMemoryBytesToRemove()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.currentPerNodeMemoryOverheadBytes()); } ); } @@ -1317,7 +1507,9 @@ public void testGetMemoryAndProcessorsScaleDownPreventedByDummyEntityMemory() th MachineLearning.MAX_JVM_SIZE_NODE_ATTR, "400000000", MachineLearning.ML_CONFIG_VERSION_NODE_ATTR, - "7.2.0" + "7.2.0", + MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, + "2.0" ); MlAutoscalingContext mlAutoscalingContext = new MlAutoscalingContext( @@ -1388,22 +1580,24 @@ public void testGetMemoryAndProcessorsScaleDownPreventedByDummyEntityMemory() th 10, MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, MlDummyAutoscalingEntity.of(perNodeAvailableModelMemoryInBytes, 1), + 1, listener ), stats -> { - assertEquals(memory, stats.perNodeMemoryInBytes()); - assertEquals(perNodeAvailableModelMemoryInBytes + 503318080, stats.modelMemoryInBytesSum()); // total model memory is that - // configured in the dummy - // entity plus that used by the - // trained models. - assertEquals(5, stats.processorsSum()); // account for the extra processor from the dummy entity - assertEquals(3, stats.nodes()); - assertEquals(1, stats.minNodes()); - assertEquals(0, stats.extraSingleNodeProcessors()); - assertEquals(0, stats.extraProcessors()); - assertEquals(0, stats.extraModelMemoryInBytes()); - assertEquals(0, stats.extraSingleNodeModelMemoryInBytes()); - assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.perNodeMemoryOverheadInBytes()); + assertEquals(memory, stats.currentPerNodeMemoryBytes()); + assertEquals(perNodeAvailableModelMemoryInBytes + 503318080, stats.currentTotalModelMemoryBytes()); // total model memory + // is that + // configured in the dummy + // entity plus that used by the + // trained models. + assertEquals(5, stats.currentTotalProcessorsInUse()); // account for the extra processor from the dummy entity + assertEquals(3, stats.currentTotalNodes()); + assertEquals(1, stats.wantedMinNodes()); + assertEquals(0, stats.wantedExtraPerNodeNodeProcessors()); + assertEquals(0, stats.wantedExtraProcessors()); + assertEquals(0, stats.wantedExtraModelMemoryBytes()); + assertEquals(0, stats.wantedExtraPerNodeMemoryBytes()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.currentPerNodeMemoryOverheadBytes()); } ); } @@ -1417,7 +1611,9 @@ public void testGetMemoryAndProcessorsScaleDownNotPreventedByDummyEntityProcesso MachineLearning.MAX_JVM_SIZE_NODE_ATTR, "400000000", MachineLearning.ML_CONFIG_VERSION_NODE_ATTR, - "7.2.0" + "7.2.0", + MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, + "2.0" ); MlAutoscalingContext mlAutoscalingContext = new MlAutoscalingContext( @@ -1488,19 +1684,20 @@ public void testGetMemoryAndProcessorsScaleDownNotPreventedByDummyEntityProcesso 10, MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, MlDummyAutoscalingEntity.of(0L, 9), + 1, listener ), stats -> { - assertEquals(memory, stats.perNodeMemoryInBytes()); - assertEquals(503318080, stats.modelMemoryInBytesSum()); - assertEquals(13, stats.processorsSum()); // account for the extra processors from the dummy entity - assertEquals(3, stats.nodes()); - assertEquals(1, stats.minNodes()); - assertEquals(0, stats.extraSingleNodeProcessors()); - assertEquals(0, stats.extraProcessors()); - assertEquals(0, stats.extraModelMemoryInBytes()); - assertEquals(0, stats.extraSingleNodeModelMemoryInBytes()); - assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.perNodeMemoryOverheadInBytes()); + assertEquals(memory, stats.currentPerNodeMemoryBytes()); + assertEquals(503318080, stats.currentTotalModelMemoryBytes()); + assertEquals(13, stats.currentTotalProcessorsInUse()); // account for the extra processors from the dummy entity + assertEquals(3, stats.currentTotalNodes()); + assertEquals(1, stats.wantedMinNodes()); + assertEquals(0, stats.wantedExtraPerNodeNodeProcessors()); + assertEquals(0, stats.wantedExtraProcessors()); + assertEquals(0, stats.wantedExtraModelMemoryBytes()); + assertEquals(0, stats.wantedExtraPerNodeMemoryBytes()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.currentPerNodeMemoryOverheadBytes()); } ); } @@ -1512,7 +1709,9 @@ public void testGetMemoryAndProcessorsScaleDownNotPreventedByDummyEntityAsMemory MachineLearning.MAX_JVM_SIZE_NODE_ATTR, "400000000", MachineLearning.ML_CONFIG_VERSION_NODE_ATTR, - "7.2.0" + "7.2.0", + MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, + "2.0" ); MlAutoscalingContext mlAutoscalingContext = new MlAutoscalingContext( @@ -1583,19 +1782,20 @@ public void testGetMemoryAndProcessorsScaleDownNotPreventedByDummyEntityAsMemory 10, MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, MlDummyAutoscalingEntity.of(1024, 0), + 1, listener ), stats -> { - assertEquals(memory, stats.perNodeMemoryInBytes()); - assertEquals(503318080, stats.modelMemoryInBytesSum()); - assertEquals(4, stats.processorsSum()); - assertEquals(3, stats.nodes()); - assertEquals(1, stats.minNodes()); - assertEquals(0, stats.extraSingleNodeProcessors()); - assertEquals(0, stats.extraProcessors()); - assertEquals(0, stats.extraModelMemoryInBytes()); - assertEquals(0, stats.extraSingleNodeModelMemoryInBytes()); - assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.perNodeMemoryOverheadInBytes()); + assertEquals(memory, stats.currentPerNodeMemoryBytes()); + assertEquals(503318080, stats.currentTotalModelMemoryBytes()); + assertEquals(4, stats.currentTotalProcessorsInUse()); + assertEquals(3, stats.currentTotalNodes()); + assertEquals(1, stats.wantedMinNodes()); + assertEquals(0, stats.wantedExtraPerNodeNodeProcessors()); + assertEquals(0, stats.wantedExtraProcessors()); + assertEquals(0, stats.wantedExtraModelMemoryBytes()); + assertEquals(0, stats.wantedExtraPerNodeMemoryBytes()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.currentPerNodeMemoryOverheadBytes()); } ); } From 5bef1267c688986c7d4829585927383b69deda46 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 25 Jul 2024 16:11:05 -0400 Subject: [PATCH 030/105] ESQL: Wait for memory to clear in CSV tests (#111302) The CSV tests have been running ESQL async for a little while now to line up better with production and reuse a bunch of code but they'll sometimes fail with a breaker-not-cleared failure. I haven't been able to track down the cause after playing with it most of the day so I'm going to take a bit of a stab in the dark and blame it on the async-ness. After all, we're proceeding as soon as the results are ready, not as soon as the driver has fully shutdown. This may be the cause. Or not. We'll see if we get this again tomorrow..... closes https://github.com/elastic/elasticsearch/issues/111261 --- muted-tests.yml | 3 --- .../src/test/java/org/elasticsearch/xpack/esql/CsvTests.java | 3 ++- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index d597939ce0cd6..f3892bcda0e8a 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -117,9 +117,6 @@ tests: - class: org.elasticsearch.action.admin.indices.create.SplitIndexIT method: testSplitIndexPrimaryTerm issue: https://github.com/elastic/elasticsearch/issues/111282 -- class: org.elasticsearch.xpack.esql.CsvTests - method: test {inlinestats.ShadowingSelf} - issue: https://github.com/elastic/elasticsearch/issues/111261 - class: org.elasticsearch.http.netty4.Netty4ChunkedContinuationsIT method: testContinuationFailure issue: https://github.com/elastic/elasticsearch/issues/111283 diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 6a9b7a0e0089d..f5faf3129d883 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -286,7 +286,8 @@ private void doTest() throws Exception { assertWarnings(actualResults.responseHeaders().getOrDefault("Warning", List.of())); } finally { Releasables.close(() -> Iterators.map(actualResults.pages().iterator(), p -> p::releaseBlocks)); - assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); + // Give the breaker service some time to clear in case we got results before the rest of the driver had cleaned up + assertBusy(() -> assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L))); } } From 75959b69cba00d2f83a399eda7fab42e4a2e4855 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 26 Jul 2024 06:18:04 +1000 Subject: [PATCH 031/105] Mute org.elasticsearch.upgrades.LogsIndexModeFullClusterRestartIT testLogsIndexing {cluster=UPGRADED} #111306 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index f3892bcda0e8a..aec11a411247c 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -120,6 +120,9 @@ tests: - class: org.elasticsearch.http.netty4.Netty4ChunkedContinuationsIT method: testContinuationFailure issue: https://github.com/elastic/elasticsearch/issues/111283 +- class: org.elasticsearch.upgrades.LogsIndexModeFullClusterRestartIT + method: testLogsIndexing {cluster=UPGRADED} + issue: https://github.com/elastic/elasticsearch/issues/111306 # Examples: # From 42cfd6b2f5dc34f1894222a1e784cab3ca330cdd Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 26 Jul 2024 06:36:58 +1000 Subject: [PATCH 032/105] Mute org.elasticsearch.xpack.core.ml.job.config.DetectionRuleTests testEqualsAndHashcode #111308 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index aec11a411247c..46d0d490a2bb9 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -123,6 +123,9 @@ tests: - class: org.elasticsearch.upgrades.LogsIndexModeFullClusterRestartIT method: testLogsIndexing {cluster=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/111306 +- class: org.elasticsearch.xpack.core.ml.job.config.DetectionRuleTests + method: testEqualsAndHashcode + issue: https://github.com/elastic/elasticsearch/issues/111308 # Examples: # From a6eec4b62f9dca1f428b9f2609fe6d9834f10f64 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 26 Jul 2024 06:53:58 +1000 Subject: [PATCH 033/105] Mute org.elasticsearch.xpack.esql.CsvTests test {inlinestats.ShadowingMulti} #111309 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 46d0d490a2bb9..c72b0a13a8e99 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -126,6 +126,9 @@ tests: - class: org.elasticsearch.xpack.core.ml.job.config.DetectionRuleTests method: testEqualsAndHashcode issue: https://github.com/elastic/elasticsearch/issues/111308 +- class: org.elasticsearch.xpack.esql.CsvTests + method: test {inlinestats.ShadowingMulti} + issue: https://github.com/elastic/elasticsearch/issues/111309 # Examples: # From 8d6b0f8481956ca89d76eb5ae25c11ab28c5d92e Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 25 Jul 2024 19:05:00 -0400 Subject: [PATCH 034/105] Revert "Mute org.elasticsearch.xpack.esql.CsvTests test {inlinestats.ShadowingMulti} #111309" (#111314) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit …ShadowingMulti} #111309" This reverts commit a6eec4b62f9dca1f428b9f2609fe6d9834f10f64 becaus I believe the problem is fixed by #111302. Closes #111309 --- muted-tests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index c72b0a13a8e99..46d0d490a2bb9 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -126,9 +126,6 @@ tests: - class: org.elasticsearch.xpack.core.ml.job.config.DetectionRuleTests method: testEqualsAndHashcode issue: https://github.com/elastic/elasticsearch/issues/111308 -- class: org.elasticsearch.xpack.esql.CsvTests - method: test {inlinestats.ShadowingMulti} - issue: https://github.com/elastic/elasticsearch/issues/111309 # Examples: # From 5761c4afb5121f10ae48e27938305e2cbca6f58a Mon Sep 17 00:00:00 2001 From: Ankita Kumar Date: Thu, 25 Jul 2024 20:30:55 -0400 Subject: [PATCH 035/105] Reconstruct set of indices in BulkRequest (#110672) Reconstruct indices set in BulkRequest constructor so that the correct thread pool can be used for forwarded bulk requests. Before this fix, forwarded bulk requests were always using the system_write thread pool because the indices set was empty. Fixes issue https://github.com/elastic/elasticsearch/issues/102792 --- .../ingest/common/IngestRestartIT.java | 99 +++++++++++++++++++ .../action/bulk/BulkRequest.java | 3 + .../action/bulk/TransportBulkActionTests.java | 21 +++- 3 files changed, 122 insertions(+), 1 deletion(-) diff --git a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/ingest/common/IngestRestartIT.java b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/ingest/common/IngestRestartIT.java index 0ff34cf687500..9f9d53d4d5081 100644 --- a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/ingest/common/IngestRestartIT.java +++ b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/ingest/common/IngestRestartIT.java @@ -8,14 +8,23 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; +import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.get.MultiGetResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.internal.Requests; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.gateway.GatewayService; @@ -26,6 +35,7 @@ import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import java.util.Arrays; @@ -33,11 +43,15 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.concurrent.CountDownLatch; import java.util.function.Consumer; import java.util.function.Function; +import java.util.stream.IntStream; +import static org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestParameters.Metric.INGEST; import static org.elasticsearch.test.NodeRoles.onlyRole; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; // Ideally I like this test to live in the server module, but otherwise a large part of the ScriptProcessor @@ -326,4 +340,89 @@ public boolean validateClusterForming() { source = client().prepareGet("index", "fails").get(timeout).getSource(); assertNull(source); } + + /** + * This test is for confirming that forwarded bulk requests do not use system_write thread pool + * for non-system indexes. Before this fix, we were using system_write thread pool for all forwarded + * bulk requests causing the system_write thread pool to get overloaded. + */ + public void testForwardBulkWithSystemWritePoolDisabled() throws Exception { + // Create a node with master only role and a node with ingest role + final String masterOnlyNode = internalCluster().startMasterOnlyNode(); + final String ingestNode = internalCluster().startNode(); + + ensureStableCluster(2); + + // Create Bulk Request + createIndex("index"); + + BytesReference source = new BytesArray(""" + { + "processors" : [ + {"set" : {"field": "y", "value": 0}} + ] + }"""); + + PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id", source, XContentType.JSON); + clusterAdmin().putPipeline(putPipelineRequest).get(); + + int numRequests = scaledRandomIntBetween(32, 128); + BulkRequest bulkRequest = new BulkRequest(); + BulkResponse response; + for (int i = 0; i < numRequests; i++) { + IndexRequest indexRequest = new IndexRequest("index").id(Integer.toString(i)).setPipeline("_id"); + indexRequest.source(Requests.INDEX_CONTENT_TYPE, "x", 1); + bulkRequest.add(indexRequest); + } + assertThat(numRequests, equalTo(bulkRequest.requests().size())); + + // Block system_write thread pool on the ingest node + final ThreadPool ingestNodeThreadPool = internalCluster().getInstance(ThreadPool.class, ingestNode); + final var blockingLatch = new CountDownLatch(1); + try { + blockSystemWriteThreadPool(blockingLatch, ingestNodeThreadPool); + // Send bulk request to master only node, so it will forward it to the ingest node. + response = safeGet(client(masterOnlyNode).bulk(bulkRequest)); + } finally { + blockingLatch.countDown(); + } + + // Make sure the requests are processed (even though we blocked system_write thread pool above). + assertThat(response.getItems().length, equalTo(numRequests)); + assertFalse(response.hasFailures()); + + // Check Node Ingest stats + NodesStatsResponse nodesStatsResponse = clusterAdmin().nodesStats(new NodesStatsRequest(ingestNode).addMetric(INGEST)).actionGet(); + assertThat(nodesStatsResponse.getNodes().size(), equalTo(1)); + + NodeStats stats = nodesStatsResponse.getNodes().get(0); + assertThat(stats.getIngestStats().totalStats().ingestCount(), equalTo((long) numRequests)); + assertThat(stats.getIngestStats().totalStats().ingestFailedCount(), equalTo(0L)); + final var pipelineStats = stats.getIngestStats().pipelineStats().get(0); + assertThat(pipelineStats.pipelineId(), equalTo("_id")); + assertThat(pipelineStats.stats().ingestCount(), equalTo((long) numRequests)); + + MultiGetResponse docListResponse = safeGet( + client().prepareMultiGet().addIds("index", IntStream.range(0, numRequests).mapToObj(String::valueOf).toList()).execute() + ); + + assertThat(docListResponse.getResponses().length, equalTo(numRequests)); + Map document; + for (int i = 0; i < numRequests; i++) { + document = docListResponse.getResponses()[i].getResponse().getSourceAsMap(); + assertThat(document.get("y"), equalTo(0)); + } + } + + private void blockSystemWriteThreadPool(CountDownLatch blockingLatch, ThreadPool threadPool) { + assertThat(blockingLatch.getCount(), greaterThan(0L)); + final var executor = threadPool.executor(ThreadPool.Names.SYSTEM_WRITE); + // Add tasks repeatedly until we get an EsRejectedExecutionException which indicates that the threadpool and its queue are full. + expectThrows(EsRejectedExecutionException.class, () -> { + // noinspection InfiniteLoopStatement + while (true) { + executor.execute(() -> safeAwait(blockingLatch)); + } + }); + } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index 83b572afb2853..1a8bdb1c885c6 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -87,6 +87,9 @@ public BulkRequest(StreamInput in) throws IOException { requests.addAll(in.readCollectionAsList(i -> DocWriteRequest.readDocumentRequest(null, i))); refreshPolicy = RefreshPolicy.readFrom(in); timeout = in.readTimeValue(); + for (DocWriteRequest request : requests) { + indices.add(Objects.requireNonNull(request.index(), "request index must not be null")); + } } public BulkRequest(@Nullable String globalIndex) { diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java index ca1d1ac49832e..db3a985c00ad0 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java @@ -33,6 +33,8 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.features.FeatureService; @@ -54,6 +56,7 @@ import org.junit.After; import org.junit.Before; +import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; @@ -287,7 +290,7 @@ public void testProhibitCustomRoutingOnDataStream() throws Exception { prohibitCustomRoutingOnDataStream(writeRequestAgainstIndex, metadata.getIndicesLookup().get(writeRequestAgainstIndex.index())); } - public void testOnlySystem() { + public void testOnlySystem() throws IOException { SortedMap indicesLookup = new TreeMap<>(); Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()).build(); indicesLookup.put( @@ -303,15 +306,23 @@ public void testOnlySystem() { ); List onlySystem = List.of(".foo", ".bar"); assertTrue(TransportBulkAction.isOnlySystem(buildBulkRequest(onlySystem), indicesLookup, systemIndices)); + /* Test forwarded bulk requests (that are serialized then deserialized) */ + assertTrue(TransportBulkAction.isOnlySystem(buildBulkStreamRequest(onlySystem), indicesLookup, systemIndices)); onlySystem = List.of(".foo", ".bar", ".test"); assertTrue(TransportBulkAction.isOnlySystem(buildBulkRequest(onlySystem), indicesLookup, systemIndices)); + /* Test forwarded bulk requests (that are serialized then deserialized) */ + assertTrue(TransportBulkAction.isOnlySystem(buildBulkStreamRequest(onlySystem), indicesLookup, systemIndices)); List nonSystem = List.of("foo", "bar"); assertFalse(TransportBulkAction.isOnlySystem(buildBulkRequest(nonSystem), indicesLookup, systemIndices)); + /* Test forwarded bulk requests (that are serialized then deserialized) */ + assertFalse(TransportBulkAction.isOnlySystem(buildBulkStreamRequest(nonSystem), indicesLookup, systemIndices)); List mixed = List.of(".foo", ".test", "other"); assertFalse(TransportBulkAction.isOnlySystem(buildBulkRequest(mixed), indicesLookup, systemIndices)); + /* Test forwarded bulk requests (that are serialized then deserialized) */ + assertFalse(TransportBulkAction.isOnlySystem(buildBulkStreamRequest(mixed), indicesLookup, systemIndices)); } private void blockWriteThreadPool(CountDownLatch blockingLatch) { @@ -463,4 +474,12 @@ private BulkRequest buildBulkRequest(List indices) { } return request; } + + private BulkRequest buildBulkStreamRequest(List indices) throws IOException { + BulkRequest request = buildBulkRequest(indices); + BytesStreamOutput out = new BytesStreamOutput(); + request.writeTo(out); + StreamInput streamInput = out.bytes().streamInput(); + return (new BulkRequest(streamInput)); + } } From 12cac1b926aeb44d0f8a252f2c31fb64fd9fd473 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 26 Jul 2024 14:34:44 +1000 Subject: [PATCH 036/105] Mute org.elasticsearch.compute.lucene.LuceneQueryExpressionEvaluatorTests testTermsQueryShuffled #111318 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 46d0d490a2bb9..370d98541c7a5 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -126,6 +126,9 @@ tests: - class: org.elasticsearch.xpack.core.ml.job.config.DetectionRuleTests method: testEqualsAndHashcode issue: https://github.com/elastic/elasticsearch/issues/111308 +- class: org.elasticsearch.compute.lucene.LuceneQueryExpressionEvaluatorTests + method: testTermsQueryShuffled + issue: https://github.com/elastic/elasticsearch/issues/111318 # Examples: # From 447488d9a03a32a8e59b26b779a1ebcc1647cef7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 26 Jul 2024 14:56:39 +1000 Subject: [PATCH 037/105] Mute org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT #111319 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 370d98541c7a5..d7d64721b3410 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -129,6 +129,8 @@ tests: - class: org.elasticsearch.compute.lucene.LuceneQueryExpressionEvaluatorTests method: testTermsQueryShuffled issue: https://github.com/elastic/elasticsearch/issues/111318 +- class: org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT + issue: https://github.com/elastic/elasticsearch/issues/111319 # Examples: # From 39cb590bb7666b28b96673def7939ad78ba13519 Mon Sep 17 00:00:00 2001 From: eyalkoren <41850454+eyalkoren@users.noreply.github.com> Date: Fri, 26 Jul 2024 09:57:08 +0300 Subject: [PATCH 038/105] [test] Small enhancement to `EcsDynamicTemplatesIT` (#110740) --- .../xpack/stack/EcsDynamicTemplatesIT.java | 75 ++++++++++++------- 1 file changed, 50 insertions(+), 25 deletions(-) diff --git a/x-pack/plugin/stack/src/javaRestTest/java/org/elasticsearch/xpack/stack/EcsDynamicTemplatesIT.java b/x-pack/plugin/stack/src/javaRestTest/java/org/elasticsearch/xpack/stack/EcsDynamicTemplatesIT.java index 8bdf7b30a9997..3896120ed96d3 100644 --- a/x-pack/plugin/stack/src/javaRestTest/java/org/elasticsearch/xpack/stack/EcsDynamicTemplatesIT.java +++ b/x-pack/plugin/stack/src/javaRestTest/java/org/elasticsearch/xpack/stack/EcsDynamicTemplatesIT.java @@ -63,7 +63,7 @@ public class EcsDynamicTemplatesIT extends ESRestTestCase { private static Map ecsDynamicTemplates; private static Map> ecsFlatFieldDefinitions; - private static Map ecsFlatMultiFieldDefinitions; + private static Map> ecsFlatMultiFieldDefinitions; @BeforeClass public static void setupSuiteScopeCluster() throws Exception { @@ -142,12 +142,11 @@ private static void prepareEcsDefinitions() throws IOException { iterator.remove(); } - List> multiFields = (List>) definitions.get("multi_fields"); + List> multiFields = (List>) definitions.get("multi_fields"); if (multiFields != null) { multiFields.forEach(multiFieldsDefinitions -> { - String subfieldFlatName = Objects.requireNonNull(multiFieldsDefinitions.get("flat_name")); - String subfieldType = Objects.requireNonNull(multiFieldsDefinitions.get("type")); - ecsFlatMultiFieldDefinitions.put(subfieldFlatName, subfieldType); + String subfieldFlatName = (String) Objects.requireNonNull(multiFieldsDefinitions.get("flat_name")); + ecsFlatMultiFieldDefinitions.put(subfieldFlatName, multiFieldsDefinitions); }); } } @@ -191,7 +190,7 @@ public void testNumericMessage() throws IOException { verifyEcsMappings(indexName); } - private void assertType(String expectedType, Map actualMappings) throws IOException { + private void assertType(String expectedType, Map actualMappings) { assertNotNull("expected to get non-null mappings for field", actualMappings); assertEquals(expectedType, actualMappings.get("type")); } @@ -411,32 +410,33 @@ private void verifyEcsMappings(String indexName) throws IOException { if (expectedMappings == null) { nonEcsFields.add(fieldName); } else { - String expectedType = (String) expectedMappings.get("type"); - String actualMappingType = (String) actualMappings.get("type"); - if (actualMappingType.equals(expectedType) == false) { - fieldToWrongMappingType.put(fieldName, actualMappingType); - } - if (expectedMappings.get("index") != actualMappings.get("index")) { - wronglyIndexedFields.add(fieldName); - } - if (expectedMappings.get("doc_values") != actualMappings.get("doc_values")) { - wronglyDocValuedFields.add(fieldName); - } + compareExpectedToActualMappings( + fieldName, + actualMappings, + expectedMappings, + fieldToWrongMappingType, + wronglyIndexedFields, + wronglyDocValuedFields + ); } }); - Map shallowMultiFieldMapCopy = new HashMap<>(ecsFlatMultiFieldDefinitions); + Map> shallowMultiFieldMapCopy = new HashMap<>(ecsFlatMultiFieldDefinitions); logger.info("Testing mapping of {} ECS multi-fields", shallowMultiFieldMapCopy.size()); flatMultiFieldsMappings.forEach((fieldName, actualMappings) -> { - String expectedType = shallowMultiFieldMapCopy.remove(fieldName); - if (expectedType != null) { + Map expectedMultiFieldMappings = shallowMultiFieldMapCopy.remove(fieldName); + if (expectedMultiFieldMappings != null) { // not finding an entry in the expected multi-field mappings map is acceptable: our dynamic templates are required to // ensure multi-field mapping for all fields with such ECS definitions. However, the patterns in these templates may lead // to multi-field mapping for ECS fields for which such are not defined - String actualMappingType = (String) actualMappings.get("type"); - if (actualMappingType.equals(expectedType) == false) { - fieldToWrongMappingType.put(fieldName, actualMappingType); - } + compareExpectedToActualMappings( + fieldName, + actualMappings, + expectedMultiFieldMappings, + fieldToWrongMappingType, + wronglyIndexedFields, + wronglyDocValuedFields + ); } }); @@ -460,7 +460,11 @@ private void verifyEcsMappings(String indexName) throws IOException { ); }); fieldToWrongMappingType.forEach((fieldName, actualMappingType) -> { - String ecsExpectedType = (String) ecsFlatFieldDefinitions.get(fieldName).get("type"); + Map fieldMappings = ecsFlatFieldDefinitions.get(fieldName); + if (fieldMappings == null) { + fieldMappings = ecsFlatMultiFieldDefinitions.get(fieldName); + } + String ecsExpectedType = (String) fieldMappings.get("type"); logger.error( "ECS field '{}' should be mapped to type '{}' but is mapped to type '{}'. Update {} accordingly.", fieldName, @@ -493,4 +497,25 @@ private void verifyEcsMappings(String indexName) throws IOException { wronglyDocValuedFields.isEmpty() ); } + + private static void compareExpectedToActualMappings( + String fieldName, + Map actualMappings, + Map expectedMappings, + Map fieldToWrongMappingType, + List wronglyIndexedFields, + List wronglyDocValuedFields + ) { + String expectedType = (String) expectedMappings.get("type"); + String actualMappingType = (String) actualMappings.get("type"); + if (actualMappingType.equals(expectedType) == false) { + fieldToWrongMappingType.put(fieldName, actualMappingType); + } + if (expectedMappings.get("index") != actualMappings.get("index")) { + wronglyIndexedFields.add(fieldName); + } + if (expectedMappings.get("doc_values") != actualMappings.get("doc_values")) { + wronglyDocValuedFields.add(fieldName); + } + } } From 5cac9a0b7f5e85ee1578a4cfef8925cdce565a18 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Fri, 26 Jul 2024 10:20:21 +0200 Subject: [PATCH 039/105] ESQL: Mark union types as experimental (#111297) --- docs/reference/esql/esql-multi-index.asciidoc | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/reference/esql/esql-multi-index.asciidoc b/docs/reference/esql/esql-multi-index.asciidoc index 41ff6a27417b1..cf98cbe959237 100644 --- a/docs/reference/esql/esql-multi-index.asciidoc +++ b/docs/reference/esql/esql-multi-index.asciidoc @@ -103,7 +103,7 @@ FROM events_* [source,bash] ---- -Cannot use field [client_ip] due to ambiguities being mapped as +Cannot use field [client_ip] due to ambiguities being mapped as [2] incompatible types: [ip] in [events_ip], [keyword] in [events_keyword] @@ -113,12 +113,14 @@ Cannot use field [client_ip] due to ambiguities being mapped as [[esql-multi-index-union-types]] === Union types +experimental::[] + {esql} has a way to handle <>. When the same field is mapped to multiple types in multiple indices, the type of the field is understood to be a _union_ of the various types in the index mappings. As seen in the preceding examples, this _union type_ cannot be used in the results, and cannot be referred to by the query -- except when it's passed to a type conversion function that accepts all the types in the _union_ and converts the field -to a single type. {esql} offers a suite of <> to achieve this. +to a single type. {esql} offers a suite of <> to achieve this. In the above examples, the query can use a command like `EVAL client_ip = TO_IP(client_ip)` to resolve the union of `ip` and `keyword` to just `ip`. From 595d907f612c099c69df59a9e9391eb7d2194465 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Iv=C3=A1n=20Cea=20Fontenla?= Date: Fri, 26 Jul 2024 10:41:18 +0200 Subject: [PATCH 040/105] ESQL: SpatialCentroid aggregation tests and docs (#111236) --- .../functions/aggregation-functions.asciidoc | 4 +- .../description/st_centroid_agg.asciidoc | 5 ++ .../examples/st_centroid_agg.asciidoc | 13 +++ .../kibana/definition/st_centroid_agg.json | 35 ++++++++ .../functions/kibana/docs/st_centroid_agg.md | 11 +++ .../functions/layout/st_centroid_agg.asciidoc | 15 ++++ .../parameters/st_centroid_agg.asciidoc | 6 ++ .../functions/signature/st_centroid_agg.svg | 1 + .../functions/types/st_centroid_agg.asciidoc | 10 +++ .../src/main/resources/meta.csv-spec | 2 +- .../function/aggregate/SpatialCentroid.java | 8 +- .../expression/function/FunctionName.java | 2 +- .../function/MultiRowTestCaseSupplier.java | 60 +++++++++++++ .../expression/function/TestCaseSupplier.java | 12 +++ .../aggregate/SpatialCentroidTests.java | 89 +++++++++++++++++++ 15 files changed, 268 insertions(+), 5 deletions(-) create mode 100644 docs/reference/esql/functions/description/st_centroid_agg.asciidoc create mode 100644 docs/reference/esql/functions/examples/st_centroid_agg.asciidoc create mode 100644 docs/reference/esql/functions/kibana/definition/st_centroid_agg.json create mode 100644 docs/reference/esql/functions/kibana/docs/st_centroid_agg.md create mode 100644 docs/reference/esql/functions/layout/st_centroid_agg.asciidoc create mode 100644 docs/reference/esql/functions/parameters/st_centroid_agg.asciidoc create mode 100644 docs/reference/esql/functions/signature/st_centroid_agg.svg create mode 100644 docs/reference/esql/functions/types/st_centroid_agg.asciidoc create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java diff --git a/docs/reference/esql/functions/aggregation-functions.asciidoc b/docs/reference/esql/functions/aggregation-functions.asciidoc index fb840687427df..083f0eee792cd 100644 --- a/docs/reference/esql/functions/aggregation-functions.asciidoc +++ b/docs/reference/esql/functions/aggregation-functions.asciidoc @@ -16,7 +16,7 @@ The <> command supports these aggregate functions: * <> * <> * <> -* experimental:[] <> +* experimental:[] <> * <> * <> * <> @@ -27,11 +27,11 @@ include::count.asciidoc[] include::count-distinct.asciidoc[] include::median.asciidoc[] include::median-absolute-deviation.asciidoc[] -include::st_centroid_agg.asciidoc[] include::layout/avg.asciidoc[] include::layout/max.asciidoc[] include::layout/min.asciidoc[] include::layout/percentile.asciidoc[] +include::layout/st_centroid_agg.asciidoc[] include::layout/sum.asciidoc[] include::layout/top.asciidoc[] include::values.asciidoc[] diff --git a/docs/reference/esql/functions/description/st_centroid_agg.asciidoc b/docs/reference/esql/functions/description/st_centroid_agg.asciidoc new file mode 100644 index 0000000000000..740accf02c33f --- /dev/null +++ b/docs/reference/esql/functions/description/st_centroid_agg.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Calculate the spatial centroid over a field with spatial point geometry type. diff --git a/docs/reference/esql/functions/examples/st_centroid_agg.asciidoc b/docs/reference/esql/functions/examples/st_centroid_agg.asciidoc new file mode 100644 index 0000000000000..69c291b738828 --- /dev/null +++ b/docs/reference/esql/functions/examples/st_centroid_agg.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial.csv-spec[tag=st_centroid_agg-airports] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial.csv-spec[tag=st_centroid_agg-airports-result] +|=== + diff --git a/docs/reference/esql/functions/kibana/definition/st_centroid_agg.json b/docs/reference/esql/functions/kibana/definition/st_centroid_agg.json new file mode 100644 index 0000000000000..b01f91526709a --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_centroid_agg.json @@ -0,0 +1,35 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "agg", + "name" : "st_centroid_agg", + "description" : "Calculate the spatial centroid over a field with spatial point geometry type.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + } + ], + "examples" : [ + "FROM airports\n| STATS centroid=ST_CENTROID_AGG(location)" + ] +} diff --git a/docs/reference/esql/functions/kibana/docs/st_centroid_agg.md b/docs/reference/esql/functions/kibana/docs/st_centroid_agg.md new file mode 100644 index 0000000000000..306a32a309a64 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_centroid_agg.md @@ -0,0 +1,11 @@ + + +### ST_CENTROID_AGG +Calculate the spatial centroid over a field with spatial point geometry type. + +``` +FROM airports +| STATS centroid=ST_CENTROID_AGG(location) +``` diff --git a/docs/reference/esql/functions/layout/st_centroid_agg.asciidoc b/docs/reference/esql/functions/layout/st_centroid_agg.asciidoc new file mode 100644 index 0000000000000..6626c162f3b06 --- /dev/null +++ b/docs/reference/esql/functions/layout/st_centroid_agg.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-st_centroid_agg]] +=== `ST_CENTROID_AGG` + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_centroid_agg.svg[Embedded,opts=inline] + +include::../parameters/st_centroid_agg.asciidoc[] +include::../description/st_centroid_agg.asciidoc[] +include::../types/st_centroid_agg.asciidoc[] +include::../examples/st_centroid_agg.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/st_centroid_agg.asciidoc b/docs/reference/esql/functions/parameters/st_centroid_agg.asciidoc new file mode 100644 index 0000000000000..8903aa1a472a3 --- /dev/null +++ b/docs/reference/esql/functions/parameters/st_centroid_agg.asciidoc @@ -0,0 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/signature/st_centroid_agg.svg b/docs/reference/esql/functions/signature/st_centroid_agg.svg new file mode 100644 index 0000000000000..45d509614a526 --- /dev/null +++ b/docs/reference/esql/functions/signature/st_centroid_agg.svg @@ -0,0 +1 @@ +ST_CENTROID_AGG(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/st_centroid_agg.asciidoc b/docs/reference/esql/functions/types/st_centroid_agg.asciidoc new file mode 100644 index 0000000000000..da95e0b9bec1a --- /dev/null +++ b/docs/reference/esql/functions/types/st_centroid_agg.asciidoc @@ -0,0 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +field | result +cartesian_point | cartesian_point +geo_point | geo_point +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 1e23df1ab9107..cb073cbc35b61 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -314,7 +314,7 @@ sin |Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric funct sinh |Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of an angle. split |Split a single valued string into multiple strings. sqrt |Returns the square root of a number. The input can be any numeric value, the return value is always a double. Square roots of negative numbers and infinities are null. -st_centroid_ag|The centroid of a spatial field. +st_centroid_ag|Calculate the spatial centroid over a field with spatial point geometry type. st_contains |Returns whether the first geometry contains the second geometry. This is the inverse of the <> function. st_disjoint |Returns whether the two geometries or geometry columns are disjoint. This is the inverse of the <> function. In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ st_distance |Computes the distance between two points. For cartesian geometries, this is the pythagorean distance in the same units as the original coordinates. For geographic geometries, this is the circular distance along the great circle in meters. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java index d5681ba8d856e..b9cd99f8eb7f0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.ToAggregator; @@ -38,7 +39,12 @@ public class SpatialCentroid extends SpatialAggregateFunction implements ToAggre SpatialCentroid::new ); - @FunctionInfo(returnType = { "geo_point", "cartesian_point" }, description = "The centroid of a spatial field.", isAggregation = true) + @FunctionInfo( + returnType = { "geo_point", "cartesian_point" }, + description = "Calculate the spatial centroid over a field with spatial point geometry type.", + isAggregation = true, + examples = @Example(file = "spatial", tag = "st_centroid_agg-airports") + ) public SpatialCentroid(Source source, @Param(name = "field", type = { "geo_point", "cartesian_point" }) Expression field) { super(source, field, false); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FunctionName.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FunctionName.java index 9807cb5365e54..ac32b50d1045a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FunctionName.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FunctionName.java @@ -13,7 +13,7 @@ import java.lang.annotation.Target; /** - * Tests that extend {@link AbstractScalarFunctionTestCase} can use this annotation to specify the name of the function + * Tests that extend {@link AbstractFunctionTestCase} can use this annotation to specify the name of the function * to use when generating documentation files while running tests. * If this is not used, the name will be deduced from the test class name, by removing the "Test" suffix, and converting * the class name to snake case. This annotation can be used to override that behavior, for cases where the deduced name diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java index dd73e64fbd8da..2896dec814a71 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java @@ -10,6 +10,8 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.geo.GeometryTestUtils; +import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -18,6 +20,8 @@ import static org.elasticsearch.test.ESTestCase.randomBoolean; import static org.elasticsearch.test.ESTestCase.randomList; +import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.CARTESIAN; +import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.GEO; import static org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier.TypedDataSupplier; /** @@ -321,4 +325,60 @@ public static List ipCases(int minRows, int maxRows) { ) ); } + + public static List geoPointCases(int minRows, int maxRows, boolean withAltitude) { + List cases = new ArrayList<>(); + + cases.add( + new TypedDataSupplier( + "", + () -> randomList(minRows, maxRows, () -> GEO.asWkb(GeometryTestUtils.randomPoint(false))), + DataType.GEO_POINT, + false, + true + ) + ); + + if (withAltitude) { + cases.add( + new TypedDataSupplier( + "", + () -> randomList(minRows, maxRows, () -> GEO.asWkb(GeometryTestUtils.randomPoint(true))), + DataType.GEO_POINT, + false, + true + ) + ); + } + + return cases; + } + + public static List cartesianPointCases(int minRows, int maxRows, boolean withAltitude) { + List cases = new ArrayList<>(); + + cases.add( + new TypedDataSupplier( + "", + () -> randomList(minRows, maxRows, () -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint(false))), + DataType.CARTESIAN_POINT, + false, + true + ) + ); + + if (withAltitude) { + cases.add( + new TypedDataSupplier( + "", + () -> randomList(minRows, maxRows, () -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint(true))), + DataType.CARTESIAN_POINT, + false, + true + ) + ); + } + + return cases; + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 3130d852c1ab1..3c9c1795ff210 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -1076,12 +1076,24 @@ public static List cartesianShapeCases() { return cartesianShapeCases(ESTestCase::randomBoolean); } + /** + * Generate cases for {@link DataType#GEO_POINT}. + *

+ * For multi-row parameters, see {@link MultiRowTestCaseSupplier#geoPointCases}. + *

+ */ public static List geoPointCases(Supplier hasAlt) { return List.of( new TypedDataSupplier("", () -> GEO.asWkb(GeometryTestUtils.randomPoint(hasAlt.get())), DataType.GEO_POINT) ); } + /** + * Generate cases for {@link DataType#CARTESIAN_POINT}. + *

+ * For multi-row parameters, see {@link MultiRowTestCaseSupplier#cartesianPointCases}. + *

+ */ public static List cartesianPointCases(Supplier hasAlt) { return List.of( new TypedDataSupplier( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java new file mode 100644 index 0000000000000..b79252c694084 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java @@ -0,0 +1,89 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.geometry.utils.GeometryValidator; +import org.elasticsearch.geometry.utils.WellKnownBinary; +import org.elasticsearch.search.aggregations.metrics.CompensatedSum; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractAggregationTestCase; +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.MultiRowTestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.nio.ByteOrder; +import java.util.List; +import java.util.function.Supplier; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.equalTo; + +@FunctionName("st_centroid_agg") +public class SpatialCentroidTests extends AbstractAggregationTestCase { + public SpatialCentroidTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + var suppliers = Stream.of( + MultiRowTestCaseSupplier.geoPointCases(1, 1000, true), + MultiRowTestCaseSupplier.cartesianPointCases(1, 1000, true) + ).flatMap(List::stream).map(SpatialCentroidTests::makeSupplier).toList(); + + // The withNoRowsExpectingNull() cases don't work here, as this aggregator doesn't return nulls. + // return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + return parameterSuppliersFromTypedData(randomizeBytesRefsOffset(suppliers)); + } + + @Override + protected Expression build(Source source, List args) { + return new SpatialCentroid(source, args.get(0)); + } + + private static TestCaseSupplier makeSupplier(TestCaseSupplier.TypedDataSupplier fieldSupplier) { + if (fieldSupplier.type() != DataType.CARTESIAN_POINT && fieldSupplier.type() != DataType.GEO_POINT) { + throw new IllegalStateException("Unexpected type: " + fieldSupplier.type()); + } + + return new TestCaseSupplier(List.of(fieldSupplier.type()), () -> { + var fieldTypedData = fieldSupplier.get(); + var values = fieldTypedData.multiRowData(); + + var xSum = new CompensatedSum(0, 0); + var ySum = new CompensatedSum(0, 0); + long count = 0; + + for (var value : values) { + var wkb = (BytesRef) value; + var point = (Point) WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, wkb.bytes, wkb.offset, wkb.length); + xSum.add(point.getX()); + ySum.add(point.getY()); + count++; + } + + var expected = new BytesRef( + WellKnownBinary.toWKB(new Point(xSum.value() / count, ySum.value() / count), ByteOrder.LITTLE_ENDIAN) + ); + + return new TestCaseSupplier.TestCase( + List.of(fieldTypedData), + "SpatialCentroid[field=Attribute[channel=0]]", + fieldTypedData.type(), + equalTo(expected) + ); + }); + } +} From f4c05bdcab98138b5b9eb1c5f39db9aa9c862fb0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Fri, 26 Jul 2024 11:32:00 +0200 Subject: [PATCH 041/105] [DOCS] Amends PUT inference API docs with model download info (#111278) * [DOCS] Amends PUT inference API docs with model download info. * [DOCS] Addresses feedback. --- .../reference/inference/put-inference.asciidoc | 18 ++++++++++++++---- .../inference/service-elasticsearch.asciidoc | 10 +++++++++- .../reference/inference/service-elser.asciidoc | 6 ++++++ 3 files changed, 29 insertions(+), 5 deletions(-) diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 948496c473a20..f8732a6aff6fd 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -6,10 +6,17 @@ experimental[] Creates an {infer} endpoint to perform an {infer} task. -IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in -{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Mistral, Azure OpenAI, Google AI Studio, Google Vertex AI or Hugging Face. -For built-in models and models uploaded through Eland, the {infer} APIs offer an alternative way to use and manage trained models. -However, if you do not plan to use the {infer} APIs to use these models or if you want to use non-NLP models, use the <>. +[IMPORTANT] +==== +* The {infer} APIs enable you to use certain services, such as built-in +{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Mistral, +Azure OpenAI, Google AI Studio, Google Vertex AI or Hugging Face. +* For built-in models and models uploaded through Eland, the {infer} APIs offer an +alternative way to use and manage trained models. However, if you do not plan to +use the {infer} APIs to use these models or if you want to use non-NLP models, +use the <>. +==== + [discrete] [[put-inference-api-request]] @@ -43,3 +50,6 @@ The following services are available through the {infer} API, click the links to * <> * <> * <> + +The {es} and ELSER services run on a {ml} node in your {es} cluster. The rest of +the services connect to external providers. \ No newline at end of file diff --git a/docs/reference/inference/service-elasticsearch.asciidoc b/docs/reference/inference/service-elasticsearch.asciidoc index 50b97b3506ee8..b568a4691a4bd 100644 --- a/docs/reference/inference/service-elasticsearch.asciidoc +++ b/docs/reference/inference/service-elasticsearch.asciidoc @@ -1,7 +1,12 @@ [[infer-service-elasticsearch]] === Elasticsearch {infer} service -Creates an {infer} endpoint to perform an {infer} task with the `elasticsearch` service. +Creates an {infer} endpoint to perform an {infer} task with the `elasticsearch` +service. + +NOTE: If you use the E5 model through the `elasticsearch` service, the API +request will automatically download and deploy the model if it isn't downloaded +yet. [discrete] @@ -81,6 +86,9 @@ Returns the document instead of only the index. Defaults to `true`. The following example shows how to create an {infer} endpoint called `my-e5-model` to perform a `text_embedding` task type. +The API request below will automatically download the E5 model if it isn't +already downloaded and then deploy the model. + [source,console] ------------------------------------------------------------ PUT _inference/text_embedding/my-e5-model diff --git a/docs/reference/inference/service-elser.asciidoc b/docs/reference/inference/service-elser.asciidoc index dff531f2a414b..34c0f7d0a9c53 100644 --- a/docs/reference/inference/service-elser.asciidoc +++ b/docs/reference/inference/service-elser.asciidoc @@ -3,6 +3,9 @@ Creates an {infer} endpoint to perform an {infer} task with the `elser` service. +NOTE: The API request will automatically download and deploy the ELSER model if +it isn't already downloaded. + [discrete] [[infer-service-elser-api-request]] @@ -63,6 +66,9 @@ The following example shows how to create an {infer} endpoint called `my-elser-model` to perform a `sparse_embedding` task type. Refer to the {ml-docs}/ml-nlp-elser.html[ELSER model documentation] for more info. +The request below will automatically download the ELSER model if it isn't +already downloaded and then deploy the model. + [source,console] ------------------------------------------------------------ PUT _inference/sparse_embedding/my-elser-model From 8886c5c8d701e5e5c7eb1de6b940a13a995bc05d Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Fri, 26 Jul 2024 11:54:59 +0200 Subject: [PATCH 042/105] Revert "[cache] Support async RangeMissingHandler callbacks (#110587)" (#111327) Reverts elastic/elasticsearch#110587 --- .../shared/SharedBlobCacheService.java | 101 +++----- .../shared/SharedBlobCacheServiceTests.java | 220 ++++++------------ .../store/input/FrozenIndexInput.java | 59 +++-- 3 files changed, 125 insertions(+), 255 deletions(-) diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java index 9a908b7e943c5..9cb83e35b63d6 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java @@ -646,14 +646,13 @@ private RangeMissingHandler writerWithOffset(RangeMissingHandler writer, int wri // no need to allocate a new capturing lambda if the offset isn't adjusted return writer; } - return (channel, channelPos, streamFactory, relativePos, len, progressUpdater, completionListener) -> writer.fillCacheRange( + return (channel, channelPos, streamFactory, relativePos, len, progressUpdater) -> writer.fillCacheRange( channel, channelPos, streamFactory, relativePos - writeOffset, len, - progressUpdater, - completionListener + progressUpdater ); } @@ -988,17 +987,16 @@ void populateAndRead( executor.execute(fillGapRunnable(gap, writer, null, refs.acquireListener())); } } else { - var gapFillingListener = refs.acquireListener(); - try (var gfRefs = new RefCountingRunnable(ActionRunnable.run(gapFillingListener, streamFactory::close))) { - final List gapFillingTasks = gaps.stream() - .map(gap -> fillGapRunnable(gap, writer, streamFactory, gfRefs.acquireListener())) - .toList(); - executor.execute(() -> { + final List gapFillingTasks = gaps.stream() + .map(gap -> fillGapRunnable(gap, writer, streamFactory, refs.acquireListener())) + .toList(); + executor.execute(() -> { + try (streamFactory) { // Fill the gaps in order. If a gap fails to fill for whatever reason, the task for filling the next // gap will still be executed. gapFillingTasks.forEach(Runnable::run); - }); - } + } + }); } } } @@ -1007,13 +1005,13 @@ void populateAndRead( } } - private Runnable fillGapRunnable( + private AbstractRunnable fillGapRunnable( SparseFileTracker.Gap gap, RangeMissingHandler writer, @Nullable SourceInputStreamFactory streamFactory, ActionListener listener ) { - return () -> ActionListener.run(listener, l -> { + return ActionRunnable.run(listener.delegateResponse((l, e) -> failGapAndListener(gap, l, e)), () -> { var ioRef = io; assert regionOwners.get(ioRef) == CacheFileRegion.this; assert CacheFileRegion.this.hasReferences() : CacheFileRegion.this; @@ -1024,15 +1022,10 @@ private Runnable fillGapRunnable( streamFactory, start, Math.toIntExact(gap.end() - start), - progress -> gap.onProgress(start + progress), - l.map(unused -> { - assert regionOwners.get(ioRef) == CacheFileRegion.this; - assert CacheFileRegion.this.hasReferences() : CacheFileRegion.this; - writeCount.increment(); - gap.onCompletion(); - return null; - }).delegateResponse((delegate, e) -> failGapAndListener(gap, delegate, e)) + progress -> gap.onProgress(start + progress) ); + writeCount.increment(); + gap.onCompletion(); }); } @@ -1120,23 +1113,12 @@ public void fillCacheRange( SourceInputStreamFactory streamFactory, int relativePos, int length, - IntConsumer progressUpdater, - ActionListener completionListener + IntConsumer progressUpdater ) throws IOException { - writer.fillCacheRange( - channel, - channelPos, - streamFactory, - relativePos, - length, - progressUpdater, - completionListener.map(unused -> { - var elapsedTime = TimeUnit.NANOSECONDS.toMicros(relativeTimeInNanosSupplier.getAsLong() - startTime); - blobCacheMetrics.getCacheMissLoadTimes().record(elapsedTime); - blobCacheMetrics.getCacheMissCounter().increment(); - return null; - }) - ); + writer.fillCacheRange(channel, channelPos, streamFactory, relativePos, length, progressUpdater); + var elapsedTime = TimeUnit.NANOSECONDS.toMicros(relativeTimeInNanosSupplier.getAsLong() - startTime); + SharedBlobCacheService.this.blobCacheMetrics.getCacheMissLoadTimes().record(elapsedTime); + SharedBlobCacheService.this.blobCacheMetrics.getCacheMissCounter().increment(); } }; if (rangeToRead.isEmpty()) { @@ -1229,18 +1211,9 @@ public void fillCacheRange( SourceInputStreamFactory streamFactory, int relativePos, int len, - IntConsumer progressUpdater, - ActionListener completionListener + IntConsumer progressUpdater ) throws IOException { - delegate.fillCacheRange( - channel, - channelPos, - streamFactory, - relativePos - writeOffset, - len, - progressUpdater, - completionListener - ); + delegate.fillCacheRange(channel, channelPos, streamFactory, relativePos - writeOffset, len, progressUpdater); } }; } @@ -1253,25 +1226,14 @@ public void fillCacheRange( SourceInputStreamFactory streamFactory, int relativePos, int len, - IntConsumer progressUpdater, - ActionListener completionListener + IntConsumer progressUpdater ) throws IOException { assert assertValidRegionAndLength(fileRegion, channelPos, len); - delegate.fillCacheRange( - channel, - channelPos, - streamFactory, - relativePos, - len, - progressUpdater, - Assertions.ENABLED ? ActionListener.runBefore(completionListener, () -> { - assert regionOwners.get(fileRegion.io) == fileRegion - : "File chunk [" + fileRegion.regionKey + "] no longer owns IO [" + fileRegion.io + "]"; - }) : completionListener - ); + delegate.fillCacheRange(channel, channelPos, streamFactory, relativePos, len, progressUpdater); + assert regionOwners.get(fileRegion.io) == fileRegion + : "File chunk [" + fileRegion.regionKey + "] no longer owns IO [" + fileRegion.io + "]"; } }; - } return adjustedWriter; } @@ -1358,7 +1320,6 @@ default SourceInputStreamFactory sharedInputStreamFactory(List completionListener + IntConsumer progressUpdater ) throws IOException; } @@ -1379,9 +1339,9 @@ public interface SourceInputStreamFactory extends Releasable { /** * Create the input stream at the specified position. * @param relativePos the relative position in the remote storage to read from. - * @param listener listener for the input stream ready to be read from. + * @return the input stream ready to be read from. */ - void create(int relativePos, ActionListener listener) throws IOException; + InputStream create(int relativePos) throws IOException; } private abstract static class DelegatingRangeMissingHandler implements RangeMissingHandler { @@ -1403,10 +1363,9 @@ public void fillCacheRange( SourceInputStreamFactory streamFactory, int relativePos, int length, - IntConsumer progressUpdater, - ActionListener completionListener + IntConsumer progressUpdater ) throws IOException { - delegate.fillCacheRange(channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener); + delegate.fillCacheRange(channel, channelPos, streamFactory, relativePos, length, progressUpdater); } } diff --git a/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java b/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java index 4ea954a1a76ce..e477673c90d6d 100644 --- a/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java +++ b/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.StoppableExecutorServiceWrapper; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.core.CheckedRunnable; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.TestEnvironment; @@ -73,13 +72,6 @@ private static long size(long numPages) { return numPages * SharedBytes.PAGE_SIZE; } - private static void completeWith(ActionListener listener, CheckedRunnable runnable) { - ActionListener.completeWith(listener, () -> { - runnable.run(); - return null; - }); - } - public void testBasicEviction() throws IOException { Settings settings = Settings.builder() .put(NODE_NAME_SETTING.getKey(), "node") @@ -123,10 +115,7 @@ public void testBasicEviction() throws IOException { ByteRange.of(0L, 1L), ByteRange.of(0L, 1L), (channel, channelPos, relativePos, length) -> 1, - (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( - completionListener, - () -> progressUpdater.accept(length) - ), + (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> progressUpdater.accept(length), taskQueue.getThreadPool().generic(), bytesReadFuture ); @@ -563,14 +552,11 @@ public void execute(Runnable command) { cacheService.maybeFetchFullEntry( cacheKey, size, - (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( - completionListener, - () -> { - assert streamFactory == null : streamFactory; - bytesRead.addAndGet(-length); - progressUpdater.accept(length); - } - ), + (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> { + assert streamFactory == null : streamFactory; + bytesRead.addAndGet(-length); + progressUpdater.accept(length); + }, bulkExecutor, future ); @@ -584,15 +570,9 @@ public void execute(Runnable command) { // a download that would use up all regions should not run final var cacheKey = generateCacheKey(); assertEquals(2, cacheService.freeRegionCount()); - var configured = cacheService.maybeFetchFullEntry( - cacheKey, - size(500), - (ch, chPos, streamFactory, relPos, len, update, completionListener) -> completeWith(completionListener, () -> { - throw new AssertionError("Should never reach here"); - }), - bulkExecutor, - ActionListener.noop() - ); + var configured = cacheService.maybeFetchFullEntry(cacheKey, size(500), (ch, chPos, streamFactory, relPos, len, update) -> { + throw new AssertionError("Should never reach here"); + }, bulkExecutor, ActionListener.noop()); assertFalse(configured); assertEquals(2, cacheService.freeRegionCount()); } @@ -633,14 +613,9 @@ public void testFetchFullCacheEntryConcurrently() throws Exception { (ActionListener listener) -> cacheService.maybeFetchFullEntry( cacheKey, size, - ( - channel, - channelPos, - streamFactory, - relativePos, - length, - progressUpdater, - completionListener) -> completeWith(completionListener, () -> progressUpdater.accept(length)), + (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> progressUpdater.accept( + length + ), bulkExecutor, listener ) @@ -884,10 +859,7 @@ public void testMaybeEvictLeastUsed() throws Exception { var entry = cacheService.get(cacheKey, regionSize, 0); entry.populate( ByteRange.of(0L, regionSize), - (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( - completionListener, - () -> progressUpdater.accept(length) - ), + (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> progressUpdater.accept(length), taskQueue.getThreadPool().generic(), ActionListener.noop() ); @@ -982,14 +954,11 @@ public void execute(Runnable command) { cacheKey, 0, blobLength, - (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( - completionListener, - () -> { - assert streamFactory == null : streamFactory; - bytesRead.addAndGet(length); - progressUpdater.accept(length); - } - ), + (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> { + assert streamFactory == null : streamFactory; + bytesRead.addAndGet(length); + progressUpdater.accept(length); + }, bulkExecutor, future ); @@ -1016,14 +985,11 @@ public void execute(Runnable command) { cacheKey, region, blobLength, - (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( - completionListener, - () -> { - assert streamFactory == null : streamFactory; - bytesRead.addAndGet(length); - progressUpdater.accept(length); - } - ), + (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> { + assert streamFactory == null : streamFactory; + bytesRead.addAndGet(length); + progressUpdater.accept(length); + }, bulkExecutor, listener ); @@ -1044,16 +1010,13 @@ public void execute(Runnable command) { cacheKey, randomIntBetween(0, 10), randomLongBetween(1L, regionSize), - (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( - completionListener, - () -> { - throw new AssertionError("should not be executed"); - } - ), + (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> { + throw new AssertionError("should not be executed"); + }, bulkExecutor, future ); - assertThat("Listener is immediately completionListener", future.isDone(), is(true)); + assertThat("Listener is immediately completed", future.isDone(), is(true)); assertThat("Region already exists in cache", future.get(), is(false)); } { @@ -1069,14 +1032,11 @@ public void execute(Runnable command) { cacheKey, 0, blobLength, - (channel, channelPos, ignore, relativePos, length, progressUpdater, completionListener) -> completeWith( - completionListener, - () -> { - assert ignore == null : ignore; - bytesRead.addAndGet(length); - progressUpdater.accept(length); - } - ), + (channel, channelPos, ignore, relativePos, length, progressUpdater) -> { + assert ignore == null : ignore; + bytesRead.addAndGet(length); + progressUpdater.accept(length); + }, bulkExecutor, future ); @@ -1150,15 +1110,12 @@ public void execute(Runnable command) { region, range, blobLength, - (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( - completionListener, - () -> { - assertThat(range.start() + relativePos, equalTo(cacheService.getRegionStart(region) + regionRange.start())); - assertThat(channelPos, equalTo(Math.toIntExact(regionRange.start()))); - assertThat(length, equalTo(Math.toIntExact(regionRange.length()))); - bytesCopied.addAndGet(length); - } - ), + (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> { + assertThat(range.start() + relativePos, equalTo(cacheService.getRegionStart(region) + regionRange.start())); + assertThat(channelPos, equalTo(Math.toIntExact(regionRange.start()))); + assertThat(length, equalTo(Math.toIntExact(regionRange.length()))); + bytesCopied.addAndGet(length); + }, bulkExecutor, future ); @@ -1193,10 +1150,7 @@ public void execute(Runnable command) { region, ByteRange.of(0L, blobLength), blobLength, - (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( - completionListener, - () -> bytesCopied.addAndGet(length) - ), + (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> bytesCopied.addAndGet(length), bulkExecutor, listener ); @@ -1219,16 +1173,13 @@ public void execute(Runnable command) { randomIntBetween(0, 10), ByteRange.of(0L, blobLength), blobLength, - (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( - completionListener, - () -> { - throw new AssertionError("should not be executed"); - } - ), + (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> { + throw new AssertionError("should not be executed"); + }, bulkExecutor, future ); - assertThat("Listener is immediately completionListener", future.isDone(), is(true)); + assertThat("Listener is immediately completed", future.isDone(), is(true)); assertThat("Region already exists in cache", future.get(), is(false)); } { @@ -1245,10 +1196,7 @@ public void execute(Runnable command) { 0, ByteRange.of(0L, blobLength), blobLength, - (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( - completionListener, - () -> bytesCopied.addAndGet(length) - ), + (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> bytesCopied.addAndGet(length), bulkExecutor, future ); @@ -1289,18 +1237,10 @@ public void testPopulate() throws Exception { var entry = cacheService.get(cacheKey, blobLength, 0); AtomicLong bytesWritten = new AtomicLong(0L); final PlainActionFuture future1 = new PlainActionFuture<>(); - entry.populate( - ByteRange.of(0, regionSize - 1), - (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( - completionListener, - () -> { - bytesWritten.addAndGet(length); - progressUpdater.accept(length); - } - ), - taskQueue.getThreadPool().generic(), - future1 - ); + entry.populate(ByteRange.of(0, regionSize - 1), (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> { + bytesWritten.addAndGet(length); + progressUpdater.accept(length); + }, taskQueue.getThreadPool().generic(), future1); assertThat(future1.isDone(), is(false)); assertThat(taskQueue.hasRunnableTasks(), is(true)); @@ -1308,34 +1248,18 @@ public void testPopulate() throws Exception { // start populating the second region entry = cacheService.get(cacheKey, blobLength, 1); final PlainActionFuture future2 = new PlainActionFuture<>(); - entry.populate( - ByteRange.of(0, regionSize - 1), - (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( - completionListener, - () -> { - bytesWritten.addAndGet(length); - progressUpdater.accept(length); - } - ), - taskQueue.getThreadPool().generic(), - future2 - ); + entry.populate(ByteRange.of(0, regionSize - 1), (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> { + bytesWritten.addAndGet(length); + progressUpdater.accept(length); + }, taskQueue.getThreadPool().generic(), future2); // start populating again the first region, listener should be called immediately entry = cacheService.get(cacheKey, blobLength, 0); final PlainActionFuture future3 = new PlainActionFuture<>(); - entry.populate( - ByteRange.of(0, regionSize - 1), - (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( - completionListener, - () -> { - bytesWritten.addAndGet(length); - progressUpdater.accept(length); - } - ), - taskQueue.getThreadPool().generic(), - future3 - ); + entry.populate(ByteRange.of(0, regionSize - 1), (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> { + bytesWritten.addAndGet(length); + progressUpdater.accept(length); + }, taskQueue.getThreadPool().generic(), future3); assertThat(future3.isDone(), is(true)); var written = future3.get(10L, TimeUnit.SECONDS); @@ -1453,10 +1377,7 @@ public void testSharedSourceInputStreamFactory() throws Exception { range, range, (channel, channelPos, relativePos, length) -> length, - (channel, channelPos, streamFactory, relativePos, length, progressUpdater, completionListener) -> completeWith( - completionListener, - () -> progressUpdater.accept(length) - ), + (channel, channelPos, streamFactory, relativePos, length, progressUpdater) -> progressUpdater.accept(length), EsExecutors.DIRECT_EXECUTOR_SERVICE, future ); @@ -1473,8 +1394,8 @@ public void testSharedSourceInputStreamFactory() throws Exception { final var factoryClosed = new AtomicBoolean(false); final var dummyStreamFactory = new SourceInputStreamFactory() { @Override - public void create(int relativePos, ActionListener listener) { - listener.onResponse(null); + public InputStream create(int relativePos) { + return null; } @Override @@ -1499,20 +1420,17 @@ public void fillCacheRange( SourceInputStreamFactory streamFactory, int relativePos, int length, - IntConsumer progressUpdater, - ActionListener completion + IntConsumer progressUpdater ) throws IOException { - completeWith(completion, () -> { - if (invocationCounter.incrementAndGet() == 1) { - final Thread witness = invocationThread.compareAndExchange(null, Thread.currentThread()); - assertThat(witness, nullValue()); - } else { - assertThat(invocationThread.get(), sameInstance(Thread.currentThread())); - } - assertThat(streamFactory, sameInstance(dummyStreamFactory)); - assertThat(position.getAndSet(relativePos), lessThan(relativePos)); - progressUpdater.accept(length); - }); + if (invocationCounter.incrementAndGet() == 1) { + final Thread witness = invocationThread.compareAndExchange(null, Thread.currentThread()); + assertThat(witness, nullValue()); + } else { + assertThat(invocationThread.get(), sameInstance(Thread.currentThread())); + } + assertThat(streamFactory, sameInstance(dummyStreamFactory)); + assertThat(position.getAndSet(relativePos), lessThan(relativePos)); + progressUpdater.accept(length); } }; diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInput.java index d7cf22a05981f..56efc72f2f6f7 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInput.java @@ -11,7 +11,6 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.blobcache.BlobCacheUtils; import org.elasticsearch.blobcache.common.ByteBufferReference; import org.elasticsearch.blobcache.common.ByteRange; @@ -147,38 +146,32 @@ private void readWithoutBlobCacheSlow(ByteBuffer b, long position, int length) t final int read = SharedBytes.readCacheFile(channel, pos, relativePos, len, byteBufferReference); stats.addCachedBytesRead(read); return read; - }, - (channel, channelPos, streamFactory, relativePos, len, progressUpdater, completionListener) -> ActionListener.completeWith( - completionListener, - () -> { - assert streamFactory == null : streamFactory; - final long startTimeNanos = stats.currentTimeNanos(); - try (InputStream input = openInputStreamFromBlobStore(rangeToWrite.start() + relativePos, len)) { - assert ThreadPool.assertCurrentThreadPool(SearchableSnapshots.CACHE_FETCH_ASYNC_THREAD_POOL_NAME); - logger.trace( - "{}: writing channel {} pos {} length {} (details: {})", - fileInfo.physicalName(), - channelPos, - relativePos, - len, - cacheFile - ); - SharedBytes.copyToCacheFileAligned( - channel, - input, - channelPos, - relativePos, - len, - progressUpdater, - writeBuffer.get().clear() - ); - final long endTimeNanos = stats.currentTimeNanos(); - stats.addCachedBytesWritten(len, endTimeNanos - startTimeNanos); - return null; - } - } - ) - ); + }, (channel, channelPos, streamFactory, relativePos, len, progressUpdater) -> { + assert streamFactory == null : streamFactory; + final long startTimeNanos = stats.currentTimeNanos(); + try (InputStream input = openInputStreamFromBlobStore(rangeToWrite.start() + relativePos, len)) { + assert ThreadPool.assertCurrentThreadPool(SearchableSnapshots.CACHE_FETCH_ASYNC_THREAD_POOL_NAME); + logger.trace( + "{}: writing channel {} pos {} length {} (details: {})", + fileInfo.physicalName(), + channelPos, + relativePos, + len, + cacheFile + ); + SharedBytes.copyToCacheFileAligned( + channel, + input, + channelPos, + relativePos, + len, + progressUpdater, + writeBuffer.get().clear() + ); + final long endTimeNanos = stats.currentTimeNanos(); + stats.addCachedBytesWritten(len, endTimeNanos - startTimeNanos); + } + }); assert bytesRead == length : bytesRead + " vs " + length; byteBufferReference.finish(bytesRead); } finally { From 22ead8d106cdccbe51b12fd2ba1c03004658837c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Fri, 26 Jul 2024 12:02:47 +0200 Subject: [PATCH 043/105] [DOCS] Documents automatic text chunking behavior for semantic text. (#111331) --- docs/reference/mapping/types/semantic-text.asciidoc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/reference/mapping/types/semantic-text.asciidoc b/docs/reference/mapping/types/semantic-text.asciidoc index 6ee30e6b9f831..ece22fc08b00f 100644 --- a/docs/reference/mapping/types/semantic-text.asciidoc +++ b/docs/reference/mapping/types/semantic-text.asciidoc @@ -65,6 +65,9 @@ To allow for large amounts of text to be used in semantic search, `semantic_text Each chunk will include the text subpassage and the corresponding embedding generated from it. When querying, the individual passages will be automatically searched for each document, and the most relevant passage will be used to compute a score. +Documents are split into 250-word sections with a 100-word overlap so that each section shares 100 words with the previous section. +This overlap ensures continuity and prevents vital contextual information in the input text from being lost by a hard break. + [discrete] [[semantic-text-structure]] From 1f741331aff3d097b48edcd1674cdaf18c992f9e Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 26 Jul 2024 11:07:02 +0100 Subject: [PATCH 044/105] Add `andThen` override which ignores result (#111328) It's a reasonably common pattern for some steps in a `SubscribableListener` chain to ignore the result of the previous step. Today you have to include a dummy `ignored` argument to the `BiConsumer`. This commit adds a new `andThen` override which accepts a unary `Consumer`, avoiding the need for this unnecessary argument. --- .../repositories/s3/S3BlobContainer.java | 2 +- .../snapshots/ConcurrentSnapshotsIT.java | 2 +- .../snapshots/SnapshotShutdownIT.java | 2 +- .../snapshots/SnapshotStressTestsIT.java | 2 +- .../elasticsearch/action/ActionListener.java | 7 ++++ .../action/ActionListenerImplementations.java | 28 +++++++++++++ .../action/support/SubscribableListener.java | 26 +++++++++++++ .../index/shard/StoreRecovery.java | 10 ++--- .../recovery/PeerRecoveryTargetService.java | 2 +- .../recovery/RecoverySourceHandler.java | 7 +--- .../repositories/RepositoriesService.java | 4 +- .../blobstore/BlobStoreRepository.java | 2 +- .../support/SubscribableListenerTests.java | 39 +++++++++++++++++++ .../snapshots/SnapshotResiliencyTests.java | 18 ++++----- .../TransportGetTrainedModelsStatsAction.java | 2 +- 15 files changed, 125 insertions(+), 28 deletions(-) diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java index b5fc37e859b9b..3e2249bf82bb6 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java @@ -644,7 +644,7 @@ void run(BytesReference expected, BytesReference updated, ActionListenerandThen((l, ignored) -> getRegister(purpose, rawKey, l)) + .andThen(l -> getRegister(purpose, rawKey, l)) // Step 5: Perform the compare-and-swap by completing our upload iff the witnessed value matches the expected value. diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java index 836bd26f08eee..71616abf0dcfa 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java @@ -2209,7 +2209,7 @@ public void testDeleteIndexWithOutOfOrderFinalization() { .anyMatch(e -> e.snapshot().getSnapshotId().getName().equals("snapshot-with-index-1") && e.state().completed()) ) // execute the index deletion _directly on the master_ so it happens before the snapshot finalization executes - .andThen((l, ignored) -> masterDeleteIndexService.deleteIndices(new DeleteIndexClusterStateUpdateRequest(l.map(r -> { + .andThen(l -> masterDeleteIndexService.deleteIndices(new DeleteIndexClusterStateUpdateRequest(l.map(r -> { assertTrue(r.isAcknowledged()); return null; })).indices(new Index[] { internalCluster().clusterService().state().metadata().index(indexToDelete).getIndex() }) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java index a45471f273732..2d1e16dc64273 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java @@ -483,7 +483,7 @@ private static void putShutdownForRemovalMetadata(ClusterService clusterService, SubscribableListener .newForked(l -> putShutdownMetadata(clusterService, shutdownMetadata, nodeName, l)) - .andThen((l, ignored) -> flushMasterQueue(clusterService, l)) + .andThen(l -> flushMasterQueue(clusterService, l)) .addListener(listener); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java index b8b6dcb25b557..ca1b93502ade1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java @@ -1257,7 +1257,7 @@ public void clusterStateProcessed(ClusterState initialState, ClusterState newSta ) .andThen( - (l, ignored) -> clusterService.submitUnbatchedStateUpdateTask( + l -> clusterService.submitUnbatchedStateUpdateTask( "unmark [" + node + "] for removal", new ClusterStateUpdateTask() { @Override diff --git a/server/src/main/java/org/elasticsearch/action/ActionListener.java b/server/src/main/java/org/elasticsearch/action/ActionListener.java index ec01d88cb5e6e..f3fa1dd2e105f 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionListener.java +++ b/server/src/main/java/org/elasticsearch/action/ActionListener.java @@ -187,6 +187,13 @@ default ActionListener delegateFailureAndWrap(CheckedBiConsumer(this, bc); } + /** + * Same as {@link #delegateFailureAndWrap(CheckedBiConsumer)} except that the response is ignored and not passed to the delegate. + */ + default ActionListener delegateFailureIgnoreResponseAndWrap(CheckedConsumer, ? extends Exception> c) { + return new ActionListenerImplementations.ResponseDroppingActionListener<>(this, c); + } + /** * Creates a listener which releases the given resource on completion (whether success or failure) */ diff --git a/server/src/main/java/org/elasticsearch/action/ActionListenerImplementations.java b/server/src/main/java/org/elasticsearch/action/ActionListenerImplementations.java index bf4f2dcc2d8db..93f6ac6e49d41 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionListenerImplementations.java +++ b/server/src/main/java/org/elasticsearch/action/ActionListenerImplementations.java @@ -9,6 +9,7 @@ package org.elasticsearch.action; import org.elasticsearch.common.CheckedBiConsumer; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.CheckedRunnable; import org.elasticsearch.core.Releasable; @@ -254,6 +255,33 @@ public String toString() { } } + /** + * The same as {@link ResponseWrappingActionListener} except that the response is dropped + */ + static final class ResponseDroppingActionListener extends DelegatingActionListener { + + private final CheckedConsumer, ? extends Exception> consumer; + + ResponseDroppingActionListener(ActionListener delegate, CheckedConsumer, ? extends Exception> consumer) { + super(delegate); + this.consumer = consumer; + } + + @Override + public void onResponse(T ignored) { + try { + consumer.accept(delegate); + } catch (Exception e) { + onFailure(e); + } + } + + @Override + public String toString() { + return super.toString() + "/" + consumer; + } + } + static final class RunAfterActionListener extends DelegatingActionListener { private final Runnable runAfter; diff --git a/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java b/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java index 41949c7ce3c22..6a2673e3276fc 100644 --- a/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java +++ b/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java @@ -410,6 +410,32 @@ public void complete(ActionListener listener) { } } + /** + * Creates and returns a new {@link SubscribableListener} {@code L} and subscribes {@code nextStep} to this listener such that if this + * listener is completed successfully then the result is discarded and {@code nextStep} is invoked with argument {@code L}. If this + * listener is completed with exception {@code E} then so is {@code L}. + *

+ * This can be used to construct a sequence of async actions, each ignoring the result of the previous ones: + *

+     * l.andThen(l1 -> forkAction1(args1, l1)).andThen(l2 -> forkAction2(args2, l2)).addListener(finalListener);
+     * 
+ * After creating this chain, completing {@code l} with a successful response will call {@code forkAction1}, which will on completion + * call {@code forkAction2}, which will in turn pass its response to {@code finalListener}. A failure of any step will bypass the + * remaining steps and ultimately fail {@code finalListener}. + *

+ * The threading of the {@code nextStep} callback is the same as for listeners added with {@link #addListener}: if this listener is + * already complete then {@code nextStep} is invoked on the thread calling {@link #andThen} and in its thread context, but if this + * listener is incomplete then {@code nextStep} is invoked on the completing thread and in its thread context. In other words, if you + * want to ensure that {@code nextStep} is invoked using a particular executor, then you must do both of: + *

    + *
  • Ensure that this {@link SubscribableListener} is always completed using that executor, and
  • + *
  • Invoke {@link #andThen} using that executor.
  • + *
+ */ + public SubscribableListener andThen(CheckedConsumer, ? extends Exception> nextStep) { + return newForked(l -> addListener(l.delegateFailureIgnoreResponseAndWrap(nextStep))); + } + /** * Creates and returns a new {@link SubscribableListener} {@code L} and subscribes {@code nextStep} to this listener such that if this * listener is completed successfully with result {@code R} then {@code nextStep} is invoked with arguments {@code L} and {@code R}. If diff --git a/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java b/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java index 22a7249ec7237..d9866220558be 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java +++ b/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java @@ -419,7 +419,7 @@ private void internalRecoverFromStore(IndexShard indexShard, ActionListenerandThen((l, ignored) -> { + .andThen(l -> { final RecoveryState recoveryState = indexShard.recoveryState(); final boolean indexShouldExists = recoveryState.getRecoverySource().getType() != RecoverySource.Type.EMPTY_STORE; indexShard.prepareForIndexRecovery(); @@ -490,7 +490,7 @@ private void internalRecoverFromStore(IndexShard indexShard, ActionListenerandThen((l, ignored) -> { + .andThen(l -> { indexShard.getEngine().fillSeqNoGaps(indexShard.getPendingPrimaryTerm()); indexShard.finalizeRecovery(); indexShard.postRecovery("post recovery from shard_store", l); @@ -536,7 +536,7 @@ record ShardAndIndexIds(IndexId indexId, ShardId shardId) {} .newForked(indexShard::preRecovery) - .andThen((shardAndIndexIdsListener, ignored) -> { + .andThen(shardAndIndexIdsListener -> { final RecoveryState.Translog translogState = indexShard.recoveryState().getTranslog(); if (restoreSource == null) { throw new IndexShardRestoreFailedException(shardId, "empty restore source"); @@ -585,7 +585,7 @@ record ShardAndIndexIds(IndexId indexId, ShardId shardId) {} ); }) - .andThen((l, ignored) -> { + .andThen(l -> { indexShard.getIndexEventListener().afterFilesRestoredFromRepository(indexShard); final Store store = indexShard.store(); bootstrap(indexShard, store); @@ -594,7 +594,7 @@ record ShardAndIndexIds(IndexId indexId, ShardId shardId) {} indexShard.openEngineAndRecoverFromTranslog(l); }) - .andThen((l, ignored) -> { + .andThen(l -> { indexShard.getEngine().fillSeqNoGaps(indexShard.getPendingPrimaryTerm()); indexShard.finalizeRecovery(); indexShard.postRecovery("restore done", l); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java index ac618ac9308c4..ff35bc75c0a9d 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java @@ -380,7 +380,7 @@ record StartRecoveryRequestToSend(StartRecoveryRequest startRecoveryRequest, Str // run pre-recovery activities .newForked(indexShard::preRecovery) // recover the shard as far as possible based on data held locally - .andThen((l, v) -> { + .andThen(l -> { logger.trace("{} preparing shard for peer recovery", recoveryTarget.shardId()); indexShard.prepareForIndexRecovery(); if (indexShard.indexSettings().getIndexMetadata().isSearchableSnapshot()) { diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index df2a9d16ebd6a..0f00f48561b75 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -700,7 +700,7 @@ void run(ActionListener listener) { .newForked(this::sendShardRecoveryPlanFileInfo) // instruct the target to recover files from snapshot, possibly updating the plan on failure .>andThen( - (l, ignored) -> recoverSnapshotFiles(shardRecoveryPlan, l.delegateResponse((recoverSnapshotFilesListener, e) -> { + l -> recoverSnapshotFiles(shardRecoveryPlan, l.delegateResponse((recoverSnapshotFilesListener, e) -> { if (shardRecoveryPlan.canRecoverSnapshotFilesFromSourceNode() == false && e instanceof CancellableThreads.ExecutionCancelledException == false) { shardRecoveryPlan = shardRecoveryPlan.getFallbackPlan(); @@ -731,10 +731,7 @@ void run(ActionListener listener) { }) // create a retention lease .andThen( - (createRetentionLeaseListener, ignored) -> createRetentionLease( - shardRecoveryPlan.getStartingSeqNo(), - createRetentionLeaseListener - ) + createRetentionLeaseListener -> createRetentionLease(shardRecoveryPlan.getStartingSeqNo(), createRetentionLeaseListener) ) // run cleanFiles, renaming temp files, removing surplus ones, creating an empty translog and so on .andThen((finalRecoveryPlanListener, retentionLease) -> { diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java index c63be88215655..ae90283b5030a 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java @@ -166,7 +166,7 @@ record RegisterRepositoryTaskResult(AcknowledgedResponse ackResponse, boolean ch // When publication has completed (and all acks received or timed out) then verify the repository. // (if acks timed out then acknowledgementStep completes before the master processes this cluster state, hence why we have // to wait for the publication to be complete too) - .andThen((clusterUpdateStep, ignored) -> { + .andThen(clusterUpdateStep -> { final ListenableFuture acknowledgementStep = new ListenableFuture<>(); final ListenableFuture publicationStep = new ListenableFuture<>(); // Boolean==changed. submitUnbatchedTask( @@ -221,7 +221,7 @@ public void clusterStateProcessed(ClusterState oldState, ClusterState newState) }) // When verification has completed, get the repository data for the first time .andThen( - (getRepositoryDataStep, ignored) -> threadPool.generic() + getRepositoryDataStep -> threadPool.generic() .execute( ActionRunnable.wrap( getRepositoryDataStep, diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 96fcf0512cbff..ddef1e1b808fe 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -1066,7 +1066,7 @@ private void runWithUniqueShardMetadataNaming(SnapshotDeleteListener listener) { // First write the new shard state metadata (without the removed snapshots) and compute deletion targets .newForked(this::writeUpdatedShardMetadataAndComputeDeletes) - .andThen((l, ignored) -> { + .andThen(l -> { // Once we have put the new shard-level metadata into place, we can update the repository metadata as follows: // 1. Remove the snapshots from the list of existing snapshots // 2. Update the index shard generations of all updated shard folders diff --git a/server/src/test/java/org/elasticsearch/action/support/SubscribableListenerTests.java b/server/src/test/java/org/elasticsearch/action/support/SubscribableListenerTests.java index 808d539646550..08357e7e5805a 100644 --- a/server/src/test/java/org/elasticsearch/action/support/SubscribableListenerTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/SubscribableListenerTests.java @@ -526,6 +526,45 @@ private static void assertComplete(SubscribableListener listener, @Nullab } } + public void testAndThenDropResultSuccess() { + final var initialListener = new SubscribableListener<>(); + final var forked = new AtomicReference>(); + + final var chainedListener = initialListener.andThen(forked::set); + assertNull(forked.get()); + + final var o1 = new Object(); + initialListener.onResponse(o1); + assertSame(chainedListener, forked.get()); + assertFalse(chainedListener.isDone()); + } + + public void testAndThenDropResultThrowException() { + final var initialListener = new SubscribableListener<>(); + final var forked = new AtomicReference>(); + + final var chainedListener = initialListener.andThen(l -> { + forked.set(l); + throw new ElasticsearchException("simulated"); + }); + assertNull(forked.get()); + + final var o1 = new Object(); + initialListener.onResponse(o1); + assertSame(chainedListener, forked.get()); + assertComplete(chainedListener, "simulated"); + } + + public void testAndThenDropResultFailure() { + final var initialListener = new SubscribableListener<>(); + + final var chainedListener = initialListener.andThen(l -> fail("should not be called")); + assertFalse(chainedListener.isDone()); + + initialListener.onFailure(new ElasticsearchException("simulated")); + assertComplete(chainedListener, "simulated"); + } + public void testAndThenApplySuccess() throws Exception { final var initialListener = new SubscribableListener<>(); final var result = new AtomicReference<>(); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 54051f8311967..8dcb5ce355849 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -1353,7 +1353,7 @@ public TransportRequestHandler interceptHandler( if (randomBoolean()) { final var snapshotName = "snapshot-" + i; testListener = testListener.andThen( - (stepListener, v) -> scheduleNow( + stepListener -> scheduleNow( ActionRunnable.wrap( stepListener, l -> client.admin() @@ -1367,7 +1367,7 @@ public TransportRequestHandler interceptHandler( ); } else { final var cloneName = "clone-" + i; - testListener = testListener.andThen((stepListener, v) -> scheduleNow(ActionRunnable.wrap(stepListener, l -> { + testListener = testListener.andThen(stepListener -> scheduleNow(ActionRunnable.wrap(stepListener, l -> { // The clone API only responds when the clone is complete, but we only want to wait until the clone starts so we watch // the cluster state instead. ClusterServiceUtils.addTemporaryStateListener( @@ -1390,7 +1390,7 @@ public TransportRequestHandler interceptHandler( } } - testListener = testListener.andThen((l, ignored) -> scheduleNow(() -> { + testListener = testListener.andThen(l -> scheduleNow(() -> { // Once all snapshots & clones have started, drop the data node and wait for all snapshot activity to complete testClusterNodes.disconnectNode(testClusterNodes.randomDataNodeSafe()); ClusterServiceUtils.addTemporaryStateListener(masterClusterService, cs -> SnapshotsInProgress.get(cs).isEmpty()).addListener(l); @@ -1444,7 +1444,7 @@ public void testFullSnapshotUnassignedShards() { // Take the snapshot to check the reaction to having unassigned shards .andThen( - (l, ignored) -> client().admin() + l -> client().admin() .cluster() .prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, repoName, randomIdentifier()) .setWaitForCompletion(randomBoolean()) @@ -1495,7 +1495,7 @@ public void testSnapshotNameAlreadyInUseExceptionLogging() { final var testListener = createRepoAndIndex(repoName, "index", between(1, 2)) // take snapshot once .andThen( - (l, ignored) -> client().admin() + l -> client().admin() .cluster() .prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, repoName, snapshotName) .setWaitForCompletion(true) @@ -1503,7 +1503,7 @@ public void testSnapshotNameAlreadyInUseExceptionLogging() { ) // take snapshot again .andThen( - (l, ignored) -> client().admin() + l -> client().admin() .cluster() .prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, repoName, snapshotName) .setWaitForCompletion(randomBoolean()) @@ -1522,7 +1522,7 @@ public void onFailure(Exception e) { ) // attempt to clone snapshot .andThen( - (l, ignored) -> client().admin() + l -> client().admin() .cluster() .prepareCloneSnapshot(TEST_REQUEST_TIMEOUT, repoName, snapshotName, snapshotName) .setIndices("*") @@ -1581,7 +1581,7 @@ public void testIndexNotFoundExceptionLogging() { ) // take snapshot of index that does not exist .andThen( - (l, ignored) -> client().admin() + l -> client().admin() .cluster() .prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, repoName, randomIdentifier()) .setIndices(indexName) @@ -1633,7 +1633,7 @@ public void testIllegalArgumentExceptionLogging() { ) // attempt to take snapshot with illegal config ('none' is allowed as a feature state iff it's the only one in the list) .andThen( - (l, ignored) -> client().admin() + l -> client().admin() .cluster() .prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, repoName, randomIdentifier()) .setFeatureStates("none", "none") diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java index 7c282d88aebfd..e9461ce371aa5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java @@ -159,7 +159,7 @@ protected void doExecute( .andThenAccept(tuple -> responseBuilder.setExpandedModelIdsWithAliases(tuple.v2()).setTotalModelCount(tuple.v1())) .andThen( - (l, ignored) -> executeAsyncWithOrigin( + l -> executeAsyncWithOrigin( client, ML_ORIGIN, TransportNodesStatsAction.TYPE, From d04f5c4e1029c2a7002663b02ec3a3bd32d2d6a6 Mon Sep 17 00:00:00 2001 From: Pius Date: Fri, 26 Jul 2024 03:09:53 -0700 Subject: [PATCH 045/105] [DOCS] Clarify that inference ID cannot match model ID (#111310) * Clarify that inference ID cannot match model ID * Update service-elasticsearch.asciidoc --- docs/reference/inference/service-elasticsearch.asciidoc | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/docs/reference/inference/service-elasticsearch.asciidoc b/docs/reference/inference/service-elasticsearch.asciidoc index b568a4691a4bd..6fb0b4a38d0ef 100644 --- a/docs/reference/inference/service-elasticsearch.asciidoc +++ b/docs/reference/inference/service-elasticsearch.asciidoc @@ -123,16 +123,17 @@ The following example shows how to create an {infer} endpoint called [source,console] ------------------------------------------------------------ -PUT _inference/text_embedding/my-msmarco-minilm-model +PUT _inference/text_embedding/my-msmarco-minilm-model <1> { "service": "elasticsearch", "service_settings": { "num_allocations": 1, "num_threads": 1, - "model_id": "msmarco-MiniLM-L12-cos-v5" <1> + "model_id": "msmarco-MiniLM-L12-cos-v5" <2> } } ------------------------------------------------------------ // TEST[skip:TBD] -<1> The `model_id` must be the ID of a text embedding model which has already been +<1> Provide an unique identifier for the inference endpoint. The `inference_id` must be unique and must not match the `model_id`. +<2> The `model_id` must be the ID of a text embedding model which has already been {ml-docs}/ml-nlp-import-model.html#ml-nlp-import-script[uploaded through Eland]. From 257f2390ac3d799c86c30b2ee21ce5ff0ac14ec2 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Fri, 26 Jul 2024 12:22:26 +0200 Subject: [PATCH 046/105] EQL: fix validation of TEXT fields with case insensitive comparison (#111238) --- docs/changelog/111238.yaml | 6 +++ .../rest-api-spec/test/eql/60_no_exact.yml | 37 +++++++++++++++++++ .../InsensitiveBinaryComparison.java | 5 +++ 3 files changed, 48 insertions(+) create mode 100644 docs/changelog/111238.yaml create mode 100644 x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/60_no_exact.yml diff --git a/docs/changelog/111238.yaml b/docs/changelog/111238.yaml new file mode 100644 index 0000000000000..b918b754ff595 --- /dev/null +++ b/docs/changelog/111238.yaml @@ -0,0 +1,6 @@ +pr: 111238 +summary: Fix validation of TEXT fields with case insensitive comparison +area: EQL +type: bug +issues: + - 111235 diff --git a/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/60_no_exact.yml b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/60_no_exact.yml new file mode 100644 index 0000000000000..133587561ec3f --- /dev/null +++ b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/60_no_exact.yml @@ -0,0 +1,37 @@ +--- +setup: + - do: + indices.create: + index: eql_test + body: + mappings: + properties: + some_text: + type: text + - do: + bulk: + refresh: true + body: + - index: + _index: eql_test + _id: "1" + - event: + - category: process + "@timestamp": 2020-02-03T12:34:56Z + user: SYSTEM + id: 123 + valid: false + some_text: foo + + +--- + +"Case insensitive match on text field": + - do: + catch: "bad_request" + eql.search: + index: eql_test + body: + query: 'process where some_text: "foo"' + - match: { error.root_cause.0.type: "verification_exception" } + - match: { error.root_cause.0.reason: "Found 1 problem\nline 1:15: [:] cannot operate on first argument field of data type [text]: No keyword/multi-field defined exact matches for [some_text]; define one or use MATCH/QUERY instead" } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/predicate/operator/comparison/InsensitiveBinaryComparison.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/predicate/operator/comparison/InsensitiveBinaryComparison.java index 0a486b77ea839..6873543ab4ec4 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/predicate/operator/comparison/InsensitiveBinaryComparison.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/predicate/operator/comparison/InsensitiveBinaryComparison.java @@ -51,6 +51,11 @@ protected TypeResolution resolveInputType(Expression e, ParamOrdinal paramOrdina resolution.message(), regularOperatorSymbol() ); + return new TypeResolution(message); + } + resolution = TypeResolutions.isExact(e, op, paramOrdinal); + if (resolution.unresolved()) { + String message = LoggerMessageFormat.format(null, "{}", resolution.message()); resolution = new TypeResolution(message); } return resolution; From 4880205eeef8dd87b53be708269ef40eb86d037f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Fri, 26 Jul 2024 13:58:06 +0200 Subject: [PATCH 047/105] Passing correct executor (#111305) With https://github.com/elastic/elasticsearch/pull/100895 we changed the executor for TransporActions to be explicitly passed; while changing `TransportYieldsContinuationsAction` used in `Netty4ChunkedContinuationsIT`, I passed down the direct executor by mistake, which broke the continuation tests. Closes #111283 --- .../http/netty4/Netty4ChunkedContinuationsIT.java | 2 +- muted-tests.yml | 3 --- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4ChunkedContinuationsIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4ChunkedContinuationsIT.java index 46684faf9fb66..60c8431c9e466 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4ChunkedContinuationsIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4ChunkedContinuationsIT.java @@ -435,7 +435,7 @@ public static class TransportYieldsContinuationsAction extends TransportAction Date: Fri, 26 Jul 2024 14:11:01 +0200 Subject: [PATCH 048/105] ESQL: Added Median and MedianAbsoluteDeviation aggregations tests and kibana docs (#111231) --- .../functions/aggregation-functions.asciidoc | 8 +- .../esql/functions/appendix/median.asciidoc | 7 ++ .../median_absolute_deviation.asciidoc | 7 ++ .../functions/description/median.asciidoc | 7 ++ .../median_absolute_deviation.asciidoc | 7 ++ .../esql/functions/examples/median.asciidoc | 22 ++++ .../median_absolute_deviation.asciidoc | 22 ++++ .../functions/kibana/definition/median.json | 49 +++++++++ .../definition/median_absolute_deviation.json | 49 +++++++++ .../esql/functions/kibana/docs/median.md | 12 ++ .../kibana/docs/median_absolute_deviation.md | 14 +++ .../esql/functions/layout/median.asciidoc | 16 +++ .../layout/median_absolute_deviation.asciidoc | 16 +++ docs/reference/esql/functions/median.asciidoc | 52 --------- .../esql/functions/parameters/median.asciidoc | 6 + .../median_absolute_deviation.asciidoc | 6 + .../esql/functions/signature/median.svg | 1 + .../signature/median_absolute_deviation.svg | 1 + .../esql/functions/types/median.asciidoc | 11 ++ .../types/median_absolute_deviation.asciidoc | 11 ++ .../src/main/resources/meta.csv-spec | 12 +- .../expression/function/aggregate/Median.java | 24 +++- .../aggregate/MedianAbsoluteDeviation.java | 31 +++++- .../MedianAbsoluteDeviationTests.java | 69 ++++++++++++ .../function/aggregate/MedianTests.java | 103 ++++++++++++++++++ 25 files changed, 495 insertions(+), 68 deletions(-) create mode 100644 docs/reference/esql/functions/appendix/median.asciidoc create mode 100644 docs/reference/esql/functions/appendix/median_absolute_deviation.asciidoc create mode 100644 docs/reference/esql/functions/description/median.asciidoc create mode 100644 docs/reference/esql/functions/description/median_absolute_deviation.asciidoc create mode 100644 docs/reference/esql/functions/examples/median.asciidoc create mode 100644 docs/reference/esql/functions/examples/median_absolute_deviation.asciidoc create mode 100644 docs/reference/esql/functions/kibana/definition/median.json create mode 100644 docs/reference/esql/functions/kibana/definition/median_absolute_deviation.json create mode 100644 docs/reference/esql/functions/kibana/docs/median.md create mode 100644 docs/reference/esql/functions/kibana/docs/median_absolute_deviation.md create mode 100644 docs/reference/esql/functions/layout/median.asciidoc create mode 100644 docs/reference/esql/functions/layout/median_absolute_deviation.asciidoc delete mode 100644 docs/reference/esql/functions/median.asciidoc create mode 100644 docs/reference/esql/functions/parameters/median.asciidoc create mode 100644 docs/reference/esql/functions/parameters/median_absolute_deviation.asciidoc create mode 100644 docs/reference/esql/functions/signature/median.svg create mode 100644 docs/reference/esql/functions/signature/median_absolute_deviation.svg create mode 100644 docs/reference/esql/functions/types/median.asciidoc create mode 100644 docs/reference/esql/functions/types/median_absolute_deviation.asciidoc create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviationTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianTests.java diff --git a/docs/reference/esql/functions/aggregation-functions.asciidoc b/docs/reference/esql/functions/aggregation-functions.asciidoc index 083f0eee792cd..821b109741a0a 100644 --- a/docs/reference/esql/functions/aggregation-functions.asciidoc +++ b/docs/reference/esql/functions/aggregation-functions.asciidoc @@ -12,8 +12,8 @@ The <> command supports these aggregate functions: * <> * <> * <> -* <> -* <> +* <> +* <> * <> * <> * experimental:[] <> @@ -25,10 +25,10 @@ The <> command supports these aggregate functions: include::count.asciidoc[] include::count-distinct.asciidoc[] -include::median.asciidoc[] -include::median-absolute-deviation.asciidoc[] include::layout/avg.asciidoc[] include::layout/max.asciidoc[] +include::layout/median.asciidoc[] +include::layout/median_absolute_deviation.asciidoc[] include::layout/min.asciidoc[] include::layout/percentile.asciidoc[] include::layout/st_centroid_agg.asciidoc[] diff --git a/docs/reference/esql/functions/appendix/median.asciidoc b/docs/reference/esql/functions/appendix/median.asciidoc new file mode 100644 index 0000000000000..929a4ed0dae2c --- /dev/null +++ b/docs/reference/esql/functions/appendix/median.asciidoc @@ -0,0 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[WARNING] +==== +`MEDIAN` is also {wikipedia}/Nondeterministic_algorithm[non-deterministic]. +This means you can get slightly different results using the same data. +==== diff --git a/docs/reference/esql/functions/appendix/median_absolute_deviation.asciidoc b/docs/reference/esql/functions/appendix/median_absolute_deviation.asciidoc new file mode 100644 index 0000000000000..a4f96c800946b --- /dev/null +++ b/docs/reference/esql/functions/appendix/median_absolute_deviation.asciidoc @@ -0,0 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[WARNING] +==== +`MEDIAN_ABSOLUTE_DEVIATION` is also {wikipedia}/Nondeterministic_algorithm[non-deterministic]. +This means you can get slightly different results using the same data. +==== diff --git a/docs/reference/esql/functions/description/median.asciidoc b/docs/reference/esql/functions/description/median.asciidoc new file mode 100644 index 0000000000000..ff3b7b32ed15e --- /dev/null +++ b/docs/reference/esql/functions/description/median.asciidoc @@ -0,0 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +The value that is greater than half of all values and less than half of all values, also known as the 50% <>. + +NOTE: Like <>, `MEDIAN` is <>. diff --git a/docs/reference/esql/functions/description/median_absolute_deviation.asciidoc b/docs/reference/esql/functions/description/median_absolute_deviation.asciidoc new file mode 100644 index 0000000000000..1a363920dd422 --- /dev/null +++ b/docs/reference/esql/functions/description/median_absolute_deviation.asciidoc @@ -0,0 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the median absolute deviation, a measure of variability. It is a robust statistic, meaning that it is useful for describing data that may have outliers, or may not be normally distributed. For such data it can be more descriptive than standard deviation. It is calculated as the median of each data point's deviation from the median of the entire sample. That is, for a random variable `X`, the median absolute deviation is `median(|median(X) - X|)`. + +NOTE: Like <>, `MEDIAN_ABSOLUTE_DEVIATION` is <>. diff --git a/docs/reference/esql/functions/examples/median.asciidoc b/docs/reference/esql/functions/examples/median.asciidoc new file mode 100644 index 0000000000000..cb6248dcff148 --- /dev/null +++ b/docs/reference/esql/functions/examples/median.asciidoc @@ -0,0 +1,22 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Examples* + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats_percentile.csv-spec[tag=median] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats_percentile.csv-spec[tag=median-result] +|=== +The expression can use inline functions. For example, to calculate the median of the maximum values of a multivalued column, first use `MV_MAX` to get the maximum value per row, and use the result with the `MEDIAN` function +[source.merge.styled,esql] +---- +include::{esql-specs}/stats_percentile.csv-spec[tag=docsStatsMedianNestedExpression] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats_percentile.csv-spec[tag=docsStatsMedianNestedExpression-result] +|=== + diff --git a/docs/reference/esql/functions/examples/median_absolute_deviation.asciidoc b/docs/reference/esql/functions/examples/median_absolute_deviation.asciidoc new file mode 100644 index 0000000000000..20891126c20fb --- /dev/null +++ b/docs/reference/esql/functions/examples/median_absolute_deviation.asciidoc @@ -0,0 +1,22 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Examples* + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats_percentile.csv-spec[tag=median-absolute-deviation] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats_percentile.csv-spec[tag=median-absolute-deviation-result] +|=== +The expression can use inline functions. For example, to calculate the the median absolute deviation of the maximum values of a multivalued column, first use `MV_MAX` to get the maximum value per row, and use the result with the `MEDIAN_ABSOLUTE_DEVIATION` function +[source.merge.styled,esql] +---- +include::{esql-specs}/stats_percentile.csv-spec[tag=docsStatsMADNestedExpression] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats_percentile.csv-spec[tag=docsStatsMADNestedExpression-result] +|=== + diff --git a/docs/reference/esql/functions/kibana/definition/median.json b/docs/reference/esql/functions/kibana/definition/median.json new file mode 100644 index 0000000000000..4887a4497e813 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/median.json @@ -0,0 +1,49 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "agg", + "name" : "median", + "description" : "The value that is greater than half of all values and less than half of all values, also known as the 50% <>.", + "note" : "Like <>, `MEDIAN` is <>.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "FROM employees\n| STATS MEDIAN(salary), PERCENTILE(salary, 50)", + "FROM employees\n| STATS median_max_salary_change = MEDIAN(MV_MAX(salary_change))" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/median_absolute_deviation.json b/docs/reference/esql/functions/kibana/definition/median_absolute_deviation.json new file mode 100644 index 0000000000000..4a8b1cd30611f --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/median_absolute_deviation.json @@ -0,0 +1,49 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "agg", + "name" : "median_absolute_deviation", + "description" : "Returns the median absolute deviation, a measure of variability. It is a robust statistic, meaning that it is useful for describing data that may have outliers, or may not be normally distributed. For such data it can be more descriptive than standard deviation.\n\nIt is calculated as the median of each data point's deviation from the median of the entire sample. That is, for a random variable `X`, the median absolute deviation is `median(|median(X) - X|)`.", + "note" : "Like <>, `MEDIAN_ABSOLUTE_DEVIATION` is <>.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "FROM employees\n| STATS MEDIAN(salary), MEDIAN_ABSOLUTE_DEVIATION(salary)", + "FROM employees\n| STATS m_a_d_max_salary_change = MEDIAN_ABSOLUTE_DEVIATION(MV_MAX(salary_change))" + ] +} diff --git a/docs/reference/esql/functions/kibana/docs/median.md b/docs/reference/esql/functions/kibana/docs/median.md new file mode 100644 index 0000000000000..7a4370b4d2551 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/median.md @@ -0,0 +1,12 @@ + + +### MEDIAN +The value that is greater than half of all values and less than half of all values, also known as the 50% <>. + +``` +FROM employees +| STATS MEDIAN(salary), PERCENTILE(salary, 50) +``` +Note: Like <>, `MEDIAN` is <>. diff --git a/docs/reference/esql/functions/kibana/docs/median_absolute_deviation.md b/docs/reference/esql/functions/kibana/docs/median_absolute_deviation.md new file mode 100644 index 0000000000000..8db113deb2c49 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/median_absolute_deviation.md @@ -0,0 +1,14 @@ + + +### MEDIAN_ABSOLUTE_DEVIATION +Returns the median absolute deviation, a measure of variability. It is a robust statistic, meaning that it is useful for describing data that may have outliers, or may not be normally distributed. For such data it can be more descriptive than standard deviation. + +It is calculated as the median of each data point's deviation from the median of the entire sample. That is, for a random variable `X`, the median absolute deviation is `median(|median(X) - X|)`. + +``` +FROM employees +| STATS MEDIAN(salary), MEDIAN_ABSOLUTE_DEVIATION(salary) +``` +Note: Like <>, `MEDIAN_ABSOLUTE_DEVIATION` is <>. diff --git a/docs/reference/esql/functions/layout/median.asciidoc b/docs/reference/esql/functions/layout/median.asciidoc new file mode 100644 index 0000000000000..c03e73523983d --- /dev/null +++ b/docs/reference/esql/functions/layout/median.asciidoc @@ -0,0 +1,16 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-median]] +=== `MEDIAN` + +*Syntax* + +[.text-center] +image::esql/functions/signature/median.svg[Embedded,opts=inline] + +include::../parameters/median.asciidoc[] +include::../description/median.asciidoc[] +include::../types/median.asciidoc[] +include::../examples/median.asciidoc[] +include::../appendix/median.asciidoc[] diff --git a/docs/reference/esql/functions/layout/median_absolute_deviation.asciidoc b/docs/reference/esql/functions/layout/median_absolute_deviation.asciidoc new file mode 100644 index 0000000000000..b558e6f11d9d0 --- /dev/null +++ b/docs/reference/esql/functions/layout/median_absolute_deviation.asciidoc @@ -0,0 +1,16 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-median_absolute_deviation]] +=== `MEDIAN_ABSOLUTE_DEVIATION` + +*Syntax* + +[.text-center] +image::esql/functions/signature/median_absolute_deviation.svg[Embedded,opts=inline] + +include::../parameters/median_absolute_deviation.asciidoc[] +include::../description/median_absolute_deviation.asciidoc[] +include::../types/median_absolute_deviation.asciidoc[] +include::../examples/median_absolute_deviation.asciidoc[] +include::../appendix/median_absolute_deviation.asciidoc[] diff --git a/docs/reference/esql/functions/median.asciidoc b/docs/reference/esql/functions/median.asciidoc deleted file mode 100644 index 2f7d70775e38e..0000000000000 --- a/docs/reference/esql/functions/median.asciidoc +++ /dev/null @@ -1,52 +0,0 @@ -[discrete] -[[esql-agg-median]] -=== `MEDIAN` - -*Syntax* - -[source,esql] ----- -MEDIAN(expression) ----- - -*Parameters* - -`expression`:: -Expression from which to return the median value. - -*Description* - -Returns the value that is greater than half of all values and less than half of -all values, also known as the 50% <>. - -NOTE: Like <>, `MEDIAN` is <>. - -[WARNING] -==== -`MEDIAN` is also {wikipedia}/Nondeterministic_algorithm[non-deterministic]. -This means you can get slightly different results using the same data. -==== - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/stats_percentile.csv-spec[tag=median] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/stats_percentile.csv-spec[tag=median-result] -|=== - -The expression can use inline functions. For example, to calculate the median of -the maximum values of a multivalued column, first use `MV_MAX` to get the -maximum value per row, and use the result with the `MEDIAN` function: - -[source.merge.styled,esql] ----- -include::{esql-specs}/stats_percentile.csv-spec[tag=docsStatsMedianNestedExpression] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/stats_percentile.csv-spec[tag=docsStatsMedianNestedExpression-result] -|=== diff --git a/docs/reference/esql/functions/parameters/median.asciidoc b/docs/reference/esql/functions/parameters/median.asciidoc new file mode 100644 index 0000000000000..91c56709d182a --- /dev/null +++ b/docs/reference/esql/functions/parameters/median.asciidoc @@ -0,0 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`number`:: + diff --git a/docs/reference/esql/functions/parameters/median_absolute_deviation.asciidoc b/docs/reference/esql/functions/parameters/median_absolute_deviation.asciidoc new file mode 100644 index 0000000000000..91c56709d182a --- /dev/null +++ b/docs/reference/esql/functions/parameters/median_absolute_deviation.asciidoc @@ -0,0 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`number`:: + diff --git a/docs/reference/esql/functions/signature/median.svg b/docs/reference/esql/functions/signature/median.svg new file mode 100644 index 0000000000000..c61b3a9e77817 --- /dev/null +++ b/docs/reference/esql/functions/signature/median.svg @@ -0,0 +1 @@ +MEDIAN(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/median_absolute_deviation.svg b/docs/reference/esql/functions/signature/median_absolute_deviation.svg new file mode 100644 index 0000000000000..bcf01de52ac12 --- /dev/null +++ b/docs/reference/esql/functions/signature/median_absolute_deviation.svg @@ -0,0 +1 @@ +MEDIAN_ABSOLUTE_DEVIATION(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/median.asciidoc b/docs/reference/esql/functions/types/median.asciidoc new file mode 100644 index 0000000000000..273dae4af76c2 --- /dev/null +++ b/docs/reference/esql/functions/types/median.asciidoc @@ -0,0 +1,11 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +number | result +double | double +integer | double +long | double +|=== diff --git a/docs/reference/esql/functions/types/median_absolute_deviation.asciidoc b/docs/reference/esql/functions/types/median_absolute_deviation.asciidoc new file mode 100644 index 0000000000000..273dae4af76c2 --- /dev/null +++ b/docs/reference/esql/functions/types/median_absolute_deviation.asciidoc @@ -0,0 +1,11 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +number | result +double | double +integer | double +long | double +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index cb073cbc35b61..c036e04bc8ba3 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -40,8 +40,8 @@ double e() "double log10(number:double|integer|long|unsigned_long)" "keyword|text ltrim(string:keyword|text)" "boolean|double|integer|long|date|ip max(field:boolean|double|integer|long|date|ip)" -"double|integer|long median(number:double|integer|long)" -"double|integer|long median_absolute_deviation(number:double|integer|long)" +"double median(number:double|integer|long)" +"double median_absolute_deviation(number:double|integer|long)" "boolean|double|integer|long|date|ip min(field:boolean|double|integer|long|date|ip)" "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version mv_append(field1:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, field2:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version)" "double mv_avg(number:double|integer|long|unsigned_long)" @@ -283,8 +283,8 @@ log |Returns the logarithm of a value to a base. The input can be any log10 |Returns the logarithm of a value to base 10. The input can be any numeric value, the return value is always a double. Logs of 0 and negative numbers return `null` as well as a warning. ltrim |Removes leading whitespaces from a string. max |The maximum value of a field. -median |The value that is greater than half of all values and less than half of all values. -median_absolut|The median absolute deviation, a measure of variability. +median |The value that is greater than half of all values and less than half of all values, also known as the 50% <>. +median_absolut|"Returns the median absolute deviation, a measure of variability. It is a robust statistic, meaning that it is useful for describing data that may have outliers, or may not be normally distributed. For such data it can be more descriptive than standard deviation. It is calculated as the median of each data point's deviation from the median of the entire sample. That is, for a random variable `X`, the median absolute deviation is `median(|median(X) - X|)`." min |The minimum value of a field. mv_append |Concatenates values of two multi-value fields. mv_avg |Converts a multivalued field into a single valued field containing the average of all of the values. @@ -406,8 +406,8 @@ log |double log10 |double |false |false |false ltrim |"keyword|text" |false |false |false max |"boolean|double|integer|long|date|ip" |false |false |true -median |"double|integer|long" |false |false |true -median_absolut|"double|integer|long" |false |false |true +median |double |false |false |true +median_absolut|double |false |false |true min |"boolean|double|integer|long|date|ip" |false |false |true mv_append |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version" |[false, false] |false |false mv_avg |double |false |false |false diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java index 36207df331e47..348fef577c934 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.SurrogateExpression; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; @@ -32,9 +33,26 @@ public class Median extends AggregateFunction implements SurrogateExpression { // TODO: Add the compression parameter @FunctionInfo( - returnType = { "double", "integer", "long" }, - description = "The value that is greater than half of all values and less than half of all values.", - isAggregation = true + returnType = "double", + description = "The value that is greater than half of all values and less than half of all values, " + + "also known as the 50% <>.", + note = "Like <>, `MEDIAN` is <>.", + appendix = """ + [WARNING] + ==== + `MEDIAN` is also {wikipedia}/Nondeterministic_algorithm[non-deterministic]. + This means you can get slightly different results using the same data. + ====""", + isAggregation = true, + examples = { + @Example(file = "stats_percentile", tag = "median"), + @Example( + description = "The expression can use inline functions. For example, to calculate the median of " + + "the maximum values of a multivalued column, first use `MV_MAX` to get the " + + "maximum value per row, and use the result with the `MEDIAN` function", + file = "stats_percentile", + tag = "docsStatsMedianNestedExpression" + ), } ) public Median(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { super(source, field); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java index 23d55942cc72f..46661e96b1d48 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -31,9 +32,33 @@ public class MedianAbsoluteDeviation extends NumericAggregate { // TODO: Add parameter @FunctionInfo( - returnType = { "double", "integer", "long" }, - description = "The median absolute deviation, a measure of variability.", - isAggregation = true + returnType = "double", + description = "Returns the median absolute deviation, a measure of variability. It is a robust " + + "statistic, meaning that it is useful for describing data that may have outliers, " + + "or may not be normally distributed. For such data it can be more descriptive " + + "than standard deviation." + + "\n\n" + + "It is calculated as the median of each data point's deviation from the median of " + + "the entire sample. That is, for a random variable `X`, the median absolute " + + "deviation is `median(|median(X) - X|)`.", + note = "Like <>, `MEDIAN_ABSOLUTE_DEVIATION` is <>.", + appendix = """ + [WARNING] + ==== + `MEDIAN_ABSOLUTE_DEVIATION` is also {wikipedia}/Nondeterministic_algorithm[non-deterministic]. + This means you can get slightly different results using the same data. + ====""", + isAggregation = true, + examples = { + @Example(file = "stats_percentile", tag = "median-absolute-deviation"), + @Example( + description = "The expression can use inline functions. For example, to calculate the the " + + "median absolute deviation of the maximum values of a multivalued column, first " + + "use `MV_MAX` to get the maximum value per row, and use the result with the " + + "`MEDIAN_ABSOLUTE_DEVIATION` function", + file = "stats_percentile", + tag = "docsStatsMADNestedExpression" + ), } ) public MedianAbsoluteDeviation(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { super(source, field); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviationTests.java new file mode 100644 index 0000000000000..2ba3345c95bdc --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviationTests.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.search.aggregations.metrics.InternalMedianAbsoluteDeviation; +import org.elasticsearch.search.aggregations.metrics.TDigestState; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractAggregationTestCase; +import org.elasticsearch.xpack.esql.expression.function.MultiRowTestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.util.List; +import java.util.function.Supplier; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.equalTo; + +public class MedianAbsoluteDeviationTests extends AbstractAggregationTestCase { + public MedianAbsoluteDeviationTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + var suppliers = Stream.of( + MultiRowTestCaseSupplier.intCases(1, 1000, Integer.MIN_VALUE, Integer.MAX_VALUE, true), + MultiRowTestCaseSupplier.longCases(1, 1000, Long.MIN_VALUE, Long.MAX_VALUE, true), + MultiRowTestCaseSupplier.doubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE, true) + ).flatMap(List::stream).map(MedianAbsoluteDeviationTests::makeSupplier).toList(); + + return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + } + + @Override + protected Expression build(Source source, List args) { + return new MedianAbsoluteDeviation(source, args.get(0)); + } + + private static TestCaseSupplier makeSupplier(TestCaseSupplier.TypedDataSupplier fieldSupplier) { + return new TestCaseSupplier(List.of(fieldSupplier.type()), () -> { + var fieldTypedData = fieldSupplier.get(); + + var digest = TDigestState.create(1000); + + for (var value : fieldTypedData.multiRowData()) { + digest.add(((Number) value).doubleValue()); + } + + var expected = digest.size() == 0 ? null : InternalMedianAbsoluteDeviation.computeMedianAbsoluteDeviation(digest); + + return new TestCaseSupplier.TestCase( + List.of(fieldTypedData), + "MedianAbsoluteDeviation[number=Attribute[channel=0]]", + DataType.DOUBLE, + equalTo(expected) + ); + }); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianTests.java new file mode 100644 index 0000000000000..b94045a9c732e --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianTests.java @@ -0,0 +1,103 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.search.aggregations.metrics.TDigestState; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractAggregationTestCase; +import org.elasticsearch.xpack.esql.expression.function.MultiRowTestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.equalTo; + +public class MedianTests extends AbstractAggregationTestCase { + public MedianTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + var suppliers = Stream.of( + MultiRowTestCaseSupplier.intCases(1, 1000, Integer.MIN_VALUE, Integer.MAX_VALUE, true), + MultiRowTestCaseSupplier.longCases(1, 1000, Long.MIN_VALUE, Long.MAX_VALUE, true), + MultiRowTestCaseSupplier.doubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE, true) + ).flatMap(List::stream).map(MedianTests::makeSupplier).collect(Collectors.toCollection(ArrayList::new)); + + suppliers.addAll( + List.of( + // Folding + new TestCaseSupplier( + List.of(DataType.INTEGER), + () -> new TestCaseSupplier.TestCase( + List.of(TestCaseSupplier.TypedData.multiRow(List.of(200), DataType.INTEGER, "number")), + "Median[field=Attribute[channel=0]]", + DataType.DOUBLE, + equalTo(200.) + ) + ), + new TestCaseSupplier( + List.of(DataType.LONG), + () -> new TestCaseSupplier.TestCase( + List.of(TestCaseSupplier.TypedData.multiRow(List.of(200L), DataType.LONG, "number")), + "Median[field=Attribute[channel=0]]", + DataType.DOUBLE, + equalTo(200.) + ) + ), + new TestCaseSupplier( + List.of(DataType.DOUBLE), + () -> new TestCaseSupplier.TestCase( + List.of(TestCaseSupplier.TypedData.multiRow(List.of(200.), DataType.DOUBLE, "number")), + "Median[field=Attribute[channel=0]]", + DataType.DOUBLE, + equalTo(200.) + ) + ) + ) + ); + + return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + } + + @Override + protected Expression build(Source source, List args) { + return new Median(source, args.get(0)); + } + + private static TestCaseSupplier makeSupplier(TestCaseSupplier.TypedDataSupplier fieldSupplier) { + return new TestCaseSupplier(List.of(fieldSupplier.type()), () -> { + var fieldTypedData = fieldSupplier.get(); + + var digest = TDigestState.create(1000); + + for (var value : fieldTypedData.multiRowData()) { + digest.add(((Number) value).doubleValue()); + } + + var expected = digest.size() == 0 ? null : digest.quantile(0.5); + + return new TestCaseSupplier.TestCase( + List.of(fieldTypedData), + "Median[number=Attribute[channel=0]]", + DataType.DOUBLE, + equalTo(expected) + ); + }); + } +} From 86c1be442180b65204c35e392f67a2f9257697fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Fri, 26 Jul 2024 15:01:43 +0200 Subject: [PATCH 049/105] Add availableCodecs to the CodecProvider interface (#111335) --- .../java/org/elasticsearch/index/codec/CodecProvider.java | 6 +++++- .../java/org/elasticsearch/index/codec/CodecService.java | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/codec/CodecProvider.java b/server/src/main/java/org/elasticsearch/index/codec/CodecProvider.java index 277c2a578fa2c..5b1a3bce0080b 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/CodecProvider.java +++ b/server/src/main/java/org/elasticsearch/index/codec/CodecProvider.java @@ -13,7 +13,11 @@ /** * Abstracts codec lookup by name, to make CodecService extensible. */ -@FunctionalInterface public interface CodecProvider { Codec codec(String name); + + /** + * Returns all registered available codec names. + */ + String[] availableCodecs(); } diff --git a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java index f95aeada762f1..ef8d130f86f1e 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java +++ b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java @@ -87,8 +87,8 @@ public Codec codec(String name) { /** * Returns all registered available codec names. - * Public visibility for tests. */ + @Override public String[] availableCodecs() { return codecs.keySet().toArray(new String[0]); } From b40ee929ee825d8b00ba542d1c4d484d31b5ee69 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 26 Jul 2024 09:25:32 -0400 Subject: [PATCH 050/105] ESQL: Reenable upgrade test (#111342) Maybe it's to do with replicas? Everyone else has replicas. Closes #111222 --- muted-tests.yml | 3 --- .../org/elasticsearch/xpack/restart/FullClusterRestartIT.java | 2 +- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index c079e344c1b03..b251972e2c9fd 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -102,9 +102,6 @@ tests: - class: org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectAuthIT method: testAuthenticateWithImplicitFlow issue: https://github.com/elastic/elasticsearch/issues/111191 -- class: org.elasticsearch.xpack.restart.FullClusterRestartIT - method: testDisableFieldNameField {cluster=UPGRADED} - issue: https://github.com/elastic/elasticsearch/issues/111222 - class: org.elasticsearch.repositories.azure.AzureBlobContainerRetriesTests method: testReadNonexistentBlobThrowsNoSuchFileException issue: https://github.com/elastic/elasticsearch/issues/111233 diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 9de999a0616e5..1cdf83b659093 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -1027,7 +1027,7 @@ public void testDisableFieldNameField() throws IOException { { "settings": { "index": { - "number_of_replicas": 0 + "number_of_replicas": 1 } }, "mappings": { From 56f5dfb9a052fd262c9d58ef05410a7ed5584511 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Fri, 26 Jul 2024 14:47:43 +0100 Subject: [PATCH 051/105] [DOCS] Update retriever reranker options (#111337) * [DOCS] Update retriever reranker options * Fix typo --- docs/reference/search/retriever.asciidoc | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/docs/reference/search/retriever.asciidoc b/docs/reference/search/retriever.asciidoc index ed39ac786880b..b86339b905631 100644 --- a/docs/reference/search/retriever.asciidoc +++ b/docs/reference/search/retriever.asciidoc @@ -213,7 +213,13 @@ The `text_similarity_reranker` is a type of retriever that enhances search resul To use `text_similarity_reranker` you must first set up a `rerank` task using the <>. The `rerank` task should be set up with a machine learning model that can compute text similarity. -Currently you can integrate directly with the Cohere Rerank endpoint using the <> task, or upload a model to {es} <>. + +Currently you can: + +* Integrate directly with the <> using the `rerank` task type +* Integrate directly with the <> using the `rerank` task type +* Upload a model to {es} with {eland-docs}/machine-learning.html#ml-nlp-pytorch[Eland] +** Then set up an <> with the `rerank` task type ===== Parameters From 33a238f9d4500fcd75acc63118fea8893a6d1aa1 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 26 Jul 2024 09:57:57 -0400 Subject: [PATCH 052/105] ESQL: Speed up test (#111347) This speeds up a test that was timing out in CI by sending it less data. Bigger data is fun but smaller data still proves things here. Closes #111318 --- muted-tests.yml | 3 --- .../compute/lucene/LuceneQueryExpressionEvaluatorTests.java | 2 +- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index b251972e2c9fd..c06c4bd127be6 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -120,9 +120,6 @@ tests: - class: org.elasticsearch.xpack.core.ml.job.config.DetectionRuleTests method: testEqualsAndHashcode issue: https://github.com/elastic/elasticsearch/issues/111308 -- class: org.elasticsearch.compute.lucene.LuceneQueryExpressionEvaluatorTests - method: testTermsQueryShuffled - issue: https://github.com/elastic/elasticsearch/issues/111318 - class: org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT issue: https://github.com/elastic/elasticsearch/issues/111319 diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java index cfac6adfd3cda..beca522878358 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java @@ -231,7 +231,7 @@ private T withReader(Set values, CheckedFunction values() { - int maxNumDocs = between(10, 10_000); + int maxNumDocs = between(10, 1_000); int keyLength = randomIntBetween(1, 10); Set values = new HashSet<>(); for (int i = 0; i < maxNumDocs; i++) { From 8df1c504bbb52b756438efddba9d6c44fc96121e Mon Sep 17 00:00:00 2001 From: Ioana Tagirta Date: Fri, 26 Jul 2024 16:03:46 +0200 Subject: [PATCH 053/105] Search in ES|QL: Add MATCH operator (#110971) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Grammar changes * Push match * Add query push down integration tests * Add verifier tests * Address review comments * Update docs/changelog/110971.yaml * Change integration tests after review comments * Fix typo and add comment for Verifier checks * Add CSV tests * Fix generated changelog * Add yml tests and fix verifier test * Use capabilities instead of node features * Enable capability on snapshot builds only * Update x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java Co-authored-by: Iván Cea Fontenla --------- Co-authored-by: Iván Cea Fontenla --- docs/changelog/110971.yaml | 5 + .../main/resources/match-operator.csv-spec | 68 + .../xpack/esql/plugin/MatchOperatorIT.java | 224 +++ .../esql/src/main/antlr/EsqlBaseLexer.g4 | 5 +- .../esql/src/main/antlr/EsqlBaseLexer.tokens | 232 +-- .../esql/src/main/antlr/EsqlBaseParser.g4 | 5 + .../esql/src/main/antlr/EsqlBaseParser.tokens | 232 +-- .../xpack/esql/action/EsqlCapabilities.java | 7 +- .../xpack/esql/analysis/Verifier.java | 52 + .../optimizer/LocalPhysicalPlanOptimizer.java | 3 + .../xpack/esql/parser/EsqlBaseLexer.interp | 17 +- .../xpack/esql/parser/EsqlBaseLexer.java | 1737 +++++++++-------- .../xpack/esql/parser/EsqlBaseParser.interp | 13 +- .../xpack/esql/parser/EsqlBaseParser.java | 1630 ++++++++-------- .../parser/EsqlBaseParserBaseListener.java | 24 + .../parser/EsqlBaseParserBaseVisitor.java | 14 + .../esql/parser/EsqlBaseParserListener.java | 22 + .../esql/parser/EsqlBaseParserVisitor.java | 13 + .../xpack/esql/parser/ExpressionBuilder.java | 15 + .../elasticsearch/xpack/esql/CsvTests.java | 4 + .../xpack/esql/analysis/VerifierTests.java | 31 + .../LocalPhysicalPlanOptimizerTests.java | 70 + .../test/esql/180_match_operator.yml | 196 ++ 23 files changed, 2739 insertions(+), 1880 deletions(-) create mode 100644 docs/changelog/110971.yaml create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec create mode 100644 x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java create mode 100644 x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/180_match_operator.yml diff --git a/docs/changelog/110971.yaml b/docs/changelog/110971.yaml new file mode 100644 index 0000000000000..3579f77dc0d1d --- /dev/null +++ b/docs/changelog/110971.yaml @@ -0,0 +1,5 @@ +pr: 110971 +summary: "Search in ES|QL: Add MATCH operator" +area: ES|QL +type: feature +issues: [] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec new file mode 100644 index 0000000000000..574f27b8c1fed --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec @@ -0,0 +1,68 @@ +############################################### +# Tests for MATCH operator +# + +singleMatchWithTextField +required_capability: match_operator +from airports | where name match "london" | keep abbrev, name | sort abbrev; + +abbrev:keyword | name:text +LGW | London Gatwick +LHR | London Heathrow +LTN | London Luton +; + +singleMatchWithKeywordField +required_capability: match_operator +from airports | where abbrev match "LTN" | keep abbrev, name | sort abbrev; + +abbrev:keyword | name:text +LTN | London Luton +; + +multipleMatch +required_capability: match_operator +from airports | where name match "london" or name match "liverpool "| keep abbrev, name | sort abbrev; + +abbrev:keyword | name:text +LGW | London Gatwick +LHR | London Heathrow +LPL | Liverpool John Lennon +LTN | London Luton +; + +multipleWhereWithMatch +required_capability: match_operator +from airports | where name match "john" | WHERE name match "St" | keep abbrev, name | sort abbrev; + +abbrev:keyword | name:text +YXJ | Fort St. John (N. Peace) +; + +combinedMatchWithFunctions +required_capability: match_operator +from airports +| where name match "john" AND country match "Canada" AND scalerank > 5 +| where length(name) > 10 +| keep abbrev, name, country, scalerank +| sort abbrev +; + +abbrev:keyword | name:text | country:keyword | scalerank: integer +YHM | John C. Munro Hamilton Int'l | Canada | 8 +YXJ | Fort St. John (N. Peace) | Canada | 8 +; + +matchWithStats +required_capability: match_operator +from airports +| where name match "john" AND scalerank > 5 +| where length(name) > 10 +| stats count(*) BY type +| sort type +; + +count(*): long | type:keyword +1 | major +2 | mid +; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java new file mode 100644 index 0000000000000..6eef27fcd04b8 --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java @@ -0,0 +1,224 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; +import org.elasticsearch.xpack.esql.action.ColumnInfoImpl; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.junit.Before; + +import java.util.List; + +import static org.elasticsearch.test.ListMatcher.matchesList; +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +//@TestLogging(value = "org.elasticsearch.xpack.esql:TRACE,org.elasticsearch.compute:TRACE", reason = "debug") +public class MatchOperatorIT extends AbstractEsqlIntegTestCase { + + @Before + public void setupIndex() { + createAndPopulateIndex(); + } + + public void testSimpleWhereMatch() { + var query = """ + FROM test + | WHERE content MATCH "fox" + | KEEP id + | SORT id + """; + + try (var resp = run(query)) { + assertThat(resp.columns().stream().map(ColumnInfoImpl::name).toList(), equalTo(List.of("id"))); + assertThat(resp.columns().stream().map(ColumnInfoImpl::type).map(DataType::toString).toList(), equalTo(List.of("INTEGER"))); + // values + List> values = getValuesList(resp); + assertMap(values, matchesList().item(List.of(1)).item(List.of(6))); + } + } + + public void testCombinedWhereMatch() { + var query = """ + FROM test + | WHERE content MATCH "fox" AND id > 5 + | KEEP id + | SORT id + """; + + try (var resp = run(query)) { + assertThat(resp.columns().stream().map(ColumnInfoImpl::name).toList(), equalTo(List.of(("id")))); + assertThat(resp.columns().stream().map(ColumnInfoImpl::type).map(DataType::toString).toList(), equalTo(List.of(("INTEGER")))); + // values + List> values = getValuesList(resp); + assertMap(values, matchesList().item(List.of(6))); + } + } + + public void testMultipleMatch() { + var query = """ + FROM test + | WHERE content MATCH "fox" OR content MATCH "brown" + | KEEP id + | SORT id + """; + + try (var resp = run(query)) { + assertThat(resp.columns().stream().map(ColumnInfoImpl::name).toList(), equalTo(List.of(("id")))); + assertThat(resp.columns().stream().map(ColumnInfoImpl::type).map(DataType::toString).toList(), equalTo(List.of(("INTEGER")))); + // values + List> values = getValuesList(resp); + assertThat(values.size(), equalTo(5)); + assertMap(values, matchesList().item(List.of(1)).item(List.of(2)).item(List.of(3)).item(List.of(4)).item(List.of(6))); + } + } + + public void testMultipleWhereMatch() { + var query = """ + FROM test + | WHERE content MATCH "fox" OR content MATCH "brown" + | EVAL summary = CONCAT("document with id: ", to_str(id), "and content: ", content) + | SORT summary + | LIMIT 4 + | WHERE content MATCH "brown fox" + | KEEP id + """; + + // TODO: this should not raise an error; + var error = expectThrows(ElasticsearchException.class, () -> run(query)); + assertThat(error.getMessage(), containsString("Unsupported expression [content MATCH \"brown fox\"]")); + } + + public void testNotWhereMatch() { + var query = """ + FROM test + | WHERE NOT content MATCH "brown fox" + | KEEP id + | SORT id + """; + + try (var resp = run(query)) { + assertThat(resp.columns().stream().map(ColumnInfoImpl::name).toList(), equalTo(List.of(("id")))); + assertThat(resp.columns().stream().map(ColumnInfoImpl::type).map(DataType::toString).toList(), equalTo(List.of(("INTEGER")))); + // values + List> values = getValuesList(resp); + assertMap(values, matchesList().item(List.of(5))); + } + } + + public void testNonExistingColumn() { + var query = """ + FROM test + | WHERE something MATCH "fox" + """; + + var error = expectThrows(VerificationException.class, () -> run(query)); + assertThat(error.getMessage(), containsString("Unknown column [something]")); + } + + public void testWhereMatchEvalColumn() { + var query = """ + FROM test + | EVAL upper_content = to_upper(content) + | WHERE upper_content MATCH "FOX" + | KEEP id + """; + + var error = expectThrows(VerificationException.class, () -> run(query)); + assertThat(error.getMessage(), containsString("MATCH requires a mapped index field, found [upper_content]")); + } + + public void testWhereMatchOverWrittenColumn() { + var query = """ + FROM test + | DROP content + | EVAL content = CONCAT("document with ID ", to_str(id)) + | WHERE content MATCH "document" + """; + + var error = expectThrows(VerificationException.class, () -> run(query)); + assertThat(error.getMessage(), containsString("MATCH requires a mapped index field, found [content]")); + } + + public void testWhereMatchAfterStats() { + var query = """ + FROM test + | STATS count(*) + | WHERE content match "fox" + """; + + var error = expectThrows(VerificationException.class, () -> run(query)); + assertThat(error.getMessage(), containsString("Unknown column [content]")); + } + + public void testWhereMatchWithFunctions() { + var query = """ + FROM test + | WHERE content MATCH "fox" OR to_upper(content) == "FOX" + """; + var error = expectThrows(ElasticsearchException.class, () -> run(query)); + assertThat(error.getMessage(), containsString(" Invalid condition using MATCH")); + } + + public void testWhereMatchWithRow() { + var query = """ + ROW content = "a brown fox" + | WHERE content MATCH "fox" + """; + + var error = expectThrows(ElasticsearchException.class, () -> run(query)); + assertThat(error.getMessage(), containsString("MATCH requires a mapped index field, found [content]")); + } + + public void testMatchWithinEval() { + var query = """ + FROM test + | EVAL matches_query = content MATCH "fox" + """; + + var error = expectThrows(VerificationException.class, () -> run(query)); + assertThat(error.getMessage(), containsString("EVAL does not support MATCH expressions")); + } + + public void testMatchWithNonTextField() { + var query = """ + FROM test + | WHERE id MATCH "fox" + """; + + var error = expectThrows(VerificationException.class, () -> run(query)); + assertThat(error.getMessage(), containsString(" MATCH requires a text or keyword field, but [id] has type [integer]")); + } + + private void createAndPopulateIndex() { + var indexName = "test"; + var client = client().admin().indices(); + var CreateRequest = client.prepareCreate(indexName) + .setSettings(Settings.builder().put("index.number_of_shards", 1)) + .setMapping("id", "type=integer", "content", "type=text"); + assertAcked(CreateRequest); + client().prepareBulk() + .add(new IndexRequest(indexName).id("1").source("id", 1, "content", "This is a brown fox")) + .add(new IndexRequest(indexName).id("2").source("id", 2, "content", "This is a brown dog")) + .add(new IndexRequest(indexName).id("3").source("id", 3, "content", "This dog is really brown")) + .add(new IndexRequest(indexName).id("4").source("id", 4, "content", "The dog is brown but this document is very very long")) + .add(new IndexRequest(indexName).id("5").source("id", 5, "content", "There is also a white cat")) + .add(new IndexRequest(indexName).id("6").source("id", 6, "content", "The quick brown fox jumps over the lazy dog")) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); + ensureYellow(indexName); + } +} diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index 25b5cae1acdd0..93f93dbac905b 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -130,11 +130,12 @@ DESC : 'desc'; DOT : '.'; FALSE : 'false'; FIRST : 'first'; -LAST : 'last'; -LP : '('; IN: 'in'; IS: 'is'; +LAST : 'last'; LIKE: 'like'; +LP : '('; +MATCH: 'match'; NOT : 'not'; NULL : 'null'; NULLS : 'nulls'; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index 63eb3a86419a3..4cac289a33634 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -40,88 +40,89 @@ DESC=39 DOT=40 FALSE=41 FIRST=42 -LAST=43 -LP=44 -IN=45 -IS=46 -LIKE=47 -NOT=48 -NULL=49 -NULLS=50 -OR=51 -PARAM=52 -RLIKE=53 -RP=54 -TRUE=55 -EQ=56 -CIEQ=57 -NEQ=58 -LT=59 -LTE=60 -GT=61 -GTE=62 -PLUS=63 -MINUS=64 -ASTERISK=65 -SLASH=66 -PERCENT=67 -NAMED_OR_POSITIONAL_PARAM=68 -OPENING_BRACKET=69 -CLOSING_BRACKET=70 -UNQUOTED_IDENTIFIER=71 -QUOTED_IDENTIFIER=72 -EXPR_LINE_COMMENT=73 -EXPR_MULTILINE_COMMENT=74 -EXPR_WS=75 -METADATA=76 -FROM_LINE_COMMENT=77 -FROM_MULTILINE_COMMENT=78 -FROM_WS=79 -ID_PATTERN=80 -PROJECT_LINE_COMMENT=81 -PROJECT_MULTILINE_COMMENT=82 -PROJECT_WS=83 -AS=84 -RENAME_LINE_COMMENT=85 -RENAME_MULTILINE_COMMENT=86 -RENAME_WS=87 -ON=88 -WITH=89 -ENRICH_POLICY_NAME=90 -ENRICH_LINE_COMMENT=91 -ENRICH_MULTILINE_COMMENT=92 -ENRICH_WS=93 -ENRICH_FIELD_LINE_COMMENT=94 -ENRICH_FIELD_MULTILINE_COMMENT=95 -ENRICH_FIELD_WS=96 -LOOKUP_LINE_COMMENT=97 -LOOKUP_MULTILINE_COMMENT=98 -LOOKUP_WS=99 -LOOKUP_FIELD_LINE_COMMENT=100 -LOOKUP_FIELD_MULTILINE_COMMENT=101 -LOOKUP_FIELD_WS=102 -MVEXPAND_LINE_COMMENT=103 -MVEXPAND_MULTILINE_COMMENT=104 -MVEXPAND_WS=105 -INFO=106 -SHOW_LINE_COMMENT=107 -SHOW_MULTILINE_COMMENT=108 -SHOW_WS=109 -FUNCTIONS=110 -META_LINE_COMMENT=111 -META_MULTILINE_COMMENT=112 -META_WS=113 -COLON=114 -SETTING=115 -SETTING_LINE_COMMENT=116 -SETTTING_MULTILINE_COMMENT=117 -SETTING_WS=118 -METRICS_LINE_COMMENT=119 -METRICS_MULTILINE_COMMENT=120 -METRICS_WS=121 -CLOSING_METRICS_LINE_COMMENT=122 -CLOSING_METRICS_MULTILINE_COMMENT=123 -CLOSING_METRICS_WS=124 +IN=43 +IS=44 +LAST=45 +LIKE=46 +LP=47 +MATCH=48 +NOT=49 +NULL=50 +NULLS=51 +OR=52 +PARAM=53 +RLIKE=54 +RP=55 +TRUE=56 +EQ=57 +CIEQ=58 +NEQ=59 +LT=60 +LTE=61 +GT=62 +GTE=63 +PLUS=64 +MINUS=65 +ASTERISK=66 +SLASH=67 +PERCENT=68 +NAMED_OR_POSITIONAL_PARAM=69 +OPENING_BRACKET=70 +CLOSING_BRACKET=71 +UNQUOTED_IDENTIFIER=72 +QUOTED_IDENTIFIER=73 +EXPR_LINE_COMMENT=74 +EXPR_MULTILINE_COMMENT=75 +EXPR_WS=76 +METADATA=77 +FROM_LINE_COMMENT=78 +FROM_MULTILINE_COMMENT=79 +FROM_WS=80 +ID_PATTERN=81 +PROJECT_LINE_COMMENT=82 +PROJECT_MULTILINE_COMMENT=83 +PROJECT_WS=84 +AS=85 +RENAME_LINE_COMMENT=86 +RENAME_MULTILINE_COMMENT=87 +RENAME_WS=88 +ON=89 +WITH=90 +ENRICH_POLICY_NAME=91 +ENRICH_LINE_COMMENT=92 +ENRICH_MULTILINE_COMMENT=93 +ENRICH_WS=94 +ENRICH_FIELD_LINE_COMMENT=95 +ENRICH_FIELD_MULTILINE_COMMENT=96 +ENRICH_FIELD_WS=97 +LOOKUP_LINE_COMMENT=98 +LOOKUP_MULTILINE_COMMENT=99 +LOOKUP_WS=100 +LOOKUP_FIELD_LINE_COMMENT=101 +LOOKUP_FIELD_MULTILINE_COMMENT=102 +LOOKUP_FIELD_WS=103 +MVEXPAND_LINE_COMMENT=104 +MVEXPAND_MULTILINE_COMMENT=105 +MVEXPAND_WS=106 +INFO=107 +SHOW_LINE_COMMENT=108 +SHOW_MULTILINE_COMMENT=109 +SHOW_WS=110 +FUNCTIONS=111 +META_LINE_COMMENT=112 +META_MULTILINE_COMMENT=113 +META_WS=114 +COLON=115 +SETTING=116 +SETTING_LINE_COMMENT=117 +SETTTING_MULTILINE_COMMENT=118 +SETTING_WS=119 +METRICS_LINE_COMMENT=120 +METRICS_MULTILINE_COMMENT=121 +METRICS_WS=122 +CLOSING_METRICS_LINE_COMMENT=123 +CLOSING_METRICS_MULTILINE_COMMENT=124 +CLOSING_METRICS_WS=125 'dissect'=1 'drop'=2 'enrich'=3 @@ -153,36 +154,37 @@ CLOSING_METRICS_WS=124 '.'=40 'false'=41 'first'=42 -'last'=43 -'('=44 -'in'=45 -'is'=46 -'like'=47 -'not'=48 -'null'=49 -'nulls'=50 -'or'=51 -'?'=52 -'rlike'=53 -')'=54 -'true'=55 -'=='=56 -'=~'=57 -'!='=58 -'<'=59 -'<='=60 -'>'=61 -'>='=62 -'+'=63 -'-'=64 -'*'=65 -'/'=66 -'%'=67 -']'=70 -'metadata'=76 -'as'=84 -'on'=88 -'with'=89 -'info'=106 -'functions'=110 -':'=114 +'in'=43 +'is'=44 +'last'=45 +'like'=46 +'('=47 +'match'=48 +'not'=49 +'null'=50 +'nulls'=51 +'or'=52 +'?'=53 +'rlike'=54 +')'=55 +'true'=56 +'=='=57 +'=~'=58 +'!='=59 +'<'=60 +'<='=61 +'>'=62 +'>='=63 +'+'=64 +'-'=65 +'*'=66 +'/'=67 +'%'=68 +']'=71 +'metadata'=77 +'as'=85 +'on'=89 +'with'=90 +'info'=107 +'functions'=111 +':'=115 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index 89059822d367b..3a45155c0bc38 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -53,6 +53,7 @@ booleanExpression : NOT booleanExpression #logicalNot | valueExpression #booleanDefault | regexBooleanExpression #regexExpression + | matchBooleanExpression #matchExpression | left=booleanExpression operator=AND right=booleanExpression #logicalBinary | left=booleanExpression operator=OR right=booleanExpression #logicalBinary | valueExpression (NOT)? IN LP valueExpression (COMMA valueExpression)* RP #logicalIn @@ -64,6 +65,10 @@ regexBooleanExpression | valueExpression (NOT)? kind=RLIKE pattern=string ; +matchBooleanExpression + : qualifiedName MATCH queryString=string + ; + valueExpression : operatorExpression #valueExpressionDefault | left=operatorExpression comparisonOperator right=operatorExpression #comparison diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index 63eb3a86419a3..4cac289a33634 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -40,88 +40,89 @@ DESC=39 DOT=40 FALSE=41 FIRST=42 -LAST=43 -LP=44 -IN=45 -IS=46 -LIKE=47 -NOT=48 -NULL=49 -NULLS=50 -OR=51 -PARAM=52 -RLIKE=53 -RP=54 -TRUE=55 -EQ=56 -CIEQ=57 -NEQ=58 -LT=59 -LTE=60 -GT=61 -GTE=62 -PLUS=63 -MINUS=64 -ASTERISK=65 -SLASH=66 -PERCENT=67 -NAMED_OR_POSITIONAL_PARAM=68 -OPENING_BRACKET=69 -CLOSING_BRACKET=70 -UNQUOTED_IDENTIFIER=71 -QUOTED_IDENTIFIER=72 -EXPR_LINE_COMMENT=73 -EXPR_MULTILINE_COMMENT=74 -EXPR_WS=75 -METADATA=76 -FROM_LINE_COMMENT=77 -FROM_MULTILINE_COMMENT=78 -FROM_WS=79 -ID_PATTERN=80 -PROJECT_LINE_COMMENT=81 -PROJECT_MULTILINE_COMMENT=82 -PROJECT_WS=83 -AS=84 -RENAME_LINE_COMMENT=85 -RENAME_MULTILINE_COMMENT=86 -RENAME_WS=87 -ON=88 -WITH=89 -ENRICH_POLICY_NAME=90 -ENRICH_LINE_COMMENT=91 -ENRICH_MULTILINE_COMMENT=92 -ENRICH_WS=93 -ENRICH_FIELD_LINE_COMMENT=94 -ENRICH_FIELD_MULTILINE_COMMENT=95 -ENRICH_FIELD_WS=96 -LOOKUP_LINE_COMMENT=97 -LOOKUP_MULTILINE_COMMENT=98 -LOOKUP_WS=99 -LOOKUP_FIELD_LINE_COMMENT=100 -LOOKUP_FIELD_MULTILINE_COMMENT=101 -LOOKUP_FIELD_WS=102 -MVEXPAND_LINE_COMMENT=103 -MVEXPAND_MULTILINE_COMMENT=104 -MVEXPAND_WS=105 -INFO=106 -SHOW_LINE_COMMENT=107 -SHOW_MULTILINE_COMMENT=108 -SHOW_WS=109 -FUNCTIONS=110 -META_LINE_COMMENT=111 -META_MULTILINE_COMMENT=112 -META_WS=113 -COLON=114 -SETTING=115 -SETTING_LINE_COMMENT=116 -SETTTING_MULTILINE_COMMENT=117 -SETTING_WS=118 -METRICS_LINE_COMMENT=119 -METRICS_MULTILINE_COMMENT=120 -METRICS_WS=121 -CLOSING_METRICS_LINE_COMMENT=122 -CLOSING_METRICS_MULTILINE_COMMENT=123 -CLOSING_METRICS_WS=124 +IN=43 +IS=44 +LAST=45 +LIKE=46 +LP=47 +MATCH=48 +NOT=49 +NULL=50 +NULLS=51 +OR=52 +PARAM=53 +RLIKE=54 +RP=55 +TRUE=56 +EQ=57 +CIEQ=58 +NEQ=59 +LT=60 +LTE=61 +GT=62 +GTE=63 +PLUS=64 +MINUS=65 +ASTERISK=66 +SLASH=67 +PERCENT=68 +NAMED_OR_POSITIONAL_PARAM=69 +OPENING_BRACKET=70 +CLOSING_BRACKET=71 +UNQUOTED_IDENTIFIER=72 +QUOTED_IDENTIFIER=73 +EXPR_LINE_COMMENT=74 +EXPR_MULTILINE_COMMENT=75 +EXPR_WS=76 +METADATA=77 +FROM_LINE_COMMENT=78 +FROM_MULTILINE_COMMENT=79 +FROM_WS=80 +ID_PATTERN=81 +PROJECT_LINE_COMMENT=82 +PROJECT_MULTILINE_COMMENT=83 +PROJECT_WS=84 +AS=85 +RENAME_LINE_COMMENT=86 +RENAME_MULTILINE_COMMENT=87 +RENAME_WS=88 +ON=89 +WITH=90 +ENRICH_POLICY_NAME=91 +ENRICH_LINE_COMMENT=92 +ENRICH_MULTILINE_COMMENT=93 +ENRICH_WS=94 +ENRICH_FIELD_LINE_COMMENT=95 +ENRICH_FIELD_MULTILINE_COMMENT=96 +ENRICH_FIELD_WS=97 +LOOKUP_LINE_COMMENT=98 +LOOKUP_MULTILINE_COMMENT=99 +LOOKUP_WS=100 +LOOKUP_FIELD_LINE_COMMENT=101 +LOOKUP_FIELD_MULTILINE_COMMENT=102 +LOOKUP_FIELD_WS=103 +MVEXPAND_LINE_COMMENT=104 +MVEXPAND_MULTILINE_COMMENT=105 +MVEXPAND_WS=106 +INFO=107 +SHOW_LINE_COMMENT=108 +SHOW_MULTILINE_COMMENT=109 +SHOW_WS=110 +FUNCTIONS=111 +META_LINE_COMMENT=112 +META_MULTILINE_COMMENT=113 +META_WS=114 +COLON=115 +SETTING=116 +SETTING_LINE_COMMENT=117 +SETTTING_MULTILINE_COMMENT=118 +SETTING_WS=119 +METRICS_LINE_COMMENT=120 +METRICS_MULTILINE_COMMENT=121 +METRICS_WS=122 +CLOSING_METRICS_LINE_COMMENT=123 +CLOSING_METRICS_MULTILINE_COMMENT=124 +CLOSING_METRICS_WS=125 'dissect'=1 'drop'=2 'enrich'=3 @@ -153,36 +154,37 @@ CLOSING_METRICS_WS=124 '.'=40 'false'=41 'first'=42 -'last'=43 -'('=44 -'in'=45 -'is'=46 -'like'=47 -'not'=48 -'null'=49 -'nulls'=50 -'or'=51 -'?'=52 -'rlike'=53 -')'=54 -'true'=55 -'=='=56 -'=~'=57 -'!='=58 -'<'=59 -'<='=60 -'>'=61 -'>='=62 -'+'=63 -'-'=64 -'*'=65 -'/'=66 -'%'=67 -']'=70 -'metadata'=76 -'as'=84 -'on'=88 -'with'=89 -'info'=106 -'functions'=110 -':'=114 +'in'=43 +'is'=44 +'last'=45 +'like'=46 +'('=47 +'match'=48 +'not'=49 +'null'=50 +'nulls'=51 +'or'=52 +'?'=53 +'rlike'=54 +')'=55 +'true'=56 +'=='=57 +'=~'=58 +'!='=59 +'<'=60 +'<='=61 +'>'=62 +'>='=63 +'+'=64 +'-'=65 +'*'=66 +'/'=67 +'%'=68 +']'=71 +'metadata'=77 +'as'=85 +'on'=89 +'with'=90 +'info'=107 +'functions'=111 +':'=115 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index e6142e8161d44..76cf95494f7ca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -181,7 +181,12 @@ public enum Cap { * Make attributes of GROK/DISSECT adjustable and fix a shadowing bug when pushing them down past PROJECT. * https://github.com/elastic/elasticsearch/issues/108008 */ - FIXED_PUSHDOWN_PAST_PROJECT; + FIXED_PUSHDOWN_PAST_PROJECT, + + /** + * Support for match operator + */ + MATCH_OPERATOR(true); private final boolean snapshotOnly; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index 8d1b96570c7a5..19d089cc4cca3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.analysis; +import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -19,8 +20,10 @@ import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; +import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MatchQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.Rate; @@ -55,6 +58,7 @@ import static org.elasticsearch.xpack.esql.common.Failure.fail; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.optimizer.LocalPhysicalPlanOptimizer.PushFiltersToSource.canPushToSource; /** * This class is part of the planner. Responsible for failing impossible queries with a human-readable error message. In particular, this @@ -170,6 +174,8 @@ else if (p instanceof Lookup lookup) { checkOperationsOnUnsignedLong(p, failures); checkBinaryComparison(p, failures); checkForSortOnSpatialTypes(p, failures); + + checkFilterMatchConditions(p, failures); }); checkRemoteEnrich(plan, failures); @@ -388,6 +394,11 @@ private static void checkEvalFields(LogicalPlan p, Set failures) { failures.add(fail(af, "aggregate function [{}] not allowed outside STATS command", af.sourceText())); } }); + // check no MATCH expressions are used + field.forEachDown( + MatchQueryPredicate.class, + mqp -> { failures.add(fail(mqp, "EVAL does not support MATCH expressions")); } + ); }); } } @@ -573,4 +584,45 @@ private static void checkRemoteEnrich(LogicalPlan plan, Set failures) { } }); } + + /** + * Currently any filter condition using MATCH needs to be pushed down to the Lucene query. + * Conditions that use a combination of MATCH and ES|QL functions (e.g. `title MATCH "anna" OR DATE_EXTRACT("year", date) > 2010) + * cannot be pushed down to Lucene. + * Another condition is for MATCH to use index fields that have been mapped as text or keyword. + * We are using canPushToSource at the Verifier level because we want to detect any condition that cannot be pushed down + * early in the execution, rather than fail at the compute engine level. + * In the future we will be able to handle MATCH at the compute and we will no longer need these checks. + */ + private static void checkFilterMatchConditions(LogicalPlan plan, Set failures) { + if (plan instanceof Filter f) { + Expression condition = f.condition(); + + Holder hasMatch = new Holder<>(false); + condition.forEachDown(MatchQueryPredicate.class, mqp -> { + hasMatch.set(true); + var field = mqp.field(); + if (field instanceof FieldAttribute == false) { + failures.add(fail(mqp, "MATCH requires a mapped index field, found [" + field.sourceText() + "]")); + } + + if (DataType.isString(field.dataType()) == false) { + var message = LoggerMessageFormat.format( + null, + "MATCH requires a text or keyword field, but [{}] has type [{}]", + field.sourceText(), + field.dataType().esType() + ); + failures.add(fail(mqp, message)); + } + }); + + if (canPushToSource(condition, x -> false)) { + return; + } + if (hasMatch.get()) { + failures.add(fail(condition, "Invalid condition using MATCH")); + } + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java index b9be9b7ad029e..672055b1d64f8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java @@ -31,6 +31,7 @@ import org.elasticsearch.xpack.esql.core.expression.TypedAttribute; import org.elasticsearch.xpack.esql.core.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; +import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MatchQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; @@ -300,6 +301,8 @@ public static boolean canPushToSource(Expression exp, Predicate && Expressions.foldable(cidrMatch.matches()); } else if (exp instanceof SpatialRelatesFunction bc) { return bc.canPushToSource(LocalPhysicalPlanOptimizer::isAggregatable); + } else if (exp instanceof MatchQueryPredicate mqp) { + return mqp.field() instanceof FieldAttribute && DataType.isString(mqp.field().dataType()); } return false; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index f5484f3e7070f..380b7a994ba87 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -42,11 +42,12 @@ null '.' 'false' 'first' -'last' -'(' 'in' 'is' +'last' 'like' +'(' +'match' 'not' 'null' 'nulls' @@ -169,11 +170,12 @@ DESC DOT FALSE FIRST -LAST -LP IN IS +LAST LIKE +LP +MATCH NOT NULL NULLS @@ -308,11 +310,12 @@ DESC DOT FALSE FIRST -LAST -LP IN IS +LAST LIKE +LP +MATCH NOT NULL NULLS @@ -471,4 +474,4 @@ METRICS_MODE CLOSING_METRICS_MODE atn: -[4, 0, 124, 1450, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 4, 20, 575, 8, 20, 11, 20, 12, 20, 576, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 585, 8, 21, 10, 21, 12, 21, 588, 9, 21, 1, 21, 3, 21, 591, 8, 21, 1, 21, 3, 21, 594, 8, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 603, 8, 22, 10, 22, 12, 22, 606, 9, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 4, 23, 614, 8, 23, 11, 23, 12, 23, 615, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 3, 24, 623, 8, 24, 1, 25, 4, 25, 626, 8, 25, 11, 25, 12, 25, 627, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 3, 36, 667, 8, 36, 1, 36, 4, 36, 670, 8, 36, 11, 36, 12, 36, 671, 1, 37, 1, 37, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 3, 39, 681, 8, 39, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 3, 41, 688, 8, 41, 1, 42, 1, 42, 1, 42, 5, 42, 693, 8, 42, 10, 42, 12, 42, 696, 9, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 704, 8, 42, 10, 42, 12, 42, 707, 9, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 714, 8, 42, 1, 42, 3, 42, 717, 8, 42, 3, 42, 719, 8, 42, 1, 43, 4, 43, 722, 8, 43, 11, 43, 12, 43, 723, 1, 44, 4, 44, 727, 8, 44, 11, 44, 12, 44, 728, 1, 44, 1, 44, 5, 44, 733, 8, 44, 10, 44, 12, 44, 736, 9, 44, 1, 44, 1, 44, 4, 44, 740, 8, 44, 11, 44, 12, 44, 741, 1, 44, 4, 44, 745, 8, 44, 11, 44, 12, 44, 746, 1, 44, 1, 44, 5, 44, 751, 8, 44, 10, 44, 12, 44, 754, 9, 44, 3, 44, 756, 8, 44, 1, 44, 1, 44, 1, 44, 1, 44, 4, 44, 762, 8, 44, 11, 44, 12, 44, 763, 1, 44, 1, 44, 3, 44, 768, 8, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 5, 80, 890, 8, 80, 10, 80, 12, 80, 893, 9, 80, 1, 80, 1, 80, 4, 80, 897, 8, 80, 11, 80, 12, 80, 898, 3, 80, 901, 8, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 5, 83, 915, 8, 83, 10, 83, 12, 83, 918, 9, 83, 1, 83, 1, 83, 3, 83, 922, 8, 83, 1, 83, 4, 83, 925, 8, 83, 11, 83, 12, 83, 926, 3, 83, 929, 8, 83, 1, 84, 1, 84, 4, 84, 933, 8, 84, 11, 84, 12, 84, 934, 1, 84, 1, 84, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 3, 104, 1024, 8, 104, 1, 105, 1, 105, 3, 105, 1028, 8, 105, 1, 105, 5, 105, 1031, 8, 105, 10, 105, 12, 105, 1034, 9, 105, 1, 105, 1, 105, 3, 105, 1038, 8, 105, 1, 105, 4, 105, 1041, 8, 105, 11, 105, 12, 105, 1042, 3, 105, 1045, 8, 105, 1, 106, 1, 106, 4, 106, 1049, 8, 106, 11, 106, 12, 106, 1050, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 124, 4, 124, 1126, 8, 124, 11, 124, 12, 124, 1127, 1, 124, 1, 124, 3, 124, 1132, 8, 124, 1, 124, 4, 124, 1135, 8, 124, 11, 124, 12, 124, 1136, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 4, 175, 1359, 8, 175, 11, 175, 12, 175, 1360, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 2, 604, 705, 0, 194, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 0, 66, 25, 68, 0, 70, 0, 72, 26, 74, 27, 76, 28, 78, 29, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 0, 96, 0, 98, 0, 100, 30, 102, 31, 104, 32, 106, 33, 108, 34, 110, 35, 112, 36, 114, 37, 116, 38, 118, 39, 120, 40, 122, 41, 124, 42, 126, 43, 128, 44, 130, 45, 132, 46, 134, 47, 136, 48, 138, 49, 140, 50, 142, 51, 144, 52, 146, 53, 148, 54, 150, 55, 152, 56, 154, 57, 156, 58, 158, 59, 160, 60, 162, 61, 164, 62, 166, 63, 168, 64, 170, 65, 172, 66, 174, 67, 176, 68, 178, 69, 180, 70, 182, 71, 184, 0, 186, 72, 188, 73, 190, 74, 192, 75, 194, 0, 196, 0, 198, 0, 200, 0, 202, 0, 204, 0, 206, 76, 208, 0, 210, 0, 212, 77, 214, 78, 216, 79, 218, 0, 220, 0, 222, 0, 224, 0, 226, 0, 228, 80, 230, 81, 232, 82, 234, 83, 236, 0, 238, 0, 240, 0, 242, 0, 244, 84, 246, 0, 248, 85, 250, 86, 252, 87, 254, 0, 256, 0, 258, 88, 260, 89, 262, 0, 264, 90, 266, 0, 268, 91, 270, 92, 272, 93, 274, 0, 276, 0, 278, 0, 280, 0, 282, 0, 284, 0, 286, 0, 288, 94, 290, 95, 292, 96, 294, 0, 296, 0, 298, 0, 300, 0, 302, 0, 304, 0, 306, 0, 308, 97, 310, 98, 312, 99, 314, 0, 316, 0, 318, 0, 320, 0, 322, 100, 324, 101, 326, 102, 328, 0, 330, 0, 332, 0, 334, 0, 336, 103, 338, 104, 340, 105, 342, 0, 344, 106, 346, 107, 348, 108, 350, 109, 352, 0, 354, 110, 356, 111, 358, 112, 360, 113, 362, 0, 364, 114, 366, 115, 368, 116, 370, 117, 372, 118, 374, 0, 376, 0, 378, 0, 380, 119, 382, 120, 384, 121, 386, 0, 388, 0, 390, 122, 392, 123, 394, 124, 396, 0, 398, 0, 400, 0, 402, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1476, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 1, 68, 1, 0, 0, 0, 1, 70, 1, 0, 0, 0, 1, 72, 1, 0, 0, 0, 1, 74, 1, 0, 0, 0, 1, 76, 1, 0, 0, 0, 2, 78, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 168, 1, 0, 0, 0, 2, 170, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 2, 178, 1, 0, 0, 0, 2, 180, 1, 0, 0, 0, 2, 182, 1, 0, 0, 0, 2, 186, 1, 0, 0, 0, 2, 188, 1, 0, 0, 0, 2, 190, 1, 0, 0, 0, 2, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 3, 196, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 3, 216, 1, 0, 0, 0, 4, 218, 1, 0, 0, 0, 4, 220, 1, 0, 0, 0, 4, 222, 1, 0, 0, 0, 4, 228, 1, 0, 0, 0, 4, 230, 1, 0, 0, 0, 4, 232, 1, 0, 0, 0, 4, 234, 1, 0, 0, 0, 5, 236, 1, 0, 0, 0, 5, 238, 1, 0, 0, 0, 5, 240, 1, 0, 0, 0, 5, 242, 1, 0, 0, 0, 5, 244, 1, 0, 0, 0, 5, 246, 1, 0, 0, 0, 5, 248, 1, 0, 0, 0, 5, 250, 1, 0, 0, 0, 5, 252, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 260, 1, 0, 0, 0, 6, 264, 1, 0, 0, 0, 6, 266, 1, 0, 0, 0, 6, 268, 1, 0, 0, 0, 6, 270, 1, 0, 0, 0, 6, 272, 1, 0, 0, 0, 7, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 7, 280, 1, 0, 0, 0, 7, 282, 1, 0, 0, 0, 7, 284, 1, 0, 0, 0, 7, 286, 1, 0, 0, 0, 7, 288, 1, 0, 0, 0, 7, 290, 1, 0, 0, 0, 7, 292, 1, 0, 0, 0, 8, 294, 1, 0, 0, 0, 8, 296, 1, 0, 0, 0, 8, 298, 1, 0, 0, 0, 8, 300, 1, 0, 0, 0, 8, 302, 1, 0, 0, 0, 8, 304, 1, 0, 0, 0, 8, 306, 1, 0, 0, 0, 8, 308, 1, 0, 0, 0, 8, 310, 1, 0, 0, 0, 8, 312, 1, 0, 0, 0, 9, 314, 1, 0, 0, 0, 9, 316, 1, 0, 0, 0, 9, 318, 1, 0, 0, 0, 9, 320, 1, 0, 0, 0, 9, 322, 1, 0, 0, 0, 9, 324, 1, 0, 0, 0, 9, 326, 1, 0, 0, 0, 10, 328, 1, 0, 0, 0, 10, 330, 1, 0, 0, 0, 10, 332, 1, 0, 0, 0, 10, 334, 1, 0, 0, 0, 10, 336, 1, 0, 0, 0, 10, 338, 1, 0, 0, 0, 10, 340, 1, 0, 0, 0, 11, 342, 1, 0, 0, 0, 11, 344, 1, 0, 0, 0, 11, 346, 1, 0, 0, 0, 11, 348, 1, 0, 0, 0, 11, 350, 1, 0, 0, 0, 12, 352, 1, 0, 0, 0, 12, 354, 1, 0, 0, 0, 12, 356, 1, 0, 0, 0, 12, 358, 1, 0, 0, 0, 12, 360, 1, 0, 0, 0, 13, 362, 1, 0, 0, 0, 13, 364, 1, 0, 0, 0, 13, 366, 1, 0, 0, 0, 13, 368, 1, 0, 0, 0, 13, 370, 1, 0, 0, 0, 13, 372, 1, 0, 0, 0, 14, 374, 1, 0, 0, 0, 14, 376, 1, 0, 0, 0, 14, 378, 1, 0, 0, 0, 14, 380, 1, 0, 0, 0, 14, 382, 1, 0, 0, 0, 14, 384, 1, 0, 0, 0, 15, 386, 1, 0, 0, 0, 15, 388, 1, 0, 0, 0, 15, 390, 1, 0, 0, 0, 15, 392, 1, 0, 0, 0, 15, 394, 1, 0, 0, 0, 15, 396, 1, 0, 0, 0, 15, 398, 1, 0, 0, 0, 15, 400, 1, 0, 0, 0, 15, 402, 1, 0, 0, 0, 16, 404, 1, 0, 0, 0, 18, 414, 1, 0, 0, 0, 20, 421, 1, 0, 0, 0, 22, 430, 1, 0, 0, 0, 24, 437, 1, 0, 0, 0, 26, 447, 1, 0, 0, 0, 28, 454, 1, 0, 0, 0, 30, 461, 1, 0, 0, 0, 32, 475, 1, 0, 0, 0, 34, 482, 1, 0, 0, 0, 36, 490, 1, 0, 0, 0, 38, 499, 1, 0, 0, 0, 40, 506, 1, 0, 0, 0, 42, 516, 1, 0, 0, 0, 44, 528, 1, 0, 0, 0, 46, 537, 1, 0, 0, 0, 48, 543, 1, 0, 0, 0, 50, 550, 1, 0, 0, 0, 52, 557, 1, 0, 0, 0, 54, 565, 1, 0, 0, 0, 56, 574, 1, 0, 0, 0, 58, 580, 1, 0, 0, 0, 60, 597, 1, 0, 0, 0, 62, 613, 1, 0, 0, 0, 64, 622, 1, 0, 0, 0, 66, 625, 1, 0, 0, 0, 68, 629, 1, 0, 0, 0, 70, 634, 1, 0, 0, 0, 72, 639, 1, 0, 0, 0, 74, 643, 1, 0, 0, 0, 76, 647, 1, 0, 0, 0, 78, 651, 1, 0, 0, 0, 80, 655, 1, 0, 0, 0, 82, 657, 1, 0, 0, 0, 84, 659, 1, 0, 0, 0, 86, 662, 1, 0, 0, 0, 88, 664, 1, 0, 0, 0, 90, 673, 1, 0, 0, 0, 92, 675, 1, 0, 0, 0, 94, 680, 1, 0, 0, 0, 96, 682, 1, 0, 0, 0, 98, 687, 1, 0, 0, 0, 100, 718, 1, 0, 0, 0, 102, 721, 1, 0, 0, 0, 104, 767, 1, 0, 0, 0, 106, 769, 1, 0, 0, 0, 108, 772, 1, 0, 0, 0, 110, 776, 1, 0, 0, 0, 112, 780, 1, 0, 0, 0, 114, 782, 1, 0, 0, 0, 116, 785, 1, 0, 0, 0, 118, 787, 1, 0, 0, 0, 120, 792, 1, 0, 0, 0, 122, 794, 1, 0, 0, 0, 124, 800, 1, 0, 0, 0, 126, 806, 1, 0, 0, 0, 128, 811, 1, 0, 0, 0, 130, 813, 1, 0, 0, 0, 132, 816, 1, 0, 0, 0, 134, 819, 1, 0, 0, 0, 136, 824, 1, 0, 0, 0, 138, 828, 1, 0, 0, 0, 140, 833, 1, 0, 0, 0, 142, 839, 1, 0, 0, 0, 144, 842, 1, 0, 0, 0, 146, 844, 1, 0, 0, 0, 148, 850, 1, 0, 0, 0, 150, 852, 1, 0, 0, 0, 152, 857, 1, 0, 0, 0, 154, 860, 1, 0, 0, 0, 156, 863, 1, 0, 0, 0, 158, 866, 1, 0, 0, 0, 160, 868, 1, 0, 0, 0, 162, 871, 1, 0, 0, 0, 164, 873, 1, 0, 0, 0, 166, 876, 1, 0, 0, 0, 168, 878, 1, 0, 0, 0, 170, 880, 1, 0, 0, 0, 172, 882, 1, 0, 0, 0, 174, 884, 1, 0, 0, 0, 176, 900, 1, 0, 0, 0, 178, 902, 1, 0, 0, 0, 180, 907, 1, 0, 0, 0, 182, 928, 1, 0, 0, 0, 184, 930, 1, 0, 0, 0, 186, 938, 1, 0, 0, 0, 188, 940, 1, 0, 0, 0, 190, 944, 1, 0, 0, 0, 192, 948, 1, 0, 0, 0, 194, 952, 1, 0, 0, 0, 196, 957, 1, 0, 0, 0, 198, 961, 1, 0, 0, 0, 200, 965, 1, 0, 0, 0, 202, 969, 1, 0, 0, 0, 204, 973, 1, 0, 0, 0, 206, 977, 1, 0, 0, 0, 208, 986, 1, 0, 0, 0, 210, 990, 1, 0, 0, 0, 212, 994, 1, 0, 0, 0, 214, 998, 1, 0, 0, 0, 216, 1002, 1, 0, 0, 0, 218, 1006, 1, 0, 0, 0, 220, 1011, 1, 0, 0, 0, 222, 1015, 1, 0, 0, 0, 224, 1023, 1, 0, 0, 0, 226, 1044, 1, 0, 0, 0, 228, 1048, 1, 0, 0, 0, 230, 1052, 1, 0, 0, 0, 232, 1056, 1, 0, 0, 0, 234, 1060, 1, 0, 0, 0, 236, 1064, 1, 0, 0, 0, 238, 1069, 1, 0, 0, 0, 240, 1073, 1, 0, 0, 0, 242, 1077, 1, 0, 0, 0, 244, 1081, 1, 0, 0, 0, 246, 1084, 1, 0, 0, 0, 248, 1088, 1, 0, 0, 0, 250, 1092, 1, 0, 0, 0, 252, 1096, 1, 0, 0, 0, 254, 1100, 1, 0, 0, 0, 256, 1105, 1, 0, 0, 0, 258, 1110, 1, 0, 0, 0, 260, 1115, 1, 0, 0, 0, 262, 1122, 1, 0, 0, 0, 264, 1131, 1, 0, 0, 0, 266, 1138, 1, 0, 0, 0, 268, 1142, 1, 0, 0, 0, 270, 1146, 1, 0, 0, 0, 272, 1150, 1, 0, 0, 0, 274, 1154, 1, 0, 0, 0, 276, 1160, 1, 0, 0, 0, 278, 1164, 1, 0, 0, 0, 280, 1168, 1, 0, 0, 0, 282, 1172, 1, 0, 0, 0, 284, 1176, 1, 0, 0, 0, 286, 1180, 1, 0, 0, 0, 288, 1184, 1, 0, 0, 0, 290, 1188, 1, 0, 0, 0, 292, 1192, 1, 0, 0, 0, 294, 1196, 1, 0, 0, 0, 296, 1201, 1, 0, 0, 0, 298, 1205, 1, 0, 0, 0, 300, 1209, 1, 0, 0, 0, 302, 1213, 1, 0, 0, 0, 304, 1218, 1, 0, 0, 0, 306, 1222, 1, 0, 0, 0, 308, 1226, 1, 0, 0, 0, 310, 1230, 1, 0, 0, 0, 312, 1234, 1, 0, 0, 0, 314, 1238, 1, 0, 0, 0, 316, 1244, 1, 0, 0, 0, 318, 1248, 1, 0, 0, 0, 320, 1252, 1, 0, 0, 0, 322, 1256, 1, 0, 0, 0, 324, 1260, 1, 0, 0, 0, 326, 1264, 1, 0, 0, 0, 328, 1268, 1, 0, 0, 0, 330, 1273, 1, 0, 0, 0, 332, 1277, 1, 0, 0, 0, 334, 1281, 1, 0, 0, 0, 336, 1285, 1, 0, 0, 0, 338, 1289, 1, 0, 0, 0, 340, 1293, 1, 0, 0, 0, 342, 1297, 1, 0, 0, 0, 344, 1302, 1, 0, 0, 0, 346, 1307, 1, 0, 0, 0, 348, 1311, 1, 0, 0, 0, 350, 1315, 1, 0, 0, 0, 352, 1319, 1, 0, 0, 0, 354, 1324, 1, 0, 0, 0, 356, 1334, 1, 0, 0, 0, 358, 1338, 1, 0, 0, 0, 360, 1342, 1, 0, 0, 0, 362, 1346, 1, 0, 0, 0, 364, 1351, 1, 0, 0, 0, 366, 1358, 1, 0, 0, 0, 368, 1362, 1, 0, 0, 0, 370, 1366, 1, 0, 0, 0, 372, 1370, 1, 0, 0, 0, 374, 1374, 1, 0, 0, 0, 376, 1379, 1, 0, 0, 0, 378, 1385, 1, 0, 0, 0, 380, 1391, 1, 0, 0, 0, 382, 1395, 1, 0, 0, 0, 384, 1399, 1, 0, 0, 0, 386, 1403, 1, 0, 0, 0, 388, 1409, 1, 0, 0, 0, 390, 1415, 1, 0, 0, 0, 392, 1419, 1, 0, 0, 0, 394, 1423, 1, 0, 0, 0, 396, 1427, 1, 0, 0, 0, 398, 1433, 1, 0, 0, 0, 400, 1439, 1, 0, 0, 0, 402, 1445, 1, 0, 0, 0, 404, 405, 5, 100, 0, 0, 405, 406, 5, 105, 0, 0, 406, 407, 5, 115, 0, 0, 407, 408, 5, 115, 0, 0, 408, 409, 5, 101, 0, 0, 409, 410, 5, 99, 0, 0, 410, 411, 5, 116, 0, 0, 411, 412, 1, 0, 0, 0, 412, 413, 6, 0, 0, 0, 413, 17, 1, 0, 0, 0, 414, 415, 5, 100, 0, 0, 415, 416, 5, 114, 0, 0, 416, 417, 5, 111, 0, 0, 417, 418, 5, 112, 0, 0, 418, 419, 1, 0, 0, 0, 419, 420, 6, 1, 1, 0, 420, 19, 1, 0, 0, 0, 421, 422, 5, 101, 0, 0, 422, 423, 5, 110, 0, 0, 423, 424, 5, 114, 0, 0, 424, 425, 5, 105, 0, 0, 425, 426, 5, 99, 0, 0, 426, 427, 5, 104, 0, 0, 427, 428, 1, 0, 0, 0, 428, 429, 6, 2, 2, 0, 429, 21, 1, 0, 0, 0, 430, 431, 5, 101, 0, 0, 431, 432, 5, 118, 0, 0, 432, 433, 5, 97, 0, 0, 433, 434, 5, 108, 0, 0, 434, 435, 1, 0, 0, 0, 435, 436, 6, 3, 0, 0, 436, 23, 1, 0, 0, 0, 437, 438, 5, 101, 0, 0, 438, 439, 5, 120, 0, 0, 439, 440, 5, 112, 0, 0, 440, 441, 5, 108, 0, 0, 441, 442, 5, 97, 0, 0, 442, 443, 5, 105, 0, 0, 443, 444, 5, 110, 0, 0, 444, 445, 1, 0, 0, 0, 445, 446, 6, 4, 3, 0, 446, 25, 1, 0, 0, 0, 447, 448, 5, 102, 0, 0, 448, 449, 5, 114, 0, 0, 449, 450, 5, 111, 0, 0, 450, 451, 5, 109, 0, 0, 451, 452, 1, 0, 0, 0, 452, 453, 6, 5, 4, 0, 453, 27, 1, 0, 0, 0, 454, 455, 5, 103, 0, 0, 455, 456, 5, 114, 0, 0, 456, 457, 5, 111, 0, 0, 457, 458, 5, 107, 0, 0, 458, 459, 1, 0, 0, 0, 459, 460, 6, 6, 0, 0, 460, 29, 1, 0, 0, 0, 461, 462, 5, 105, 0, 0, 462, 463, 5, 110, 0, 0, 463, 464, 5, 108, 0, 0, 464, 465, 5, 105, 0, 0, 465, 466, 5, 110, 0, 0, 466, 467, 5, 101, 0, 0, 467, 468, 5, 115, 0, 0, 468, 469, 5, 116, 0, 0, 469, 470, 5, 97, 0, 0, 470, 471, 5, 116, 0, 0, 471, 472, 5, 115, 0, 0, 472, 473, 1, 0, 0, 0, 473, 474, 6, 7, 0, 0, 474, 31, 1, 0, 0, 0, 475, 476, 5, 107, 0, 0, 476, 477, 5, 101, 0, 0, 477, 478, 5, 101, 0, 0, 478, 479, 5, 112, 0, 0, 479, 480, 1, 0, 0, 0, 480, 481, 6, 8, 1, 0, 481, 33, 1, 0, 0, 0, 482, 483, 5, 108, 0, 0, 483, 484, 5, 105, 0, 0, 484, 485, 5, 109, 0, 0, 485, 486, 5, 105, 0, 0, 486, 487, 5, 116, 0, 0, 487, 488, 1, 0, 0, 0, 488, 489, 6, 9, 0, 0, 489, 35, 1, 0, 0, 0, 490, 491, 5, 108, 0, 0, 491, 492, 5, 111, 0, 0, 492, 493, 5, 111, 0, 0, 493, 494, 5, 107, 0, 0, 494, 495, 5, 117, 0, 0, 495, 496, 5, 112, 0, 0, 496, 497, 1, 0, 0, 0, 497, 498, 6, 10, 5, 0, 498, 37, 1, 0, 0, 0, 499, 500, 5, 109, 0, 0, 500, 501, 5, 101, 0, 0, 501, 502, 5, 116, 0, 0, 502, 503, 5, 97, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 11, 6, 0, 505, 39, 1, 0, 0, 0, 506, 507, 5, 109, 0, 0, 507, 508, 5, 101, 0, 0, 508, 509, 5, 116, 0, 0, 509, 510, 5, 114, 0, 0, 510, 511, 5, 105, 0, 0, 511, 512, 5, 99, 0, 0, 512, 513, 5, 115, 0, 0, 513, 514, 1, 0, 0, 0, 514, 515, 6, 12, 7, 0, 515, 41, 1, 0, 0, 0, 516, 517, 5, 109, 0, 0, 517, 518, 5, 118, 0, 0, 518, 519, 5, 95, 0, 0, 519, 520, 5, 101, 0, 0, 520, 521, 5, 120, 0, 0, 521, 522, 5, 112, 0, 0, 522, 523, 5, 97, 0, 0, 523, 524, 5, 110, 0, 0, 524, 525, 5, 100, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 6, 13, 8, 0, 527, 43, 1, 0, 0, 0, 528, 529, 5, 114, 0, 0, 529, 530, 5, 101, 0, 0, 530, 531, 5, 110, 0, 0, 531, 532, 5, 97, 0, 0, 532, 533, 5, 109, 0, 0, 533, 534, 5, 101, 0, 0, 534, 535, 1, 0, 0, 0, 535, 536, 6, 14, 9, 0, 536, 45, 1, 0, 0, 0, 537, 538, 5, 114, 0, 0, 538, 539, 5, 111, 0, 0, 539, 540, 5, 119, 0, 0, 540, 541, 1, 0, 0, 0, 541, 542, 6, 15, 0, 0, 542, 47, 1, 0, 0, 0, 543, 544, 5, 115, 0, 0, 544, 545, 5, 104, 0, 0, 545, 546, 5, 111, 0, 0, 546, 547, 5, 119, 0, 0, 547, 548, 1, 0, 0, 0, 548, 549, 6, 16, 10, 0, 549, 49, 1, 0, 0, 0, 550, 551, 5, 115, 0, 0, 551, 552, 5, 111, 0, 0, 552, 553, 5, 114, 0, 0, 553, 554, 5, 116, 0, 0, 554, 555, 1, 0, 0, 0, 555, 556, 6, 17, 0, 0, 556, 51, 1, 0, 0, 0, 557, 558, 5, 115, 0, 0, 558, 559, 5, 116, 0, 0, 559, 560, 5, 97, 0, 0, 560, 561, 5, 116, 0, 0, 561, 562, 5, 115, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 6, 18, 0, 0, 564, 53, 1, 0, 0, 0, 565, 566, 5, 119, 0, 0, 566, 567, 5, 104, 0, 0, 567, 568, 5, 101, 0, 0, 568, 569, 5, 114, 0, 0, 569, 570, 5, 101, 0, 0, 570, 571, 1, 0, 0, 0, 571, 572, 6, 19, 0, 0, 572, 55, 1, 0, 0, 0, 573, 575, 8, 0, 0, 0, 574, 573, 1, 0, 0, 0, 575, 576, 1, 0, 0, 0, 576, 574, 1, 0, 0, 0, 576, 577, 1, 0, 0, 0, 577, 578, 1, 0, 0, 0, 578, 579, 6, 20, 0, 0, 579, 57, 1, 0, 0, 0, 580, 581, 5, 47, 0, 0, 581, 582, 5, 47, 0, 0, 582, 586, 1, 0, 0, 0, 583, 585, 8, 1, 0, 0, 584, 583, 1, 0, 0, 0, 585, 588, 1, 0, 0, 0, 586, 584, 1, 0, 0, 0, 586, 587, 1, 0, 0, 0, 587, 590, 1, 0, 0, 0, 588, 586, 1, 0, 0, 0, 589, 591, 5, 13, 0, 0, 590, 589, 1, 0, 0, 0, 590, 591, 1, 0, 0, 0, 591, 593, 1, 0, 0, 0, 592, 594, 5, 10, 0, 0, 593, 592, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 595, 1, 0, 0, 0, 595, 596, 6, 21, 11, 0, 596, 59, 1, 0, 0, 0, 597, 598, 5, 47, 0, 0, 598, 599, 5, 42, 0, 0, 599, 604, 1, 0, 0, 0, 600, 603, 3, 60, 22, 0, 601, 603, 9, 0, 0, 0, 602, 600, 1, 0, 0, 0, 602, 601, 1, 0, 0, 0, 603, 606, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 605, 607, 1, 0, 0, 0, 606, 604, 1, 0, 0, 0, 607, 608, 5, 42, 0, 0, 608, 609, 5, 47, 0, 0, 609, 610, 1, 0, 0, 0, 610, 611, 6, 22, 11, 0, 611, 61, 1, 0, 0, 0, 612, 614, 7, 2, 0, 0, 613, 612, 1, 0, 0, 0, 614, 615, 1, 0, 0, 0, 615, 613, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 6, 23, 11, 0, 618, 63, 1, 0, 0, 0, 619, 623, 8, 3, 0, 0, 620, 621, 5, 47, 0, 0, 621, 623, 8, 4, 0, 0, 622, 619, 1, 0, 0, 0, 622, 620, 1, 0, 0, 0, 623, 65, 1, 0, 0, 0, 624, 626, 3, 64, 24, 0, 625, 624, 1, 0, 0, 0, 626, 627, 1, 0, 0, 0, 627, 625, 1, 0, 0, 0, 627, 628, 1, 0, 0, 0, 628, 67, 1, 0, 0, 0, 629, 630, 3, 178, 81, 0, 630, 631, 1, 0, 0, 0, 631, 632, 6, 26, 12, 0, 632, 633, 6, 26, 13, 0, 633, 69, 1, 0, 0, 0, 634, 635, 3, 78, 31, 0, 635, 636, 1, 0, 0, 0, 636, 637, 6, 27, 14, 0, 637, 638, 6, 27, 15, 0, 638, 71, 1, 0, 0, 0, 639, 640, 3, 62, 23, 0, 640, 641, 1, 0, 0, 0, 641, 642, 6, 28, 11, 0, 642, 73, 1, 0, 0, 0, 643, 644, 3, 58, 21, 0, 644, 645, 1, 0, 0, 0, 645, 646, 6, 29, 11, 0, 646, 75, 1, 0, 0, 0, 647, 648, 3, 60, 22, 0, 648, 649, 1, 0, 0, 0, 649, 650, 6, 30, 11, 0, 650, 77, 1, 0, 0, 0, 651, 652, 5, 124, 0, 0, 652, 653, 1, 0, 0, 0, 653, 654, 6, 31, 15, 0, 654, 79, 1, 0, 0, 0, 655, 656, 7, 5, 0, 0, 656, 81, 1, 0, 0, 0, 657, 658, 7, 6, 0, 0, 658, 83, 1, 0, 0, 0, 659, 660, 5, 92, 0, 0, 660, 661, 7, 7, 0, 0, 661, 85, 1, 0, 0, 0, 662, 663, 8, 8, 0, 0, 663, 87, 1, 0, 0, 0, 664, 666, 7, 9, 0, 0, 665, 667, 7, 10, 0, 0, 666, 665, 1, 0, 0, 0, 666, 667, 1, 0, 0, 0, 667, 669, 1, 0, 0, 0, 668, 670, 3, 80, 32, 0, 669, 668, 1, 0, 0, 0, 670, 671, 1, 0, 0, 0, 671, 669, 1, 0, 0, 0, 671, 672, 1, 0, 0, 0, 672, 89, 1, 0, 0, 0, 673, 674, 5, 64, 0, 0, 674, 91, 1, 0, 0, 0, 675, 676, 5, 96, 0, 0, 676, 93, 1, 0, 0, 0, 677, 681, 8, 11, 0, 0, 678, 679, 5, 96, 0, 0, 679, 681, 5, 96, 0, 0, 680, 677, 1, 0, 0, 0, 680, 678, 1, 0, 0, 0, 681, 95, 1, 0, 0, 0, 682, 683, 5, 95, 0, 0, 683, 97, 1, 0, 0, 0, 684, 688, 3, 82, 33, 0, 685, 688, 3, 80, 32, 0, 686, 688, 3, 96, 40, 0, 687, 684, 1, 0, 0, 0, 687, 685, 1, 0, 0, 0, 687, 686, 1, 0, 0, 0, 688, 99, 1, 0, 0, 0, 689, 694, 5, 34, 0, 0, 690, 693, 3, 84, 34, 0, 691, 693, 3, 86, 35, 0, 692, 690, 1, 0, 0, 0, 692, 691, 1, 0, 0, 0, 693, 696, 1, 0, 0, 0, 694, 692, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 697, 1, 0, 0, 0, 696, 694, 1, 0, 0, 0, 697, 719, 5, 34, 0, 0, 698, 699, 5, 34, 0, 0, 699, 700, 5, 34, 0, 0, 700, 701, 5, 34, 0, 0, 701, 705, 1, 0, 0, 0, 702, 704, 8, 1, 0, 0, 703, 702, 1, 0, 0, 0, 704, 707, 1, 0, 0, 0, 705, 706, 1, 0, 0, 0, 705, 703, 1, 0, 0, 0, 706, 708, 1, 0, 0, 0, 707, 705, 1, 0, 0, 0, 708, 709, 5, 34, 0, 0, 709, 710, 5, 34, 0, 0, 710, 711, 5, 34, 0, 0, 711, 713, 1, 0, 0, 0, 712, 714, 5, 34, 0, 0, 713, 712, 1, 0, 0, 0, 713, 714, 1, 0, 0, 0, 714, 716, 1, 0, 0, 0, 715, 717, 5, 34, 0, 0, 716, 715, 1, 0, 0, 0, 716, 717, 1, 0, 0, 0, 717, 719, 1, 0, 0, 0, 718, 689, 1, 0, 0, 0, 718, 698, 1, 0, 0, 0, 719, 101, 1, 0, 0, 0, 720, 722, 3, 80, 32, 0, 721, 720, 1, 0, 0, 0, 722, 723, 1, 0, 0, 0, 723, 721, 1, 0, 0, 0, 723, 724, 1, 0, 0, 0, 724, 103, 1, 0, 0, 0, 725, 727, 3, 80, 32, 0, 726, 725, 1, 0, 0, 0, 727, 728, 1, 0, 0, 0, 728, 726, 1, 0, 0, 0, 728, 729, 1, 0, 0, 0, 729, 730, 1, 0, 0, 0, 730, 734, 3, 120, 52, 0, 731, 733, 3, 80, 32, 0, 732, 731, 1, 0, 0, 0, 733, 736, 1, 0, 0, 0, 734, 732, 1, 0, 0, 0, 734, 735, 1, 0, 0, 0, 735, 768, 1, 0, 0, 0, 736, 734, 1, 0, 0, 0, 737, 739, 3, 120, 52, 0, 738, 740, 3, 80, 32, 0, 739, 738, 1, 0, 0, 0, 740, 741, 1, 0, 0, 0, 741, 739, 1, 0, 0, 0, 741, 742, 1, 0, 0, 0, 742, 768, 1, 0, 0, 0, 743, 745, 3, 80, 32, 0, 744, 743, 1, 0, 0, 0, 745, 746, 1, 0, 0, 0, 746, 744, 1, 0, 0, 0, 746, 747, 1, 0, 0, 0, 747, 755, 1, 0, 0, 0, 748, 752, 3, 120, 52, 0, 749, 751, 3, 80, 32, 0, 750, 749, 1, 0, 0, 0, 751, 754, 1, 0, 0, 0, 752, 750, 1, 0, 0, 0, 752, 753, 1, 0, 0, 0, 753, 756, 1, 0, 0, 0, 754, 752, 1, 0, 0, 0, 755, 748, 1, 0, 0, 0, 755, 756, 1, 0, 0, 0, 756, 757, 1, 0, 0, 0, 757, 758, 3, 88, 36, 0, 758, 768, 1, 0, 0, 0, 759, 761, 3, 120, 52, 0, 760, 762, 3, 80, 32, 0, 761, 760, 1, 0, 0, 0, 762, 763, 1, 0, 0, 0, 763, 761, 1, 0, 0, 0, 763, 764, 1, 0, 0, 0, 764, 765, 1, 0, 0, 0, 765, 766, 3, 88, 36, 0, 766, 768, 1, 0, 0, 0, 767, 726, 1, 0, 0, 0, 767, 737, 1, 0, 0, 0, 767, 744, 1, 0, 0, 0, 767, 759, 1, 0, 0, 0, 768, 105, 1, 0, 0, 0, 769, 770, 5, 98, 0, 0, 770, 771, 5, 121, 0, 0, 771, 107, 1, 0, 0, 0, 772, 773, 5, 97, 0, 0, 773, 774, 5, 110, 0, 0, 774, 775, 5, 100, 0, 0, 775, 109, 1, 0, 0, 0, 776, 777, 5, 97, 0, 0, 777, 778, 5, 115, 0, 0, 778, 779, 5, 99, 0, 0, 779, 111, 1, 0, 0, 0, 780, 781, 5, 61, 0, 0, 781, 113, 1, 0, 0, 0, 782, 783, 5, 58, 0, 0, 783, 784, 5, 58, 0, 0, 784, 115, 1, 0, 0, 0, 785, 786, 5, 44, 0, 0, 786, 117, 1, 0, 0, 0, 787, 788, 5, 100, 0, 0, 788, 789, 5, 101, 0, 0, 789, 790, 5, 115, 0, 0, 790, 791, 5, 99, 0, 0, 791, 119, 1, 0, 0, 0, 792, 793, 5, 46, 0, 0, 793, 121, 1, 0, 0, 0, 794, 795, 5, 102, 0, 0, 795, 796, 5, 97, 0, 0, 796, 797, 5, 108, 0, 0, 797, 798, 5, 115, 0, 0, 798, 799, 5, 101, 0, 0, 799, 123, 1, 0, 0, 0, 800, 801, 5, 102, 0, 0, 801, 802, 5, 105, 0, 0, 802, 803, 5, 114, 0, 0, 803, 804, 5, 115, 0, 0, 804, 805, 5, 116, 0, 0, 805, 125, 1, 0, 0, 0, 806, 807, 5, 108, 0, 0, 807, 808, 5, 97, 0, 0, 808, 809, 5, 115, 0, 0, 809, 810, 5, 116, 0, 0, 810, 127, 1, 0, 0, 0, 811, 812, 5, 40, 0, 0, 812, 129, 1, 0, 0, 0, 813, 814, 5, 105, 0, 0, 814, 815, 5, 110, 0, 0, 815, 131, 1, 0, 0, 0, 816, 817, 5, 105, 0, 0, 817, 818, 5, 115, 0, 0, 818, 133, 1, 0, 0, 0, 819, 820, 5, 108, 0, 0, 820, 821, 5, 105, 0, 0, 821, 822, 5, 107, 0, 0, 822, 823, 5, 101, 0, 0, 823, 135, 1, 0, 0, 0, 824, 825, 5, 110, 0, 0, 825, 826, 5, 111, 0, 0, 826, 827, 5, 116, 0, 0, 827, 137, 1, 0, 0, 0, 828, 829, 5, 110, 0, 0, 829, 830, 5, 117, 0, 0, 830, 831, 5, 108, 0, 0, 831, 832, 5, 108, 0, 0, 832, 139, 1, 0, 0, 0, 833, 834, 5, 110, 0, 0, 834, 835, 5, 117, 0, 0, 835, 836, 5, 108, 0, 0, 836, 837, 5, 108, 0, 0, 837, 838, 5, 115, 0, 0, 838, 141, 1, 0, 0, 0, 839, 840, 5, 111, 0, 0, 840, 841, 5, 114, 0, 0, 841, 143, 1, 0, 0, 0, 842, 843, 5, 63, 0, 0, 843, 145, 1, 0, 0, 0, 844, 845, 5, 114, 0, 0, 845, 846, 5, 108, 0, 0, 846, 847, 5, 105, 0, 0, 847, 848, 5, 107, 0, 0, 848, 849, 5, 101, 0, 0, 849, 147, 1, 0, 0, 0, 850, 851, 5, 41, 0, 0, 851, 149, 1, 0, 0, 0, 852, 853, 5, 116, 0, 0, 853, 854, 5, 114, 0, 0, 854, 855, 5, 117, 0, 0, 855, 856, 5, 101, 0, 0, 856, 151, 1, 0, 0, 0, 857, 858, 5, 61, 0, 0, 858, 859, 5, 61, 0, 0, 859, 153, 1, 0, 0, 0, 860, 861, 5, 61, 0, 0, 861, 862, 5, 126, 0, 0, 862, 155, 1, 0, 0, 0, 863, 864, 5, 33, 0, 0, 864, 865, 5, 61, 0, 0, 865, 157, 1, 0, 0, 0, 866, 867, 5, 60, 0, 0, 867, 159, 1, 0, 0, 0, 868, 869, 5, 60, 0, 0, 869, 870, 5, 61, 0, 0, 870, 161, 1, 0, 0, 0, 871, 872, 5, 62, 0, 0, 872, 163, 1, 0, 0, 0, 873, 874, 5, 62, 0, 0, 874, 875, 5, 61, 0, 0, 875, 165, 1, 0, 0, 0, 876, 877, 5, 43, 0, 0, 877, 167, 1, 0, 0, 0, 878, 879, 5, 45, 0, 0, 879, 169, 1, 0, 0, 0, 880, 881, 5, 42, 0, 0, 881, 171, 1, 0, 0, 0, 882, 883, 5, 47, 0, 0, 883, 173, 1, 0, 0, 0, 884, 885, 5, 37, 0, 0, 885, 175, 1, 0, 0, 0, 886, 887, 3, 144, 64, 0, 887, 891, 3, 82, 33, 0, 888, 890, 3, 98, 41, 0, 889, 888, 1, 0, 0, 0, 890, 893, 1, 0, 0, 0, 891, 889, 1, 0, 0, 0, 891, 892, 1, 0, 0, 0, 892, 901, 1, 0, 0, 0, 893, 891, 1, 0, 0, 0, 894, 896, 3, 144, 64, 0, 895, 897, 3, 80, 32, 0, 896, 895, 1, 0, 0, 0, 897, 898, 1, 0, 0, 0, 898, 896, 1, 0, 0, 0, 898, 899, 1, 0, 0, 0, 899, 901, 1, 0, 0, 0, 900, 886, 1, 0, 0, 0, 900, 894, 1, 0, 0, 0, 901, 177, 1, 0, 0, 0, 902, 903, 5, 91, 0, 0, 903, 904, 1, 0, 0, 0, 904, 905, 6, 81, 0, 0, 905, 906, 6, 81, 0, 0, 906, 179, 1, 0, 0, 0, 907, 908, 5, 93, 0, 0, 908, 909, 1, 0, 0, 0, 909, 910, 6, 82, 15, 0, 910, 911, 6, 82, 15, 0, 911, 181, 1, 0, 0, 0, 912, 916, 3, 82, 33, 0, 913, 915, 3, 98, 41, 0, 914, 913, 1, 0, 0, 0, 915, 918, 1, 0, 0, 0, 916, 914, 1, 0, 0, 0, 916, 917, 1, 0, 0, 0, 917, 929, 1, 0, 0, 0, 918, 916, 1, 0, 0, 0, 919, 922, 3, 96, 40, 0, 920, 922, 3, 90, 37, 0, 921, 919, 1, 0, 0, 0, 921, 920, 1, 0, 0, 0, 922, 924, 1, 0, 0, 0, 923, 925, 3, 98, 41, 0, 924, 923, 1, 0, 0, 0, 925, 926, 1, 0, 0, 0, 926, 924, 1, 0, 0, 0, 926, 927, 1, 0, 0, 0, 927, 929, 1, 0, 0, 0, 928, 912, 1, 0, 0, 0, 928, 921, 1, 0, 0, 0, 929, 183, 1, 0, 0, 0, 930, 932, 3, 92, 38, 0, 931, 933, 3, 94, 39, 0, 932, 931, 1, 0, 0, 0, 933, 934, 1, 0, 0, 0, 934, 932, 1, 0, 0, 0, 934, 935, 1, 0, 0, 0, 935, 936, 1, 0, 0, 0, 936, 937, 3, 92, 38, 0, 937, 185, 1, 0, 0, 0, 938, 939, 3, 184, 84, 0, 939, 187, 1, 0, 0, 0, 940, 941, 3, 58, 21, 0, 941, 942, 1, 0, 0, 0, 942, 943, 6, 86, 11, 0, 943, 189, 1, 0, 0, 0, 944, 945, 3, 60, 22, 0, 945, 946, 1, 0, 0, 0, 946, 947, 6, 87, 11, 0, 947, 191, 1, 0, 0, 0, 948, 949, 3, 62, 23, 0, 949, 950, 1, 0, 0, 0, 950, 951, 6, 88, 11, 0, 951, 193, 1, 0, 0, 0, 952, 953, 3, 78, 31, 0, 953, 954, 1, 0, 0, 0, 954, 955, 6, 89, 14, 0, 955, 956, 6, 89, 15, 0, 956, 195, 1, 0, 0, 0, 957, 958, 3, 178, 81, 0, 958, 959, 1, 0, 0, 0, 959, 960, 6, 90, 12, 0, 960, 197, 1, 0, 0, 0, 961, 962, 3, 180, 82, 0, 962, 963, 1, 0, 0, 0, 963, 964, 6, 91, 16, 0, 964, 199, 1, 0, 0, 0, 965, 966, 3, 364, 174, 0, 966, 967, 1, 0, 0, 0, 967, 968, 6, 92, 17, 0, 968, 201, 1, 0, 0, 0, 969, 970, 3, 116, 50, 0, 970, 971, 1, 0, 0, 0, 971, 972, 6, 93, 18, 0, 972, 203, 1, 0, 0, 0, 973, 974, 3, 112, 48, 0, 974, 975, 1, 0, 0, 0, 975, 976, 6, 94, 19, 0, 976, 205, 1, 0, 0, 0, 977, 978, 5, 109, 0, 0, 978, 979, 5, 101, 0, 0, 979, 980, 5, 116, 0, 0, 980, 981, 5, 97, 0, 0, 981, 982, 5, 100, 0, 0, 982, 983, 5, 97, 0, 0, 983, 984, 5, 116, 0, 0, 984, 985, 5, 97, 0, 0, 985, 207, 1, 0, 0, 0, 986, 987, 3, 66, 25, 0, 987, 988, 1, 0, 0, 0, 988, 989, 6, 96, 20, 0, 989, 209, 1, 0, 0, 0, 990, 991, 3, 100, 42, 0, 991, 992, 1, 0, 0, 0, 992, 993, 6, 97, 21, 0, 993, 211, 1, 0, 0, 0, 994, 995, 3, 58, 21, 0, 995, 996, 1, 0, 0, 0, 996, 997, 6, 98, 11, 0, 997, 213, 1, 0, 0, 0, 998, 999, 3, 60, 22, 0, 999, 1000, 1, 0, 0, 0, 1000, 1001, 6, 99, 11, 0, 1001, 215, 1, 0, 0, 0, 1002, 1003, 3, 62, 23, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 6, 100, 11, 0, 1005, 217, 1, 0, 0, 0, 1006, 1007, 3, 78, 31, 0, 1007, 1008, 1, 0, 0, 0, 1008, 1009, 6, 101, 14, 0, 1009, 1010, 6, 101, 15, 0, 1010, 219, 1, 0, 0, 0, 1011, 1012, 3, 120, 52, 0, 1012, 1013, 1, 0, 0, 0, 1013, 1014, 6, 102, 22, 0, 1014, 221, 1, 0, 0, 0, 1015, 1016, 3, 116, 50, 0, 1016, 1017, 1, 0, 0, 0, 1017, 1018, 6, 103, 18, 0, 1018, 223, 1, 0, 0, 0, 1019, 1024, 3, 82, 33, 0, 1020, 1024, 3, 80, 32, 0, 1021, 1024, 3, 96, 40, 0, 1022, 1024, 3, 170, 77, 0, 1023, 1019, 1, 0, 0, 0, 1023, 1020, 1, 0, 0, 0, 1023, 1021, 1, 0, 0, 0, 1023, 1022, 1, 0, 0, 0, 1024, 225, 1, 0, 0, 0, 1025, 1028, 3, 82, 33, 0, 1026, 1028, 3, 170, 77, 0, 1027, 1025, 1, 0, 0, 0, 1027, 1026, 1, 0, 0, 0, 1028, 1032, 1, 0, 0, 0, 1029, 1031, 3, 224, 104, 0, 1030, 1029, 1, 0, 0, 0, 1031, 1034, 1, 0, 0, 0, 1032, 1030, 1, 0, 0, 0, 1032, 1033, 1, 0, 0, 0, 1033, 1045, 1, 0, 0, 0, 1034, 1032, 1, 0, 0, 0, 1035, 1038, 3, 96, 40, 0, 1036, 1038, 3, 90, 37, 0, 1037, 1035, 1, 0, 0, 0, 1037, 1036, 1, 0, 0, 0, 1038, 1040, 1, 0, 0, 0, 1039, 1041, 3, 224, 104, 0, 1040, 1039, 1, 0, 0, 0, 1041, 1042, 1, 0, 0, 0, 1042, 1040, 1, 0, 0, 0, 1042, 1043, 1, 0, 0, 0, 1043, 1045, 1, 0, 0, 0, 1044, 1027, 1, 0, 0, 0, 1044, 1037, 1, 0, 0, 0, 1045, 227, 1, 0, 0, 0, 1046, 1049, 3, 226, 105, 0, 1047, 1049, 3, 184, 84, 0, 1048, 1046, 1, 0, 0, 0, 1048, 1047, 1, 0, 0, 0, 1049, 1050, 1, 0, 0, 0, 1050, 1048, 1, 0, 0, 0, 1050, 1051, 1, 0, 0, 0, 1051, 229, 1, 0, 0, 0, 1052, 1053, 3, 58, 21, 0, 1053, 1054, 1, 0, 0, 0, 1054, 1055, 6, 107, 11, 0, 1055, 231, 1, 0, 0, 0, 1056, 1057, 3, 60, 22, 0, 1057, 1058, 1, 0, 0, 0, 1058, 1059, 6, 108, 11, 0, 1059, 233, 1, 0, 0, 0, 1060, 1061, 3, 62, 23, 0, 1061, 1062, 1, 0, 0, 0, 1062, 1063, 6, 109, 11, 0, 1063, 235, 1, 0, 0, 0, 1064, 1065, 3, 78, 31, 0, 1065, 1066, 1, 0, 0, 0, 1066, 1067, 6, 110, 14, 0, 1067, 1068, 6, 110, 15, 0, 1068, 237, 1, 0, 0, 0, 1069, 1070, 3, 112, 48, 0, 1070, 1071, 1, 0, 0, 0, 1071, 1072, 6, 111, 19, 0, 1072, 239, 1, 0, 0, 0, 1073, 1074, 3, 116, 50, 0, 1074, 1075, 1, 0, 0, 0, 1075, 1076, 6, 112, 18, 0, 1076, 241, 1, 0, 0, 0, 1077, 1078, 3, 120, 52, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1080, 6, 113, 22, 0, 1080, 243, 1, 0, 0, 0, 1081, 1082, 5, 97, 0, 0, 1082, 1083, 5, 115, 0, 0, 1083, 245, 1, 0, 0, 0, 1084, 1085, 3, 228, 106, 0, 1085, 1086, 1, 0, 0, 0, 1086, 1087, 6, 115, 23, 0, 1087, 247, 1, 0, 0, 0, 1088, 1089, 3, 58, 21, 0, 1089, 1090, 1, 0, 0, 0, 1090, 1091, 6, 116, 11, 0, 1091, 249, 1, 0, 0, 0, 1092, 1093, 3, 60, 22, 0, 1093, 1094, 1, 0, 0, 0, 1094, 1095, 6, 117, 11, 0, 1095, 251, 1, 0, 0, 0, 1096, 1097, 3, 62, 23, 0, 1097, 1098, 1, 0, 0, 0, 1098, 1099, 6, 118, 11, 0, 1099, 253, 1, 0, 0, 0, 1100, 1101, 3, 78, 31, 0, 1101, 1102, 1, 0, 0, 0, 1102, 1103, 6, 119, 14, 0, 1103, 1104, 6, 119, 15, 0, 1104, 255, 1, 0, 0, 0, 1105, 1106, 3, 178, 81, 0, 1106, 1107, 1, 0, 0, 0, 1107, 1108, 6, 120, 12, 0, 1108, 1109, 6, 120, 24, 0, 1109, 257, 1, 0, 0, 0, 1110, 1111, 5, 111, 0, 0, 1111, 1112, 5, 110, 0, 0, 1112, 1113, 1, 0, 0, 0, 1113, 1114, 6, 121, 25, 0, 1114, 259, 1, 0, 0, 0, 1115, 1116, 5, 119, 0, 0, 1116, 1117, 5, 105, 0, 0, 1117, 1118, 5, 116, 0, 0, 1118, 1119, 5, 104, 0, 0, 1119, 1120, 1, 0, 0, 0, 1120, 1121, 6, 122, 25, 0, 1121, 261, 1, 0, 0, 0, 1122, 1123, 8, 12, 0, 0, 1123, 263, 1, 0, 0, 0, 1124, 1126, 3, 262, 123, 0, 1125, 1124, 1, 0, 0, 0, 1126, 1127, 1, 0, 0, 0, 1127, 1125, 1, 0, 0, 0, 1127, 1128, 1, 0, 0, 0, 1128, 1129, 1, 0, 0, 0, 1129, 1130, 3, 364, 174, 0, 1130, 1132, 1, 0, 0, 0, 1131, 1125, 1, 0, 0, 0, 1131, 1132, 1, 0, 0, 0, 1132, 1134, 1, 0, 0, 0, 1133, 1135, 3, 262, 123, 0, 1134, 1133, 1, 0, 0, 0, 1135, 1136, 1, 0, 0, 0, 1136, 1134, 1, 0, 0, 0, 1136, 1137, 1, 0, 0, 0, 1137, 265, 1, 0, 0, 0, 1138, 1139, 3, 264, 124, 0, 1139, 1140, 1, 0, 0, 0, 1140, 1141, 6, 125, 26, 0, 1141, 267, 1, 0, 0, 0, 1142, 1143, 3, 58, 21, 0, 1143, 1144, 1, 0, 0, 0, 1144, 1145, 6, 126, 11, 0, 1145, 269, 1, 0, 0, 0, 1146, 1147, 3, 60, 22, 0, 1147, 1148, 1, 0, 0, 0, 1148, 1149, 6, 127, 11, 0, 1149, 271, 1, 0, 0, 0, 1150, 1151, 3, 62, 23, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 6, 128, 11, 0, 1153, 273, 1, 0, 0, 0, 1154, 1155, 3, 78, 31, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 6, 129, 14, 0, 1157, 1158, 6, 129, 15, 0, 1158, 1159, 6, 129, 15, 0, 1159, 275, 1, 0, 0, 0, 1160, 1161, 3, 112, 48, 0, 1161, 1162, 1, 0, 0, 0, 1162, 1163, 6, 130, 19, 0, 1163, 277, 1, 0, 0, 0, 1164, 1165, 3, 116, 50, 0, 1165, 1166, 1, 0, 0, 0, 1166, 1167, 6, 131, 18, 0, 1167, 279, 1, 0, 0, 0, 1168, 1169, 3, 120, 52, 0, 1169, 1170, 1, 0, 0, 0, 1170, 1171, 6, 132, 22, 0, 1171, 281, 1, 0, 0, 0, 1172, 1173, 3, 260, 122, 0, 1173, 1174, 1, 0, 0, 0, 1174, 1175, 6, 133, 27, 0, 1175, 283, 1, 0, 0, 0, 1176, 1177, 3, 228, 106, 0, 1177, 1178, 1, 0, 0, 0, 1178, 1179, 6, 134, 23, 0, 1179, 285, 1, 0, 0, 0, 1180, 1181, 3, 186, 85, 0, 1181, 1182, 1, 0, 0, 0, 1182, 1183, 6, 135, 28, 0, 1183, 287, 1, 0, 0, 0, 1184, 1185, 3, 58, 21, 0, 1185, 1186, 1, 0, 0, 0, 1186, 1187, 6, 136, 11, 0, 1187, 289, 1, 0, 0, 0, 1188, 1189, 3, 60, 22, 0, 1189, 1190, 1, 0, 0, 0, 1190, 1191, 6, 137, 11, 0, 1191, 291, 1, 0, 0, 0, 1192, 1193, 3, 62, 23, 0, 1193, 1194, 1, 0, 0, 0, 1194, 1195, 6, 138, 11, 0, 1195, 293, 1, 0, 0, 0, 1196, 1197, 3, 78, 31, 0, 1197, 1198, 1, 0, 0, 0, 1198, 1199, 6, 139, 14, 0, 1199, 1200, 6, 139, 15, 0, 1200, 295, 1, 0, 0, 0, 1201, 1202, 3, 364, 174, 0, 1202, 1203, 1, 0, 0, 0, 1203, 1204, 6, 140, 17, 0, 1204, 297, 1, 0, 0, 0, 1205, 1206, 3, 116, 50, 0, 1206, 1207, 1, 0, 0, 0, 1207, 1208, 6, 141, 18, 0, 1208, 299, 1, 0, 0, 0, 1209, 1210, 3, 120, 52, 0, 1210, 1211, 1, 0, 0, 0, 1211, 1212, 6, 142, 22, 0, 1212, 301, 1, 0, 0, 0, 1213, 1214, 3, 258, 121, 0, 1214, 1215, 1, 0, 0, 0, 1215, 1216, 6, 143, 29, 0, 1216, 1217, 6, 143, 30, 0, 1217, 303, 1, 0, 0, 0, 1218, 1219, 3, 66, 25, 0, 1219, 1220, 1, 0, 0, 0, 1220, 1221, 6, 144, 20, 0, 1221, 305, 1, 0, 0, 0, 1222, 1223, 3, 100, 42, 0, 1223, 1224, 1, 0, 0, 0, 1224, 1225, 6, 145, 21, 0, 1225, 307, 1, 0, 0, 0, 1226, 1227, 3, 58, 21, 0, 1227, 1228, 1, 0, 0, 0, 1228, 1229, 6, 146, 11, 0, 1229, 309, 1, 0, 0, 0, 1230, 1231, 3, 60, 22, 0, 1231, 1232, 1, 0, 0, 0, 1232, 1233, 6, 147, 11, 0, 1233, 311, 1, 0, 0, 0, 1234, 1235, 3, 62, 23, 0, 1235, 1236, 1, 0, 0, 0, 1236, 1237, 6, 148, 11, 0, 1237, 313, 1, 0, 0, 0, 1238, 1239, 3, 78, 31, 0, 1239, 1240, 1, 0, 0, 0, 1240, 1241, 6, 149, 14, 0, 1241, 1242, 6, 149, 15, 0, 1242, 1243, 6, 149, 15, 0, 1243, 315, 1, 0, 0, 0, 1244, 1245, 3, 116, 50, 0, 1245, 1246, 1, 0, 0, 0, 1246, 1247, 6, 150, 18, 0, 1247, 317, 1, 0, 0, 0, 1248, 1249, 3, 120, 52, 0, 1249, 1250, 1, 0, 0, 0, 1250, 1251, 6, 151, 22, 0, 1251, 319, 1, 0, 0, 0, 1252, 1253, 3, 228, 106, 0, 1253, 1254, 1, 0, 0, 0, 1254, 1255, 6, 152, 23, 0, 1255, 321, 1, 0, 0, 0, 1256, 1257, 3, 58, 21, 0, 1257, 1258, 1, 0, 0, 0, 1258, 1259, 6, 153, 11, 0, 1259, 323, 1, 0, 0, 0, 1260, 1261, 3, 60, 22, 0, 1261, 1262, 1, 0, 0, 0, 1262, 1263, 6, 154, 11, 0, 1263, 325, 1, 0, 0, 0, 1264, 1265, 3, 62, 23, 0, 1265, 1266, 1, 0, 0, 0, 1266, 1267, 6, 155, 11, 0, 1267, 327, 1, 0, 0, 0, 1268, 1269, 3, 78, 31, 0, 1269, 1270, 1, 0, 0, 0, 1270, 1271, 6, 156, 14, 0, 1271, 1272, 6, 156, 15, 0, 1272, 329, 1, 0, 0, 0, 1273, 1274, 3, 120, 52, 0, 1274, 1275, 1, 0, 0, 0, 1275, 1276, 6, 157, 22, 0, 1276, 331, 1, 0, 0, 0, 1277, 1278, 3, 186, 85, 0, 1278, 1279, 1, 0, 0, 0, 1279, 1280, 6, 158, 28, 0, 1280, 333, 1, 0, 0, 0, 1281, 1282, 3, 182, 83, 0, 1282, 1283, 1, 0, 0, 0, 1283, 1284, 6, 159, 31, 0, 1284, 335, 1, 0, 0, 0, 1285, 1286, 3, 58, 21, 0, 1286, 1287, 1, 0, 0, 0, 1287, 1288, 6, 160, 11, 0, 1288, 337, 1, 0, 0, 0, 1289, 1290, 3, 60, 22, 0, 1290, 1291, 1, 0, 0, 0, 1291, 1292, 6, 161, 11, 0, 1292, 339, 1, 0, 0, 0, 1293, 1294, 3, 62, 23, 0, 1294, 1295, 1, 0, 0, 0, 1295, 1296, 6, 162, 11, 0, 1296, 341, 1, 0, 0, 0, 1297, 1298, 3, 78, 31, 0, 1298, 1299, 1, 0, 0, 0, 1299, 1300, 6, 163, 14, 0, 1300, 1301, 6, 163, 15, 0, 1301, 343, 1, 0, 0, 0, 1302, 1303, 5, 105, 0, 0, 1303, 1304, 5, 110, 0, 0, 1304, 1305, 5, 102, 0, 0, 1305, 1306, 5, 111, 0, 0, 1306, 345, 1, 0, 0, 0, 1307, 1308, 3, 58, 21, 0, 1308, 1309, 1, 0, 0, 0, 1309, 1310, 6, 165, 11, 0, 1310, 347, 1, 0, 0, 0, 1311, 1312, 3, 60, 22, 0, 1312, 1313, 1, 0, 0, 0, 1313, 1314, 6, 166, 11, 0, 1314, 349, 1, 0, 0, 0, 1315, 1316, 3, 62, 23, 0, 1316, 1317, 1, 0, 0, 0, 1317, 1318, 6, 167, 11, 0, 1318, 351, 1, 0, 0, 0, 1319, 1320, 3, 78, 31, 0, 1320, 1321, 1, 0, 0, 0, 1321, 1322, 6, 168, 14, 0, 1322, 1323, 6, 168, 15, 0, 1323, 353, 1, 0, 0, 0, 1324, 1325, 5, 102, 0, 0, 1325, 1326, 5, 117, 0, 0, 1326, 1327, 5, 110, 0, 0, 1327, 1328, 5, 99, 0, 0, 1328, 1329, 5, 116, 0, 0, 1329, 1330, 5, 105, 0, 0, 1330, 1331, 5, 111, 0, 0, 1331, 1332, 5, 110, 0, 0, 1332, 1333, 5, 115, 0, 0, 1333, 355, 1, 0, 0, 0, 1334, 1335, 3, 58, 21, 0, 1335, 1336, 1, 0, 0, 0, 1336, 1337, 6, 170, 11, 0, 1337, 357, 1, 0, 0, 0, 1338, 1339, 3, 60, 22, 0, 1339, 1340, 1, 0, 0, 0, 1340, 1341, 6, 171, 11, 0, 1341, 359, 1, 0, 0, 0, 1342, 1343, 3, 62, 23, 0, 1343, 1344, 1, 0, 0, 0, 1344, 1345, 6, 172, 11, 0, 1345, 361, 1, 0, 0, 0, 1346, 1347, 3, 180, 82, 0, 1347, 1348, 1, 0, 0, 0, 1348, 1349, 6, 173, 16, 0, 1349, 1350, 6, 173, 15, 0, 1350, 363, 1, 0, 0, 0, 1351, 1352, 5, 58, 0, 0, 1352, 365, 1, 0, 0, 0, 1353, 1359, 3, 90, 37, 0, 1354, 1359, 3, 80, 32, 0, 1355, 1359, 3, 120, 52, 0, 1356, 1359, 3, 82, 33, 0, 1357, 1359, 3, 96, 40, 0, 1358, 1353, 1, 0, 0, 0, 1358, 1354, 1, 0, 0, 0, 1358, 1355, 1, 0, 0, 0, 1358, 1356, 1, 0, 0, 0, 1358, 1357, 1, 0, 0, 0, 1359, 1360, 1, 0, 0, 0, 1360, 1358, 1, 0, 0, 0, 1360, 1361, 1, 0, 0, 0, 1361, 367, 1, 0, 0, 0, 1362, 1363, 3, 58, 21, 0, 1363, 1364, 1, 0, 0, 0, 1364, 1365, 6, 176, 11, 0, 1365, 369, 1, 0, 0, 0, 1366, 1367, 3, 60, 22, 0, 1367, 1368, 1, 0, 0, 0, 1368, 1369, 6, 177, 11, 0, 1369, 371, 1, 0, 0, 0, 1370, 1371, 3, 62, 23, 0, 1371, 1372, 1, 0, 0, 0, 1372, 1373, 6, 178, 11, 0, 1373, 373, 1, 0, 0, 0, 1374, 1375, 3, 78, 31, 0, 1375, 1376, 1, 0, 0, 0, 1376, 1377, 6, 179, 14, 0, 1377, 1378, 6, 179, 15, 0, 1378, 375, 1, 0, 0, 0, 1379, 1380, 3, 66, 25, 0, 1380, 1381, 1, 0, 0, 0, 1381, 1382, 6, 180, 20, 0, 1382, 1383, 6, 180, 15, 0, 1383, 1384, 6, 180, 32, 0, 1384, 377, 1, 0, 0, 0, 1385, 1386, 3, 100, 42, 0, 1386, 1387, 1, 0, 0, 0, 1387, 1388, 6, 181, 21, 0, 1388, 1389, 6, 181, 15, 0, 1389, 1390, 6, 181, 32, 0, 1390, 379, 1, 0, 0, 0, 1391, 1392, 3, 58, 21, 0, 1392, 1393, 1, 0, 0, 0, 1393, 1394, 6, 182, 11, 0, 1394, 381, 1, 0, 0, 0, 1395, 1396, 3, 60, 22, 0, 1396, 1397, 1, 0, 0, 0, 1397, 1398, 6, 183, 11, 0, 1398, 383, 1, 0, 0, 0, 1399, 1400, 3, 62, 23, 0, 1400, 1401, 1, 0, 0, 0, 1401, 1402, 6, 184, 11, 0, 1402, 385, 1, 0, 0, 0, 1403, 1404, 3, 364, 174, 0, 1404, 1405, 1, 0, 0, 0, 1405, 1406, 6, 185, 17, 0, 1406, 1407, 6, 185, 15, 0, 1407, 1408, 6, 185, 7, 0, 1408, 387, 1, 0, 0, 0, 1409, 1410, 3, 116, 50, 0, 1410, 1411, 1, 0, 0, 0, 1411, 1412, 6, 186, 18, 0, 1412, 1413, 6, 186, 15, 0, 1413, 1414, 6, 186, 7, 0, 1414, 389, 1, 0, 0, 0, 1415, 1416, 3, 58, 21, 0, 1416, 1417, 1, 0, 0, 0, 1417, 1418, 6, 187, 11, 0, 1418, 391, 1, 0, 0, 0, 1419, 1420, 3, 60, 22, 0, 1420, 1421, 1, 0, 0, 0, 1421, 1422, 6, 188, 11, 0, 1422, 393, 1, 0, 0, 0, 1423, 1424, 3, 62, 23, 0, 1424, 1425, 1, 0, 0, 0, 1425, 1426, 6, 189, 11, 0, 1426, 395, 1, 0, 0, 0, 1427, 1428, 3, 186, 85, 0, 1428, 1429, 1, 0, 0, 0, 1429, 1430, 6, 190, 15, 0, 1430, 1431, 6, 190, 0, 0, 1431, 1432, 6, 190, 28, 0, 1432, 397, 1, 0, 0, 0, 1433, 1434, 3, 182, 83, 0, 1434, 1435, 1, 0, 0, 0, 1435, 1436, 6, 191, 15, 0, 1436, 1437, 6, 191, 0, 0, 1437, 1438, 6, 191, 31, 0, 1438, 399, 1, 0, 0, 0, 1439, 1440, 3, 106, 45, 0, 1440, 1441, 1, 0, 0, 0, 1441, 1442, 6, 192, 15, 0, 1442, 1443, 6, 192, 0, 0, 1443, 1444, 6, 192, 33, 0, 1444, 401, 1, 0, 0, 0, 1445, 1446, 3, 78, 31, 0, 1446, 1447, 1, 0, 0, 0, 1447, 1448, 6, 193, 14, 0, 1448, 1449, 6, 193, 15, 0, 1449, 403, 1, 0, 0, 0, 65, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 576, 586, 590, 593, 602, 604, 615, 622, 627, 666, 671, 680, 687, 692, 694, 705, 713, 716, 718, 723, 728, 734, 741, 746, 752, 755, 763, 767, 891, 898, 900, 916, 921, 926, 928, 934, 1023, 1027, 1032, 1037, 1042, 1044, 1048, 1050, 1127, 1131, 1136, 1358, 1360, 34, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 8, 0, 5, 12, 0, 5, 14, 0, 5, 10, 0, 5, 5, 0, 5, 11, 0, 0, 1, 0, 7, 69, 0, 5, 0, 0, 7, 29, 0, 4, 0, 0, 7, 70, 0, 7, 114, 0, 7, 38, 0, 7, 36, 0, 7, 25, 0, 7, 30, 0, 7, 40, 0, 7, 80, 0, 5, 13, 0, 5, 7, 0, 7, 90, 0, 7, 89, 0, 7, 72, 0, 7, 88, 0, 5, 9, 0, 7, 71, 0, 5, 15, 0, 7, 33, 0] \ No newline at end of file +[4, 0, 125, 1458, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 4, 20, 577, 8, 20, 11, 20, 12, 20, 578, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 587, 8, 21, 10, 21, 12, 21, 590, 9, 21, 1, 21, 3, 21, 593, 8, 21, 1, 21, 3, 21, 596, 8, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 605, 8, 22, 10, 22, 12, 22, 608, 9, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 4, 23, 616, 8, 23, 11, 23, 12, 23, 617, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 3, 24, 625, 8, 24, 1, 25, 4, 25, 628, 8, 25, 11, 25, 12, 25, 629, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 3, 36, 669, 8, 36, 1, 36, 4, 36, 672, 8, 36, 11, 36, 12, 36, 673, 1, 37, 1, 37, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 3, 39, 683, 8, 39, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 3, 41, 690, 8, 41, 1, 42, 1, 42, 1, 42, 5, 42, 695, 8, 42, 10, 42, 12, 42, 698, 9, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 706, 8, 42, 10, 42, 12, 42, 709, 9, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 716, 8, 42, 1, 42, 3, 42, 719, 8, 42, 3, 42, 721, 8, 42, 1, 43, 4, 43, 724, 8, 43, 11, 43, 12, 43, 725, 1, 44, 4, 44, 729, 8, 44, 11, 44, 12, 44, 730, 1, 44, 1, 44, 5, 44, 735, 8, 44, 10, 44, 12, 44, 738, 9, 44, 1, 44, 1, 44, 4, 44, 742, 8, 44, 11, 44, 12, 44, 743, 1, 44, 4, 44, 747, 8, 44, 11, 44, 12, 44, 748, 1, 44, 1, 44, 5, 44, 753, 8, 44, 10, 44, 12, 44, 756, 9, 44, 3, 44, 758, 8, 44, 1, 44, 1, 44, 1, 44, 1, 44, 4, 44, 764, 8, 44, 11, 44, 12, 44, 765, 1, 44, 1, 44, 3, 44, 770, 8, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 5, 81, 898, 8, 81, 10, 81, 12, 81, 901, 9, 81, 1, 81, 1, 81, 4, 81, 905, 8, 81, 11, 81, 12, 81, 906, 3, 81, 909, 8, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 5, 84, 923, 8, 84, 10, 84, 12, 84, 926, 9, 84, 1, 84, 1, 84, 3, 84, 930, 8, 84, 1, 84, 4, 84, 933, 8, 84, 11, 84, 12, 84, 934, 3, 84, 937, 8, 84, 1, 85, 1, 85, 4, 85, 941, 8, 85, 11, 85, 12, 85, 942, 1, 85, 1, 85, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 96, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 3, 105, 1032, 8, 105, 1, 106, 1, 106, 3, 106, 1036, 8, 106, 1, 106, 5, 106, 1039, 8, 106, 10, 106, 12, 106, 1042, 9, 106, 1, 106, 1, 106, 3, 106, 1046, 8, 106, 1, 106, 4, 106, 1049, 8, 106, 11, 106, 12, 106, 1050, 3, 106, 1053, 8, 106, 1, 107, 1, 107, 4, 107, 1057, 8, 107, 11, 107, 12, 107, 1058, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 125, 4, 125, 1134, 8, 125, 11, 125, 12, 125, 1135, 1, 125, 1, 125, 3, 125, 1140, 8, 125, 1, 125, 4, 125, 1143, 8, 125, 11, 125, 12, 125, 1144, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 4, 176, 1367, 8, 176, 11, 176, 12, 176, 1368, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 2, 606, 707, 0, 195, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 0, 66, 25, 68, 0, 70, 0, 72, 26, 74, 27, 76, 28, 78, 29, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 0, 96, 0, 98, 0, 100, 30, 102, 31, 104, 32, 106, 33, 108, 34, 110, 35, 112, 36, 114, 37, 116, 38, 118, 39, 120, 40, 122, 41, 124, 42, 126, 43, 128, 44, 130, 45, 132, 46, 134, 47, 136, 48, 138, 49, 140, 50, 142, 51, 144, 52, 146, 53, 148, 54, 150, 55, 152, 56, 154, 57, 156, 58, 158, 59, 160, 60, 162, 61, 164, 62, 166, 63, 168, 64, 170, 65, 172, 66, 174, 67, 176, 68, 178, 69, 180, 70, 182, 71, 184, 72, 186, 0, 188, 73, 190, 74, 192, 75, 194, 76, 196, 0, 198, 0, 200, 0, 202, 0, 204, 0, 206, 0, 208, 77, 210, 0, 212, 0, 214, 78, 216, 79, 218, 80, 220, 0, 222, 0, 224, 0, 226, 0, 228, 0, 230, 81, 232, 82, 234, 83, 236, 84, 238, 0, 240, 0, 242, 0, 244, 0, 246, 85, 248, 0, 250, 86, 252, 87, 254, 88, 256, 0, 258, 0, 260, 89, 262, 90, 264, 0, 266, 91, 268, 0, 270, 92, 272, 93, 274, 94, 276, 0, 278, 0, 280, 0, 282, 0, 284, 0, 286, 0, 288, 0, 290, 95, 292, 96, 294, 97, 296, 0, 298, 0, 300, 0, 302, 0, 304, 0, 306, 0, 308, 0, 310, 98, 312, 99, 314, 100, 316, 0, 318, 0, 320, 0, 322, 0, 324, 101, 326, 102, 328, 103, 330, 0, 332, 0, 334, 0, 336, 0, 338, 104, 340, 105, 342, 106, 344, 0, 346, 107, 348, 108, 350, 109, 352, 110, 354, 0, 356, 111, 358, 112, 360, 113, 362, 114, 364, 0, 366, 115, 368, 116, 370, 117, 372, 118, 374, 119, 376, 0, 378, 0, 380, 0, 382, 120, 384, 121, 386, 122, 388, 0, 390, 0, 392, 123, 394, 124, 396, 125, 398, 0, 400, 0, 402, 0, 404, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1484, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 1, 68, 1, 0, 0, 0, 1, 70, 1, 0, 0, 0, 1, 72, 1, 0, 0, 0, 1, 74, 1, 0, 0, 0, 1, 76, 1, 0, 0, 0, 2, 78, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 168, 1, 0, 0, 0, 2, 170, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 2, 178, 1, 0, 0, 0, 2, 180, 1, 0, 0, 0, 2, 182, 1, 0, 0, 0, 2, 184, 1, 0, 0, 0, 2, 188, 1, 0, 0, 0, 2, 190, 1, 0, 0, 0, 2, 192, 1, 0, 0, 0, 2, 194, 1, 0, 0, 0, 3, 196, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 3, 216, 1, 0, 0, 0, 3, 218, 1, 0, 0, 0, 4, 220, 1, 0, 0, 0, 4, 222, 1, 0, 0, 0, 4, 224, 1, 0, 0, 0, 4, 230, 1, 0, 0, 0, 4, 232, 1, 0, 0, 0, 4, 234, 1, 0, 0, 0, 4, 236, 1, 0, 0, 0, 5, 238, 1, 0, 0, 0, 5, 240, 1, 0, 0, 0, 5, 242, 1, 0, 0, 0, 5, 244, 1, 0, 0, 0, 5, 246, 1, 0, 0, 0, 5, 248, 1, 0, 0, 0, 5, 250, 1, 0, 0, 0, 5, 252, 1, 0, 0, 0, 5, 254, 1, 0, 0, 0, 6, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 260, 1, 0, 0, 0, 6, 262, 1, 0, 0, 0, 6, 266, 1, 0, 0, 0, 6, 268, 1, 0, 0, 0, 6, 270, 1, 0, 0, 0, 6, 272, 1, 0, 0, 0, 6, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 7, 280, 1, 0, 0, 0, 7, 282, 1, 0, 0, 0, 7, 284, 1, 0, 0, 0, 7, 286, 1, 0, 0, 0, 7, 288, 1, 0, 0, 0, 7, 290, 1, 0, 0, 0, 7, 292, 1, 0, 0, 0, 7, 294, 1, 0, 0, 0, 8, 296, 1, 0, 0, 0, 8, 298, 1, 0, 0, 0, 8, 300, 1, 0, 0, 0, 8, 302, 1, 0, 0, 0, 8, 304, 1, 0, 0, 0, 8, 306, 1, 0, 0, 0, 8, 308, 1, 0, 0, 0, 8, 310, 1, 0, 0, 0, 8, 312, 1, 0, 0, 0, 8, 314, 1, 0, 0, 0, 9, 316, 1, 0, 0, 0, 9, 318, 1, 0, 0, 0, 9, 320, 1, 0, 0, 0, 9, 322, 1, 0, 0, 0, 9, 324, 1, 0, 0, 0, 9, 326, 1, 0, 0, 0, 9, 328, 1, 0, 0, 0, 10, 330, 1, 0, 0, 0, 10, 332, 1, 0, 0, 0, 10, 334, 1, 0, 0, 0, 10, 336, 1, 0, 0, 0, 10, 338, 1, 0, 0, 0, 10, 340, 1, 0, 0, 0, 10, 342, 1, 0, 0, 0, 11, 344, 1, 0, 0, 0, 11, 346, 1, 0, 0, 0, 11, 348, 1, 0, 0, 0, 11, 350, 1, 0, 0, 0, 11, 352, 1, 0, 0, 0, 12, 354, 1, 0, 0, 0, 12, 356, 1, 0, 0, 0, 12, 358, 1, 0, 0, 0, 12, 360, 1, 0, 0, 0, 12, 362, 1, 0, 0, 0, 13, 364, 1, 0, 0, 0, 13, 366, 1, 0, 0, 0, 13, 368, 1, 0, 0, 0, 13, 370, 1, 0, 0, 0, 13, 372, 1, 0, 0, 0, 13, 374, 1, 0, 0, 0, 14, 376, 1, 0, 0, 0, 14, 378, 1, 0, 0, 0, 14, 380, 1, 0, 0, 0, 14, 382, 1, 0, 0, 0, 14, 384, 1, 0, 0, 0, 14, 386, 1, 0, 0, 0, 15, 388, 1, 0, 0, 0, 15, 390, 1, 0, 0, 0, 15, 392, 1, 0, 0, 0, 15, 394, 1, 0, 0, 0, 15, 396, 1, 0, 0, 0, 15, 398, 1, 0, 0, 0, 15, 400, 1, 0, 0, 0, 15, 402, 1, 0, 0, 0, 15, 404, 1, 0, 0, 0, 16, 406, 1, 0, 0, 0, 18, 416, 1, 0, 0, 0, 20, 423, 1, 0, 0, 0, 22, 432, 1, 0, 0, 0, 24, 439, 1, 0, 0, 0, 26, 449, 1, 0, 0, 0, 28, 456, 1, 0, 0, 0, 30, 463, 1, 0, 0, 0, 32, 477, 1, 0, 0, 0, 34, 484, 1, 0, 0, 0, 36, 492, 1, 0, 0, 0, 38, 501, 1, 0, 0, 0, 40, 508, 1, 0, 0, 0, 42, 518, 1, 0, 0, 0, 44, 530, 1, 0, 0, 0, 46, 539, 1, 0, 0, 0, 48, 545, 1, 0, 0, 0, 50, 552, 1, 0, 0, 0, 52, 559, 1, 0, 0, 0, 54, 567, 1, 0, 0, 0, 56, 576, 1, 0, 0, 0, 58, 582, 1, 0, 0, 0, 60, 599, 1, 0, 0, 0, 62, 615, 1, 0, 0, 0, 64, 624, 1, 0, 0, 0, 66, 627, 1, 0, 0, 0, 68, 631, 1, 0, 0, 0, 70, 636, 1, 0, 0, 0, 72, 641, 1, 0, 0, 0, 74, 645, 1, 0, 0, 0, 76, 649, 1, 0, 0, 0, 78, 653, 1, 0, 0, 0, 80, 657, 1, 0, 0, 0, 82, 659, 1, 0, 0, 0, 84, 661, 1, 0, 0, 0, 86, 664, 1, 0, 0, 0, 88, 666, 1, 0, 0, 0, 90, 675, 1, 0, 0, 0, 92, 677, 1, 0, 0, 0, 94, 682, 1, 0, 0, 0, 96, 684, 1, 0, 0, 0, 98, 689, 1, 0, 0, 0, 100, 720, 1, 0, 0, 0, 102, 723, 1, 0, 0, 0, 104, 769, 1, 0, 0, 0, 106, 771, 1, 0, 0, 0, 108, 774, 1, 0, 0, 0, 110, 778, 1, 0, 0, 0, 112, 782, 1, 0, 0, 0, 114, 784, 1, 0, 0, 0, 116, 787, 1, 0, 0, 0, 118, 789, 1, 0, 0, 0, 120, 794, 1, 0, 0, 0, 122, 796, 1, 0, 0, 0, 124, 802, 1, 0, 0, 0, 126, 808, 1, 0, 0, 0, 128, 811, 1, 0, 0, 0, 130, 814, 1, 0, 0, 0, 132, 819, 1, 0, 0, 0, 134, 824, 1, 0, 0, 0, 136, 826, 1, 0, 0, 0, 138, 832, 1, 0, 0, 0, 140, 836, 1, 0, 0, 0, 142, 841, 1, 0, 0, 0, 144, 847, 1, 0, 0, 0, 146, 850, 1, 0, 0, 0, 148, 852, 1, 0, 0, 0, 150, 858, 1, 0, 0, 0, 152, 860, 1, 0, 0, 0, 154, 865, 1, 0, 0, 0, 156, 868, 1, 0, 0, 0, 158, 871, 1, 0, 0, 0, 160, 874, 1, 0, 0, 0, 162, 876, 1, 0, 0, 0, 164, 879, 1, 0, 0, 0, 166, 881, 1, 0, 0, 0, 168, 884, 1, 0, 0, 0, 170, 886, 1, 0, 0, 0, 172, 888, 1, 0, 0, 0, 174, 890, 1, 0, 0, 0, 176, 892, 1, 0, 0, 0, 178, 908, 1, 0, 0, 0, 180, 910, 1, 0, 0, 0, 182, 915, 1, 0, 0, 0, 184, 936, 1, 0, 0, 0, 186, 938, 1, 0, 0, 0, 188, 946, 1, 0, 0, 0, 190, 948, 1, 0, 0, 0, 192, 952, 1, 0, 0, 0, 194, 956, 1, 0, 0, 0, 196, 960, 1, 0, 0, 0, 198, 965, 1, 0, 0, 0, 200, 969, 1, 0, 0, 0, 202, 973, 1, 0, 0, 0, 204, 977, 1, 0, 0, 0, 206, 981, 1, 0, 0, 0, 208, 985, 1, 0, 0, 0, 210, 994, 1, 0, 0, 0, 212, 998, 1, 0, 0, 0, 214, 1002, 1, 0, 0, 0, 216, 1006, 1, 0, 0, 0, 218, 1010, 1, 0, 0, 0, 220, 1014, 1, 0, 0, 0, 222, 1019, 1, 0, 0, 0, 224, 1023, 1, 0, 0, 0, 226, 1031, 1, 0, 0, 0, 228, 1052, 1, 0, 0, 0, 230, 1056, 1, 0, 0, 0, 232, 1060, 1, 0, 0, 0, 234, 1064, 1, 0, 0, 0, 236, 1068, 1, 0, 0, 0, 238, 1072, 1, 0, 0, 0, 240, 1077, 1, 0, 0, 0, 242, 1081, 1, 0, 0, 0, 244, 1085, 1, 0, 0, 0, 246, 1089, 1, 0, 0, 0, 248, 1092, 1, 0, 0, 0, 250, 1096, 1, 0, 0, 0, 252, 1100, 1, 0, 0, 0, 254, 1104, 1, 0, 0, 0, 256, 1108, 1, 0, 0, 0, 258, 1113, 1, 0, 0, 0, 260, 1118, 1, 0, 0, 0, 262, 1123, 1, 0, 0, 0, 264, 1130, 1, 0, 0, 0, 266, 1139, 1, 0, 0, 0, 268, 1146, 1, 0, 0, 0, 270, 1150, 1, 0, 0, 0, 272, 1154, 1, 0, 0, 0, 274, 1158, 1, 0, 0, 0, 276, 1162, 1, 0, 0, 0, 278, 1168, 1, 0, 0, 0, 280, 1172, 1, 0, 0, 0, 282, 1176, 1, 0, 0, 0, 284, 1180, 1, 0, 0, 0, 286, 1184, 1, 0, 0, 0, 288, 1188, 1, 0, 0, 0, 290, 1192, 1, 0, 0, 0, 292, 1196, 1, 0, 0, 0, 294, 1200, 1, 0, 0, 0, 296, 1204, 1, 0, 0, 0, 298, 1209, 1, 0, 0, 0, 300, 1213, 1, 0, 0, 0, 302, 1217, 1, 0, 0, 0, 304, 1221, 1, 0, 0, 0, 306, 1226, 1, 0, 0, 0, 308, 1230, 1, 0, 0, 0, 310, 1234, 1, 0, 0, 0, 312, 1238, 1, 0, 0, 0, 314, 1242, 1, 0, 0, 0, 316, 1246, 1, 0, 0, 0, 318, 1252, 1, 0, 0, 0, 320, 1256, 1, 0, 0, 0, 322, 1260, 1, 0, 0, 0, 324, 1264, 1, 0, 0, 0, 326, 1268, 1, 0, 0, 0, 328, 1272, 1, 0, 0, 0, 330, 1276, 1, 0, 0, 0, 332, 1281, 1, 0, 0, 0, 334, 1285, 1, 0, 0, 0, 336, 1289, 1, 0, 0, 0, 338, 1293, 1, 0, 0, 0, 340, 1297, 1, 0, 0, 0, 342, 1301, 1, 0, 0, 0, 344, 1305, 1, 0, 0, 0, 346, 1310, 1, 0, 0, 0, 348, 1315, 1, 0, 0, 0, 350, 1319, 1, 0, 0, 0, 352, 1323, 1, 0, 0, 0, 354, 1327, 1, 0, 0, 0, 356, 1332, 1, 0, 0, 0, 358, 1342, 1, 0, 0, 0, 360, 1346, 1, 0, 0, 0, 362, 1350, 1, 0, 0, 0, 364, 1354, 1, 0, 0, 0, 366, 1359, 1, 0, 0, 0, 368, 1366, 1, 0, 0, 0, 370, 1370, 1, 0, 0, 0, 372, 1374, 1, 0, 0, 0, 374, 1378, 1, 0, 0, 0, 376, 1382, 1, 0, 0, 0, 378, 1387, 1, 0, 0, 0, 380, 1393, 1, 0, 0, 0, 382, 1399, 1, 0, 0, 0, 384, 1403, 1, 0, 0, 0, 386, 1407, 1, 0, 0, 0, 388, 1411, 1, 0, 0, 0, 390, 1417, 1, 0, 0, 0, 392, 1423, 1, 0, 0, 0, 394, 1427, 1, 0, 0, 0, 396, 1431, 1, 0, 0, 0, 398, 1435, 1, 0, 0, 0, 400, 1441, 1, 0, 0, 0, 402, 1447, 1, 0, 0, 0, 404, 1453, 1, 0, 0, 0, 406, 407, 5, 100, 0, 0, 407, 408, 5, 105, 0, 0, 408, 409, 5, 115, 0, 0, 409, 410, 5, 115, 0, 0, 410, 411, 5, 101, 0, 0, 411, 412, 5, 99, 0, 0, 412, 413, 5, 116, 0, 0, 413, 414, 1, 0, 0, 0, 414, 415, 6, 0, 0, 0, 415, 17, 1, 0, 0, 0, 416, 417, 5, 100, 0, 0, 417, 418, 5, 114, 0, 0, 418, 419, 5, 111, 0, 0, 419, 420, 5, 112, 0, 0, 420, 421, 1, 0, 0, 0, 421, 422, 6, 1, 1, 0, 422, 19, 1, 0, 0, 0, 423, 424, 5, 101, 0, 0, 424, 425, 5, 110, 0, 0, 425, 426, 5, 114, 0, 0, 426, 427, 5, 105, 0, 0, 427, 428, 5, 99, 0, 0, 428, 429, 5, 104, 0, 0, 429, 430, 1, 0, 0, 0, 430, 431, 6, 2, 2, 0, 431, 21, 1, 0, 0, 0, 432, 433, 5, 101, 0, 0, 433, 434, 5, 118, 0, 0, 434, 435, 5, 97, 0, 0, 435, 436, 5, 108, 0, 0, 436, 437, 1, 0, 0, 0, 437, 438, 6, 3, 0, 0, 438, 23, 1, 0, 0, 0, 439, 440, 5, 101, 0, 0, 440, 441, 5, 120, 0, 0, 441, 442, 5, 112, 0, 0, 442, 443, 5, 108, 0, 0, 443, 444, 5, 97, 0, 0, 444, 445, 5, 105, 0, 0, 445, 446, 5, 110, 0, 0, 446, 447, 1, 0, 0, 0, 447, 448, 6, 4, 3, 0, 448, 25, 1, 0, 0, 0, 449, 450, 5, 102, 0, 0, 450, 451, 5, 114, 0, 0, 451, 452, 5, 111, 0, 0, 452, 453, 5, 109, 0, 0, 453, 454, 1, 0, 0, 0, 454, 455, 6, 5, 4, 0, 455, 27, 1, 0, 0, 0, 456, 457, 5, 103, 0, 0, 457, 458, 5, 114, 0, 0, 458, 459, 5, 111, 0, 0, 459, 460, 5, 107, 0, 0, 460, 461, 1, 0, 0, 0, 461, 462, 6, 6, 0, 0, 462, 29, 1, 0, 0, 0, 463, 464, 5, 105, 0, 0, 464, 465, 5, 110, 0, 0, 465, 466, 5, 108, 0, 0, 466, 467, 5, 105, 0, 0, 467, 468, 5, 110, 0, 0, 468, 469, 5, 101, 0, 0, 469, 470, 5, 115, 0, 0, 470, 471, 5, 116, 0, 0, 471, 472, 5, 97, 0, 0, 472, 473, 5, 116, 0, 0, 473, 474, 5, 115, 0, 0, 474, 475, 1, 0, 0, 0, 475, 476, 6, 7, 0, 0, 476, 31, 1, 0, 0, 0, 477, 478, 5, 107, 0, 0, 478, 479, 5, 101, 0, 0, 479, 480, 5, 101, 0, 0, 480, 481, 5, 112, 0, 0, 481, 482, 1, 0, 0, 0, 482, 483, 6, 8, 1, 0, 483, 33, 1, 0, 0, 0, 484, 485, 5, 108, 0, 0, 485, 486, 5, 105, 0, 0, 486, 487, 5, 109, 0, 0, 487, 488, 5, 105, 0, 0, 488, 489, 5, 116, 0, 0, 489, 490, 1, 0, 0, 0, 490, 491, 6, 9, 0, 0, 491, 35, 1, 0, 0, 0, 492, 493, 5, 108, 0, 0, 493, 494, 5, 111, 0, 0, 494, 495, 5, 111, 0, 0, 495, 496, 5, 107, 0, 0, 496, 497, 5, 117, 0, 0, 497, 498, 5, 112, 0, 0, 498, 499, 1, 0, 0, 0, 499, 500, 6, 10, 5, 0, 500, 37, 1, 0, 0, 0, 501, 502, 5, 109, 0, 0, 502, 503, 5, 101, 0, 0, 503, 504, 5, 116, 0, 0, 504, 505, 5, 97, 0, 0, 505, 506, 1, 0, 0, 0, 506, 507, 6, 11, 6, 0, 507, 39, 1, 0, 0, 0, 508, 509, 5, 109, 0, 0, 509, 510, 5, 101, 0, 0, 510, 511, 5, 116, 0, 0, 511, 512, 5, 114, 0, 0, 512, 513, 5, 105, 0, 0, 513, 514, 5, 99, 0, 0, 514, 515, 5, 115, 0, 0, 515, 516, 1, 0, 0, 0, 516, 517, 6, 12, 7, 0, 517, 41, 1, 0, 0, 0, 518, 519, 5, 109, 0, 0, 519, 520, 5, 118, 0, 0, 520, 521, 5, 95, 0, 0, 521, 522, 5, 101, 0, 0, 522, 523, 5, 120, 0, 0, 523, 524, 5, 112, 0, 0, 524, 525, 5, 97, 0, 0, 525, 526, 5, 110, 0, 0, 526, 527, 5, 100, 0, 0, 527, 528, 1, 0, 0, 0, 528, 529, 6, 13, 8, 0, 529, 43, 1, 0, 0, 0, 530, 531, 5, 114, 0, 0, 531, 532, 5, 101, 0, 0, 532, 533, 5, 110, 0, 0, 533, 534, 5, 97, 0, 0, 534, 535, 5, 109, 0, 0, 535, 536, 5, 101, 0, 0, 536, 537, 1, 0, 0, 0, 537, 538, 6, 14, 9, 0, 538, 45, 1, 0, 0, 0, 539, 540, 5, 114, 0, 0, 540, 541, 5, 111, 0, 0, 541, 542, 5, 119, 0, 0, 542, 543, 1, 0, 0, 0, 543, 544, 6, 15, 0, 0, 544, 47, 1, 0, 0, 0, 545, 546, 5, 115, 0, 0, 546, 547, 5, 104, 0, 0, 547, 548, 5, 111, 0, 0, 548, 549, 5, 119, 0, 0, 549, 550, 1, 0, 0, 0, 550, 551, 6, 16, 10, 0, 551, 49, 1, 0, 0, 0, 552, 553, 5, 115, 0, 0, 553, 554, 5, 111, 0, 0, 554, 555, 5, 114, 0, 0, 555, 556, 5, 116, 0, 0, 556, 557, 1, 0, 0, 0, 557, 558, 6, 17, 0, 0, 558, 51, 1, 0, 0, 0, 559, 560, 5, 115, 0, 0, 560, 561, 5, 116, 0, 0, 561, 562, 5, 97, 0, 0, 562, 563, 5, 116, 0, 0, 563, 564, 5, 115, 0, 0, 564, 565, 1, 0, 0, 0, 565, 566, 6, 18, 0, 0, 566, 53, 1, 0, 0, 0, 567, 568, 5, 119, 0, 0, 568, 569, 5, 104, 0, 0, 569, 570, 5, 101, 0, 0, 570, 571, 5, 114, 0, 0, 571, 572, 5, 101, 0, 0, 572, 573, 1, 0, 0, 0, 573, 574, 6, 19, 0, 0, 574, 55, 1, 0, 0, 0, 575, 577, 8, 0, 0, 0, 576, 575, 1, 0, 0, 0, 577, 578, 1, 0, 0, 0, 578, 576, 1, 0, 0, 0, 578, 579, 1, 0, 0, 0, 579, 580, 1, 0, 0, 0, 580, 581, 6, 20, 0, 0, 581, 57, 1, 0, 0, 0, 582, 583, 5, 47, 0, 0, 583, 584, 5, 47, 0, 0, 584, 588, 1, 0, 0, 0, 585, 587, 8, 1, 0, 0, 586, 585, 1, 0, 0, 0, 587, 590, 1, 0, 0, 0, 588, 586, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 592, 1, 0, 0, 0, 590, 588, 1, 0, 0, 0, 591, 593, 5, 13, 0, 0, 592, 591, 1, 0, 0, 0, 592, 593, 1, 0, 0, 0, 593, 595, 1, 0, 0, 0, 594, 596, 5, 10, 0, 0, 595, 594, 1, 0, 0, 0, 595, 596, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 598, 6, 21, 11, 0, 598, 59, 1, 0, 0, 0, 599, 600, 5, 47, 0, 0, 600, 601, 5, 42, 0, 0, 601, 606, 1, 0, 0, 0, 602, 605, 3, 60, 22, 0, 603, 605, 9, 0, 0, 0, 604, 602, 1, 0, 0, 0, 604, 603, 1, 0, 0, 0, 605, 608, 1, 0, 0, 0, 606, 607, 1, 0, 0, 0, 606, 604, 1, 0, 0, 0, 607, 609, 1, 0, 0, 0, 608, 606, 1, 0, 0, 0, 609, 610, 5, 42, 0, 0, 610, 611, 5, 47, 0, 0, 611, 612, 1, 0, 0, 0, 612, 613, 6, 22, 11, 0, 613, 61, 1, 0, 0, 0, 614, 616, 7, 2, 0, 0, 615, 614, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 617, 615, 1, 0, 0, 0, 617, 618, 1, 0, 0, 0, 618, 619, 1, 0, 0, 0, 619, 620, 6, 23, 11, 0, 620, 63, 1, 0, 0, 0, 621, 625, 8, 3, 0, 0, 622, 623, 5, 47, 0, 0, 623, 625, 8, 4, 0, 0, 624, 621, 1, 0, 0, 0, 624, 622, 1, 0, 0, 0, 625, 65, 1, 0, 0, 0, 626, 628, 3, 64, 24, 0, 627, 626, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 627, 1, 0, 0, 0, 629, 630, 1, 0, 0, 0, 630, 67, 1, 0, 0, 0, 631, 632, 3, 180, 82, 0, 632, 633, 1, 0, 0, 0, 633, 634, 6, 26, 12, 0, 634, 635, 6, 26, 13, 0, 635, 69, 1, 0, 0, 0, 636, 637, 3, 78, 31, 0, 637, 638, 1, 0, 0, 0, 638, 639, 6, 27, 14, 0, 639, 640, 6, 27, 15, 0, 640, 71, 1, 0, 0, 0, 641, 642, 3, 62, 23, 0, 642, 643, 1, 0, 0, 0, 643, 644, 6, 28, 11, 0, 644, 73, 1, 0, 0, 0, 645, 646, 3, 58, 21, 0, 646, 647, 1, 0, 0, 0, 647, 648, 6, 29, 11, 0, 648, 75, 1, 0, 0, 0, 649, 650, 3, 60, 22, 0, 650, 651, 1, 0, 0, 0, 651, 652, 6, 30, 11, 0, 652, 77, 1, 0, 0, 0, 653, 654, 5, 124, 0, 0, 654, 655, 1, 0, 0, 0, 655, 656, 6, 31, 15, 0, 656, 79, 1, 0, 0, 0, 657, 658, 7, 5, 0, 0, 658, 81, 1, 0, 0, 0, 659, 660, 7, 6, 0, 0, 660, 83, 1, 0, 0, 0, 661, 662, 5, 92, 0, 0, 662, 663, 7, 7, 0, 0, 663, 85, 1, 0, 0, 0, 664, 665, 8, 8, 0, 0, 665, 87, 1, 0, 0, 0, 666, 668, 7, 9, 0, 0, 667, 669, 7, 10, 0, 0, 668, 667, 1, 0, 0, 0, 668, 669, 1, 0, 0, 0, 669, 671, 1, 0, 0, 0, 670, 672, 3, 80, 32, 0, 671, 670, 1, 0, 0, 0, 672, 673, 1, 0, 0, 0, 673, 671, 1, 0, 0, 0, 673, 674, 1, 0, 0, 0, 674, 89, 1, 0, 0, 0, 675, 676, 5, 64, 0, 0, 676, 91, 1, 0, 0, 0, 677, 678, 5, 96, 0, 0, 678, 93, 1, 0, 0, 0, 679, 683, 8, 11, 0, 0, 680, 681, 5, 96, 0, 0, 681, 683, 5, 96, 0, 0, 682, 679, 1, 0, 0, 0, 682, 680, 1, 0, 0, 0, 683, 95, 1, 0, 0, 0, 684, 685, 5, 95, 0, 0, 685, 97, 1, 0, 0, 0, 686, 690, 3, 82, 33, 0, 687, 690, 3, 80, 32, 0, 688, 690, 3, 96, 40, 0, 689, 686, 1, 0, 0, 0, 689, 687, 1, 0, 0, 0, 689, 688, 1, 0, 0, 0, 690, 99, 1, 0, 0, 0, 691, 696, 5, 34, 0, 0, 692, 695, 3, 84, 34, 0, 693, 695, 3, 86, 35, 0, 694, 692, 1, 0, 0, 0, 694, 693, 1, 0, 0, 0, 695, 698, 1, 0, 0, 0, 696, 694, 1, 0, 0, 0, 696, 697, 1, 0, 0, 0, 697, 699, 1, 0, 0, 0, 698, 696, 1, 0, 0, 0, 699, 721, 5, 34, 0, 0, 700, 701, 5, 34, 0, 0, 701, 702, 5, 34, 0, 0, 702, 703, 5, 34, 0, 0, 703, 707, 1, 0, 0, 0, 704, 706, 8, 1, 0, 0, 705, 704, 1, 0, 0, 0, 706, 709, 1, 0, 0, 0, 707, 708, 1, 0, 0, 0, 707, 705, 1, 0, 0, 0, 708, 710, 1, 0, 0, 0, 709, 707, 1, 0, 0, 0, 710, 711, 5, 34, 0, 0, 711, 712, 5, 34, 0, 0, 712, 713, 5, 34, 0, 0, 713, 715, 1, 0, 0, 0, 714, 716, 5, 34, 0, 0, 715, 714, 1, 0, 0, 0, 715, 716, 1, 0, 0, 0, 716, 718, 1, 0, 0, 0, 717, 719, 5, 34, 0, 0, 718, 717, 1, 0, 0, 0, 718, 719, 1, 0, 0, 0, 719, 721, 1, 0, 0, 0, 720, 691, 1, 0, 0, 0, 720, 700, 1, 0, 0, 0, 721, 101, 1, 0, 0, 0, 722, 724, 3, 80, 32, 0, 723, 722, 1, 0, 0, 0, 724, 725, 1, 0, 0, 0, 725, 723, 1, 0, 0, 0, 725, 726, 1, 0, 0, 0, 726, 103, 1, 0, 0, 0, 727, 729, 3, 80, 32, 0, 728, 727, 1, 0, 0, 0, 729, 730, 1, 0, 0, 0, 730, 728, 1, 0, 0, 0, 730, 731, 1, 0, 0, 0, 731, 732, 1, 0, 0, 0, 732, 736, 3, 120, 52, 0, 733, 735, 3, 80, 32, 0, 734, 733, 1, 0, 0, 0, 735, 738, 1, 0, 0, 0, 736, 734, 1, 0, 0, 0, 736, 737, 1, 0, 0, 0, 737, 770, 1, 0, 0, 0, 738, 736, 1, 0, 0, 0, 739, 741, 3, 120, 52, 0, 740, 742, 3, 80, 32, 0, 741, 740, 1, 0, 0, 0, 742, 743, 1, 0, 0, 0, 743, 741, 1, 0, 0, 0, 743, 744, 1, 0, 0, 0, 744, 770, 1, 0, 0, 0, 745, 747, 3, 80, 32, 0, 746, 745, 1, 0, 0, 0, 747, 748, 1, 0, 0, 0, 748, 746, 1, 0, 0, 0, 748, 749, 1, 0, 0, 0, 749, 757, 1, 0, 0, 0, 750, 754, 3, 120, 52, 0, 751, 753, 3, 80, 32, 0, 752, 751, 1, 0, 0, 0, 753, 756, 1, 0, 0, 0, 754, 752, 1, 0, 0, 0, 754, 755, 1, 0, 0, 0, 755, 758, 1, 0, 0, 0, 756, 754, 1, 0, 0, 0, 757, 750, 1, 0, 0, 0, 757, 758, 1, 0, 0, 0, 758, 759, 1, 0, 0, 0, 759, 760, 3, 88, 36, 0, 760, 770, 1, 0, 0, 0, 761, 763, 3, 120, 52, 0, 762, 764, 3, 80, 32, 0, 763, 762, 1, 0, 0, 0, 764, 765, 1, 0, 0, 0, 765, 763, 1, 0, 0, 0, 765, 766, 1, 0, 0, 0, 766, 767, 1, 0, 0, 0, 767, 768, 3, 88, 36, 0, 768, 770, 1, 0, 0, 0, 769, 728, 1, 0, 0, 0, 769, 739, 1, 0, 0, 0, 769, 746, 1, 0, 0, 0, 769, 761, 1, 0, 0, 0, 770, 105, 1, 0, 0, 0, 771, 772, 5, 98, 0, 0, 772, 773, 5, 121, 0, 0, 773, 107, 1, 0, 0, 0, 774, 775, 5, 97, 0, 0, 775, 776, 5, 110, 0, 0, 776, 777, 5, 100, 0, 0, 777, 109, 1, 0, 0, 0, 778, 779, 5, 97, 0, 0, 779, 780, 5, 115, 0, 0, 780, 781, 5, 99, 0, 0, 781, 111, 1, 0, 0, 0, 782, 783, 5, 61, 0, 0, 783, 113, 1, 0, 0, 0, 784, 785, 5, 58, 0, 0, 785, 786, 5, 58, 0, 0, 786, 115, 1, 0, 0, 0, 787, 788, 5, 44, 0, 0, 788, 117, 1, 0, 0, 0, 789, 790, 5, 100, 0, 0, 790, 791, 5, 101, 0, 0, 791, 792, 5, 115, 0, 0, 792, 793, 5, 99, 0, 0, 793, 119, 1, 0, 0, 0, 794, 795, 5, 46, 0, 0, 795, 121, 1, 0, 0, 0, 796, 797, 5, 102, 0, 0, 797, 798, 5, 97, 0, 0, 798, 799, 5, 108, 0, 0, 799, 800, 5, 115, 0, 0, 800, 801, 5, 101, 0, 0, 801, 123, 1, 0, 0, 0, 802, 803, 5, 102, 0, 0, 803, 804, 5, 105, 0, 0, 804, 805, 5, 114, 0, 0, 805, 806, 5, 115, 0, 0, 806, 807, 5, 116, 0, 0, 807, 125, 1, 0, 0, 0, 808, 809, 5, 105, 0, 0, 809, 810, 5, 110, 0, 0, 810, 127, 1, 0, 0, 0, 811, 812, 5, 105, 0, 0, 812, 813, 5, 115, 0, 0, 813, 129, 1, 0, 0, 0, 814, 815, 5, 108, 0, 0, 815, 816, 5, 97, 0, 0, 816, 817, 5, 115, 0, 0, 817, 818, 5, 116, 0, 0, 818, 131, 1, 0, 0, 0, 819, 820, 5, 108, 0, 0, 820, 821, 5, 105, 0, 0, 821, 822, 5, 107, 0, 0, 822, 823, 5, 101, 0, 0, 823, 133, 1, 0, 0, 0, 824, 825, 5, 40, 0, 0, 825, 135, 1, 0, 0, 0, 826, 827, 5, 109, 0, 0, 827, 828, 5, 97, 0, 0, 828, 829, 5, 116, 0, 0, 829, 830, 5, 99, 0, 0, 830, 831, 5, 104, 0, 0, 831, 137, 1, 0, 0, 0, 832, 833, 5, 110, 0, 0, 833, 834, 5, 111, 0, 0, 834, 835, 5, 116, 0, 0, 835, 139, 1, 0, 0, 0, 836, 837, 5, 110, 0, 0, 837, 838, 5, 117, 0, 0, 838, 839, 5, 108, 0, 0, 839, 840, 5, 108, 0, 0, 840, 141, 1, 0, 0, 0, 841, 842, 5, 110, 0, 0, 842, 843, 5, 117, 0, 0, 843, 844, 5, 108, 0, 0, 844, 845, 5, 108, 0, 0, 845, 846, 5, 115, 0, 0, 846, 143, 1, 0, 0, 0, 847, 848, 5, 111, 0, 0, 848, 849, 5, 114, 0, 0, 849, 145, 1, 0, 0, 0, 850, 851, 5, 63, 0, 0, 851, 147, 1, 0, 0, 0, 852, 853, 5, 114, 0, 0, 853, 854, 5, 108, 0, 0, 854, 855, 5, 105, 0, 0, 855, 856, 5, 107, 0, 0, 856, 857, 5, 101, 0, 0, 857, 149, 1, 0, 0, 0, 858, 859, 5, 41, 0, 0, 859, 151, 1, 0, 0, 0, 860, 861, 5, 116, 0, 0, 861, 862, 5, 114, 0, 0, 862, 863, 5, 117, 0, 0, 863, 864, 5, 101, 0, 0, 864, 153, 1, 0, 0, 0, 865, 866, 5, 61, 0, 0, 866, 867, 5, 61, 0, 0, 867, 155, 1, 0, 0, 0, 868, 869, 5, 61, 0, 0, 869, 870, 5, 126, 0, 0, 870, 157, 1, 0, 0, 0, 871, 872, 5, 33, 0, 0, 872, 873, 5, 61, 0, 0, 873, 159, 1, 0, 0, 0, 874, 875, 5, 60, 0, 0, 875, 161, 1, 0, 0, 0, 876, 877, 5, 60, 0, 0, 877, 878, 5, 61, 0, 0, 878, 163, 1, 0, 0, 0, 879, 880, 5, 62, 0, 0, 880, 165, 1, 0, 0, 0, 881, 882, 5, 62, 0, 0, 882, 883, 5, 61, 0, 0, 883, 167, 1, 0, 0, 0, 884, 885, 5, 43, 0, 0, 885, 169, 1, 0, 0, 0, 886, 887, 5, 45, 0, 0, 887, 171, 1, 0, 0, 0, 888, 889, 5, 42, 0, 0, 889, 173, 1, 0, 0, 0, 890, 891, 5, 47, 0, 0, 891, 175, 1, 0, 0, 0, 892, 893, 5, 37, 0, 0, 893, 177, 1, 0, 0, 0, 894, 895, 3, 146, 65, 0, 895, 899, 3, 82, 33, 0, 896, 898, 3, 98, 41, 0, 897, 896, 1, 0, 0, 0, 898, 901, 1, 0, 0, 0, 899, 897, 1, 0, 0, 0, 899, 900, 1, 0, 0, 0, 900, 909, 1, 0, 0, 0, 901, 899, 1, 0, 0, 0, 902, 904, 3, 146, 65, 0, 903, 905, 3, 80, 32, 0, 904, 903, 1, 0, 0, 0, 905, 906, 1, 0, 0, 0, 906, 904, 1, 0, 0, 0, 906, 907, 1, 0, 0, 0, 907, 909, 1, 0, 0, 0, 908, 894, 1, 0, 0, 0, 908, 902, 1, 0, 0, 0, 909, 179, 1, 0, 0, 0, 910, 911, 5, 91, 0, 0, 911, 912, 1, 0, 0, 0, 912, 913, 6, 82, 0, 0, 913, 914, 6, 82, 0, 0, 914, 181, 1, 0, 0, 0, 915, 916, 5, 93, 0, 0, 916, 917, 1, 0, 0, 0, 917, 918, 6, 83, 15, 0, 918, 919, 6, 83, 15, 0, 919, 183, 1, 0, 0, 0, 920, 924, 3, 82, 33, 0, 921, 923, 3, 98, 41, 0, 922, 921, 1, 0, 0, 0, 923, 926, 1, 0, 0, 0, 924, 922, 1, 0, 0, 0, 924, 925, 1, 0, 0, 0, 925, 937, 1, 0, 0, 0, 926, 924, 1, 0, 0, 0, 927, 930, 3, 96, 40, 0, 928, 930, 3, 90, 37, 0, 929, 927, 1, 0, 0, 0, 929, 928, 1, 0, 0, 0, 930, 932, 1, 0, 0, 0, 931, 933, 3, 98, 41, 0, 932, 931, 1, 0, 0, 0, 933, 934, 1, 0, 0, 0, 934, 932, 1, 0, 0, 0, 934, 935, 1, 0, 0, 0, 935, 937, 1, 0, 0, 0, 936, 920, 1, 0, 0, 0, 936, 929, 1, 0, 0, 0, 937, 185, 1, 0, 0, 0, 938, 940, 3, 92, 38, 0, 939, 941, 3, 94, 39, 0, 940, 939, 1, 0, 0, 0, 941, 942, 1, 0, 0, 0, 942, 940, 1, 0, 0, 0, 942, 943, 1, 0, 0, 0, 943, 944, 1, 0, 0, 0, 944, 945, 3, 92, 38, 0, 945, 187, 1, 0, 0, 0, 946, 947, 3, 186, 85, 0, 947, 189, 1, 0, 0, 0, 948, 949, 3, 58, 21, 0, 949, 950, 1, 0, 0, 0, 950, 951, 6, 87, 11, 0, 951, 191, 1, 0, 0, 0, 952, 953, 3, 60, 22, 0, 953, 954, 1, 0, 0, 0, 954, 955, 6, 88, 11, 0, 955, 193, 1, 0, 0, 0, 956, 957, 3, 62, 23, 0, 957, 958, 1, 0, 0, 0, 958, 959, 6, 89, 11, 0, 959, 195, 1, 0, 0, 0, 960, 961, 3, 78, 31, 0, 961, 962, 1, 0, 0, 0, 962, 963, 6, 90, 14, 0, 963, 964, 6, 90, 15, 0, 964, 197, 1, 0, 0, 0, 965, 966, 3, 180, 82, 0, 966, 967, 1, 0, 0, 0, 967, 968, 6, 91, 12, 0, 968, 199, 1, 0, 0, 0, 969, 970, 3, 182, 83, 0, 970, 971, 1, 0, 0, 0, 971, 972, 6, 92, 16, 0, 972, 201, 1, 0, 0, 0, 973, 974, 3, 366, 175, 0, 974, 975, 1, 0, 0, 0, 975, 976, 6, 93, 17, 0, 976, 203, 1, 0, 0, 0, 977, 978, 3, 116, 50, 0, 978, 979, 1, 0, 0, 0, 979, 980, 6, 94, 18, 0, 980, 205, 1, 0, 0, 0, 981, 982, 3, 112, 48, 0, 982, 983, 1, 0, 0, 0, 983, 984, 6, 95, 19, 0, 984, 207, 1, 0, 0, 0, 985, 986, 5, 109, 0, 0, 986, 987, 5, 101, 0, 0, 987, 988, 5, 116, 0, 0, 988, 989, 5, 97, 0, 0, 989, 990, 5, 100, 0, 0, 990, 991, 5, 97, 0, 0, 991, 992, 5, 116, 0, 0, 992, 993, 5, 97, 0, 0, 993, 209, 1, 0, 0, 0, 994, 995, 3, 66, 25, 0, 995, 996, 1, 0, 0, 0, 996, 997, 6, 97, 20, 0, 997, 211, 1, 0, 0, 0, 998, 999, 3, 100, 42, 0, 999, 1000, 1, 0, 0, 0, 1000, 1001, 6, 98, 21, 0, 1001, 213, 1, 0, 0, 0, 1002, 1003, 3, 58, 21, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 6, 99, 11, 0, 1005, 215, 1, 0, 0, 0, 1006, 1007, 3, 60, 22, 0, 1007, 1008, 1, 0, 0, 0, 1008, 1009, 6, 100, 11, 0, 1009, 217, 1, 0, 0, 0, 1010, 1011, 3, 62, 23, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1013, 6, 101, 11, 0, 1013, 219, 1, 0, 0, 0, 1014, 1015, 3, 78, 31, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 102, 14, 0, 1017, 1018, 6, 102, 15, 0, 1018, 221, 1, 0, 0, 0, 1019, 1020, 3, 120, 52, 0, 1020, 1021, 1, 0, 0, 0, 1021, 1022, 6, 103, 22, 0, 1022, 223, 1, 0, 0, 0, 1023, 1024, 3, 116, 50, 0, 1024, 1025, 1, 0, 0, 0, 1025, 1026, 6, 104, 18, 0, 1026, 225, 1, 0, 0, 0, 1027, 1032, 3, 82, 33, 0, 1028, 1032, 3, 80, 32, 0, 1029, 1032, 3, 96, 40, 0, 1030, 1032, 3, 172, 78, 0, 1031, 1027, 1, 0, 0, 0, 1031, 1028, 1, 0, 0, 0, 1031, 1029, 1, 0, 0, 0, 1031, 1030, 1, 0, 0, 0, 1032, 227, 1, 0, 0, 0, 1033, 1036, 3, 82, 33, 0, 1034, 1036, 3, 172, 78, 0, 1035, 1033, 1, 0, 0, 0, 1035, 1034, 1, 0, 0, 0, 1036, 1040, 1, 0, 0, 0, 1037, 1039, 3, 226, 105, 0, 1038, 1037, 1, 0, 0, 0, 1039, 1042, 1, 0, 0, 0, 1040, 1038, 1, 0, 0, 0, 1040, 1041, 1, 0, 0, 0, 1041, 1053, 1, 0, 0, 0, 1042, 1040, 1, 0, 0, 0, 1043, 1046, 3, 96, 40, 0, 1044, 1046, 3, 90, 37, 0, 1045, 1043, 1, 0, 0, 0, 1045, 1044, 1, 0, 0, 0, 1046, 1048, 1, 0, 0, 0, 1047, 1049, 3, 226, 105, 0, 1048, 1047, 1, 0, 0, 0, 1049, 1050, 1, 0, 0, 0, 1050, 1048, 1, 0, 0, 0, 1050, 1051, 1, 0, 0, 0, 1051, 1053, 1, 0, 0, 0, 1052, 1035, 1, 0, 0, 0, 1052, 1045, 1, 0, 0, 0, 1053, 229, 1, 0, 0, 0, 1054, 1057, 3, 228, 106, 0, 1055, 1057, 3, 186, 85, 0, 1056, 1054, 1, 0, 0, 0, 1056, 1055, 1, 0, 0, 0, 1057, 1058, 1, 0, 0, 0, 1058, 1056, 1, 0, 0, 0, 1058, 1059, 1, 0, 0, 0, 1059, 231, 1, 0, 0, 0, 1060, 1061, 3, 58, 21, 0, 1061, 1062, 1, 0, 0, 0, 1062, 1063, 6, 108, 11, 0, 1063, 233, 1, 0, 0, 0, 1064, 1065, 3, 60, 22, 0, 1065, 1066, 1, 0, 0, 0, 1066, 1067, 6, 109, 11, 0, 1067, 235, 1, 0, 0, 0, 1068, 1069, 3, 62, 23, 0, 1069, 1070, 1, 0, 0, 0, 1070, 1071, 6, 110, 11, 0, 1071, 237, 1, 0, 0, 0, 1072, 1073, 3, 78, 31, 0, 1073, 1074, 1, 0, 0, 0, 1074, 1075, 6, 111, 14, 0, 1075, 1076, 6, 111, 15, 0, 1076, 239, 1, 0, 0, 0, 1077, 1078, 3, 112, 48, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1080, 6, 112, 19, 0, 1080, 241, 1, 0, 0, 0, 1081, 1082, 3, 116, 50, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 113, 18, 0, 1084, 243, 1, 0, 0, 0, 1085, 1086, 3, 120, 52, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 114, 22, 0, 1088, 245, 1, 0, 0, 0, 1089, 1090, 5, 97, 0, 0, 1090, 1091, 5, 115, 0, 0, 1091, 247, 1, 0, 0, 0, 1092, 1093, 3, 230, 107, 0, 1093, 1094, 1, 0, 0, 0, 1094, 1095, 6, 116, 23, 0, 1095, 249, 1, 0, 0, 0, 1096, 1097, 3, 58, 21, 0, 1097, 1098, 1, 0, 0, 0, 1098, 1099, 6, 117, 11, 0, 1099, 251, 1, 0, 0, 0, 1100, 1101, 3, 60, 22, 0, 1101, 1102, 1, 0, 0, 0, 1102, 1103, 6, 118, 11, 0, 1103, 253, 1, 0, 0, 0, 1104, 1105, 3, 62, 23, 0, 1105, 1106, 1, 0, 0, 0, 1106, 1107, 6, 119, 11, 0, 1107, 255, 1, 0, 0, 0, 1108, 1109, 3, 78, 31, 0, 1109, 1110, 1, 0, 0, 0, 1110, 1111, 6, 120, 14, 0, 1111, 1112, 6, 120, 15, 0, 1112, 257, 1, 0, 0, 0, 1113, 1114, 3, 180, 82, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 6, 121, 12, 0, 1116, 1117, 6, 121, 24, 0, 1117, 259, 1, 0, 0, 0, 1118, 1119, 5, 111, 0, 0, 1119, 1120, 5, 110, 0, 0, 1120, 1121, 1, 0, 0, 0, 1121, 1122, 6, 122, 25, 0, 1122, 261, 1, 0, 0, 0, 1123, 1124, 5, 119, 0, 0, 1124, 1125, 5, 105, 0, 0, 1125, 1126, 5, 116, 0, 0, 1126, 1127, 5, 104, 0, 0, 1127, 1128, 1, 0, 0, 0, 1128, 1129, 6, 123, 25, 0, 1129, 263, 1, 0, 0, 0, 1130, 1131, 8, 12, 0, 0, 1131, 265, 1, 0, 0, 0, 1132, 1134, 3, 264, 124, 0, 1133, 1132, 1, 0, 0, 0, 1134, 1135, 1, 0, 0, 0, 1135, 1133, 1, 0, 0, 0, 1135, 1136, 1, 0, 0, 0, 1136, 1137, 1, 0, 0, 0, 1137, 1138, 3, 366, 175, 0, 1138, 1140, 1, 0, 0, 0, 1139, 1133, 1, 0, 0, 0, 1139, 1140, 1, 0, 0, 0, 1140, 1142, 1, 0, 0, 0, 1141, 1143, 3, 264, 124, 0, 1142, 1141, 1, 0, 0, 0, 1143, 1144, 1, 0, 0, 0, 1144, 1142, 1, 0, 0, 0, 1144, 1145, 1, 0, 0, 0, 1145, 267, 1, 0, 0, 0, 1146, 1147, 3, 266, 125, 0, 1147, 1148, 1, 0, 0, 0, 1148, 1149, 6, 126, 26, 0, 1149, 269, 1, 0, 0, 0, 1150, 1151, 3, 58, 21, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 6, 127, 11, 0, 1153, 271, 1, 0, 0, 0, 1154, 1155, 3, 60, 22, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 6, 128, 11, 0, 1157, 273, 1, 0, 0, 0, 1158, 1159, 3, 62, 23, 0, 1159, 1160, 1, 0, 0, 0, 1160, 1161, 6, 129, 11, 0, 1161, 275, 1, 0, 0, 0, 1162, 1163, 3, 78, 31, 0, 1163, 1164, 1, 0, 0, 0, 1164, 1165, 6, 130, 14, 0, 1165, 1166, 6, 130, 15, 0, 1166, 1167, 6, 130, 15, 0, 1167, 277, 1, 0, 0, 0, 1168, 1169, 3, 112, 48, 0, 1169, 1170, 1, 0, 0, 0, 1170, 1171, 6, 131, 19, 0, 1171, 279, 1, 0, 0, 0, 1172, 1173, 3, 116, 50, 0, 1173, 1174, 1, 0, 0, 0, 1174, 1175, 6, 132, 18, 0, 1175, 281, 1, 0, 0, 0, 1176, 1177, 3, 120, 52, 0, 1177, 1178, 1, 0, 0, 0, 1178, 1179, 6, 133, 22, 0, 1179, 283, 1, 0, 0, 0, 1180, 1181, 3, 262, 123, 0, 1181, 1182, 1, 0, 0, 0, 1182, 1183, 6, 134, 27, 0, 1183, 285, 1, 0, 0, 0, 1184, 1185, 3, 230, 107, 0, 1185, 1186, 1, 0, 0, 0, 1186, 1187, 6, 135, 23, 0, 1187, 287, 1, 0, 0, 0, 1188, 1189, 3, 188, 86, 0, 1189, 1190, 1, 0, 0, 0, 1190, 1191, 6, 136, 28, 0, 1191, 289, 1, 0, 0, 0, 1192, 1193, 3, 58, 21, 0, 1193, 1194, 1, 0, 0, 0, 1194, 1195, 6, 137, 11, 0, 1195, 291, 1, 0, 0, 0, 1196, 1197, 3, 60, 22, 0, 1197, 1198, 1, 0, 0, 0, 1198, 1199, 6, 138, 11, 0, 1199, 293, 1, 0, 0, 0, 1200, 1201, 3, 62, 23, 0, 1201, 1202, 1, 0, 0, 0, 1202, 1203, 6, 139, 11, 0, 1203, 295, 1, 0, 0, 0, 1204, 1205, 3, 78, 31, 0, 1205, 1206, 1, 0, 0, 0, 1206, 1207, 6, 140, 14, 0, 1207, 1208, 6, 140, 15, 0, 1208, 297, 1, 0, 0, 0, 1209, 1210, 3, 366, 175, 0, 1210, 1211, 1, 0, 0, 0, 1211, 1212, 6, 141, 17, 0, 1212, 299, 1, 0, 0, 0, 1213, 1214, 3, 116, 50, 0, 1214, 1215, 1, 0, 0, 0, 1215, 1216, 6, 142, 18, 0, 1216, 301, 1, 0, 0, 0, 1217, 1218, 3, 120, 52, 0, 1218, 1219, 1, 0, 0, 0, 1219, 1220, 6, 143, 22, 0, 1220, 303, 1, 0, 0, 0, 1221, 1222, 3, 260, 122, 0, 1222, 1223, 1, 0, 0, 0, 1223, 1224, 6, 144, 29, 0, 1224, 1225, 6, 144, 30, 0, 1225, 305, 1, 0, 0, 0, 1226, 1227, 3, 66, 25, 0, 1227, 1228, 1, 0, 0, 0, 1228, 1229, 6, 145, 20, 0, 1229, 307, 1, 0, 0, 0, 1230, 1231, 3, 100, 42, 0, 1231, 1232, 1, 0, 0, 0, 1232, 1233, 6, 146, 21, 0, 1233, 309, 1, 0, 0, 0, 1234, 1235, 3, 58, 21, 0, 1235, 1236, 1, 0, 0, 0, 1236, 1237, 6, 147, 11, 0, 1237, 311, 1, 0, 0, 0, 1238, 1239, 3, 60, 22, 0, 1239, 1240, 1, 0, 0, 0, 1240, 1241, 6, 148, 11, 0, 1241, 313, 1, 0, 0, 0, 1242, 1243, 3, 62, 23, 0, 1243, 1244, 1, 0, 0, 0, 1244, 1245, 6, 149, 11, 0, 1245, 315, 1, 0, 0, 0, 1246, 1247, 3, 78, 31, 0, 1247, 1248, 1, 0, 0, 0, 1248, 1249, 6, 150, 14, 0, 1249, 1250, 6, 150, 15, 0, 1250, 1251, 6, 150, 15, 0, 1251, 317, 1, 0, 0, 0, 1252, 1253, 3, 116, 50, 0, 1253, 1254, 1, 0, 0, 0, 1254, 1255, 6, 151, 18, 0, 1255, 319, 1, 0, 0, 0, 1256, 1257, 3, 120, 52, 0, 1257, 1258, 1, 0, 0, 0, 1258, 1259, 6, 152, 22, 0, 1259, 321, 1, 0, 0, 0, 1260, 1261, 3, 230, 107, 0, 1261, 1262, 1, 0, 0, 0, 1262, 1263, 6, 153, 23, 0, 1263, 323, 1, 0, 0, 0, 1264, 1265, 3, 58, 21, 0, 1265, 1266, 1, 0, 0, 0, 1266, 1267, 6, 154, 11, 0, 1267, 325, 1, 0, 0, 0, 1268, 1269, 3, 60, 22, 0, 1269, 1270, 1, 0, 0, 0, 1270, 1271, 6, 155, 11, 0, 1271, 327, 1, 0, 0, 0, 1272, 1273, 3, 62, 23, 0, 1273, 1274, 1, 0, 0, 0, 1274, 1275, 6, 156, 11, 0, 1275, 329, 1, 0, 0, 0, 1276, 1277, 3, 78, 31, 0, 1277, 1278, 1, 0, 0, 0, 1278, 1279, 6, 157, 14, 0, 1279, 1280, 6, 157, 15, 0, 1280, 331, 1, 0, 0, 0, 1281, 1282, 3, 120, 52, 0, 1282, 1283, 1, 0, 0, 0, 1283, 1284, 6, 158, 22, 0, 1284, 333, 1, 0, 0, 0, 1285, 1286, 3, 188, 86, 0, 1286, 1287, 1, 0, 0, 0, 1287, 1288, 6, 159, 28, 0, 1288, 335, 1, 0, 0, 0, 1289, 1290, 3, 184, 84, 0, 1290, 1291, 1, 0, 0, 0, 1291, 1292, 6, 160, 31, 0, 1292, 337, 1, 0, 0, 0, 1293, 1294, 3, 58, 21, 0, 1294, 1295, 1, 0, 0, 0, 1295, 1296, 6, 161, 11, 0, 1296, 339, 1, 0, 0, 0, 1297, 1298, 3, 60, 22, 0, 1298, 1299, 1, 0, 0, 0, 1299, 1300, 6, 162, 11, 0, 1300, 341, 1, 0, 0, 0, 1301, 1302, 3, 62, 23, 0, 1302, 1303, 1, 0, 0, 0, 1303, 1304, 6, 163, 11, 0, 1304, 343, 1, 0, 0, 0, 1305, 1306, 3, 78, 31, 0, 1306, 1307, 1, 0, 0, 0, 1307, 1308, 6, 164, 14, 0, 1308, 1309, 6, 164, 15, 0, 1309, 345, 1, 0, 0, 0, 1310, 1311, 5, 105, 0, 0, 1311, 1312, 5, 110, 0, 0, 1312, 1313, 5, 102, 0, 0, 1313, 1314, 5, 111, 0, 0, 1314, 347, 1, 0, 0, 0, 1315, 1316, 3, 58, 21, 0, 1316, 1317, 1, 0, 0, 0, 1317, 1318, 6, 166, 11, 0, 1318, 349, 1, 0, 0, 0, 1319, 1320, 3, 60, 22, 0, 1320, 1321, 1, 0, 0, 0, 1321, 1322, 6, 167, 11, 0, 1322, 351, 1, 0, 0, 0, 1323, 1324, 3, 62, 23, 0, 1324, 1325, 1, 0, 0, 0, 1325, 1326, 6, 168, 11, 0, 1326, 353, 1, 0, 0, 0, 1327, 1328, 3, 78, 31, 0, 1328, 1329, 1, 0, 0, 0, 1329, 1330, 6, 169, 14, 0, 1330, 1331, 6, 169, 15, 0, 1331, 355, 1, 0, 0, 0, 1332, 1333, 5, 102, 0, 0, 1333, 1334, 5, 117, 0, 0, 1334, 1335, 5, 110, 0, 0, 1335, 1336, 5, 99, 0, 0, 1336, 1337, 5, 116, 0, 0, 1337, 1338, 5, 105, 0, 0, 1338, 1339, 5, 111, 0, 0, 1339, 1340, 5, 110, 0, 0, 1340, 1341, 5, 115, 0, 0, 1341, 357, 1, 0, 0, 0, 1342, 1343, 3, 58, 21, 0, 1343, 1344, 1, 0, 0, 0, 1344, 1345, 6, 171, 11, 0, 1345, 359, 1, 0, 0, 0, 1346, 1347, 3, 60, 22, 0, 1347, 1348, 1, 0, 0, 0, 1348, 1349, 6, 172, 11, 0, 1349, 361, 1, 0, 0, 0, 1350, 1351, 3, 62, 23, 0, 1351, 1352, 1, 0, 0, 0, 1352, 1353, 6, 173, 11, 0, 1353, 363, 1, 0, 0, 0, 1354, 1355, 3, 182, 83, 0, 1355, 1356, 1, 0, 0, 0, 1356, 1357, 6, 174, 16, 0, 1357, 1358, 6, 174, 15, 0, 1358, 365, 1, 0, 0, 0, 1359, 1360, 5, 58, 0, 0, 1360, 367, 1, 0, 0, 0, 1361, 1367, 3, 90, 37, 0, 1362, 1367, 3, 80, 32, 0, 1363, 1367, 3, 120, 52, 0, 1364, 1367, 3, 82, 33, 0, 1365, 1367, 3, 96, 40, 0, 1366, 1361, 1, 0, 0, 0, 1366, 1362, 1, 0, 0, 0, 1366, 1363, 1, 0, 0, 0, 1366, 1364, 1, 0, 0, 0, 1366, 1365, 1, 0, 0, 0, 1367, 1368, 1, 0, 0, 0, 1368, 1366, 1, 0, 0, 0, 1368, 1369, 1, 0, 0, 0, 1369, 369, 1, 0, 0, 0, 1370, 1371, 3, 58, 21, 0, 1371, 1372, 1, 0, 0, 0, 1372, 1373, 6, 177, 11, 0, 1373, 371, 1, 0, 0, 0, 1374, 1375, 3, 60, 22, 0, 1375, 1376, 1, 0, 0, 0, 1376, 1377, 6, 178, 11, 0, 1377, 373, 1, 0, 0, 0, 1378, 1379, 3, 62, 23, 0, 1379, 1380, 1, 0, 0, 0, 1380, 1381, 6, 179, 11, 0, 1381, 375, 1, 0, 0, 0, 1382, 1383, 3, 78, 31, 0, 1383, 1384, 1, 0, 0, 0, 1384, 1385, 6, 180, 14, 0, 1385, 1386, 6, 180, 15, 0, 1386, 377, 1, 0, 0, 0, 1387, 1388, 3, 66, 25, 0, 1388, 1389, 1, 0, 0, 0, 1389, 1390, 6, 181, 20, 0, 1390, 1391, 6, 181, 15, 0, 1391, 1392, 6, 181, 32, 0, 1392, 379, 1, 0, 0, 0, 1393, 1394, 3, 100, 42, 0, 1394, 1395, 1, 0, 0, 0, 1395, 1396, 6, 182, 21, 0, 1396, 1397, 6, 182, 15, 0, 1397, 1398, 6, 182, 32, 0, 1398, 381, 1, 0, 0, 0, 1399, 1400, 3, 58, 21, 0, 1400, 1401, 1, 0, 0, 0, 1401, 1402, 6, 183, 11, 0, 1402, 383, 1, 0, 0, 0, 1403, 1404, 3, 60, 22, 0, 1404, 1405, 1, 0, 0, 0, 1405, 1406, 6, 184, 11, 0, 1406, 385, 1, 0, 0, 0, 1407, 1408, 3, 62, 23, 0, 1408, 1409, 1, 0, 0, 0, 1409, 1410, 6, 185, 11, 0, 1410, 387, 1, 0, 0, 0, 1411, 1412, 3, 366, 175, 0, 1412, 1413, 1, 0, 0, 0, 1413, 1414, 6, 186, 17, 0, 1414, 1415, 6, 186, 15, 0, 1415, 1416, 6, 186, 7, 0, 1416, 389, 1, 0, 0, 0, 1417, 1418, 3, 116, 50, 0, 1418, 1419, 1, 0, 0, 0, 1419, 1420, 6, 187, 18, 0, 1420, 1421, 6, 187, 15, 0, 1421, 1422, 6, 187, 7, 0, 1422, 391, 1, 0, 0, 0, 1423, 1424, 3, 58, 21, 0, 1424, 1425, 1, 0, 0, 0, 1425, 1426, 6, 188, 11, 0, 1426, 393, 1, 0, 0, 0, 1427, 1428, 3, 60, 22, 0, 1428, 1429, 1, 0, 0, 0, 1429, 1430, 6, 189, 11, 0, 1430, 395, 1, 0, 0, 0, 1431, 1432, 3, 62, 23, 0, 1432, 1433, 1, 0, 0, 0, 1433, 1434, 6, 190, 11, 0, 1434, 397, 1, 0, 0, 0, 1435, 1436, 3, 188, 86, 0, 1436, 1437, 1, 0, 0, 0, 1437, 1438, 6, 191, 15, 0, 1438, 1439, 6, 191, 0, 0, 1439, 1440, 6, 191, 28, 0, 1440, 399, 1, 0, 0, 0, 1441, 1442, 3, 184, 84, 0, 1442, 1443, 1, 0, 0, 0, 1443, 1444, 6, 192, 15, 0, 1444, 1445, 6, 192, 0, 0, 1445, 1446, 6, 192, 31, 0, 1446, 401, 1, 0, 0, 0, 1447, 1448, 3, 106, 45, 0, 1448, 1449, 1, 0, 0, 0, 1449, 1450, 6, 193, 15, 0, 1450, 1451, 6, 193, 0, 0, 1451, 1452, 6, 193, 33, 0, 1452, 403, 1, 0, 0, 0, 1453, 1454, 3, 78, 31, 0, 1454, 1455, 1, 0, 0, 0, 1455, 1456, 6, 194, 14, 0, 1456, 1457, 6, 194, 15, 0, 1457, 405, 1, 0, 0, 0, 65, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 578, 588, 592, 595, 604, 606, 617, 624, 629, 668, 673, 682, 689, 694, 696, 707, 715, 718, 720, 725, 730, 736, 743, 748, 754, 757, 765, 769, 899, 906, 908, 924, 929, 934, 936, 942, 1031, 1035, 1040, 1045, 1050, 1052, 1056, 1058, 1135, 1139, 1144, 1366, 1368, 34, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 8, 0, 5, 12, 0, 5, 14, 0, 5, 10, 0, 5, 5, 0, 5, 11, 0, 0, 1, 0, 7, 70, 0, 5, 0, 0, 7, 29, 0, 4, 0, 0, 7, 71, 0, 7, 115, 0, 7, 38, 0, 7, 36, 0, 7, 25, 0, 7, 30, 0, 7, 40, 0, 7, 81, 0, 5, 13, 0, 5, 7, 0, 7, 91, 0, 7, 90, 0, 7, 73, 0, 7, 89, 0, 5, 9, 0, 7, 72, 0, 5, 15, 0, 7, 33, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index de837d1764791..f92830a27eb03 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -23,25 +23,26 @@ public class EsqlBaseLexer extends Lexer { MULTILINE_COMMENT=23, WS=24, UNQUOTED_SOURCE=25, EXPLAIN_WS=26, EXPLAIN_LINE_COMMENT=27, EXPLAIN_MULTILINE_COMMENT=28, PIPE=29, QUOTED_STRING=30, INTEGER_LITERAL=31, DECIMAL_LITERAL=32, BY=33, AND=34, ASC=35, ASSIGN=36, CAST_OP=37, COMMA=38, - DESC=39, DOT=40, FALSE=41, FIRST=42, LAST=43, LP=44, IN=45, IS=46, LIKE=47, - NOT=48, NULL=49, NULLS=50, OR=51, PARAM=52, RLIKE=53, RP=54, TRUE=55, - EQ=56, CIEQ=57, NEQ=58, LT=59, LTE=60, GT=61, GTE=62, PLUS=63, MINUS=64, - ASTERISK=65, SLASH=66, PERCENT=67, NAMED_OR_POSITIONAL_PARAM=68, OPENING_BRACKET=69, - CLOSING_BRACKET=70, UNQUOTED_IDENTIFIER=71, QUOTED_IDENTIFIER=72, EXPR_LINE_COMMENT=73, - EXPR_MULTILINE_COMMENT=74, EXPR_WS=75, METADATA=76, FROM_LINE_COMMENT=77, - FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, PROJECT_LINE_COMMENT=81, - PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, AS=84, RENAME_LINE_COMMENT=85, - RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, ON=88, WITH=89, ENRICH_POLICY_NAME=90, - ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, - ENRICH_FIELD_MULTILINE_COMMENT=95, ENRICH_FIELD_WS=96, LOOKUP_LINE_COMMENT=97, - LOOKUP_MULTILINE_COMMENT=98, LOOKUP_WS=99, LOOKUP_FIELD_LINE_COMMENT=100, - LOOKUP_FIELD_MULTILINE_COMMENT=101, LOOKUP_FIELD_WS=102, MVEXPAND_LINE_COMMENT=103, - MVEXPAND_MULTILINE_COMMENT=104, MVEXPAND_WS=105, INFO=106, SHOW_LINE_COMMENT=107, - SHOW_MULTILINE_COMMENT=108, SHOW_WS=109, FUNCTIONS=110, META_LINE_COMMENT=111, - META_MULTILINE_COMMENT=112, META_WS=113, COLON=114, SETTING=115, SETTING_LINE_COMMENT=116, - SETTTING_MULTILINE_COMMENT=117, SETTING_WS=118, METRICS_LINE_COMMENT=119, - METRICS_MULTILINE_COMMENT=120, METRICS_WS=121, CLOSING_METRICS_LINE_COMMENT=122, - CLOSING_METRICS_MULTILINE_COMMENT=123, CLOSING_METRICS_WS=124; + DESC=39, DOT=40, FALSE=41, FIRST=42, IN=43, IS=44, LAST=45, LIKE=46, LP=47, + MATCH=48, NOT=49, NULL=50, NULLS=51, OR=52, PARAM=53, RLIKE=54, RP=55, + TRUE=56, EQ=57, CIEQ=58, NEQ=59, LT=60, LTE=61, GT=62, GTE=63, PLUS=64, + MINUS=65, ASTERISK=66, SLASH=67, PERCENT=68, NAMED_OR_POSITIONAL_PARAM=69, + OPENING_BRACKET=70, CLOSING_BRACKET=71, UNQUOTED_IDENTIFIER=72, QUOTED_IDENTIFIER=73, + EXPR_LINE_COMMENT=74, EXPR_MULTILINE_COMMENT=75, EXPR_WS=76, METADATA=77, + FROM_LINE_COMMENT=78, FROM_MULTILINE_COMMENT=79, FROM_WS=80, ID_PATTERN=81, + PROJECT_LINE_COMMENT=82, PROJECT_MULTILINE_COMMENT=83, PROJECT_WS=84, + AS=85, RENAME_LINE_COMMENT=86, RENAME_MULTILINE_COMMENT=87, RENAME_WS=88, + ON=89, WITH=90, ENRICH_POLICY_NAME=91, ENRICH_LINE_COMMENT=92, ENRICH_MULTILINE_COMMENT=93, + ENRICH_WS=94, ENRICH_FIELD_LINE_COMMENT=95, ENRICH_FIELD_MULTILINE_COMMENT=96, + ENRICH_FIELD_WS=97, LOOKUP_LINE_COMMENT=98, LOOKUP_MULTILINE_COMMENT=99, + LOOKUP_WS=100, LOOKUP_FIELD_LINE_COMMENT=101, LOOKUP_FIELD_MULTILINE_COMMENT=102, + LOOKUP_FIELD_WS=103, MVEXPAND_LINE_COMMENT=104, MVEXPAND_MULTILINE_COMMENT=105, + MVEXPAND_WS=106, INFO=107, SHOW_LINE_COMMENT=108, SHOW_MULTILINE_COMMENT=109, + SHOW_WS=110, FUNCTIONS=111, META_LINE_COMMENT=112, META_MULTILINE_COMMENT=113, + META_WS=114, COLON=115, SETTING=116, SETTING_LINE_COMMENT=117, SETTTING_MULTILINE_COMMENT=118, + SETTING_WS=119, METRICS_LINE_COMMENT=120, METRICS_MULTILINE_COMMENT=121, + METRICS_WS=122, CLOSING_METRICS_LINE_COMMENT=123, CLOSING_METRICS_MULTILINE_COMMENT=124, + CLOSING_METRICS_WS=125; public static final int EXPLAIN_MODE=1, EXPRESSION_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, ENRICH_MODE=6, ENRICH_FIELD_MODE=7, LOOKUP_MODE=8, LOOKUP_FIELD_MODE=9, @@ -69,8 +70,8 @@ private static String[] makeRuleNames() { "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", - "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", "NOT", - "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", + "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "MATCH", + "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", @@ -112,11 +113,11 @@ private static String[] makeLiteralNames() { "'metrics'", "'mv_expand'", "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, null, "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", "','", - "'desc'", "'.'", "'false'", "'first'", "'last'", "'('", "'in'", "'is'", - "'like'", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", - "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", - "'-'", "'*'", "'/'", "'%'", null, null, "']'", null, null, null, null, - null, "'metadata'", null, null, null, null, null, null, null, "'as'", + "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", "'like'", + "'('", "'match'", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", + "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", + "'+'", "'-'", "'*'", "'/'", "'%'", null, null, "']'", null, null, null, + null, null, "'metadata'", null, null, null, null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, "'info'", null, null, null, "'functions'", null, null, null, "':'" @@ -131,8 +132,8 @@ private static String[] makeSymbolicNames() { "MULTILINE_COMMENT", "WS", "UNQUOTED_SOURCE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", - "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", "NOT", - "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", + "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "MATCH", + "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", @@ -212,7 +213,7 @@ public EsqlBaseLexer(CharStream input) { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\u0004\u0000|\u05aa\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000}\u05b2\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ @@ -267,874 +268,880 @@ public EsqlBaseLexer(CharStream input) { "\u0002\u00b7\u0007\u00b7\u0002\u00b8\u0007\u00b8\u0002\u00b9\u0007\u00b9"+ "\u0002\u00ba\u0007\u00ba\u0002\u00bb\u0007\u00bb\u0002\u00bc\u0007\u00bc"+ "\u0002\u00bd\u0007\u00bd\u0002\u00be\u0007\u00be\u0002\u00bf\u0007\u00bf"+ - "\u0002\u00c0\u0007\u00c0\u0002\u00c1\u0007\u00c1\u0001\u0000\u0001\u0000"+ + "\u0002\u00c0\u0007\u00c0\u0002\u00c1\u0007\u00c1\u0002\u00c2\u0007\u00c2"+ "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ - "\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002"+ + "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002"+ "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ + "\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004"+ + "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ + "\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007"+ "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ - "\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ - "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ - "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ - "\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ - "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012"+ + "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001"+ + "\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001"+ + "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001"+ + "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ + "\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e"+ + "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f"+ + "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010"+ + "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011"+ + "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012"+ - "\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013"+ - "\u0001\u0013\u0001\u0013\u0001\u0014\u0004\u0014\u023f\b\u0014\u000b\u0014"+ - "\f\u0014\u0240\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015"+ - "\u0001\u0015\u0005\u0015\u0249\b\u0015\n\u0015\f\u0015\u024c\t\u0015\u0001"+ - "\u0015\u0003\u0015\u024f\b\u0015\u0001\u0015\u0003\u0015\u0252\b\u0015"+ - "\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016"+ - "\u0001\u0016\u0005\u0016\u025b\b\u0016\n\u0016\f\u0016\u025e\t\u0016\u0001"+ - "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0004"+ - "\u0017\u0266\b\u0017\u000b\u0017\f\u0017\u0267\u0001\u0017\u0001\u0017"+ - "\u0001\u0018\u0001\u0018\u0001\u0018\u0003\u0018\u026f\b\u0018\u0001\u0019"+ - "\u0004\u0019\u0272\b\u0019\u000b\u0019\f\u0019\u0273\u0001\u001a\u0001"+ - "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001"+ - "\u001b\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001c\u0001"+ - "\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001e\u0001"+ - "\u001e\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0001"+ - "\u001f\u0001 \u0001 \u0001!\u0001!\u0001\"\u0001\"\u0001\"\u0001#\u0001"+ - "#\u0001$\u0001$\u0003$\u029b\b$\u0001$\u0004$\u029e\b$\u000b$\f$\u029f"+ - "\u0001%\u0001%\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0003\'\u02a9\b\'"+ - "\u0001(\u0001(\u0001)\u0001)\u0001)\u0003)\u02b0\b)\u0001*\u0001*\u0001"+ - "*\u0005*\u02b5\b*\n*\f*\u02b8\t*\u0001*\u0001*\u0001*\u0001*\u0001*\u0001"+ - "*\u0005*\u02c0\b*\n*\f*\u02c3\t*\u0001*\u0001*\u0001*\u0001*\u0001*\u0003"+ - "*\u02ca\b*\u0001*\u0003*\u02cd\b*\u0003*\u02cf\b*\u0001+\u0004+\u02d2"+ - "\b+\u000b+\f+\u02d3\u0001,\u0004,\u02d7\b,\u000b,\f,\u02d8\u0001,\u0001"+ - ",\u0005,\u02dd\b,\n,\f,\u02e0\t,\u0001,\u0001,\u0004,\u02e4\b,\u000b,"+ - "\f,\u02e5\u0001,\u0004,\u02e9\b,\u000b,\f,\u02ea\u0001,\u0001,\u0005,"+ - "\u02ef\b,\n,\f,\u02f2\t,\u0003,\u02f4\b,\u0001,\u0001,\u0001,\u0001,\u0004"+ - ",\u02fa\b,\u000b,\f,\u02fb\u0001,\u0001,\u0003,\u0300\b,\u0001-\u0001"+ - "-\u0001-\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u0001/\u0001/\u0001"+ - "0\u00010\u00011\u00011\u00011\u00012\u00012\u00013\u00013\u00013\u0001"+ - "3\u00013\u00014\u00014\u00015\u00015\u00015\u00015\u00015\u00015\u0001"+ - "6\u00016\u00016\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u0001"+ - "7\u00018\u00018\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001;\u0001"+ - ";\u0001;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001=\u0001=\u0001"+ - "=\u0001=\u0001=\u0001>\u0001>\u0001>\u0001>\u0001>\u0001>\u0001?\u0001"+ - "?\u0001?\u0001@\u0001@\u0001A\u0001A\u0001A\u0001A\u0001A\u0001A\u0001"+ - "B\u0001B\u0001C\u0001C\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001"+ - "E\u0001E\u0001E\u0001F\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001"+ - "H\u0001I\u0001I\u0001J\u0001J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001"+ - "M\u0001M\u0001N\u0001N\u0001O\u0001O\u0001P\u0001P\u0001P\u0005P\u037a"+ - "\bP\nP\fP\u037d\tP\u0001P\u0001P\u0004P\u0381\bP\u000bP\fP\u0382\u0003"+ - "P\u0385\bP\u0001Q\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001"+ - "R\u0001R\u0001S\u0001S\u0005S\u0393\bS\nS\fS\u0396\tS\u0001S\u0001S\u0003"+ - "S\u039a\bS\u0001S\u0004S\u039d\bS\u000bS\fS\u039e\u0003S\u03a1\bS\u0001"+ - "T\u0001T\u0004T\u03a5\bT\u000bT\fT\u03a6\u0001T\u0001T\u0001U\u0001U\u0001"+ - "V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001X\u0001X\u0001"+ - "X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001"+ - "Z\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001"+ - "]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001"+ - "_\u0001_\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001"+ - "a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001"+ - "c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001e\u0001"+ - "e\u0001f\u0001f\u0001f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001h\u0001"+ - "h\u0001h\u0001h\u0003h\u0400\bh\u0001i\u0001i\u0003i\u0404\bi\u0001i\u0005"+ - "i\u0407\bi\ni\fi\u040a\ti\u0001i\u0001i\u0003i\u040e\bi\u0001i\u0004i"+ - "\u0411\bi\u000bi\fi\u0412\u0003i\u0415\bi\u0001j\u0001j\u0004j\u0419\b"+ - "j\u000bj\fj\u041a\u0001k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001"+ - "l\u0001m\u0001m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001n\u0001n\u0001"+ - "o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001"+ - "q\u0001q\u0001r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001t\u0001"+ - "t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001"+ - "v\u0001w\u0001w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001"+ - "x\u0001y\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001"+ - "z\u0001z\u0001z\u0001{\u0001{\u0001|\u0004|\u0466\b|\u000b|\f|\u0467\u0001"+ - "|\u0001|\u0003|\u046c\b|\u0001|\u0004|\u046f\b|\u000b|\f|\u0470\u0001"+ - "}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f"+ + "\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013"+ + "\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0014\u0004\u0014"+ + "\u0241\b\u0014\u000b\u0014\f\u0014\u0242\u0001\u0014\u0001\u0014\u0001"+ + "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0005\u0015\u024b\b\u0015\n"+ + "\u0015\f\u0015\u024e\t\u0015\u0001\u0015\u0003\u0015\u0251\b\u0015\u0001"+ + "\u0015\u0003\u0015\u0254\b\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0001"+ + "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u025d\b\u0016\n"+ + "\u0016\f\u0016\u0260\t\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001"+ + "\u0016\u0001\u0016\u0001\u0017\u0004\u0017\u0268\b\u0017\u000b\u0017\f"+ + "\u0017\u0269\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018"+ + "\u0003\u0018\u0271\b\u0018\u0001\u0019\u0004\u0019\u0274\b\u0019\u000b"+ + "\u0019\f\u0019\u0275\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001"+ + "\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001b\u0001"+ + "\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001"+ + "\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001"+ + "\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001!\u0001"+ + "!\u0001\"\u0001\"\u0001\"\u0001#\u0001#\u0001$\u0001$\u0003$\u029d\b$"+ + "\u0001$\u0004$\u02a0\b$\u000b$\f$\u02a1\u0001%\u0001%\u0001&\u0001&\u0001"+ + "\'\u0001\'\u0001\'\u0003\'\u02ab\b\'\u0001(\u0001(\u0001)\u0001)\u0001"+ + ")\u0003)\u02b2\b)\u0001*\u0001*\u0001*\u0005*\u02b7\b*\n*\f*\u02ba\t*"+ + "\u0001*\u0001*\u0001*\u0001*\u0001*\u0001*\u0005*\u02c2\b*\n*\f*\u02c5"+ + "\t*\u0001*\u0001*\u0001*\u0001*\u0001*\u0003*\u02cc\b*\u0001*\u0003*\u02cf"+ + "\b*\u0003*\u02d1\b*\u0001+\u0004+\u02d4\b+\u000b+\f+\u02d5\u0001,\u0004"+ + ",\u02d9\b,\u000b,\f,\u02da\u0001,\u0001,\u0005,\u02df\b,\n,\f,\u02e2\t"+ + ",\u0001,\u0001,\u0004,\u02e6\b,\u000b,\f,\u02e7\u0001,\u0004,\u02eb\b"+ + ",\u000b,\f,\u02ec\u0001,\u0001,\u0005,\u02f1\b,\n,\f,\u02f4\t,\u0003,"+ + "\u02f6\b,\u0001,\u0001,\u0001,\u0001,\u0004,\u02fc\b,\u000b,\f,\u02fd"+ + "\u0001,\u0001,\u0003,\u0302\b,\u0001-\u0001-\u0001-\u0001.\u0001.\u0001"+ + ".\u0001.\u0001/\u0001/\u0001/\u0001/\u00010\u00010\u00011\u00011\u0001"+ + "1\u00012\u00012\u00013\u00013\u00013\u00013\u00013\u00014\u00014\u0001"+ + "5\u00015\u00015\u00015\u00015\u00015\u00016\u00016\u00016\u00016\u0001"+ + "6\u00016\u00017\u00017\u00017\u00018\u00018\u00018\u00019\u00019\u0001"+ + "9\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0001:\u0001;\u0001;\u0001"+ + "<\u0001<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001=\u0001"+ + ">\u0001>\u0001>\u0001>\u0001>\u0001?\u0001?\u0001?\u0001?\u0001?\u0001"+ + "?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001B\u0001B\u0001B\u0001B\u0001"+ + "B\u0001B\u0001C\u0001C\u0001D\u0001D\u0001D\u0001D\u0001D\u0001E\u0001"+ + "E\u0001E\u0001F\u0001F\u0001F\u0001G\u0001G\u0001G\u0001H\u0001H\u0001"+ + "I\u0001I\u0001I\u0001J\u0001J\u0001K\u0001K\u0001K\u0001L\u0001L\u0001"+ + "M\u0001M\u0001N\u0001N\u0001O\u0001O\u0001P\u0001P\u0001Q\u0001Q\u0001"+ + "Q\u0005Q\u0382\bQ\nQ\fQ\u0385\tQ\u0001Q\u0001Q\u0004Q\u0389\bQ\u000bQ"+ + "\fQ\u038a\u0003Q\u038d\bQ\u0001R\u0001R\u0001R\u0001R\u0001R\u0001S\u0001"+ + "S\u0001S\u0001S\u0001S\u0001T\u0001T\u0005T\u039b\bT\nT\fT\u039e\tT\u0001"+ + "T\u0001T\u0003T\u03a2\bT\u0001T\u0004T\u03a5\bT\u000bT\fT\u03a6\u0003"+ + "T\u03a9\bT\u0001U\u0001U\u0004U\u03ad\bU\u000bU\fU\u03ae\u0001U\u0001"+ + "U\u0001V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001X\u0001X\u0001X\u0001"+ + "X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001"+ + "[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001"+ + "]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001"+ + "`\u0001`\u0001`\u0001`\u0001`\u0001`\u0001`\u0001`\u0001`\u0001a\u0001"+ + "a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001"+ + "c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001e\u0001f\u0001"+ + "f\u0001f\u0001f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001"+ + "h\u0001h\u0001i\u0001i\u0001i\u0001i\u0003i\u0408\bi\u0001j\u0001j\u0003"+ + "j\u040c\bj\u0001j\u0005j\u040f\bj\nj\fj\u0412\tj\u0001j\u0001j\u0003j"+ + "\u0416\bj\u0001j\u0004j\u0419\bj\u000bj\fj\u041a\u0003j\u041d\bj\u0001"+ + "k\u0001k\u0004k\u0421\bk\u000bk\fk\u0422\u0001l\u0001l\u0001l\u0001l\u0001"+ + "m\u0001m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001"+ + "o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001"+ + "q\u0001r\u0001r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001t\u0001t\u0001"+ + "t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001v\u0001"+ + "w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001x\u0001y\u0001"+ + "y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001z\u0001{\u0001"+ + "{\u0001{\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001}\u0004}\u046e"+ + "\b}\u000b}\f}\u046f\u0001}\u0001}\u0003}\u0474\b}\u0001}\u0004}\u0477"+ + "\b}\u000b}\f}\u0478\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f"+ "\u0001\u007f\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080"+ - "\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0081"+ + "\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082"+ "\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083"+ "\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084"+ "\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086\u0001\u0086"+ "\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0087"+ "\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089"+ "\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a"+ - "\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008c"+ + "\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008c\u0001\u008c"+ "\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001\u008d"+ "\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008f"+ - "\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090"+ + "\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090\u0001\u0090"+ "\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091"+ "\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001\u0093"+ "\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094"+ - "\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095"+ + "\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096"+ "\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097"+ "\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0098"+ "\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u009a\u0001\u009a"+ "\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009b"+ - "\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009d"+ + "\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009d\u0001\u009d"+ "\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e"+ "\u0001\u009e\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u00a0"+ "\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a1\u0001\u00a1\u0001\u00a1"+ "\u0001\u00a1\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3"+ - "\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a4\u0001\u00a4"+ - "\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5\u0001\u00a5\u0001\u00a5"+ + "\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4"+ + "\u0001\u00a4\u0001\u00a4\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5"+ "\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a7"+ "\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8"+ - "\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9"+ - "\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9"+ + "\u0001\u00a8\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9"+ + "\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa"+ "\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00ab\u0001\u00ab"+ "\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac"+ - "\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ae"+ - "\u0001\u00ae\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00af"+ - "\u0004\u00af\u054f\b\u00af\u000b\u00af\f\u00af\u0550\u0001\u00b0\u0001"+ - "\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001"+ - "\u00b1\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b3\u0001"+ - "\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001"+ - "\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b5\u0001\u00b5\u0001"+ - "\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b6\u0001\u00b6\u0001"+ - "\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001"+ - "\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b9\u0001\u00b9\u0001"+ - "\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001"+ - "\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001"+ - "\u00bb\u0001\u00bb\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001"+ - "\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be\u0001"+ - "\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00bf\u0001\u00bf\u0001"+ - "\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00c0\u0001\u00c0\u0001"+ - "\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001\u00c1\u0001"+ - "\u00c1\u0001\u00c1\u0001\u00c1\u0002\u025c\u02c1\u0000\u00c2\u0010\u0001"+ - "\u0012\u0002\u0014\u0003\u0016\u0004\u0018\u0005\u001a\u0006\u001c\u0007"+ - "\u001e\b \t\"\n$\u000b&\f(\r*\u000e,\u000f.\u00100\u00112\u00124\u0013"+ - "6\u00148\u0015:\u0016<\u0017>\u0018@\u0000B\u0019D\u0000F\u0000H\u001a"+ - "J\u001bL\u001cN\u001dP\u0000R\u0000T\u0000V\u0000X\u0000Z\u0000\\\u0000"+ - "^\u0000`\u0000b\u0000d\u001ef\u001fh j!l\"n#p$r%t&v\'x(z)|*~+\u0080,\u0082"+ - "-\u0084.\u0086/\u00880\u008a1\u008c2\u008e3\u00904\u00925\u00946\u0096"+ - "7\u00988\u009a9\u009c:\u009e;\u00a0<\u00a2=\u00a4>\u00a6?\u00a8@\u00aa"+ - "A\u00acB\u00aeC\u00b0D\u00b2E\u00b4F\u00b6G\u00b8\u0000\u00baH\u00bcI"+ - "\u00beJ\u00c0K\u00c2\u0000\u00c4\u0000\u00c6\u0000\u00c8\u0000\u00ca\u0000"+ - "\u00cc\u0000\u00ceL\u00d0\u0000\u00d2\u0000\u00d4M\u00d6N\u00d8O\u00da"+ - "\u0000\u00dc\u0000\u00de\u0000\u00e0\u0000\u00e2\u0000\u00e4P\u00e6Q\u00e8"+ - "R\u00eaS\u00ec\u0000\u00ee\u0000\u00f0\u0000\u00f2\u0000\u00f4T\u00f6"+ - "\u0000\u00f8U\u00faV\u00fcW\u00fe\u0000\u0100\u0000\u0102X\u0104Y\u0106"+ - "\u0000\u0108Z\u010a\u0000\u010c[\u010e\\\u0110]\u0112\u0000\u0114\u0000"+ - "\u0116\u0000\u0118\u0000\u011a\u0000\u011c\u0000\u011e\u0000\u0120^\u0122"+ - "_\u0124`\u0126\u0000\u0128\u0000\u012a\u0000\u012c\u0000\u012e\u0000\u0130"+ - "\u0000\u0132\u0000\u0134a\u0136b\u0138c\u013a\u0000\u013c\u0000\u013e"+ - "\u0000\u0140\u0000\u0142d\u0144e\u0146f\u0148\u0000\u014a\u0000\u014c"+ - "\u0000\u014e\u0000\u0150g\u0152h\u0154i\u0156\u0000\u0158j\u015ak\u015c"+ - "l\u015em\u0160\u0000\u0162n\u0164o\u0166p\u0168q\u016a\u0000\u016cr\u016e"+ - "s\u0170t\u0172u\u0174v\u0176\u0000\u0178\u0000\u017a\u0000\u017cw\u017e"+ - "x\u0180y\u0182\u0000\u0184\u0000\u0186z\u0188{\u018a|\u018c\u0000\u018e"+ - "\u0000\u0190\u0000\u0192\u0000\u0010\u0000\u0001\u0002\u0003\u0004\u0005"+ - "\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\r\u0006\u0000\t\n\r\r //[[]"+ - "]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u000b\u0000\t\n\r\r \"\""+ - ",,//::==[[]]||\u0002\u0000**//\u0001\u000009\u0002\u0000AZaz\u0005\u0000"+ - "\"\"\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002\u0000EEee\u0002\u0000"+ - "++--\u0001\u0000``\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u05c4\u0000"+ - "\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001\u0000\u0000\u0000\u0000"+ - "\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001\u0000\u0000\u0000\u0000"+ - "\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001\u0000\u0000\u0000\u0000"+ - "\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001\u0000\u0000\u0000\u0000"+ - " \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000\u0000\u0000\u0000$\u0001"+ - "\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000\u0000(\u0001\u0000\u0000"+ - "\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,\u0001\u0000\u0000\u0000\u0000"+ - ".\u0001\u0000\u0000\u0000\u00000\u0001\u0000\u0000\u0000\u00002\u0001"+ - "\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000\u00006\u0001\u0000\u0000"+ - "\u0000\u00008\u0001\u0000\u0000\u0000\u0000:\u0001\u0000\u0000\u0000\u0000"+ - "<\u0001\u0000\u0000\u0000\u0000>\u0001\u0000\u0000\u0000\u0000B\u0001"+ - "\u0000\u0000\u0000\u0001D\u0001\u0000\u0000\u0000\u0001F\u0001\u0000\u0000"+ - "\u0000\u0001H\u0001\u0000\u0000\u0000\u0001J\u0001\u0000\u0000\u0000\u0001"+ - "L\u0001\u0000\u0000\u0000\u0002N\u0001\u0000\u0000\u0000\u0002d\u0001"+ - "\u0000\u0000\u0000\u0002f\u0001\u0000\u0000\u0000\u0002h\u0001\u0000\u0000"+ - "\u0000\u0002j\u0001\u0000\u0000\u0000\u0002l\u0001\u0000\u0000\u0000\u0002"+ - "n\u0001\u0000\u0000\u0000\u0002p\u0001\u0000\u0000\u0000\u0002r\u0001"+ - "\u0000\u0000\u0000\u0002t\u0001\u0000\u0000\u0000\u0002v\u0001\u0000\u0000"+ - "\u0000\u0002x\u0001\u0000\u0000\u0000\u0002z\u0001\u0000\u0000\u0000\u0002"+ - "|\u0001\u0000\u0000\u0000\u0002~\u0001\u0000\u0000\u0000\u0002\u0080\u0001"+ - "\u0000\u0000\u0000\u0002\u0082\u0001\u0000\u0000\u0000\u0002\u0084\u0001"+ - "\u0000\u0000\u0000\u0002\u0086\u0001\u0000\u0000\u0000\u0002\u0088\u0001"+ - "\u0000\u0000\u0000\u0002\u008a\u0001\u0000\u0000\u0000\u0002\u008c\u0001"+ - "\u0000\u0000\u0000\u0002\u008e\u0001\u0000\u0000\u0000\u0002\u0090\u0001"+ - "\u0000\u0000\u0000\u0002\u0092\u0001\u0000\u0000\u0000\u0002\u0094\u0001"+ - "\u0000\u0000\u0000\u0002\u0096\u0001\u0000\u0000\u0000\u0002\u0098\u0001"+ - "\u0000\u0000\u0000\u0002\u009a\u0001\u0000\u0000\u0000\u0002\u009c\u0001"+ - "\u0000\u0000\u0000\u0002\u009e\u0001\u0000\u0000\u0000\u0002\u00a0\u0001"+ - "\u0000\u0000\u0000\u0002\u00a2\u0001\u0000\u0000\u0000\u0002\u00a4\u0001"+ - "\u0000\u0000\u0000\u0002\u00a6\u0001\u0000\u0000\u0000\u0002\u00a8\u0001"+ - "\u0000\u0000\u0000\u0002\u00aa\u0001\u0000\u0000\u0000\u0002\u00ac\u0001"+ - "\u0000\u0000\u0000\u0002\u00ae\u0001\u0000\u0000\u0000\u0002\u00b0\u0001"+ - "\u0000\u0000\u0000\u0002\u00b2\u0001\u0000\u0000\u0000\u0002\u00b4\u0001"+ - "\u0000\u0000\u0000\u0002\u00b6\u0001\u0000\u0000\u0000\u0002\u00ba\u0001"+ - "\u0000\u0000\u0000\u0002\u00bc\u0001\u0000\u0000\u0000\u0002\u00be\u0001"+ - "\u0000\u0000\u0000\u0002\u00c0\u0001\u0000\u0000\u0000\u0003\u00c2\u0001"+ - "\u0000\u0000\u0000\u0003\u00c4\u0001\u0000\u0000\u0000\u0003\u00c6\u0001"+ - "\u0000\u0000\u0000\u0003\u00c8\u0001\u0000\u0000\u0000\u0003\u00ca\u0001"+ - "\u0000\u0000\u0000\u0003\u00cc\u0001\u0000\u0000\u0000\u0003\u00ce\u0001"+ - "\u0000\u0000\u0000\u0003\u00d0\u0001\u0000\u0000\u0000\u0003\u00d2\u0001"+ - "\u0000\u0000\u0000\u0003\u00d4\u0001\u0000\u0000\u0000\u0003\u00d6\u0001"+ - "\u0000\u0000\u0000\u0003\u00d8\u0001\u0000\u0000\u0000\u0004\u00da\u0001"+ - "\u0000\u0000\u0000\u0004\u00dc\u0001\u0000\u0000\u0000\u0004\u00de\u0001"+ - "\u0000\u0000\u0000\u0004\u00e4\u0001\u0000\u0000\u0000\u0004\u00e6\u0001"+ - "\u0000\u0000\u0000\u0004\u00e8\u0001\u0000\u0000\u0000\u0004\u00ea\u0001"+ - "\u0000\u0000\u0000\u0005\u00ec\u0001\u0000\u0000\u0000\u0005\u00ee\u0001"+ - "\u0000\u0000\u0000\u0005\u00f0\u0001\u0000\u0000\u0000\u0005\u00f2\u0001"+ - "\u0000\u0000\u0000\u0005\u00f4\u0001\u0000\u0000\u0000\u0005\u00f6\u0001"+ - "\u0000\u0000\u0000\u0005\u00f8\u0001\u0000\u0000\u0000\u0005\u00fa\u0001"+ - "\u0000\u0000\u0000\u0005\u00fc\u0001\u0000\u0000\u0000\u0006\u00fe\u0001"+ - "\u0000\u0000\u0000\u0006\u0100\u0001\u0000\u0000\u0000\u0006\u0102\u0001"+ - "\u0000\u0000\u0000\u0006\u0104\u0001\u0000\u0000\u0000\u0006\u0108\u0001"+ - "\u0000\u0000\u0000\u0006\u010a\u0001\u0000\u0000\u0000\u0006\u010c\u0001"+ - "\u0000\u0000\u0000\u0006\u010e\u0001\u0000\u0000\u0000\u0006\u0110\u0001"+ - "\u0000\u0000\u0000\u0007\u0112\u0001\u0000\u0000\u0000\u0007\u0114\u0001"+ - "\u0000\u0000\u0000\u0007\u0116\u0001\u0000\u0000\u0000\u0007\u0118\u0001"+ - "\u0000\u0000\u0000\u0007\u011a\u0001\u0000\u0000\u0000\u0007\u011c\u0001"+ - "\u0000\u0000\u0000\u0007\u011e\u0001\u0000\u0000\u0000\u0007\u0120\u0001"+ - "\u0000\u0000\u0000\u0007\u0122\u0001\u0000\u0000\u0000\u0007\u0124\u0001"+ - "\u0000\u0000\u0000\b\u0126\u0001\u0000\u0000\u0000\b\u0128\u0001\u0000"+ - "\u0000\u0000\b\u012a\u0001\u0000\u0000\u0000\b\u012c\u0001\u0000\u0000"+ - "\u0000\b\u012e\u0001\u0000\u0000\u0000\b\u0130\u0001\u0000\u0000\u0000"+ - "\b\u0132\u0001\u0000\u0000\u0000\b\u0134\u0001\u0000\u0000\u0000\b\u0136"+ - "\u0001\u0000\u0000\u0000\b\u0138\u0001\u0000\u0000\u0000\t\u013a\u0001"+ - "\u0000\u0000\u0000\t\u013c\u0001\u0000\u0000\u0000\t\u013e\u0001\u0000"+ - "\u0000\u0000\t\u0140\u0001\u0000\u0000\u0000\t\u0142\u0001\u0000\u0000"+ - "\u0000\t\u0144\u0001\u0000\u0000\u0000\t\u0146\u0001\u0000\u0000\u0000"+ - "\n\u0148\u0001\u0000\u0000\u0000\n\u014a\u0001\u0000\u0000\u0000\n\u014c"+ - "\u0001\u0000\u0000\u0000\n\u014e\u0001\u0000\u0000\u0000\n\u0150\u0001"+ - "\u0000\u0000\u0000\n\u0152\u0001\u0000\u0000\u0000\n\u0154\u0001\u0000"+ - "\u0000\u0000\u000b\u0156\u0001\u0000\u0000\u0000\u000b\u0158\u0001\u0000"+ - "\u0000\u0000\u000b\u015a\u0001\u0000\u0000\u0000\u000b\u015c\u0001\u0000"+ - "\u0000\u0000\u000b\u015e\u0001\u0000\u0000\u0000\f\u0160\u0001\u0000\u0000"+ - "\u0000\f\u0162\u0001\u0000\u0000\u0000\f\u0164\u0001\u0000\u0000\u0000"+ - "\f\u0166\u0001\u0000\u0000\u0000\f\u0168\u0001\u0000\u0000\u0000\r\u016a"+ - "\u0001\u0000\u0000\u0000\r\u016c\u0001\u0000\u0000\u0000\r\u016e\u0001"+ - "\u0000\u0000\u0000\r\u0170\u0001\u0000\u0000\u0000\r\u0172\u0001\u0000"+ - "\u0000\u0000\r\u0174\u0001\u0000\u0000\u0000\u000e\u0176\u0001\u0000\u0000"+ - "\u0000\u000e\u0178\u0001\u0000\u0000\u0000\u000e\u017a\u0001\u0000\u0000"+ - "\u0000\u000e\u017c\u0001\u0000\u0000\u0000\u000e\u017e\u0001\u0000\u0000"+ - "\u0000\u000e\u0180\u0001\u0000\u0000\u0000\u000f\u0182\u0001\u0000\u0000"+ - "\u0000\u000f\u0184\u0001\u0000\u0000\u0000\u000f\u0186\u0001\u0000\u0000"+ - "\u0000\u000f\u0188\u0001\u0000\u0000\u0000\u000f\u018a\u0001\u0000\u0000"+ - "\u0000\u000f\u018c\u0001\u0000\u0000\u0000\u000f\u018e\u0001\u0000\u0000"+ - "\u0000\u000f\u0190\u0001\u0000\u0000\u0000\u000f\u0192\u0001\u0000\u0000"+ - "\u0000\u0010\u0194\u0001\u0000\u0000\u0000\u0012\u019e\u0001\u0000\u0000"+ - "\u0000\u0014\u01a5\u0001\u0000\u0000\u0000\u0016\u01ae\u0001\u0000\u0000"+ - "\u0000\u0018\u01b5\u0001\u0000\u0000\u0000\u001a\u01bf\u0001\u0000\u0000"+ - "\u0000\u001c\u01c6\u0001\u0000\u0000\u0000\u001e\u01cd\u0001\u0000\u0000"+ - "\u0000 \u01db\u0001\u0000\u0000\u0000\"\u01e2\u0001\u0000\u0000\u0000"+ - "$\u01ea\u0001\u0000\u0000\u0000&\u01f3\u0001\u0000\u0000\u0000(\u01fa"+ - "\u0001\u0000\u0000\u0000*\u0204\u0001\u0000\u0000\u0000,\u0210\u0001\u0000"+ - "\u0000\u0000.\u0219\u0001\u0000\u0000\u00000\u021f\u0001\u0000\u0000\u0000"+ - "2\u0226\u0001\u0000\u0000\u00004\u022d\u0001\u0000\u0000\u00006\u0235"+ - "\u0001\u0000\u0000\u00008\u023e\u0001\u0000\u0000\u0000:\u0244\u0001\u0000"+ - "\u0000\u0000<\u0255\u0001\u0000\u0000\u0000>\u0265\u0001\u0000\u0000\u0000"+ - "@\u026e\u0001\u0000\u0000\u0000B\u0271\u0001\u0000\u0000\u0000D\u0275"+ - "\u0001\u0000\u0000\u0000F\u027a\u0001\u0000\u0000\u0000H\u027f\u0001\u0000"+ - "\u0000\u0000J\u0283\u0001\u0000\u0000\u0000L\u0287\u0001\u0000\u0000\u0000"+ - "N\u028b\u0001\u0000\u0000\u0000P\u028f\u0001\u0000\u0000\u0000R\u0291"+ - "\u0001\u0000\u0000\u0000T\u0293\u0001\u0000\u0000\u0000V\u0296\u0001\u0000"+ - "\u0000\u0000X\u0298\u0001\u0000\u0000\u0000Z\u02a1\u0001\u0000\u0000\u0000"+ - "\\\u02a3\u0001\u0000\u0000\u0000^\u02a8\u0001\u0000\u0000\u0000`\u02aa"+ - "\u0001\u0000\u0000\u0000b\u02af\u0001\u0000\u0000\u0000d\u02ce\u0001\u0000"+ - "\u0000\u0000f\u02d1\u0001\u0000\u0000\u0000h\u02ff\u0001\u0000\u0000\u0000"+ - "j\u0301\u0001\u0000\u0000\u0000l\u0304\u0001\u0000\u0000\u0000n\u0308"+ - "\u0001\u0000\u0000\u0000p\u030c\u0001\u0000\u0000\u0000r\u030e\u0001\u0000"+ - "\u0000\u0000t\u0311\u0001\u0000\u0000\u0000v\u0313\u0001\u0000\u0000\u0000"+ - "x\u0318\u0001\u0000\u0000\u0000z\u031a\u0001\u0000\u0000\u0000|\u0320"+ - "\u0001\u0000\u0000\u0000~\u0326\u0001\u0000\u0000\u0000\u0080\u032b\u0001"+ - "\u0000\u0000\u0000\u0082\u032d\u0001\u0000\u0000\u0000\u0084\u0330\u0001"+ - "\u0000\u0000\u0000\u0086\u0333\u0001\u0000\u0000\u0000\u0088\u0338\u0001"+ - "\u0000\u0000\u0000\u008a\u033c\u0001\u0000\u0000\u0000\u008c\u0341\u0001"+ - "\u0000\u0000\u0000\u008e\u0347\u0001\u0000\u0000\u0000\u0090\u034a\u0001"+ - "\u0000\u0000\u0000\u0092\u034c\u0001\u0000\u0000\u0000\u0094\u0352\u0001"+ - "\u0000\u0000\u0000\u0096\u0354\u0001\u0000\u0000\u0000\u0098\u0359\u0001"+ - "\u0000\u0000\u0000\u009a\u035c\u0001\u0000\u0000\u0000\u009c\u035f\u0001"+ - "\u0000\u0000\u0000\u009e\u0362\u0001\u0000\u0000\u0000\u00a0\u0364\u0001"+ - "\u0000\u0000\u0000\u00a2\u0367\u0001\u0000\u0000\u0000\u00a4\u0369\u0001"+ - "\u0000\u0000\u0000\u00a6\u036c\u0001\u0000\u0000\u0000\u00a8\u036e\u0001"+ - "\u0000\u0000\u0000\u00aa\u0370\u0001\u0000\u0000\u0000\u00ac\u0372\u0001"+ - "\u0000\u0000\u0000\u00ae\u0374\u0001\u0000\u0000\u0000\u00b0\u0384\u0001"+ - "\u0000\u0000\u0000\u00b2\u0386\u0001\u0000\u0000\u0000\u00b4\u038b\u0001"+ - "\u0000\u0000\u0000\u00b6\u03a0\u0001\u0000\u0000\u0000\u00b8\u03a2\u0001"+ - "\u0000\u0000\u0000\u00ba\u03aa\u0001\u0000\u0000\u0000\u00bc\u03ac\u0001"+ - "\u0000\u0000\u0000\u00be\u03b0\u0001\u0000\u0000\u0000\u00c0\u03b4\u0001"+ - "\u0000\u0000\u0000\u00c2\u03b8\u0001\u0000\u0000\u0000\u00c4\u03bd\u0001"+ - "\u0000\u0000\u0000\u00c6\u03c1\u0001\u0000\u0000\u0000\u00c8\u03c5\u0001"+ - "\u0000\u0000\u0000\u00ca\u03c9\u0001\u0000\u0000\u0000\u00cc\u03cd\u0001"+ - "\u0000\u0000\u0000\u00ce\u03d1\u0001\u0000\u0000\u0000\u00d0\u03da\u0001"+ - "\u0000\u0000\u0000\u00d2\u03de\u0001\u0000\u0000\u0000\u00d4\u03e2\u0001"+ - "\u0000\u0000\u0000\u00d6\u03e6\u0001\u0000\u0000\u0000\u00d8\u03ea\u0001"+ - "\u0000\u0000\u0000\u00da\u03ee\u0001\u0000\u0000\u0000\u00dc\u03f3\u0001"+ - "\u0000\u0000\u0000\u00de\u03f7\u0001\u0000\u0000\u0000\u00e0\u03ff\u0001"+ - "\u0000\u0000\u0000\u00e2\u0414\u0001\u0000\u0000\u0000\u00e4\u0418\u0001"+ - "\u0000\u0000\u0000\u00e6\u041c\u0001\u0000\u0000\u0000\u00e8\u0420\u0001"+ - "\u0000\u0000\u0000\u00ea\u0424\u0001\u0000\u0000\u0000\u00ec\u0428\u0001"+ - "\u0000\u0000\u0000\u00ee\u042d\u0001\u0000\u0000\u0000\u00f0\u0431\u0001"+ - "\u0000\u0000\u0000\u00f2\u0435\u0001\u0000\u0000\u0000\u00f4\u0439\u0001"+ - "\u0000\u0000\u0000\u00f6\u043c\u0001\u0000\u0000\u0000\u00f8\u0440\u0001"+ - "\u0000\u0000\u0000\u00fa\u0444\u0001\u0000\u0000\u0000\u00fc\u0448\u0001"+ - "\u0000\u0000\u0000\u00fe\u044c\u0001\u0000\u0000\u0000\u0100\u0451\u0001"+ - "\u0000\u0000\u0000\u0102\u0456\u0001\u0000\u0000\u0000\u0104\u045b\u0001"+ - "\u0000\u0000\u0000\u0106\u0462\u0001\u0000\u0000\u0000\u0108\u046b\u0001"+ - "\u0000\u0000\u0000\u010a\u0472\u0001\u0000\u0000\u0000\u010c\u0476\u0001"+ - "\u0000\u0000\u0000\u010e\u047a\u0001\u0000\u0000\u0000\u0110\u047e\u0001"+ - "\u0000\u0000\u0000\u0112\u0482\u0001\u0000\u0000\u0000\u0114\u0488\u0001"+ - "\u0000\u0000\u0000\u0116\u048c\u0001\u0000\u0000\u0000\u0118\u0490\u0001"+ - "\u0000\u0000\u0000\u011a\u0494\u0001\u0000\u0000\u0000\u011c\u0498\u0001"+ - "\u0000\u0000\u0000\u011e\u049c\u0001\u0000\u0000\u0000\u0120\u04a0\u0001"+ - "\u0000\u0000\u0000\u0122\u04a4\u0001\u0000\u0000\u0000\u0124\u04a8\u0001"+ - "\u0000\u0000\u0000\u0126\u04ac\u0001\u0000\u0000\u0000\u0128\u04b1\u0001"+ - "\u0000\u0000\u0000\u012a\u04b5\u0001\u0000\u0000\u0000\u012c\u04b9\u0001"+ - "\u0000\u0000\u0000\u012e\u04bd\u0001\u0000\u0000\u0000\u0130\u04c2\u0001"+ - "\u0000\u0000\u0000\u0132\u04c6\u0001\u0000\u0000\u0000\u0134\u04ca\u0001"+ - "\u0000\u0000\u0000\u0136\u04ce\u0001\u0000\u0000\u0000\u0138\u04d2\u0001"+ - "\u0000\u0000\u0000\u013a\u04d6\u0001\u0000\u0000\u0000\u013c\u04dc\u0001"+ - "\u0000\u0000\u0000\u013e\u04e0\u0001\u0000\u0000\u0000\u0140\u04e4\u0001"+ - "\u0000\u0000\u0000\u0142\u04e8\u0001\u0000\u0000\u0000\u0144\u04ec\u0001"+ - "\u0000\u0000\u0000\u0146\u04f0\u0001\u0000\u0000\u0000\u0148\u04f4\u0001"+ - "\u0000\u0000\u0000\u014a\u04f9\u0001\u0000\u0000\u0000\u014c\u04fd\u0001"+ - "\u0000\u0000\u0000\u014e\u0501\u0001\u0000\u0000\u0000\u0150\u0505\u0001"+ - "\u0000\u0000\u0000\u0152\u0509\u0001\u0000\u0000\u0000\u0154\u050d\u0001"+ - "\u0000\u0000\u0000\u0156\u0511\u0001\u0000\u0000\u0000\u0158\u0516\u0001"+ - "\u0000\u0000\u0000\u015a\u051b\u0001\u0000\u0000\u0000\u015c\u051f\u0001"+ - "\u0000\u0000\u0000\u015e\u0523\u0001\u0000\u0000\u0000\u0160\u0527\u0001"+ - "\u0000\u0000\u0000\u0162\u052c\u0001\u0000\u0000\u0000\u0164\u0536\u0001"+ - "\u0000\u0000\u0000\u0166\u053a\u0001\u0000\u0000\u0000\u0168\u053e\u0001"+ - "\u0000\u0000\u0000\u016a\u0542\u0001\u0000\u0000\u0000\u016c\u0547\u0001"+ - "\u0000\u0000\u0000\u016e\u054e\u0001\u0000\u0000\u0000\u0170\u0552\u0001"+ - "\u0000\u0000\u0000\u0172\u0556\u0001\u0000\u0000\u0000\u0174\u055a\u0001"+ - "\u0000\u0000\u0000\u0176\u055e\u0001\u0000\u0000\u0000\u0178\u0563\u0001"+ - "\u0000\u0000\u0000\u017a\u0569\u0001\u0000\u0000\u0000\u017c\u056f\u0001"+ - "\u0000\u0000\u0000\u017e\u0573\u0001\u0000\u0000\u0000\u0180\u0577\u0001"+ - "\u0000\u0000\u0000\u0182\u057b\u0001\u0000\u0000\u0000\u0184\u0581\u0001"+ - "\u0000\u0000\u0000\u0186\u0587\u0001\u0000\u0000\u0000\u0188\u058b\u0001"+ - "\u0000\u0000\u0000\u018a\u058f\u0001\u0000\u0000\u0000\u018c\u0593\u0001"+ - "\u0000\u0000\u0000\u018e\u0599\u0001\u0000\u0000\u0000\u0190\u059f\u0001"+ - "\u0000\u0000\u0000\u0192\u05a5\u0001\u0000\u0000\u0000\u0194\u0195\u0005"+ - "d\u0000\u0000\u0195\u0196\u0005i\u0000\u0000\u0196\u0197\u0005s\u0000"+ - "\u0000\u0197\u0198\u0005s\u0000\u0000\u0198\u0199\u0005e\u0000\u0000\u0199"+ - "\u019a\u0005c\u0000\u0000\u019a\u019b\u0005t\u0000\u0000\u019b\u019c\u0001"+ - "\u0000\u0000\u0000\u019c\u019d\u0006\u0000\u0000\u0000\u019d\u0011\u0001"+ - "\u0000\u0000\u0000\u019e\u019f\u0005d\u0000\u0000\u019f\u01a0\u0005r\u0000"+ - "\u0000\u01a0\u01a1\u0005o\u0000\u0000\u01a1\u01a2\u0005p\u0000\u0000\u01a2"+ - "\u01a3\u0001\u0000\u0000\u0000\u01a3\u01a4\u0006\u0001\u0001\u0000\u01a4"+ - "\u0013\u0001\u0000\u0000\u0000\u01a5\u01a6\u0005e\u0000\u0000\u01a6\u01a7"+ - "\u0005n\u0000\u0000\u01a7\u01a8\u0005r\u0000\u0000\u01a8\u01a9\u0005i"+ - "\u0000\u0000\u01a9\u01aa\u0005c\u0000\u0000\u01aa\u01ab\u0005h\u0000\u0000"+ - "\u01ab\u01ac\u0001\u0000\u0000\u0000\u01ac\u01ad\u0006\u0002\u0002\u0000"+ - "\u01ad\u0015\u0001\u0000\u0000\u0000\u01ae\u01af\u0005e\u0000\u0000\u01af"+ - "\u01b0\u0005v\u0000\u0000\u01b0\u01b1\u0005a\u0000\u0000\u01b1\u01b2\u0005"+ - "l\u0000\u0000\u01b2\u01b3\u0001\u0000\u0000\u0000\u01b3\u01b4\u0006\u0003"+ - "\u0000\u0000\u01b4\u0017\u0001\u0000\u0000\u0000\u01b5\u01b6\u0005e\u0000"+ - "\u0000\u01b6\u01b7\u0005x\u0000\u0000\u01b7\u01b8\u0005p\u0000\u0000\u01b8"+ - "\u01b9\u0005l\u0000\u0000\u01b9\u01ba\u0005a\u0000\u0000\u01ba\u01bb\u0005"+ - "i\u0000\u0000\u01bb\u01bc\u0005n\u0000\u0000\u01bc\u01bd\u0001\u0000\u0000"+ - "\u0000\u01bd\u01be\u0006\u0004\u0003\u0000\u01be\u0019\u0001\u0000\u0000"+ - "\u0000\u01bf\u01c0\u0005f\u0000\u0000\u01c0\u01c1\u0005r\u0000\u0000\u01c1"+ - "\u01c2\u0005o\u0000\u0000\u01c2\u01c3\u0005m\u0000\u0000\u01c3\u01c4\u0001"+ - "\u0000\u0000\u0000\u01c4\u01c5\u0006\u0005\u0004\u0000\u01c5\u001b\u0001"+ - "\u0000\u0000\u0000\u01c6\u01c7\u0005g\u0000\u0000\u01c7\u01c8\u0005r\u0000"+ - "\u0000\u01c8\u01c9\u0005o\u0000\u0000\u01c9\u01ca\u0005k\u0000\u0000\u01ca"+ - "\u01cb\u0001\u0000\u0000\u0000\u01cb\u01cc\u0006\u0006\u0000\u0000\u01cc"+ - "\u001d\u0001\u0000\u0000\u0000\u01cd\u01ce\u0005i\u0000\u0000\u01ce\u01cf"+ - "\u0005n\u0000\u0000\u01cf\u01d0\u0005l\u0000\u0000\u01d0\u01d1\u0005i"+ - "\u0000\u0000\u01d1\u01d2\u0005n\u0000\u0000\u01d2\u01d3\u0005e\u0000\u0000"+ - "\u01d3\u01d4\u0005s\u0000\u0000\u01d4\u01d5\u0005t\u0000\u0000\u01d5\u01d6"+ - "\u0005a\u0000\u0000\u01d6\u01d7\u0005t\u0000\u0000\u01d7\u01d8\u0005s"+ - "\u0000\u0000\u01d8\u01d9\u0001\u0000\u0000\u0000\u01d9\u01da\u0006\u0007"+ - "\u0000\u0000\u01da\u001f\u0001\u0000\u0000\u0000\u01db\u01dc\u0005k\u0000"+ - "\u0000\u01dc\u01dd\u0005e\u0000\u0000\u01dd\u01de\u0005e\u0000\u0000\u01de"+ - "\u01df\u0005p\u0000\u0000\u01df\u01e0\u0001\u0000\u0000\u0000\u01e0\u01e1"+ - "\u0006\b\u0001\u0000\u01e1!\u0001\u0000\u0000\u0000\u01e2\u01e3\u0005"+ - "l\u0000\u0000\u01e3\u01e4\u0005i\u0000\u0000\u01e4\u01e5\u0005m\u0000"+ - "\u0000\u01e5\u01e6\u0005i\u0000\u0000\u01e6\u01e7\u0005t\u0000\u0000\u01e7"+ - "\u01e8\u0001\u0000\u0000\u0000\u01e8\u01e9\u0006\t\u0000\u0000\u01e9#"+ - "\u0001\u0000\u0000\u0000\u01ea\u01eb\u0005l\u0000\u0000\u01eb\u01ec\u0005"+ - "o\u0000\u0000\u01ec\u01ed\u0005o\u0000\u0000\u01ed\u01ee\u0005k\u0000"+ - "\u0000\u01ee\u01ef\u0005u\u0000\u0000\u01ef\u01f0\u0005p\u0000\u0000\u01f0"+ - "\u01f1\u0001\u0000\u0000\u0000\u01f1\u01f2\u0006\n\u0005\u0000\u01f2%"+ - "\u0001\u0000\u0000\u0000\u01f3\u01f4\u0005m\u0000\u0000\u01f4\u01f5\u0005"+ - "e\u0000\u0000\u01f5\u01f6\u0005t\u0000\u0000\u01f6\u01f7\u0005a\u0000"+ - "\u0000\u01f7\u01f8\u0001\u0000\u0000\u0000\u01f8\u01f9\u0006\u000b\u0006"+ - "\u0000\u01f9\'\u0001\u0000\u0000\u0000\u01fa\u01fb\u0005m\u0000\u0000"+ - "\u01fb\u01fc\u0005e\u0000\u0000\u01fc\u01fd\u0005t\u0000\u0000\u01fd\u01fe"+ - "\u0005r\u0000\u0000\u01fe\u01ff\u0005i\u0000\u0000\u01ff\u0200\u0005c"+ - "\u0000\u0000\u0200\u0201\u0005s\u0000\u0000\u0201\u0202\u0001\u0000\u0000"+ - "\u0000\u0202\u0203\u0006\f\u0007\u0000\u0203)\u0001\u0000\u0000\u0000"+ - "\u0204\u0205\u0005m\u0000\u0000\u0205\u0206\u0005v\u0000\u0000\u0206\u0207"+ - "\u0005_\u0000\u0000\u0207\u0208\u0005e\u0000\u0000\u0208\u0209\u0005x"+ - "\u0000\u0000\u0209\u020a\u0005p\u0000\u0000\u020a\u020b\u0005a\u0000\u0000"+ - "\u020b\u020c\u0005n\u0000\u0000\u020c\u020d\u0005d\u0000\u0000\u020d\u020e"+ - "\u0001\u0000\u0000\u0000\u020e\u020f\u0006\r\b\u0000\u020f+\u0001\u0000"+ - "\u0000\u0000\u0210\u0211\u0005r\u0000\u0000\u0211\u0212\u0005e\u0000\u0000"+ - "\u0212\u0213\u0005n\u0000\u0000\u0213\u0214\u0005a\u0000\u0000\u0214\u0215"+ - "\u0005m\u0000\u0000\u0215\u0216\u0005e\u0000\u0000\u0216\u0217\u0001\u0000"+ - "\u0000\u0000\u0217\u0218\u0006\u000e\t\u0000\u0218-\u0001\u0000\u0000"+ - "\u0000\u0219\u021a\u0005r\u0000\u0000\u021a\u021b\u0005o\u0000\u0000\u021b"+ - "\u021c\u0005w\u0000\u0000\u021c\u021d\u0001\u0000\u0000\u0000\u021d\u021e"+ - "\u0006\u000f\u0000\u0000\u021e/\u0001\u0000\u0000\u0000\u021f\u0220\u0005"+ - "s\u0000\u0000\u0220\u0221\u0005h\u0000\u0000\u0221\u0222\u0005o\u0000"+ - "\u0000\u0222\u0223\u0005w\u0000\u0000\u0223\u0224\u0001\u0000\u0000\u0000"+ - "\u0224\u0225\u0006\u0010\n\u0000\u02251\u0001\u0000\u0000\u0000\u0226"+ - "\u0227\u0005s\u0000\u0000\u0227\u0228\u0005o\u0000\u0000\u0228\u0229\u0005"+ - "r\u0000\u0000\u0229\u022a\u0005t\u0000\u0000\u022a\u022b\u0001\u0000\u0000"+ - "\u0000\u022b\u022c\u0006\u0011\u0000\u0000\u022c3\u0001\u0000\u0000\u0000"+ - "\u022d\u022e\u0005s\u0000\u0000\u022e\u022f\u0005t\u0000\u0000\u022f\u0230"+ - "\u0005a\u0000\u0000\u0230\u0231\u0005t\u0000\u0000\u0231\u0232\u0005s"+ - "\u0000\u0000\u0232\u0233\u0001\u0000\u0000\u0000\u0233\u0234\u0006\u0012"+ - "\u0000\u0000\u02345\u0001\u0000\u0000\u0000\u0235\u0236\u0005w\u0000\u0000"+ - "\u0236\u0237\u0005h\u0000\u0000\u0237\u0238\u0005e\u0000\u0000\u0238\u0239"+ - "\u0005r\u0000\u0000\u0239\u023a\u0005e\u0000\u0000\u023a\u023b\u0001\u0000"+ - "\u0000\u0000\u023b\u023c\u0006\u0013\u0000\u0000\u023c7\u0001\u0000\u0000"+ - "\u0000\u023d\u023f\b\u0000\u0000\u0000\u023e\u023d\u0001\u0000\u0000\u0000"+ - "\u023f\u0240\u0001\u0000\u0000\u0000\u0240\u023e\u0001\u0000\u0000\u0000"+ - "\u0240\u0241\u0001\u0000\u0000\u0000\u0241\u0242\u0001\u0000\u0000\u0000"+ - "\u0242\u0243\u0006\u0014\u0000\u0000\u02439\u0001\u0000\u0000\u0000\u0244"+ - "\u0245\u0005/\u0000\u0000\u0245\u0246\u0005/\u0000\u0000\u0246\u024a\u0001"+ - "\u0000\u0000\u0000\u0247\u0249\b\u0001\u0000\u0000\u0248\u0247\u0001\u0000"+ - "\u0000\u0000\u0249\u024c\u0001\u0000\u0000\u0000\u024a\u0248\u0001\u0000"+ - "\u0000\u0000\u024a\u024b\u0001\u0000\u0000\u0000\u024b\u024e\u0001\u0000"+ - "\u0000\u0000\u024c\u024a\u0001\u0000\u0000\u0000\u024d\u024f\u0005\r\u0000"+ - "\u0000\u024e\u024d\u0001\u0000\u0000\u0000\u024e\u024f\u0001\u0000\u0000"+ - "\u0000\u024f\u0251\u0001\u0000\u0000\u0000\u0250\u0252\u0005\n\u0000\u0000"+ - "\u0251\u0250\u0001\u0000\u0000\u0000\u0251\u0252\u0001\u0000\u0000\u0000"+ - "\u0252\u0253\u0001\u0000\u0000\u0000\u0253\u0254\u0006\u0015\u000b\u0000"+ - "\u0254;\u0001\u0000\u0000\u0000\u0255\u0256\u0005/\u0000\u0000\u0256\u0257"+ - "\u0005*\u0000\u0000\u0257\u025c\u0001\u0000\u0000\u0000\u0258\u025b\u0003"+ - "<\u0016\u0000\u0259\u025b\t\u0000\u0000\u0000\u025a\u0258\u0001\u0000"+ - "\u0000\u0000\u025a\u0259\u0001\u0000\u0000\u0000\u025b\u025e\u0001\u0000"+ - "\u0000\u0000\u025c\u025d\u0001\u0000\u0000\u0000\u025c\u025a\u0001\u0000"+ - "\u0000\u0000\u025d\u025f\u0001\u0000\u0000\u0000\u025e\u025c\u0001\u0000"+ - "\u0000\u0000\u025f\u0260\u0005*\u0000\u0000\u0260\u0261\u0005/\u0000\u0000"+ - "\u0261\u0262\u0001\u0000\u0000\u0000\u0262\u0263\u0006\u0016\u000b\u0000"+ - "\u0263=\u0001\u0000\u0000\u0000\u0264\u0266\u0007\u0002\u0000\u0000\u0265"+ - "\u0264\u0001\u0000\u0000\u0000\u0266\u0267\u0001\u0000\u0000\u0000\u0267"+ - "\u0265\u0001\u0000\u0000\u0000\u0267\u0268\u0001\u0000\u0000\u0000\u0268"+ - "\u0269\u0001\u0000\u0000\u0000\u0269\u026a\u0006\u0017\u000b\u0000\u026a"+ - "?\u0001\u0000\u0000\u0000\u026b\u026f\b\u0003\u0000\u0000\u026c\u026d"+ - "\u0005/\u0000\u0000\u026d\u026f\b\u0004\u0000\u0000\u026e\u026b\u0001"+ - "\u0000\u0000\u0000\u026e\u026c\u0001\u0000\u0000\u0000\u026fA\u0001\u0000"+ - "\u0000\u0000\u0270\u0272\u0003@\u0018\u0000\u0271\u0270\u0001\u0000\u0000"+ - "\u0000\u0272\u0273\u0001\u0000\u0000\u0000\u0273\u0271\u0001\u0000\u0000"+ - "\u0000\u0273\u0274\u0001\u0000\u0000\u0000\u0274C\u0001\u0000\u0000\u0000"+ - "\u0275\u0276\u0003\u00b2Q\u0000\u0276\u0277\u0001\u0000\u0000\u0000\u0277"+ - "\u0278\u0006\u001a\f\u0000\u0278\u0279\u0006\u001a\r\u0000\u0279E\u0001"+ - "\u0000\u0000\u0000\u027a\u027b\u0003N\u001f\u0000\u027b\u027c\u0001\u0000"+ - "\u0000\u0000\u027c\u027d\u0006\u001b\u000e\u0000\u027d\u027e\u0006\u001b"+ - "\u000f\u0000\u027eG\u0001\u0000\u0000\u0000\u027f\u0280\u0003>\u0017\u0000"+ - "\u0280\u0281\u0001\u0000\u0000\u0000\u0281\u0282\u0006\u001c\u000b\u0000"+ - "\u0282I\u0001\u0000\u0000\u0000\u0283\u0284\u0003:\u0015\u0000\u0284\u0285"+ - "\u0001\u0000\u0000\u0000\u0285\u0286\u0006\u001d\u000b\u0000\u0286K\u0001"+ - "\u0000\u0000\u0000\u0287\u0288\u0003<\u0016\u0000\u0288\u0289\u0001\u0000"+ - "\u0000\u0000\u0289\u028a\u0006\u001e\u000b\u0000\u028aM\u0001\u0000\u0000"+ - "\u0000\u028b\u028c\u0005|\u0000\u0000\u028c\u028d\u0001\u0000\u0000\u0000"+ - "\u028d\u028e\u0006\u001f\u000f\u0000\u028eO\u0001\u0000\u0000\u0000\u028f"+ - "\u0290\u0007\u0005\u0000\u0000\u0290Q\u0001\u0000\u0000\u0000\u0291\u0292"+ - "\u0007\u0006\u0000\u0000\u0292S\u0001\u0000\u0000\u0000\u0293\u0294\u0005"+ - "\\\u0000\u0000\u0294\u0295\u0007\u0007\u0000\u0000\u0295U\u0001\u0000"+ - "\u0000\u0000\u0296\u0297\b\b\u0000\u0000\u0297W\u0001\u0000\u0000\u0000"+ - "\u0298\u029a\u0007\t\u0000\u0000\u0299\u029b\u0007\n\u0000\u0000\u029a"+ - "\u0299\u0001\u0000\u0000\u0000\u029a\u029b\u0001\u0000\u0000\u0000\u029b"+ - "\u029d\u0001\u0000\u0000\u0000\u029c\u029e\u0003P \u0000\u029d\u029c\u0001"+ - "\u0000\u0000\u0000\u029e\u029f\u0001\u0000\u0000\u0000\u029f\u029d\u0001"+ - "\u0000\u0000\u0000\u029f\u02a0\u0001\u0000\u0000\u0000\u02a0Y\u0001\u0000"+ - "\u0000\u0000\u02a1\u02a2\u0005@\u0000\u0000\u02a2[\u0001\u0000\u0000\u0000"+ - "\u02a3\u02a4\u0005`\u0000\u0000\u02a4]\u0001\u0000\u0000\u0000\u02a5\u02a9"+ - "\b\u000b\u0000\u0000\u02a6\u02a7\u0005`\u0000\u0000\u02a7\u02a9\u0005"+ - "`\u0000\u0000\u02a8\u02a5\u0001\u0000\u0000\u0000\u02a8\u02a6\u0001\u0000"+ - "\u0000\u0000\u02a9_\u0001\u0000\u0000\u0000\u02aa\u02ab\u0005_\u0000\u0000"+ - "\u02aba\u0001\u0000\u0000\u0000\u02ac\u02b0\u0003R!\u0000\u02ad\u02b0"+ - "\u0003P \u0000\u02ae\u02b0\u0003`(\u0000\u02af\u02ac\u0001\u0000\u0000"+ - "\u0000\u02af\u02ad\u0001\u0000\u0000\u0000\u02af\u02ae\u0001\u0000\u0000"+ - "\u0000\u02b0c\u0001\u0000\u0000\u0000\u02b1\u02b6\u0005\"\u0000\u0000"+ - "\u02b2\u02b5\u0003T\"\u0000\u02b3\u02b5\u0003V#\u0000\u02b4\u02b2\u0001"+ - "\u0000\u0000\u0000\u02b4\u02b3\u0001\u0000\u0000\u0000\u02b5\u02b8\u0001"+ - "\u0000\u0000\u0000\u02b6\u02b4\u0001\u0000\u0000\u0000\u02b6\u02b7\u0001"+ - "\u0000\u0000\u0000\u02b7\u02b9\u0001\u0000\u0000\u0000\u02b8\u02b6\u0001"+ - "\u0000\u0000\u0000\u02b9\u02cf\u0005\"\u0000\u0000\u02ba\u02bb\u0005\""+ - "\u0000\u0000\u02bb\u02bc\u0005\"\u0000\u0000\u02bc\u02bd\u0005\"\u0000"+ - "\u0000\u02bd\u02c1\u0001\u0000\u0000\u0000\u02be\u02c0\b\u0001\u0000\u0000"+ - "\u02bf\u02be\u0001\u0000\u0000\u0000\u02c0\u02c3\u0001\u0000\u0000\u0000"+ - "\u02c1\u02c2\u0001\u0000\u0000\u0000\u02c1\u02bf\u0001\u0000\u0000\u0000"+ - "\u02c2\u02c4\u0001\u0000\u0000\u0000\u02c3\u02c1\u0001\u0000\u0000\u0000"+ - "\u02c4\u02c5\u0005\"\u0000\u0000\u02c5\u02c6\u0005\"\u0000\u0000\u02c6"+ - "\u02c7\u0005\"\u0000\u0000\u02c7\u02c9\u0001\u0000\u0000\u0000\u02c8\u02ca"+ - "\u0005\"\u0000\u0000\u02c9\u02c8\u0001\u0000\u0000\u0000\u02c9\u02ca\u0001"+ - "\u0000\u0000\u0000\u02ca\u02cc\u0001\u0000\u0000\u0000\u02cb\u02cd\u0005"+ - "\"\u0000\u0000\u02cc\u02cb\u0001\u0000\u0000\u0000\u02cc\u02cd\u0001\u0000"+ - "\u0000\u0000\u02cd\u02cf\u0001\u0000\u0000\u0000\u02ce\u02b1\u0001\u0000"+ - "\u0000\u0000\u02ce\u02ba\u0001\u0000\u0000\u0000\u02cfe\u0001\u0000\u0000"+ - "\u0000\u02d0\u02d2\u0003P \u0000\u02d1\u02d0\u0001\u0000\u0000\u0000\u02d2"+ - "\u02d3\u0001\u0000\u0000\u0000\u02d3\u02d1\u0001\u0000\u0000\u0000\u02d3"+ - "\u02d4\u0001\u0000\u0000\u0000\u02d4g\u0001\u0000\u0000\u0000\u02d5\u02d7"+ - "\u0003P \u0000\u02d6\u02d5\u0001\u0000\u0000\u0000\u02d7\u02d8\u0001\u0000"+ - "\u0000\u0000\u02d8\u02d6\u0001\u0000\u0000\u0000\u02d8\u02d9\u0001\u0000"+ - "\u0000\u0000\u02d9\u02da\u0001\u0000\u0000\u0000\u02da\u02de\u0003x4\u0000"+ - "\u02db\u02dd\u0003P \u0000\u02dc\u02db\u0001\u0000\u0000\u0000\u02dd\u02e0"+ - "\u0001\u0000\u0000\u0000\u02de\u02dc\u0001\u0000\u0000\u0000\u02de\u02df"+ - "\u0001\u0000\u0000\u0000\u02df\u0300\u0001\u0000\u0000\u0000\u02e0\u02de"+ - "\u0001\u0000\u0000\u0000\u02e1\u02e3\u0003x4\u0000\u02e2\u02e4\u0003P"+ - " \u0000\u02e3\u02e2\u0001\u0000\u0000\u0000\u02e4\u02e5\u0001\u0000\u0000"+ - "\u0000\u02e5\u02e3\u0001\u0000\u0000\u0000\u02e5\u02e6\u0001\u0000\u0000"+ - "\u0000\u02e6\u0300\u0001\u0000\u0000\u0000\u02e7\u02e9\u0003P \u0000\u02e8"+ - "\u02e7\u0001\u0000\u0000\u0000\u02e9\u02ea\u0001\u0000\u0000\u0000\u02ea"+ - "\u02e8\u0001\u0000\u0000\u0000\u02ea\u02eb\u0001\u0000\u0000\u0000\u02eb"+ - "\u02f3\u0001\u0000\u0000\u0000\u02ec\u02f0\u0003x4\u0000\u02ed\u02ef\u0003"+ - "P \u0000\u02ee\u02ed\u0001\u0000\u0000\u0000\u02ef\u02f2\u0001\u0000\u0000"+ - "\u0000\u02f0\u02ee\u0001\u0000\u0000\u0000\u02f0\u02f1\u0001\u0000\u0000"+ + "\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ae\u0001\u00ae"+ + "\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001\u00af\u0001\u00b0"+ + "\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0004\u00b0\u0557\b\u00b0"+ + "\u000b\u00b0\f\u00b0\u0558\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1"+ + "\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b3\u0001\u00b3"+ + "\u0001\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b4"+ + "\u0001\u00b4\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5"+ + "\u0001\u00b5\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6"+ + "\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b8"+ + "\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b9\u0001\u00b9\u0001\u00b9"+ + "\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba"+ + "\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb"+ + "\u0001\u00bb\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd"+ + "\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be\u0001\u00be"+ + "\u0001\u00be\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf"+ + "\u0001\u00bf\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0"+ + "\u0001\u00c0\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1"+ + "\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2"+ + "\u0002\u025e\u02c3\u0000\u00c3\u0010\u0001\u0012\u0002\u0014\u0003\u0016"+ + "\u0004\u0018\u0005\u001a\u0006\u001c\u0007\u001e\b \t\"\n$\u000b&\f(\r"+ + "*\u000e,\u000f.\u00100\u00112\u00124\u00136\u00148\u0015:\u0016<\u0017"+ + ">\u0018@\u0000B\u0019D\u0000F\u0000H\u001aJ\u001bL\u001cN\u001dP\u0000"+ + "R\u0000T\u0000V\u0000X\u0000Z\u0000\\\u0000^\u0000`\u0000b\u0000d\u001e"+ + "f\u001fh j!l\"n#p$r%t&v\'x(z)|*~+\u0080,\u0082-\u0084.\u0086/\u00880\u008a"+ + "1\u008c2\u008e3\u00904\u00925\u00946\u00967\u00988\u009a9\u009c:\u009e"+ + ";\u00a0<\u00a2=\u00a4>\u00a6?\u00a8@\u00aaA\u00acB\u00aeC\u00b0D\u00b2"+ + "E\u00b4F\u00b6G\u00b8H\u00ba\u0000\u00bcI\u00beJ\u00c0K\u00c2L\u00c4\u0000"+ + "\u00c6\u0000\u00c8\u0000\u00ca\u0000\u00cc\u0000\u00ce\u0000\u00d0M\u00d2"+ + "\u0000\u00d4\u0000\u00d6N\u00d8O\u00daP\u00dc\u0000\u00de\u0000\u00e0"+ + "\u0000\u00e2\u0000\u00e4\u0000\u00e6Q\u00e8R\u00eaS\u00ecT\u00ee\u0000"+ + "\u00f0\u0000\u00f2\u0000\u00f4\u0000\u00f6U\u00f8\u0000\u00faV\u00fcW"+ + "\u00feX\u0100\u0000\u0102\u0000\u0104Y\u0106Z\u0108\u0000\u010a[\u010c"+ + "\u0000\u010e\\\u0110]\u0112^\u0114\u0000\u0116\u0000\u0118\u0000\u011a"+ + "\u0000\u011c\u0000\u011e\u0000\u0120\u0000\u0122_\u0124`\u0126a\u0128"+ + "\u0000\u012a\u0000\u012c\u0000\u012e\u0000\u0130\u0000\u0132\u0000\u0134"+ + "\u0000\u0136b\u0138c\u013ad\u013c\u0000\u013e\u0000\u0140\u0000\u0142"+ + "\u0000\u0144e\u0146f\u0148g\u014a\u0000\u014c\u0000\u014e\u0000\u0150"+ + "\u0000\u0152h\u0154i\u0156j\u0158\u0000\u015ak\u015cl\u015em\u0160n\u0162"+ + "\u0000\u0164o\u0166p\u0168q\u016ar\u016c\u0000\u016es\u0170t\u0172u\u0174"+ + "v\u0176w\u0178\u0000\u017a\u0000\u017c\u0000\u017ex\u0180y\u0182z\u0184"+ + "\u0000\u0186\u0000\u0188{\u018a|\u018c}\u018e\u0000\u0190\u0000\u0192"+ + "\u0000\u0194\u0000\u0010\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007"+ + "\b\t\n\u000b\f\r\u000e\u000f\r\u0006\u0000\t\n\r\r //[[]]\u0002\u0000"+ + "\n\n\r\r\u0003\u0000\t\n\r\r \u000b\u0000\t\n\r\r \"\",,//::==[[]]|"+ + "|\u0002\u0000**//\u0001\u000009\u0002\u0000AZaz\u0005\u0000\"\"\\\\nn"+ + "rrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002\u0000EEee\u0002\u0000++--\u0001"+ + "\u0000``\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u05cc\u0000\u0010\u0001"+ + "\u0000\u0000\u0000\u0000\u0012\u0001\u0000\u0000\u0000\u0000\u0014\u0001"+ + "\u0000\u0000\u0000\u0000\u0016\u0001\u0000\u0000\u0000\u0000\u0018\u0001"+ + "\u0000\u0000\u0000\u0000\u001a\u0001\u0000\u0000\u0000\u0000\u001c\u0001"+ + "\u0000\u0000\u0000\u0000\u001e\u0001\u0000\u0000\u0000\u0000 \u0001\u0000"+ + "\u0000\u0000\u0000\"\u0001\u0000\u0000\u0000\u0000$\u0001\u0000\u0000"+ + "\u0000\u0000&\u0001\u0000\u0000\u0000\u0000(\u0001\u0000\u0000\u0000\u0000"+ + "*\u0001\u0000\u0000\u0000\u0000,\u0001\u0000\u0000\u0000\u0000.\u0001"+ + "\u0000\u0000\u0000\u00000\u0001\u0000\u0000\u0000\u00002\u0001\u0000\u0000"+ + "\u0000\u00004\u0001\u0000\u0000\u0000\u00006\u0001\u0000\u0000\u0000\u0000"+ + "8\u0001\u0000\u0000\u0000\u0000:\u0001\u0000\u0000\u0000\u0000<\u0001"+ + "\u0000\u0000\u0000\u0000>\u0001\u0000\u0000\u0000\u0000B\u0001\u0000\u0000"+ + "\u0000\u0001D\u0001\u0000\u0000\u0000\u0001F\u0001\u0000\u0000\u0000\u0001"+ + "H\u0001\u0000\u0000\u0000\u0001J\u0001\u0000\u0000\u0000\u0001L\u0001"+ + "\u0000\u0000\u0000\u0002N\u0001\u0000\u0000\u0000\u0002d\u0001\u0000\u0000"+ + "\u0000\u0002f\u0001\u0000\u0000\u0000\u0002h\u0001\u0000\u0000\u0000\u0002"+ + "j\u0001\u0000\u0000\u0000\u0002l\u0001\u0000\u0000\u0000\u0002n\u0001"+ + "\u0000\u0000\u0000\u0002p\u0001\u0000\u0000\u0000\u0002r\u0001\u0000\u0000"+ + "\u0000\u0002t\u0001\u0000\u0000\u0000\u0002v\u0001\u0000\u0000\u0000\u0002"+ + "x\u0001\u0000\u0000\u0000\u0002z\u0001\u0000\u0000\u0000\u0002|\u0001"+ + "\u0000\u0000\u0000\u0002~\u0001\u0000\u0000\u0000\u0002\u0080\u0001\u0000"+ + "\u0000\u0000\u0002\u0082\u0001\u0000\u0000\u0000\u0002\u0084\u0001\u0000"+ + "\u0000\u0000\u0002\u0086\u0001\u0000\u0000\u0000\u0002\u0088\u0001\u0000"+ + "\u0000\u0000\u0002\u008a\u0001\u0000\u0000\u0000\u0002\u008c\u0001\u0000"+ + "\u0000\u0000\u0002\u008e\u0001\u0000\u0000\u0000\u0002\u0090\u0001\u0000"+ + "\u0000\u0000\u0002\u0092\u0001\u0000\u0000\u0000\u0002\u0094\u0001\u0000"+ + "\u0000\u0000\u0002\u0096\u0001\u0000\u0000\u0000\u0002\u0098\u0001\u0000"+ + "\u0000\u0000\u0002\u009a\u0001\u0000\u0000\u0000\u0002\u009c\u0001\u0000"+ + "\u0000\u0000\u0002\u009e\u0001\u0000\u0000\u0000\u0002\u00a0\u0001\u0000"+ + "\u0000\u0000\u0002\u00a2\u0001\u0000\u0000\u0000\u0002\u00a4\u0001\u0000"+ + "\u0000\u0000\u0002\u00a6\u0001\u0000\u0000\u0000\u0002\u00a8\u0001\u0000"+ + "\u0000\u0000\u0002\u00aa\u0001\u0000\u0000\u0000\u0002\u00ac\u0001\u0000"+ + "\u0000\u0000\u0002\u00ae\u0001\u0000\u0000\u0000\u0002\u00b0\u0001\u0000"+ + "\u0000\u0000\u0002\u00b2\u0001\u0000\u0000\u0000\u0002\u00b4\u0001\u0000"+ + "\u0000\u0000\u0002\u00b6\u0001\u0000\u0000\u0000\u0002\u00b8\u0001\u0000"+ + "\u0000\u0000\u0002\u00bc\u0001\u0000\u0000\u0000\u0002\u00be\u0001\u0000"+ + "\u0000\u0000\u0002\u00c0\u0001\u0000\u0000\u0000\u0002\u00c2\u0001\u0000"+ + "\u0000\u0000\u0003\u00c4\u0001\u0000\u0000\u0000\u0003\u00c6\u0001\u0000"+ + "\u0000\u0000\u0003\u00c8\u0001\u0000\u0000\u0000\u0003\u00ca\u0001\u0000"+ + "\u0000\u0000\u0003\u00cc\u0001\u0000\u0000\u0000\u0003\u00ce\u0001\u0000"+ + "\u0000\u0000\u0003\u00d0\u0001\u0000\u0000\u0000\u0003\u00d2\u0001\u0000"+ + "\u0000\u0000\u0003\u00d4\u0001\u0000\u0000\u0000\u0003\u00d6\u0001\u0000"+ + "\u0000\u0000\u0003\u00d8\u0001\u0000\u0000\u0000\u0003\u00da\u0001\u0000"+ + "\u0000\u0000\u0004\u00dc\u0001\u0000\u0000\u0000\u0004\u00de\u0001\u0000"+ + "\u0000\u0000\u0004\u00e0\u0001\u0000\u0000\u0000\u0004\u00e6\u0001\u0000"+ + "\u0000\u0000\u0004\u00e8\u0001\u0000\u0000\u0000\u0004\u00ea\u0001\u0000"+ + "\u0000\u0000\u0004\u00ec\u0001\u0000\u0000\u0000\u0005\u00ee\u0001\u0000"+ + "\u0000\u0000\u0005\u00f0\u0001\u0000\u0000\u0000\u0005\u00f2\u0001\u0000"+ + "\u0000\u0000\u0005\u00f4\u0001\u0000\u0000\u0000\u0005\u00f6\u0001\u0000"+ + "\u0000\u0000\u0005\u00f8\u0001\u0000\u0000\u0000\u0005\u00fa\u0001\u0000"+ + "\u0000\u0000\u0005\u00fc\u0001\u0000\u0000\u0000\u0005\u00fe\u0001\u0000"+ + "\u0000\u0000\u0006\u0100\u0001\u0000\u0000\u0000\u0006\u0102\u0001\u0000"+ + "\u0000\u0000\u0006\u0104\u0001\u0000\u0000\u0000\u0006\u0106\u0001\u0000"+ + "\u0000\u0000\u0006\u010a\u0001\u0000\u0000\u0000\u0006\u010c\u0001\u0000"+ + "\u0000\u0000\u0006\u010e\u0001\u0000\u0000\u0000\u0006\u0110\u0001\u0000"+ + "\u0000\u0000\u0006\u0112\u0001\u0000\u0000\u0000\u0007\u0114\u0001\u0000"+ + "\u0000\u0000\u0007\u0116\u0001\u0000\u0000\u0000\u0007\u0118\u0001\u0000"+ + "\u0000\u0000\u0007\u011a\u0001\u0000\u0000\u0000\u0007\u011c\u0001\u0000"+ + "\u0000\u0000\u0007\u011e\u0001\u0000\u0000\u0000\u0007\u0120\u0001\u0000"+ + "\u0000\u0000\u0007\u0122\u0001\u0000\u0000\u0000\u0007\u0124\u0001\u0000"+ + "\u0000\u0000\u0007\u0126\u0001\u0000\u0000\u0000\b\u0128\u0001\u0000\u0000"+ + "\u0000\b\u012a\u0001\u0000\u0000\u0000\b\u012c\u0001\u0000\u0000\u0000"+ + "\b\u012e\u0001\u0000\u0000\u0000\b\u0130\u0001\u0000\u0000\u0000\b\u0132"+ + "\u0001\u0000\u0000\u0000\b\u0134\u0001\u0000\u0000\u0000\b\u0136\u0001"+ + "\u0000\u0000\u0000\b\u0138\u0001\u0000\u0000\u0000\b\u013a\u0001\u0000"+ + "\u0000\u0000\t\u013c\u0001\u0000\u0000\u0000\t\u013e\u0001\u0000\u0000"+ + "\u0000\t\u0140\u0001\u0000\u0000\u0000\t\u0142\u0001\u0000\u0000\u0000"+ + "\t\u0144\u0001\u0000\u0000\u0000\t\u0146\u0001\u0000\u0000\u0000\t\u0148"+ + "\u0001\u0000\u0000\u0000\n\u014a\u0001\u0000\u0000\u0000\n\u014c\u0001"+ + "\u0000\u0000\u0000\n\u014e\u0001\u0000\u0000\u0000\n\u0150\u0001\u0000"+ + "\u0000\u0000\n\u0152\u0001\u0000\u0000\u0000\n\u0154\u0001\u0000\u0000"+ + "\u0000\n\u0156\u0001\u0000\u0000\u0000\u000b\u0158\u0001\u0000\u0000\u0000"+ + "\u000b\u015a\u0001\u0000\u0000\u0000\u000b\u015c\u0001\u0000\u0000\u0000"+ + "\u000b\u015e\u0001\u0000\u0000\u0000\u000b\u0160\u0001\u0000\u0000\u0000"+ + "\f\u0162\u0001\u0000\u0000\u0000\f\u0164\u0001\u0000\u0000\u0000\f\u0166"+ + "\u0001\u0000\u0000\u0000\f\u0168\u0001\u0000\u0000\u0000\f\u016a\u0001"+ + "\u0000\u0000\u0000\r\u016c\u0001\u0000\u0000\u0000\r\u016e\u0001\u0000"+ + "\u0000\u0000\r\u0170\u0001\u0000\u0000\u0000\r\u0172\u0001\u0000\u0000"+ + "\u0000\r\u0174\u0001\u0000\u0000\u0000\r\u0176\u0001\u0000\u0000\u0000"+ + "\u000e\u0178\u0001\u0000\u0000\u0000\u000e\u017a\u0001\u0000\u0000\u0000"+ + "\u000e\u017c\u0001\u0000\u0000\u0000\u000e\u017e\u0001\u0000\u0000\u0000"+ + "\u000e\u0180\u0001\u0000\u0000\u0000\u000e\u0182\u0001\u0000\u0000\u0000"+ + "\u000f\u0184\u0001\u0000\u0000\u0000\u000f\u0186\u0001\u0000\u0000\u0000"+ + "\u000f\u0188\u0001\u0000\u0000\u0000\u000f\u018a\u0001\u0000\u0000\u0000"+ + "\u000f\u018c\u0001\u0000\u0000\u0000\u000f\u018e\u0001\u0000\u0000\u0000"+ + "\u000f\u0190\u0001\u0000\u0000\u0000\u000f\u0192\u0001\u0000\u0000\u0000"+ + "\u000f\u0194\u0001\u0000\u0000\u0000\u0010\u0196\u0001\u0000\u0000\u0000"+ + "\u0012\u01a0\u0001\u0000\u0000\u0000\u0014\u01a7\u0001\u0000\u0000\u0000"+ + "\u0016\u01b0\u0001\u0000\u0000\u0000\u0018\u01b7\u0001\u0000\u0000\u0000"+ + "\u001a\u01c1\u0001\u0000\u0000\u0000\u001c\u01c8\u0001\u0000\u0000\u0000"+ + "\u001e\u01cf\u0001\u0000\u0000\u0000 \u01dd\u0001\u0000\u0000\u0000\""+ + "\u01e4\u0001\u0000\u0000\u0000$\u01ec\u0001\u0000\u0000\u0000&\u01f5\u0001"+ + "\u0000\u0000\u0000(\u01fc\u0001\u0000\u0000\u0000*\u0206\u0001\u0000\u0000"+ + "\u0000,\u0212\u0001\u0000\u0000\u0000.\u021b\u0001\u0000\u0000\u00000"+ + "\u0221\u0001\u0000\u0000\u00002\u0228\u0001\u0000\u0000\u00004\u022f\u0001"+ + "\u0000\u0000\u00006\u0237\u0001\u0000\u0000\u00008\u0240\u0001\u0000\u0000"+ + "\u0000:\u0246\u0001\u0000\u0000\u0000<\u0257\u0001\u0000\u0000\u0000>"+ + "\u0267\u0001\u0000\u0000\u0000@\u0270\u0001\u0000\u0000\u0000B\u0273\u0001"+ + "\u0000\u0000\u0000D\u0277\u0001\u0000\u0000\u0000F\u027c\u0001\u0000\u0000"+ + "\u0000H\u0281\u0001\u0000\u0000\u0000J\u0285\u0001\u0000\u0000\u0000L"+ + "\u0289\u0001\u0000\u0000\u0000N\u028d\u0001\u0000\u0000\u0000P\u0291\u0001"+ + "\u0000\u0000\u0000R\u0293\u0001\u0000\u0000\u0000T\u0295\u0001\u0000\u0000"+ + "\u0000V\u0298\u0001\u0000\u0000\u0000X\u029a\u0001\u0000\u0000\u0000Z"+ + "\u02a3\u0001\u0000\u0000\u0000\\\u02a5\u0001\u0000\u0000\u0000^\u02aa"+ + "\u0001\u0000\u0000\u0000`\u02ac\u0001\u0000\u0000\u0000b\u02b1\u0001\u0000"+ + "\u0000\u0000d\u02d0\u0001\u0000\u0000\u0000f\u02d3\u0001\u0000\u0000\u0000"+ + "h\u0301\u0001\u0000\u0000\u0000j\u0303\u0001\u0000\u0000\u0000l\u0306"+ + "\u0001\u0000\u0000\u0000n\u030a\u0001\u0000\u0000\u0000p\u030e\u0001\u0000"+ + "\u0000\u0000r\u0310\u0001\u0000\u0000\u0000t\u0313\u0001\u0000\u0000\u0000"+ + "v\u0315\u0001\u0000\u0000\u0000x\u031a\u0001\u0000\u0000\u0000z\u031c"+ + "\u0001\u0000\u0000\u0000|\u0322\u0001\u0000\u0000\u0000~\u0328\u0001\u0000"+ + "\u0000\u0000\u0080\u032b\u0001\u0000\u0000\u0000\u0082\u032e\u0001\u0000"+ + "\u0000\u0000\u0084\u0333\u0001\u0000\u0000\u0000\u0086\u0338\u0001\u0000"+ + "\u0000\u0000\u0088\u033a\u0001\u0000\u0000\u0000\u008a\u0340\u0001\u0000"+ + "\u0000\u0000\u008c\u0344\u0001\u0000\u0000\u0000\u008e\u0349\u0001\u0000"+ + "\u0000\u0000\u0090\u034f\u0001\u0000\u0000\u0000\u0092\u0352\u0001\u0000"+ + "\u0000\u0000\u0094\u0354\u0001\u0000\u0000\u0000\u0096\u035a\u0001\u0000"+ + "\u0000\u0000\u0098\u035c\u0001\u0000\u0000\u0000\u009a\u0361\u0001\u0000"+ + "\u0000\u0000\u009c\u0364\u0001\u0000\u0000\u0000\u009e\u0367\u0001\u0000"+ + "\u0000\u0000\u00a0\u036a\u0001\u0000\u0000\u0000\u00a2\u036c\u0001\u0000"+ + "\u0000\u0000\u00a4\u036f\u0001\u0000\u0000\u0000\u00a6\u0371\u0001\u0000"+ + "\u0000\u0000\u00a8\u0374\u0001\u0000\u0000\u0000\u00aa\u0376\u0001\u0000"+ + "\u0000\u0000\u00ac\u0378\u0001\u0000\u0000\u0000\u00ae\u037a\u0001\u0000"+ + "\u0000\u0000\u00b0\u037c\u0001\u0000\u0000\u0000\u00b2\u038c\u0001\u0000"+ + "\u0000\u0000\u00b4\u038e\u0001\u0000\u0000\u0000\u00b6\u0393\u0001\u0000"+ + "\u0000\u0000\u00b8\u03a8\u0001\u0000\u0000\u0000\u00ba\u03aa\u0001\u0000"+ + "\u0000\u0000\u00bc\u03b2\u0001\u0000\u0000\u0000\u00be\u03b4\u0001\u0000"+ + "\u0000\u0000\u00c0\u03b8\u0001\u0000\u0000\u0000\u00c2\u03bc\u0001\u0000"+ + "\u0000\u0000\u00c4\u03c0\u0001\u0000\u0000\u0000\u00c6\u03c5\u0001\u0000"+ + "\u0000\u0000\u00c8\u03c9\u0001\u0000\u0000\u0000\u00ca\u03cd\u0001\u0000"+ + "\u0000\u0000\u00cc\u03d1\u0001\u0000\u0000\u0000\u00ce\u03d5\u0001\u0000"+ + "\u0000\u0000\u00d0\u03d9\u0001\u0000\u0000\u0000\u00d2\u03e2\u0001\u0000"+ + "\u0000\u0000\u00d4\u03e6\u0001\u0000\u0000\u0000\u00d6\u03ea\u0001\u0000"+ + "\u0000\u0000\u00d8\u03ee\u0001\u0000\u0000\u0000\u00da\u03f2\u0001\u0000"+ + "\u0000\u0000\u00dc\u03f6\u0001\u0000\u0000\u0000\u00de\u03fb\u0001\u0000"+ + "\u0000\u0000\u00e0\u03ff\u0001\u0000\u0000\u0000\u00e2\u0407\u0001\u0000"+ + "\u0000\u0000\u00e4\u041c\u0001\u0000\u0000\u0000\u00e6\u0420\u0001\u0000"+ + "\u0000\u0000\u00e8\u0424\u0001\u0000\u0000\u0000\u00ea\u0428\u0001\u0000"+ + "\u0000\u0000\u00ec\u042c\u0001\u0000\u0000\u0000\u00ee\u0430\u0001\u0000"+ + "\u0000\u0000\u00f0\u0435\u0001\u0000\u0000\u0000\u00f2\u0439\u0001\u0000"+ + "\u0000\u0000\u00f4\u043d\u0001\u0000\u0000\u0000\u00f6\u0441\u0001\u0000"+ + "\u0000\u0000\u00f8\u0444\u0001\u0000\u0000\u0000\u00fa\u0448\u0001\u0000"+ + "\u0000\u0000\u00fc\u044c\u0001\u0000\u0000\u0000\u00fe\u0450\u0001\u0000"+ + "\u0000\u0000\u0100\u0454\u0001\u0000\u0000\u0000\u0102\u0459\u0001\u0000"+ + "\u0000\u0000\u0104\u045e\u0001\u0000\u0000\u0000\u0106\u0463\u0001\u0000"+ + "\u0000\u0000\u0108\u046a\u0001\u0000\u0000\u0000\u010a\u0473\u0001\u0000"+ + "\u0000\u0000\u010c\u047a\u0001\u0000\u0000\u0000\u010e\u047e\u0001\u0000"+ + "\u0000\u0000\u0110\u0482\u0001\u0000\u0000\u0000\u0112\u0486\u0001\u0000"+ + "\u0000\u0000\u0114\u048a\u0001\u0000\u0000\u0000\u0116\u0490\u0001\u0000"+ + "\u0000\u0000\u0118\u0494\u0001\u0000\u0000\u0000\u011a\u0498\u0001\u0000"+ + "\u0000\u0000\u011c\u049c\u0001\u0000\u0000\u0000\u011e\u04a0\u0001\u0000"+ + "\u0000\u0000\u0120\u04a4\u0001\u0000\u0000\u0000\u0122\u04a8\u0001\u0000"+ + "\u0000\u0000\u0124\u04ac\u0001\u0000\u0000\u0000\u0126\u04b0\u0001\u0000"+ + "\u0000\u0000\u0128\u04b4\u0001\u0000\u0000\u0000\u012a\u04b9\u0001\u0000"+ + "\u0000\u0000\u012c\u04bd\u0001\u0000\u0000\u0000\u012e\u04c1\u0001\u0000"+ + "\u0000\u0000\u0130\u04c5\u0001\u0000\u0000\u0000\u0132\u04ca\u0001\u0000"+ + "\u0000\u0000\u0134\u04ce\u0001\u0000\u0000\u0000\u0136\u04d2\u0001\u0000"+ + "\u0000\u0000\u0138\u04d6\u0001\u0000\u0000\u0000\u013a\u04da\u0001\u0000"+ + "\u0000\u0000\u013c\u04de\u0001\u0000\u0000\u0000\u013e\u04e4\u0001\u0000"+ + "\u0000\u0000\u0140\u04e8\u0001\u0000\u0000\u0000\u0142\u04ec\u0001\u0000"+ + "\u0000\u0000\u0144\u04f0\u0001\u0000\u0000\u0000\u0146\u04f4\u0001\u0000"+ + "\u0000\u0000\u0148\u04f8\u0001\u0000\u0000\u0000\u014a\u04fc\u0001\u0000"+ + "\u0000\u0000\u014c\u0501\u0001\u0000\u0000\u0000\u014e\u0505\u0001\u0000"+ + "\u0000\u0000\u0150\u0509\u0001\u0000\u0000\u0000\u0152\u050d\u0001\u0000"+ + "\u0000\u0000\u0154\u0511\u0001\u0000\u0000\u0000\u0156\u0515\u0001\u0000"+ + "\u0000\u0000\u0158\u0519\u0001\u0000\u0000\u0000\u015a\u051e\u0001\u0000"+ + "\u0000\u0000\u015c\u0523\u0001\u0000\u0000\u0000\u015e\u0527\u0001\u0000"+ + "\u0000\u0000\u0160\u052b\u0001\u0000\u0000\u0000\u0162\u052f\u0001\u0000"+ + "\u0000\u0000\u0164\u0534\u0001\u0000\u0000\u0000\u0166\u053e\u0001\u0000"+ + "\u0000\u0000\u0168\u0542\u0001\u0000\u0000\u0000\u016a\u0546\u0001\u0000"+ + "\u0000\u0000\u016c\u054a\u0001\u0000\u0000\u0000\u016e\u054f\u0001\u0000"+ + "\u0000\u0000\u0170\u0556\u0001\u0000\u0000\u0000\u0172\u055a\u0001\u0000"+ + "\u0000\u0000\u0174\u055e\u0001\u0000\u0000\u0000\u0176\u0562\u0001\u0000"+ + "\u0000\u0000\u0178\u0566\u0001\u0000\u0000\u0000\u017a\u056b\u0001\u0000"+ + "\u0000\u0000\u017c\u0571\u0001\u0000\u0000\u0000\u017e\u0577\u0001\u0000"+ + "\u0000\u0000\u0180\u057b\u0001\u0000\u0000\u0000\u0182\u057f\u0001\u0000"+ + "\u0000\u0000\u0184\u0583\u0001\u0000\u0000\u0000\u0186\u0589\u0001\u0000"+ + "\u0000\u0000\u0188\u058f\u0001\u0000\u0000\u0000\u018a\u0593\u0001\u0000"+ + "\u0000\u0000\u018c\u0597\u0001\u0000\u0000\u0000\u018e\u059b\u0001\u0000"+ + "\u0000\u0000\u0190\u05a1\u0001\u0000\u0000\u0000\u0192\u05a7\u0001\u0000"+ + "\u0000\u0000\u0194\u05ad\u0001\u0000\u0000\u0000\u0196\u0197\u0005d\u0000"+ + "\u0000\u0197\u0198\u0005i\u0000\u0000\u0198\u0199\u0005s\u0000\u0000\u0199"+ + "\u019a\u0005s\u0000\u0000\u019a\u019b\u0005e\u0000\u0000\u019b\u019c\u0005"+ + "c\u0000\u0000\u019c\u019d\u0005t\u0000\u0000\u019d\u019e\u0001\u0000\u0000"+ + "\u0000\u019e\u019f\u0006\u0000\u0000\u0000\u019f\u0011\u0001\u0000\u0000"+ + "\u0000\u01a0\u01a1\u0005d\u0000\u0000\u01a1\u01a2\u0005r\u0000\u0000\u01a2"+ + "\u01a3\u0005o\u0000\u0000\u01a3\u01a4\u0005p\u0000\u0000\u01a4\u01a5\u0001"+ + "\u0000\u0000\u0000\u01a5\u01a6\u0006\u0001\u0001\u0000\u01a6\u0013\u0001"+ + "\u0000\u0000\u0000\u01a7\u01a8\u0005e\u0000\u0000\u01a8\u01a9\u0005n\u0000"+ + "\u0000\u01a9\u01aa\u0005r\u0000\u0000\u01aa\u01ab\u0005i\u0000\u0000\u01ab"+ + "\u01ac\u0005c\u0000\u0000\u01ac\u01ad\u0005h\u0000\u0000\u01ad\u01ae\u0001"+ + "\u0000\u0000\u0000\u01ae\u01af\u0006\u0002\u0002\u0000\u01af\u0015\u0001"+ + "\u0000\u0000\u0000\u01b0\u01b1\u0005e\u0000\u0000\u01b1\u01b2\u0005v\u0000"+ + "\u0000\u01b2\u01b3\u0005a\u0000\u0000\u01b3\u01b4\u0005l\u0000\u0000\u01b4"+ + "\u01b5\u0001\u0000\u0000\u0000\u01b5\u01b6\u0006\u0003\u0000\u0000\u01b6"+ + "\u0017\u0001\u0000\u0000\u0000\u01b7\u01b8\u0005e\u0000\u0000\u01b8\u01b9"+ + "\u0005x\u0000\u0000\u01b9\u01ba\u0005p\u0000\u0000\u01ba\u01bb\u0005l"+ + "\u0000\u0000\u01bb\u01bc\u0005a\u0000\u0000\u01bc\u01bd\u0005i\u0000\u0000"+ + "\u01bd\u01be\u0005n\u0000\u0000\u01be\u01bf\u0001\u0000\u0000\u0000\u01bf"+ + "\u01c0\u0006\u0004\u0003\u0000\u01c0\u0019\u0001\u0000\u0000\u0000\u01c1"+ + "\u01c2\u0005f\u0000\u0000\u01c2\u01c3\u0005r\u0000\u0000\u01c3\u01c4\u0005"+ + "o\u0000\u0000\u01c4\u01c5\u0005m\u0000\u0000\u01c5\u01c6\u0001\u0000\u0000"+ + "\u0000\u01c6\u01c7\u0006\u0005\u0004\u0000\u01c7\u001b\u0001\u0000\u0000"+ + "\u0000\u01c8\u01c9\u0005g\u0000\u0000\u01c9\u01ca\u0005r\u0000\u0000\u01ca"+ + "\u01cb\u0005o\u0000\u0000\u01cb\u01cc\u0005k\u0000\u0000\u01cc\u01cd\u0001"+ + "\u0000\u0000\u0000\u01cd\u01ce\u0006\u0006\u0000\u0000\u01ce\u001d\u0001"+ + "\u0000\u0000\u0000\u01cf\u01d0\u0005i\u0000\u0000\u01d0\u01d1\u0005n\u0000"+ + "\u0000\u01d1\u01d2\u0005l\u0000\u0000\u01d2\u01d3\u0005i\u0000\u0000\u01d3"+ + "\u01d4\u0005n\u0000\u0000\u01d4\u01d5\u0005e\u0000\u0000\u01d5\u01d6\u0005"+ + "s\u0000\u0000\u01d6\u01d7\u0005t\u0000\u0000\u01d7\u01d8\u0005a\u0000"+ + "\u0000\u01d8\u01d9\u0005t\u0000\u0000\u01d9\u01da\u0005s\u0000\u0000\u01da"+ + "\u01db\u0001\u0000\u0000\u0000\u01db\u01dc\u0006\u0007\u0000\u0000\u01dc"+ + "\u001f\u0001\u0000\u0000\u0000\u01dd\u01de\u0005k\u0000\u0000\u01de\u01df"+ + "\u0005e\u0000\u0000\u01df\u01e0\u0005e\u0000\u0000\u01e0\u01e1\u0005p"+ + "\u0000\u0000\u01e1\u01e2\u0001\u0000\u0000\u0000\u01e2\u01e3\u0006\b\u0001"+ + "\u0000\u01e3!\u0001\u0000\u0000\u0000\u01e4\u01e5\u0005l\u0000\u0000\u01e5"+ + "\u01e6\u0005i\u0000\u0000\u01e6\u01e7\u0005m\u0000\u0000\u01e7\u01e8\u0005"+ + "i\u0000\u0000\u01e8\u01e9\u0005t\u0000\u0000\u01e9\u01ea\u0001\u0000\u0000"+ + "\u0000\u01ea\u01eb\u0006\t\u0000\u0000\u01eb#\u0001\u0000\u0000\u0000"+ + "\u01ec\u01ed\u0005l\u0000\u0000\u01ed\u01ee\u0005o\u0000\u0000\u01ee\u01ef"+ + "\u0005o\u0000\u0000\u01ef\u01f0\u0005k\u0000\u0000\u01f0\u01f1\u0005u"+ + "\u0000\u0000\u01f1\u01f2\u0005p\u0000\u0000\u01f2\u01f3\u0001\u0000\u0000"+ + "\u0000\u01f3\u01f4\u0006\n\u0005\u0000\u01f4%\u0001\u0000\u0000\u0000"+ + "\u01f5\u01f6\u0005m\u0000\u0000\u01f6\u01f7\u0005e\u0000\u0000\u01f7\u01f8"+ + "\u0005t\u0000\u0000\u01f8\u01f9\u0005a\u0000\u0000\u01f9\u01fa\u0001\u0000"+ + "\u0000\u0000\u01fa\u01fb\u0006\u000b\u0006\u0000\u01fb\'\u0001\u0000\u0000"+ + "\u0000\u01fc\u01fd\u0005m\u0000\u0000\u01fd\u01fe\u0005e\u0000\u0000\u01fe"+ + "\u01ff\u0005t\u0000\u0000\u01ff\u0200\u0005r\u0000\u0000\u0200\u0201\u0005"+ + "i\u0000\u0000\u0201\u0202\u0005c\u0000\u0000\u0202\u0203\u0005s\u0000"+ + "\u0000\u0203\u0204\u0001\u0000\u0000\u0000\u0204\u0205\u0006\f\u0007\u0000"+ + "\u0205)\u0001\u0000\u0000\u0000\u0206\u0207\u0005m\u0000\u0000\u0207\u0208"+ + "\u0005v\u0000\u0000\u0208\u0209\u0005_\u0000\u0000\u0209\u020a\u0005e"+ + "\u0000\u0000\u020a\u020b\u0005x\u0000\u0000\u020b\u020c\u0005p\u0000\u0000"+ + "\u020c\u020d\u0005a\u0000\u0000\u020d\u020e\u0005n\u0000\u0000\u020e\u020f"+ + "\u0005d\u0000\u0000\u020f\u0210\u0001\u0000\u0000\u0000\u0210\u0211\u0006"+ + "\r\b\u0000\u0211+\u0001\u0000\u0000\u0000\u0212\u0213\u0005r\u0000\u0000"+ + "\u0213\u0214\u0005e\u0000\u0000\u0214\u0215\u0005n\u0000\u0000\u0215\u0216"+ + "\u0005a\u0000\u0000\u0216\u0217\u0005m\u0000\u0000\u0217\u0218\u0005e"+ + "\u0000\u0000\u0218\u0219\u0001\u0000\u0000\u0000\u0219\u021a\u0006\u000e"+ + "\t\u0000\u021a-\u0001\u0000\u0000\u0000\u021b\u021c\u0005r\u0000\u0000"+ + "\u021c\u021d\u0005o\u0000\u0000\u021d\u021e\u0005w\u0000\u0000\u021e\u021f"+ + "\u0001\u0000\u0000\u0000\u021f\u0220\u0006\u000f\u0000\u0000\u0220/\u0001"+ + "\u0000\u0000\u0000\u0221\u0222\u0005s\u0000\u0000\u0222\u0223\u0005h\u0000"+ + "\u0000\u0223\u0224\u0005o\u0000\u0000\u0224\u0225\u0005w\u0000\u0000\u0225"+ + "\u0226\u0001\u0000\u0000\u0000\u0226\u0227\u0006\u0010\n\u0000\u02271"+ + "\u0001\u0000\u0000\u0000\u0228\u0229\u0005s\u0000\u0000\u0229\u022a\u0005"+ + "o\u0000\u0000\u022a\u022b\u0005r\u0000\u0000\u022b\u022c\u0005t\u0000"+ + "\u0000\u022c\u022d\u0001\u0000\u0000\u0000\u022d\u022e\u0006\u0011\u0000"+ + "\u0000\u022e3\u0001\u0000\u0000\u0000\u022f\u0230\u0005s\u0000\u0000\u0230"+ + "\u0231\u0005t\u0000\u0000\u0231\u0232\u0005a\u0000\u0000\u0232\u0233\u0005"+ + "t\u0000\u0000\u0233\u0234\u0005s\u0000\u0000\u0234\u0235\u0001\u0000\u0000"+ + "\u0000\u0235\u0236\u0006\u0012\u0000\u0000\u02365\u0001\u0000\u0000\u0000"+ + "\u0237\u0238\u0005w\u0000\u0000\u0238\u0239\u0005h\u0000\u0000\u0239\u023a"+ + "\u0005e\u0000\u0000\u023a\u023b\u0005r\u0000\u0000\u023b\u023c\u0005e"+ + "\u0000\u0000\u023c\u023d\u0001\u0000\u0000\u0000\u023d\u023e\u0006\u0013"+ + "\u0000\u0000\u023e7\u0001\u0000\u0000\u0000\u023f\u0241\b\u0000\u0000"+ + "\u0000\u0240\u023f\u0001\u0000\u0000\u0000\u0241\u0242\u0001\u0000\u0000"+ + "\u0000\u0242\u0240\u0001\u0000\u0000\u0000\u0242\u0243\u0001\u0000\u0000"+ + "\u0000\u0243\u0244\u0001\u0000\u0000\u0000\u0244\u0245\u0006\u0014\u0000"+ + "\u0000\u02459\u0001\u0000\u0000\u0000\u0246\u0247\u0005/\u0000\u0000\u0247"+ + "\u0248\u0005/\u0000\u0000\u0248\u024c\u0001\u0000\u0000\u0000\u0249\u024b"+ + "\b\u0001\u0000\u0000\u024a\u0249\u0001\u0000\u0000\u0000\u024b\u024e\u0001"+ + "\u0000\u0000\u0000\u024c\u024a\u0001\u0000\u0000\u0000\u024c\u024d\u0001"+ + "\u0000\u0000\u0000\u024d\u0250\u0001\u0000\u0000\u0000\u024e\u024c\u0001"+ + "\u0000\u0000\u0000\u024f\u0251\u0005\r\u0000\u0000\u0250\u024f\u0001\u0000"+ + "\u0000\u0000\u0250\u0251\u0001\u0000\u0000\u0000\u0251\u0253\u0001\u0000"+ + "\u0000\u0000\u0252\u0254\u0005\n\u0000\u0000\u0253\u0252\u0001\u0000\u0000"+ + "\u0000\u0253\u0254\u0001\u0000\u0000\u0000\u0254\u0255\u0001\u0000\u0000"+ + "\u0000\u0255\u0256\u0006\u0015\u000b\u0000\u0256;\u0001\u0000\u0000\u0000"+ + "\u0257\u0258\u0005/\u0000\u0000\u0258\u0259\u0005*\u0000\u0000\u0259\u025e"+ + "\u0001\u0000\u0000\u0000\u025a\u025d\u0003<\u0016\u0000\u025b\u025d\t"+ + "\u0000\u0000\u0000\u025c\u025a\u0001\u0000\u0000\u0000\u025c\u025b\u0001"+ + "\u0000\u0000\u0000\u025d\u0260\u0001\u0000\u0000\u0000\u025e\u025f\u0001"+ + "\u0000\u0000\u0000\u025e\u025c\u0001\u0000\u0000\u0000\u025f\u0261\u0001"+ + "\u0000\u0000\u0000\u0260\u025e\u0001\u0000\u0000\u0000\u0261\u0262\u0005"+ + "*\u0000\u0000\u0262\u0263\u0005/\u0000\u0000\u0263\u0264\u0001\u0000\u0000"+ + "\u0000\u0264\u0265\u0006\u0016\u000b\u0000\u0265=\u0001\u0000\u0000\u0000"+ + "\u0266\u0268\u0007\u0002\u0000\u0000\u0267\u0266\u0001\u0000\u0000\u0000"+ + "\u0268\u0269\u0001\u0000\u0000\u0000\u0269\u0267\u0001\u0000\u0000\u0000"+ + "\u0269\u026a\u0001\u0000\u0000\u0000\u026a\u026b\u0001\u0000\u0000\u0000"+ + "\u026b\u026c\u0006\u0017\u000b\u0000\u026c?\u0001\u0000\u0000\u0000\u026d"+ + "\u0271\b\u0003\u0000\u0000\u026e\u026f\u0005/\u0000\u0000\u026f\u0271"+ + "\b\u0004\u0000\u0000\u0270\u026d\u0001\u0000\u0000\u0000\u0270\u026e\u0001"+ + "\u0000\u0000\u0000\u0271A\u0001\u0000\u0000\u0000\u0272\u0274\u0003@\u0018"+ + "\u0000\u0273\u0272\u0001\u0000\u0000\u0000\u0274\u0275\u0001\u0000\u0000"+ + "\u0000\u0275\u0273\u0001\u0000\u0000\u0000\u0275\u0276\u0001\u0000\u0000"+ + "\u0000\u0276C\u0001\u0000\u0000\u0000\u0277\u0278\u0003\u00b4R\u0000\u0278"+ + "\u0279\u0001\u0000\u0000\u0000\u0279\u027a\u0006\u001a\f\u0000\u027a\u027b"+ + "\u0006\u001a\r\u0000\u027bE\u0001\u0000\u0000\u0000\u027c\u027d\u0003"+ + "N\u001f\u0000\u027d\u027e\u0001\u0000\u0000\u0000\u027e\u027f\u0006\u001b"+ + "\u000e\u0000\u027f\u0280\u0006\u001b\u000f\u0000\u0280G\u0001\u0000\u0000"+ + "\u0000\u0281\u0282\u0003>\u0017\u0000\u0282\u0283\u0001\u0000\u0000\u0000"+ + "\u0283\u0284\u0006\u001c\u000b\u0000\u0284I\u0001\u0000\u0000\u0000\u0285"+ + "\u0286\u0003:\u0015\u0000\u0286\u0287\u0001\u0000\u0000\u0000\u0287\u0288"+ + "\u0006\u001d\u000b\u0000\u0288K\u0001\u0000\u0000\u0000\u0289\u028a\u0003"+ + "<\u0016\u0000\u028a\u028b\u0001\u0000\u0000\u0000\u028b\u028c\u0006\u001e"+ + "\u000b\u0000\u028cM\u0001\u0000\u0000\u0000\u028d\u028e\u0005|\u0000\u0000"+ + "\u028e\u028f\u0001\u0000\u0000\u0000\u028f\u0290\u0006\u001f\u000f\u0000"+ + "\u0290O\u0001\u0000\u0000\u0000\u0291\u0292\u0007\u0005\u0000\u0000\u0292"+ + "Q\u0001\u0000\u0000\u0000\u0293\u0294\u0007\u0006\u0000\u0000\u0294S\u0001"+ + "\u0000\u0000\u0000\u0295\u0296\u0005\\\u0000\u0000\u0296\u0297\u0007\u0007"+ + "\u0000\u0000\u0297U\u0001\u0000\u0000\u0000\u0298\u0299\b\b\u0000\u0000"+ + "\u0299W\u0001\u0000\u0000\u0000\u029a\u029c\u0007\t\u0000\u0000\u029b"+ + "\u029d\u0007\n\u0000\u0000\u029c\u029b\u0001\u0000\u0000\u0000\u029c\u029d"+ + "\u0001\u0000\u0000\u0000\u029d\u029f\u0001\u0000\u0000\u0000\u029e\u02a0"+ + "\u0003P \u0000\u029f\u029e\u0001\u0000\u0000\u0000\u02a0\u02a1\u0001\u0000"+ + "\u0000\u0000\u02a1\u029f\u0001\u0000\u0000\u0000\u02a1\u02a2\u0001\u0000"+ + "\u0000\u0000\u02a2Y\u0001\u0000\u0000\u0000\u02a3\u02a4\u0005@\u0000\u0000"+ + "\u02a4[\u0001\u0000\u0000\u0000\u02a5\u02a6\u0005`\u0000\u0000\u02a6]"+ + "\u0001\u0000\u0000\u0000\u02a7\u02ab\b\u000b\u0000\u0000\u02a8\u02a9\u0005"+ + "`\u0000\u0000\u02a9\u02ab\u0005`\u0000\u0000\u02aa\u02a7\u0001\u0000\u0000"+ + "\u0000\u02aa\u02a8\u0001\u0000\u0000\u0000\u02ab_\u0001\u0000\u0000\u0000"+ + "\u02ac\u02ad\u0005_\u0000\u0000\u02ada\u0001\u0000\u0000\u0000\u02ae\u02b2"+ + "\u0003R!\u0000\u02af\u02b2\u0003P \u0000\u02b0\u02b2\u0003`(\u0000\u02b1"+ + "\u02ae\u0001\u0000\u0000\u0000\u02b1\u02af\u0001\u0000\u0000\u0000\u02b1"+ + "\u02b0\u0001\u0000\u0000\u0000\u02b2c\u0001\u0000\u0000\u0000\u02b3\u02b8"+ + "\u0005\"\u0000\u0000\u02b4\u02b7\u0003T\"\u0000\u02b5\u02b7\u0003V#\u0000"+ + "\u02b6\u02b4\u0001\u0000\u0000\u0000\u02b6\u02b5\u0001\u0000\u0000\u0000"+ + "\u02b7\u02ba\u0001\u0000\u0000\u0000\u02b8\u02b6\u0001\u0000\u0000\u0000"+ + "\u02b8\u02b9\u0001\u0000\u0000\u0000\u02b9\u02bb\u0001\u0000\u0000\u0000"+ + "\u02ba\u02b8\u0001\u0000\u0000\u0000\u02bb\u02d1\u0005\"\u0000\u0000\u02bc"+ + "\u02bd\u0005\"\u0000\u0000\u02bd\u02be\u0005\"\u0000\u0000\u02be\u02bf"+ + "\u0005\"\u0000\u0000\u02bf\u02c3\u0001\u0000\u0000\u0000\u02c0\u02c2\b"+ + "\u0001\u0000\u0000\u02c1\u02c0\u0001\u0000\u0000\u0000\u02c2\u02c5\u0001"+ + "\u0000\u0000\u0000\u02c3\u02c4\u0001\u0000\u0000\u0000\u02c3\u02c1\u0001"+ + "\u0000\u0000\u0000\u02c4\u02c6\u0001\u0000\u0000\u0000\u02c5\u02c3\u0001"+ + "\u0000\u0000\u0000\u02c6\u02c7\u0005\"\u0000\u0000\u02c7\u02c8\u0005\""+ + "\u0000\u0000\u02c8\u02c9\u0005\"\u0000\u0000\u02c9\u02cb\u0001\u0000\u0000"+ + "\u0000\u02ca\u02cc\u0005\"\u0000\u0000\u02cb\u02ca\u0001\u0000\u0000\u0000"+ + "\u02cb\u02cc\u0001\u0000\u0000\u0000\u02cc\u02ce\u0001\u0000\u0000\u0000"+ + "\u02cd\u02cf\u0005\"\u0000\u0000\u02ce\u02cd\u0001\u0000\u0000\u0000\u02ce"+ + "\u02cf\u0001\u0000\u0000\u0000\u02cf\u02d1\u0001\u0000\u0000\u0000\u02d0"+ + "\u02b3\u0001\u0000\u0000\u0000\u02d0\u02bc\u0001\u0000\u0000\u0000\u02d1"+ + "e\u0001\u0000\u0000\u0000\u02d2\u02d4\u0003P \u0000\u02d3\u02d2\u0001"+ + "\u0000\u0000\u0000\u02d4\u02d5\u0001\u0000\u0000\u0000\u02d5\u02d3\u0001"+ + "\u0000\u0000\u0000\u02d5\u02d6\u0001\u0000\u0000\u0000\u02d6g\u0001\u0000"+ + "\u0000\u0000\u02d7\u02d9\u0003P \u0000\u02d8\u02d7\u0001\u0000\u0000\u0000"+ + "\u02d9\u02da\u0001\u0000\u0000\u0000\u02da\u02d8\u0001\u0000\u0000\u0000"+ + "\u02da\u02db\u0001\u0000\u0000\u0000\u02db\u02dc\u0001\u0000\u0000\u0000"+ + "\u02dc\u02e0\u0003x4\u0000\u02dd\u02df\u0003P \u0000\u02de\u02dd\u0001"+ + "\u0000\u0000\u0000\u02df\u02e2\u0001\u0000\u0000\u0000\u02e0\u02de\u0001"+ + "\u0000\u0000\u0000\u02e0\u02e1\u0001\u0000\u0000\u0000\u02e1\u0302\u0001"+ + "\u0000\u0000\u0000\u02e2\u02e0\u0001\u0000\u0000\u0000\u02e3\u02e5\u0003"+ + "x4\u0000\u02e4\u02e6\u0003P \u0000\u02e5\u02e4\u0001\u0000\u0000\u0000"+ + "\u02e6\u02e7\u0001\u0000\u0000\u0000\u02e7\u02e5\u0001\u0000\u0000\u0000"+ + "\u02e7\u02e8\u0001\u0000\u0000\u0000\u02e8\u0302\u0001\u0000\u0000\u0000"+ + "\u02e9\u02eb\u0003P \u0000\u02ea\u02e9\u0001\u0000\u0000\u0000\u02eb\u02ec"+ + "\u0001\u0000\u0000\u0000\u02ec\u02ea\u0001\u0000\u0000\u0000\u02ec\u02ed"+ + "\u0001\u0000\u0000\u0000\u02ed\u02f5\u0001\u0000\u0000\u0000\u02ee\u02f2"+ + "\u0003x4\u0000\u02ef\u02f1\u0003P \u0000\u02f0\u02ef\u0001\u0000\u0000"+ "\u0000\u02f1\u02f4\u0001\u0000\u0000\u0000\u02f2\u02f0\u0001\u0000\u0000"+ - "\u0000\u02f3\u02ec\u0001\u0000\u0000\u0000\u02f3\u02f4\u0001\u0000\u0000"+ - "\u0000\u02f4\u02f5\u0001\u0000\u0000\u0000\u02f5\u02f6\u0003X$\u0000\u02f6"+ - "\u0300\u0001\u0000\u0000\u0000\u02f7\u02f9\u0003x4\u0000\u02f8\u02fa\u0003"+ - "P \u0000\u02f9\u02f8\u0001\u0000\u0000\u0000\u02fa\u02fb\u0001\u0000\u0000"+ - "\u0000\u02fb\u02f9\u0001\u0000\u0000\u0000\u02fb\u02fc\u0001\u0000\u0000"+ - "\u0000\u02fc\u02fd\u0001\u0000\u0000\u0000\u02fd\u02fe\u0003X$\u0000\u02fe"+ - "\u0300\u0001\u0000\u0000\u0000\u02ff\u02d6\u0001\u0000\u0000\u0000\u02ff"+ - "\u02e1\u0001\u0000\u0000\u0000\u02ff\u02e8\u0001\u0000\u0000\u0000\u02ff"+ - "\u02f7\u0001\u0000\u0000\u0000\u0300i\u0001\u0000\u0000\u0000\u0301\u0302"+ - "\u0005b\u0000\u0000\u0302\u0303\u0005y\u0000\u0000\u0303k\u0001\u0000"+ - "\u0000\u0000\u0304\u0305\u0005a\u0000\u0000\u0305\u0306\u0005n\u0000\u0000"+ - "\u0306\u0307\u0005d\u0000\u0000\u0307m\u0001\u0000\u0000\u0000\u0308\u0309"+ - "\u0005a\u0000\u0000\u0309\u030a\u0005s\u0000\u0000\u030a\u030b\u0005c"+ - "\u0000\u0000\u030bo\u0001\u0000\u0000\u0000\u030c\u030d\u0005=\u0000\u0000"+ - "\u030dq\u0001\u0000\u0000\u0000\u030e\u030f\u0005:\u0000\u0000\u030f\u0310"+ - "\u0005:\u0000\u0000\u0310s\u0001\u0000\u0000\u0000\u0311\u0312\u0005,"+ - "\u0000\u0000\u0312u\u0001\u0000\u0000\u0000\u0313\u0314\u0005d\u0000\u0000"+ - "\u0314\u0315\u0005e\u0000\u0000\u0315\u0316\u0005s\u0000\u0000\u0316\u0317"+ - "\u0005c\u0000\u0000\u0317w\u0001\u0000\u0000\u0000\u0318\u0319\u0005."+ - "\u0000\u0000\u0319y\u0001\u0000\u0000\u0000\u031a\u031b\u0005f\u0000\u0000"+ - "\u031b\u031c\u0005a\u0000\u0000\u031c\u031d\u0005l\u0000\u0000\u031d\u031e"+ - "\u0005s\u0000\u0000\u031e\u031f\u0005e\u0000\u0000\u031f{\u0001\u0000"+ - "\u0000\u0000\u0320\u0321\u0005f\u0000\u0000\u0321\u0322\u0005i\u0000\u0000"+ - "\u0322\u0323\u0005r\u0000\u0000\u0323\u0324\u0005s\u0000\u0000\u0324\u0325"+ - "\u0005t\u0000\u0000\u0325}\u0001\u0000\u0000\u0000\u0326\u0327\u0005l"+ - "\u0000\u0000\u0327\u0328\u0005a\u0000\u0000\u0328\u0329\u0005s\u0000\u0000"+ - "\u0329\u032a\u0005t\u0000\u0000\u032a\u007f\u0001\u0000\u0000\u0000\u032b"+ - "\u032c\u0005(\u0000\u0000\u032c\u0081\u0001\u0000\u0000\u0000\u032d\u032e"+ - "\u0005i\u0000\u0000\u032e\u032f\u0005n\u0000\u0000\u032f\u0083\u0001\u0000"+ - "\u0000\u0000\u0330\u0331\u0005i\u0000\u0000\u0331\u0332\u0005s\u0000\u0000"+ - "\u0332\u0085\u0001\u0000\u0000\u0000\u0333\u0334\u0005l\u0000\u0000\u0334"+ - "\u0335\u0005i\u0000\u0000\u0335\u0336\u0005k\u0000\u0000\u0336\u0337\u0005"+ - "e\u0000\u0000\u0337\u0087\u0001\u0000\u0000\u0000\u0338\u0339\u0005n\u0000"+ - "\u0000\u0339\u033a\u0005o\u0000\u0000\u033a\u033b\u0005t\u0000\u0000\u033b"+ - "\u0089\u0001\u0000\u0000\u0000\u033c\u033d\u0005n\u0000\u0000\u033d\u033e"+ - "\u0005u\u0000\u0000\u033e\u033f\u0005l\u0000\u0000\u033f\u0340\u0005l"+ - "\u0000\u0000\u0340\u008b\u0001\u0000\u0000\u0000\u0341\u0342\u0005n\u0000"+ - "\u0000\u0342\u0343\u0005u\u0000\u0000\u0343\u0344\u0005l\u0000\u0000\u0344"+ - "\u0345\u0005l\u0000\u0000\u0345\u0346\u0005s\u0000\u0000\u0346\u008d\u0001"+ - "\u0000\u0000\u0000\u0347\u0348\u0005o\u0000\u0000\u0348\u0349\u0005r\u0000"+ - "\u0000\u0349\u008f\u0001\u0000\u0000\u0000\u034a\u034b\u0005?\u0000\u0000"+ - "\u034b\u0091\u0001\u0000\u0000\u0000\u034c\u034d\u0005r\u0000\u0000\u034d"+ - "\u034e\u0005l\u0000\u0000\u034e\u034f\u0005i\u0000\u0000\u034f\u0350\u0005"+ - "k\u0000\u0000\u0350\u0351\u0005e\u0000\u0000\u0351\u0093\u0001\u0000\u0000"+ - "\u0000\u0352\u0353\u0005)\u0000\u0000\u0353\u0095\u0001\u0000\u0000\u0000"+ - "\u0354\u0355\u0005t\u0000\u0000\u0355\u0356\u0005r\u0000\u0000\u0356\u0357"+ - "\u0005u\u0000\u0000\u0357\u0358\u0005e\u0000\u0000\u0358\u0097\u0001\u0000"+ - "\u0000\u0000\u0359\u035a\u0005=\u0000\u0000\u035a\u035b\u0005=\u0000\u0000"+ - "\u035b\u0099\u0001\u0000\u0000\u0000\u035c\u035d\u0005=\u0000\u0000\u035d"+ - "\u035e\u0005~\u0000\u0000\u035e\u009b\u0001\u0000\u0000\u0000\u035f\u0360"+ - "\u0005!\u0000\u0000\u0360\u0361\u0005=\u0000\u0000\u0361\u009d\u0001\u0000"+ - "\u0000\u0000\u0362\u0363\u0005<\u0000\u0000\u0363\u009f\u0001\u0000\u0000"+ - "\u0000\u0364\u0365\u0005<\u0000\u0000\u0365\u0366\u0005=\u0000\u0000\u0366"+ - "\u00a1\u0001\u0000\u0000\u0000\u0367\u0368\u0005>\u0000\u0000\u0368\u00a3"+ - "\u0001\u0000\u0000\u0000\u0369\u036a\u0005>\u0000\u0000\u036a\u036b\u0005"+ - "=\u0000\u0000\u036b\u00a5\u0001\u0000\u0000\u0000\u036c\u036d\u0005+\u0000"+ - "\u0000\u036d\u00a7\u0001\u0000\u0000\u0000\u036e\u036f\u0005-\u0000\u0000"+ - "\u036f\u00a9\u0001\u0000\u0000\u0000\u0370\u0371\u0005*\u0000\u0000\u0371"+ - "\u00ab\u0001\u0000\u0000\u0000\u0372\u0373\u0005/\u0000\u0000\u0373\u00ad"+ - "\u0001\u0000\u0000\u0000\u0374\u0375\u0005%\u0000\u0000\u0375\u00af\u0001"+ - "\u0000\u0000\u0000\u0376\u0377\u0003\u0090@\u0000\u0377\u037b\u0003R!"+ - "\u0000\u0378\u037a\u0003b)\u0000\u0379\u0378\u0001\u0000\u0000\u0000\u037a"+ - "\u037d\u0001\u0000\u0000\u0000\u037b\u0379\u0001\u0000\u0000\u0000\u037b"+ - "\u037c\u0001\u0000\u0000\u0000\u037c\u0385\u0001\u0000\u0000\u0000\u037d"+ - "\u037b\u0001\u0000\u0000\u0000\u037e\u0380\u0003\u0090@\u0000\u037f\u0381"+ - "\u0003P \u0000\u0380\u037f\u0001\u0000\u0000\u0000\u0381\u0382\u0001\u0000"+ - "\u0000\u0000\u0382\u0380\u0001\u0000\u0000\u0000\u0382\u0383\u0001\u0000"+ - "\u0000\u0000\u0383\u0385\u0001\u0000\u0000\u0000\u0384\u0376\u0001\u0000"+ - "\u0000\u0000\u0384\u037e\u0001\u0000\u0000\u0000\u0385\u00b1\u0001\u0000"+ - "\u0000\u0000\u0386\u0387\u0005[\u0000\u0000\u0387\u0388\u0001\u0000\u0000"+ - "\u0000\u0388\u0389\u0006Q\u0000\u0000\u0389\u038a\u0006Q\u0000\u0000\u038a"+ - "\u00b3\u0001\u0000\u0000\u0000\u038b\u038c\u0005]\u0000\u0000\u038c\u038d"+ - "\u0001\u0000\u0000\u0000\u038d\u038e\u0006R\u000f\u0000\u038e\u038f\u0006"+ - "R\u000f\u0000\u038f\u00b5\u0001\u0000\u0000\u0000\u0390\u0394\u0003R!"+ - "\u0000\u0391\u0393\u0003b)\u0000\u0392\u0391\u0001\u0000\u0000\u0000\u0393"+ - "\u0396\u0001\u0000\u0000\u0000\u0394\u0392\u0001\u0000\u0000\u0000\u0394"+ - "\u0395\u0001\u0000\u0000\u0000\u0395\u03a1\u0001\u0000\u0000\u0000\u0396"+ - "\u0394\u0001\u0000\u0000\u0000\u0397\u039a\u0003`(\u0000\u0398\u039a\u0003"+ - "Z%\u0000\u0399\u0397\u0001\u0000\u0000\u0000\u0399\u0398\u0001\u0000\u0000"+ - "\u0000\u039a\u039c\u0001\u0000\u0000\u0000\u039b\u039d\u0003b)\u0000\u039c"+ - "\u039b\u0001\u0000\u0000\u0000\u039d\u039e\u0001\u0000\u0000\u0000\u039e"+ - "\u039c\u0001\u0000\u0000\u0000\u039e\u039f\u0001\u0000\u0000\u0000\u039f"+ - "\u03a1\u0001\u0000\u0000\u0000\u03a0\u0390\u0001\u0000\u0000\u0000\u03a0"+ - "\u0399\u0001\u0000\u0000\u0000\u03a1\u00b7\u0001\u0000\u0000\u0000\u03a2"+ - "\u03a4\u0003\\&\u0000\u03a3\u03a5\u0003^\'\u0000\u03a4\u03a3\u0001\u0000"+ - "\u0000\u0000\u03a5\u03a6\u0001\u0000\u0000\u0000\u03a6\u03a4\u0001\u0000"+ - "\u0000\u0000\u03a6\u03a7\u0001\u0000\u0000\u0000\u03a7\u03a8\u0001\u0000"+ - "\u0000\u0000\u03a8\u03a9\u0003\\&\u0000\u03a9\u00b9\u0001\u0000\u0000"+ - "\u0000\u03aa\u03ab\u0003\u00b8T\u0000\u03ab\u00bb\u0001\u0000\u0000\u0000"+ - "\u03ac\u03ad\u0003:\u0015\u0000\u03ad\u03ae\u0001\u0000\u0000\u0000\u03ae"+ - "\u03af\u0006V\u000b\u0000\u03af\u00bd\u0001\u0000\u0000\u0000\u03b0\u03b1"+ - "\u0003<\u0016\u0000\u03b1\u03b2\u0001\u0000\u0000\u0000\u03b2\u03b3\u0006"+ - "W\u000b\u0000\u03b3\u00bf\u0001\u0000\u0000\u0000\u03b4\u03b5\u0003>\u0017"+ - "\u0000\u03b5\u03b6\u0001\u0000\u0000\u0000\u03b6\u03b7\u0006X\u000b\u0000"+ - "\u03b7\u00c1\u0001\u0000\u0000\u0000\u03b8\u03b9\u0003N\u001f\u0000\u03b9"+ - "\u03ba\u0001\u0000\u0000\u0000\u03ba\u03bb\u0006Y\u000e\u0000\u03bb\u03bc"+ - "\u0006Y\u000f\u0000\u03bc\u00c3\u0001\u0000\u0000\u0000\u03bd\u03be\u0003"+ - "\u00b2Q\u0000\u03be\u03bf\u0001\u0000\u0000\u0000\u03bf\u03c0\u0006Z\f"+ - "\u0000\u03c0\u00c5\u0001\u0000\u0000\u0000\u03c1\u03c2\u0003\u00b4R\u0000"+ - "\u03c2\u03c3\u0001\u0000\u0000\u0000\u03c3\u03c4\u0006[\u0010\u0000\u03c4"+ - "\u00c7\u0001\u0000\u0000\u0000\u03c5\u03c6\u0003\u016c\u00ae\u0000\u03c6"+ - "\u03c7\u0001\u0000\u0000\u0000\u03c7\u03c8\u0006\\\u0011\u0000\u03c8\u00c9"+ - "\u0001\u0000\u0000\u0000\u03c9\u03ca\u0003t2\u0000\u03ca\u03cb\u0001\u0000"+ - "\u0000\u0000\u03cb\u03cc\u0006]\u0012\u0000\u03cc\u00cb\u0001\u0000\u0000"+ - "\u0000\u03cd\u03ce\u0003p0\u0000\u03ce\u03cf\u0001\u0000\u0000\u0000\u03cf"+ - "\u03d0\u0006^\u0013\u0000\u03d0\u00cd\u0001\u0000\u0000\u0000\u03d1\u03d2"+ - "\u0005m\u0000\u0000\u03d2\u03d3\u0005e\u0000\u0000\u03d3\u03d4\u0005t"+ - "\u0000\u0000\u03d4\u03d5\u0005a\u0000\u0000\u03d5\u03d6\u0005d\u0000\u0000"+ - "\u03d6\u03d7\u0005a\u0000\u0000\u03d7\u03d8\u0005t\u0000\u0000\u03d8\u03d9"+ - "\u0005a\u0000\u0000\u03d9\u00cf\u0001\u0000\u0000\u0000\u03da\u03db\u0003"+ - "B\u0019\u0000\u03db\u03dc\u0001\u0000\u0000\u0000\u03dc\u03dd\u0006`\u0014"+ - "\u0000\u03dd\u00d1\u0001\u0000\u0000\u0000\u03de\u03df\u0003d*\u0000\u03df"+ - "\u03e0\u0001\u0000\u0000\u0000\u03e0\u03e1\u0006a\u0015\u0000\u03e1\u00d3"+ - "\u0001\u0000\u0000\u0000\u03e2\u03e3\u0003:\u0015\u0000\u03e3\u03e4\u0001"+ - "\u0000\u0000\u0000\u03e4\u03e5\u0006b\u000b\u0000\u03e5\u00d5\u0001\u0000"+ - "\u0000\u0000\u03e6\u03e7\u0003<\u0016\u0000\u03e7\u03e8\u0001\u0000\u0000"+ - "\u0000\u03e8\u03e9\u0006c\u000b\u0000\u03e9\u00d7\u0001\u0000\u0000\u0000"+ - "\u03ea\u03eb\u0003>\u0017\u0000\u03eb\u03ec\u0001\u0000\u0000\u0000\u03ec"+ - "\u03ed\u0006d\u000b\u0000\u03ed\u00d9\u0001\u0000\u0000\u0000\u03ee\u03ef"+ - "\u0003N\u001f\u0000\u03ef\u03f0\u0001\u0000\u0000\u0000\u03f0\u03f1\u0006"+ - "e\u000e\u0000\u03f1\u03f2\u0006e\u000f\u0000\u03f2\u00db\u0001\u0000\u0000"+ - "\u0000\u03f3\u03f4\u0003x4\u0000\u03f4\u03f5\u0001\u0000\u0000\u0000\u03f5"+ - "\u03f6\u0006f\u0016\u0000\u03f6\u00dd\u0001\u0000\u0000\u0000\u03f7\u03f8"+ - "\u0003t2\u0000\u03f8\u03f9\u0001\u0000\u0000\u0000\u03f9\u03fa\u0006g"+ - "\u0012\u0000\u03fa\u00df\u0001\u0000\u0000\u0000\u03fb\u0400\u0003R!\u0000"+ - "\u03fc\u0400\u0003P \u0000\u03fd\u0400\u0003`(\u0000\u03fe\u0400\u0003"+ - "\u00aaM\u0000\u03ff\u03fb\u0001\u0000\u0000\u0000\u03ff\u03fc\u0001\u0000"+ - "\u0000\u0000\u03ff\u03fd\u0001\u0000\u0000\u0000\u03ff\u03fe\u0001\u0000"+ - "\u0000\u0000\u0400\u00e1\u0001\u0000\u0000\u0000\u0401\u0404\u0003R!\u0000"+ - "\u0402\u0404\u0003\u00aaM\u0000\u0403\u0401\u0001\u0000\u0000\u0000\u0403"+ - "\u0402\u0001\u0000\u0000\u0000\u0404\u0408\u0001\u0000\u0000\u0000\u0405"+ - "\u0407\u0003\u00e0h\u0000\u0406\u0405\u0001\u0000\u0000\u0000\u0407\u040a"+ - "\u0001\u0000\u0000\u0000\u0408\u0406\u0001\u0000\u0000\u0000\u0408\u0409"+ - "\u0001\u0000\u0000\u0000\u0409\u0415\u0001\u0000\u0000\u0000\u040a\u0408"+ - "\u0001\u0000\u0000\u0000\u040b\u040e\u0003`(\u0000\u040c\u040e\u0003Z"+ - "%\u0000\u040d\u040b\u0001\u0000\u0000\u0000\u040d\u040c\u0001\u0000\u0000"+ - "\u0000\u040e\u0410\u0001\u0000\u0000\u0000\u040f\u0411\u0003\u00e0h\u0000"+ - "\u0410\u040f\u0001\u0000\u0000\u0000\u0411\u0412\u0001\u0000\u0000\u0000"+ - "\u0412\u0410\u0001\u0000\u0000\u0000\u0412\u0413\u0001\u0000\u0000\u0000"+ - "\u0413\u0415\u0001\u0000\u0000\u0000\u0414\u0403\u0001\u0000\u0000\u0000"+ - "\u0414\u040d\u0001\u0000\u0000\u0000\u0415\u00e3\u0001\u0000\u0000\u0000"+ - "\u0416\u0419\u0003\u00e2i\u0000\u0417\u0419\u0003\u00b8T\u0000\u0418\u0416"+ - "\u0001\u0000\u0000\u0000\u0418\u0417\u0001\u0000\u0000\u0000\u0419\u041a"+ - "\u0001\u0000\u0000\u0000\u041a\u0418\u0001\u0000\u0000\u0000\u041a\u041b"+ - "\u0001\u0000\u0000\u0000\u041b\u00e5\u0001\u0000\u0000\u0000\u041c\u041d"+ - "\u0003:\u0015\u0000\u041d\u041e\u0001\u0000\u0000\u0000\u041e\u041f\u0006"+ - "k\u000b\u0000\u041f\u00e7\u0001\u0000\u0000\u0000\u0420\u0421\u0003<\u0016"+ - "\u0000\u0421\u0422\u0001\u0000\u0000\u0000\u0422\u0423\u0006l\u000b\u0000"+ - "\u0423\u00e9\u0001\u0000\u0000\u0000\u0424\u0425\u0003>\u0017\u0000\u0425"+ - "\u0426\u0001\u0000\u0000\u0000\u0426\u0427\u0006m\u000b\u0000\u0427\u00eb"+ - "\u0001\u0000\u0000\u0000\u0428\u0429\u0003N\u001f\u0000\u0429\u042a\u0001"+ - "\u0000\u0000\u0000\u042a\u042b\u0006n\u000e\u0000\u042b\u042c\u0006n\u000f"+ - "\u0000\u042c\u00ed\u0001\u0000\u0000\u0000\u042d\u042e\u0003p0\u0000\u042e"+ - "\u042f\u0001\u0000\u0000\u0000\u042f\u0430\u0006o\u0013\u0000\u0430\u00ef"+ - "\u0001\u0000\u0000\u0000\u0431\u0432\u0003t2\u0000\u0432\u0433\u0001\u0000"+ - "\u0000\u0000\u0433\u0434\u0006p\u0012\u0000\u0434\u00f1\u0001\u0000\u0000"+ - "\u0000\u0435\u0436\u0003x4\u0000\u0436\u0437\u0001\u0000\u0000\u0000\u0437"+ - "\u0438\u0006q\u0016\u0000\u0438\u00f3\u0001\u0000\u0000\u0000\u0439\u043a"+ - "\u0005a\u0000\u0000\u043a\u043b\u0005s\u0000\u0000\u043b\u00f5\u0001\u0000"+ - "\u0000\u0000\u043c\u043d\u0003\u00e4j\u0000\u043d\u043e\u0001\u0000\u0000"+ - "\u0000\u043e\u043f\u0006s\u0017\u0000\u043f\u00f7\u0001\u0000\u0000\u0000"+ - "\u0440\u0441\u0003:\u0015\u0000\u0441\u0442\u0001\u0000\u0000\u0000\u0442"+ - "\u0443\u0006t\u000b\u0000\u0443\u00f9\u0001\u0000\u0000\u0000\u0444\u0445"+ - "\u0003<\u0016\u0000\u0445\u0446\u0001\u0000\u0000\u0000\u0446\u0447\u0006"+ - "u\u000b\u0000\u0447\u00fb\u0001\u0000\u0000\u0000\u0448\u0449\u0003>\u0017"+ - "\u0000\u0449\u044a\u0001\u0000\u0000\u0000\u044a\u044b\u0006v\u000b\u0000"+ - "\u044b\u00fd\u0001\u0000\u0000\u0000\u044c\u044d\u0003N\u001f\u0000\u044d"+ - "\u044e\u0001\u0000\u0000\u0000\u044e\u044f\u0006w\u000e\u0000\u044f\u0450"+ - "\u0006w\u000f\u0000\u0450\u00ff\u0001\u0000\u0000\u0000\u0451\u0452\u0003"+ - "\u00b2Q\u0000\u0452\u0453\u0001\u0000\u0000\u0000\u0453\u0454\u0006x\f"+ - "\u0000\u0454\u0455\u0006x\u0018\u0000\u0455\u0101\u0001\u0000\u0000\u0000"+ - "\u0456\u0457\u0005o\u0000\u0000\u0457\u0458\u0005n\u0000\u0000\u0458\u0459"+ - "\u0001\u0000\u0000\u0000\u0459\u045a\u0006y\u0019\u0000\u045a\u0103\u0001"+ - "\u0000\u0000\u0000\u045b\u045c\u0005w\u0000\u0000\u045c\u045d\u0005i\u0000"+ - "\u0000\u045d\u045e\u0005t\u0000\u0000\u045e\u045f\u0005h\u0000\u0000\u045f"+ - "\u0460\u0001\u0000\u0000\u0000\u0460\u0461\u0006z\u0019\u0000\u0461\u0105"+ - "\u0001\u0000\u0000\u0000\u0462\u0463\b\f\u0000\u0000\u0463\u0107\u0001"+ - "\u0000\u0000\u0000\u0464\u0466\u0003\u0106{\u0000\u0465\u0464\u0001\u0000"+ - "\u0000\u0000\u0466\u0467\u0001\u0000\u0000\u0000\u0467\u0465\u0001\u0000"+ - "\u0000\u0000\u0467\u0468\u0001\u0000\u0000\u0000\u0468\u0469\u0001\u0000"+ - "\u0000\u0000\u0469\u046a\u0003\u016c\u00ae\u0000\u046a\u046c\u0001\u0000"+ - "\u0000\u0000\u046b\u0465\u0001\u0000\u0000\u0000\u046b\u046c\u0001\u0000"+ - "\u0000\u0000\u046c\u046e\u0001\u0000\u0000\u0000\u046d\u046f\u0003\u0106"+ - "{\u0000\u046e\u046d\u0001\u0000\u0000\u0000\u046f\u0470\u0001\u0000\u0000"+ - "\u0000\u0470\u046e\u0001\u0000\u0000\u0000\u0470\u0471\u0001\u0000\u0000"+ - "\u0000\u0471\u0109\u0001\u0000\u0000\u0000\u0472\u0473\u0003\u0108|\u0000"+ - "\u0473\u0474\u0001\u0000\u0000\u0000\u0474\u0475\u0006}\u001a\u0000\u0475"+ - "\u010b\u0001\u0000\u0000\u0000\u0476\u0477\u0003:\u0015\u0000\u0477\u0478"+ - "\u0001\u0000\u0000\u0000\u0478\u0479\u0006~\u000b\u0000\u0479\u010d\u0001"+ - "\u0000\u0000\u0000\u047a\u047b\u0003<\u0016\u0000\u047b\u047c\u0001\u0000"+ - "\u0000\u0000\u047c\u047d\u0006\u007f\u000b\u0000\u047d\u010f\u0001\u0000"+ - "\u0000\u0000\u047e\u047f\u0003>\u0017\u0000\u047f\u0480\u0001\u0000\u0000"+ - "\u0000\u0480\u0481\u0006\u0080\u000b\u0000\u0481\u0111\u0001\u0000\u0000"+ - "\u0000\u0482\u0483\u0003N\u001f\u0000\u0483\u0484\u0001\u0000\u0000\u0000"+ - "\u0484\u0485\u0006\u0081\u000e\u0000\u0485\u0486\u0006\u0081\u000f\u0000"+ - "\u0486\u0487\u0006\u0081\u000f\u0000\u0487\u0113\u0001\u0000\u0000\u0000"+ - "\u0488\u0489\u0003p0\u0000\u0489\u048a\u0001\u0000\u0000\u0000\u048a\u048b"+ - "\u0006\u0082\u0013\u0000\u048b\u0115\u0001\u0000\u0000\u0000\u048c\u048d"+ - "\u0003t2\u0000\u048d\u048e\u0001\u0000\u0000\u0000\u048e\u048f\u0006\u0083"+ - "\u0012\u0000\u048f\u0117\u0001\u0000\u0000\u0000\u0490\u0491\u0003x4\u0000"+ - "\u0491\u0492\u0001\u0000\u0000\u0000\u0492\u0493\u0006\u0084\u0016\u0000"+ - "\u0493\u0119\u0001\u0000\u0000\u0000\u0494\u0495\u0003\u0104z\u0000\u0495"+ - "\u0496\u0001\u0000\u0000\u0000\u0496\u0497\u0006\u0085\u001b\u0000\u0497"+ - "\u011b\u0001\u0000\u0000\u0000\u0498\u0499\u0003\u00e4j\u0000\u0499\u049a"+ - "\u0001\u0000\u0000\u0000\u049a\u049b\u0006\u0086\u0017\u0000\u049b\u011d"+ - "\u0001\u0000\u0000\u0000\u049c\u049d\u0003\u00baU\u0000\u049d\u049e\u0001"+ - "\u0000\u0000\u0000\u049e\u049f\u0006\u0087\u001c\u0000\u049f\u011f\u0001"+ - "\u0000\u0000\u0000\u04a0\u04a1\u0003:\u0015\u0000\u04a1\u04a2\u0001\u0000"+ - "\u0000\u0000\u04a2\u04a3\u0006\u0088\u000b\u0000\u04a3\u0121\u0001\u0000"+ - "\u0000\u0000\u04a4\u04a5\u0003<\u0016\u0000\u04a5\u04a6\u0001\u0000\u0000"+ - "\u0000\u04a6\u04a7\u0006\u0089\u000b\u0000\u04a7\u0123\u0001\u0000\u0000"+ - "\u0000\u04a8\u04a9\u0003>\u0017\u0000\u04a9\u04aa\u0001\u0000\u0000\u0000"+ - "\u04aa\u04ab\u0006\u008a\u000b\u0000\u04ab\u0125\u0001\u0000\u0000\u0000"+ - "\u04ac\u04ad\u0003N\u001f\u0000\u04ad\u04ae\u0001\u0000\u0000\u0000\u04ae"+ - "\u04af\u0006\u008b\u000e\u0000\u04af\u04b0\u0006\u008b\u000f\u0000\u04b0"+ - "\u0127\u0001\u0000\u0000\u0000\u04b1\u04b2\u0003\u016c\u00ae\u0000\u04b2"+ - "\u04b3\u0001\u0000\u0000\u0000\u04b3\u04b4\u0006\u008c\u0011\u0000\u04b4"+ - "\u0129\u0001\u0000\u0000\u0000\u04b5\u04b6\u0003t2\u0000\u04b6\u04b7\u0001"+ - "\u0000\u0000\u0000\u04b7\u04b8\u0006\u008d\u0012\u0000\u04b8\u012b\u0001"+ - "\u0000\u0000\u0000\u04b9\u04ba\u0003x4\u0000\u04ba\u04bb\u0001\u0000\u0000"+ - "\u0000\u04bb\u04bc\u0006\u008e\u0016\u0000\u04bc\u012d\u0001\u0000\u0000"+ - "\u0000\u04bd\u04be\u0003\u0102y\u0000\u04be\u04bf\u0001\u0000\u0000\u0000"+ - "\u04bf\u04c0\u0006\u008f\u001d\u0000\u04c0\u04c1\u0006\u008f\u001e\u0000"+ - "\u04c1\u012f\u0001\u0000\u0000\u0000\u04c2\u04c3\u0003B\u0019\u0000\u04c3"+ - "\u04c4\u0001\u0000\u0000\u0000\u04c4\u04c5\u0006\u0090\u0014\u0000\u04c5"+ - "\u0131\u0001\u0000\u0000\u0000\u04c6\u04c7\u0003d*\u0000\u04c7\u04c8\u0001"+ - "\u0000\u0000\u0000\u04c8\u04c9\u0006\u0091\u0015\u0000\u04c9\u0133\u0001"+ - "\u0000\u0000\u0000\u04ca\u04cb\u0003:\u0015\u0000\u04cb\u04cc\u0001\u0000"+ - "\u0000\u0000\u04cc\u04cd\u0006\u0092\u000b\u0000\u04cd\u0135\u0001\u0000"+ - "\u0000\u0000\u04ce\u04cf\u0003<\u0016\u0000\u04cf\u04d0\u0001\u0000\u0000"+ - "\u0000\u04d0\u04d1\u0006\u0093\u000b\u0000\u04d1\u0137\u0001\u0000\u0000"+ - "\u0000\u04d2\u04d3\u0003>\u0017\u0000\u04d3\u04d4\u0001\u0000\u0000\u0000"+ - "\u04d4\u04d5\u0006\u0094\u000b\u0000\u04d5\u0139\u0001\u0000\u0000\u0000"+ - "\u04d6\u04d7\u0003N\u001f\u0000\u04d7\u04d8\u0001\u0000\u0000\u0000\u04d8"+ - "\u04d9\u0006\u0095\u000e\u0000\u04d9\u04da\u0006\u0095\u000f\u0000\u04da"+ - "\u04db\u0006\u0095\u000f\u0000\u04db\u013b\u0001\u0000\u0000\u0000\u04dc"+ - "\u04dd\u0003t2\u0000\u04dd\u04de\u0001\u0000\u0000\u0000\u04de\u04df\u0006"+ - "\u0096\u0012\u0000\u04df\u013d\u0001\u0000\u0000\u0000\u04e0\u04e1\u0003"+ - "x4\u0000\u04e1\u04e2\u0001\u0000\u0000\u0000\u04e2\u04e3\u0006\u0097\u0016"+ - "\u0000\u04e3\u013f\u0001\u0000\u0000\u0000\u04e4\u04e5\u0003\u00e4j\u0000"+ - "\u04e5\u04e6\u0001\u0000\u0000\u0000\u04e6\u04e7\u0006\u0098\u0017\u0000"+ - "\u04e7\u0141\u0001\u0000\u0000\u0000\u04e8\u04e9\u0003:\u0015\u0000\u04e9"+ - "\u04ea\u0001\u0000\u0000\u0000\u04ea\u04eb\u0006\u0099\u000b\u0000\u04eb"+ - "\u0143\u0001\u0000\u0000\u0000\u04ec\u04ed\u0003<\u0016\u0000\u04ed\u04ee"+ - "\u0001\u0000\u0000\u0000\u04ee\u04ef\u0006\u009a\u000b\u0000\u04ef\u0145"+ - "\u0001\u0000\u0000\u0000\u04f0\u04f1\u0003>\u0017\u0000\u04f1\u04f2\u0001"+ - "\u0000\u0000\u0000\u04f2\u04f3\u0006\u009b\u000b\u0000\u04f3\u0147\u0001"+ - "\u0000\u0000\u0000\u04f4\u04f5\u0003N\u001f\u0000\u04f5\u04f6\u0001\u0000"+ - "\u0000\u0000\u04f6\u04f7\u0006\u009c\u000e\u0000\u04f7\u04f8\u0006\u009c"+ - "\u000f\u0000\u04f8\u0149\u0001\u0000\u0000\u0000\u04f9\u04fa\u0003x4\u0000"+ - "\u04fa\u04fb\u0001\u0000\u0000\u0000\u04fb\u04fc\u0006\u009d\u0016\u0000"+ - "\u04fc\u014b\u0001\u0000\u0000\u0000\u04fd\u04fe\u0003\u00baU\u0000\u04fe"+ - "\u04ff\u0001\u0000\u0000\u0000\u04ff\u0500\u0006\u009e\u001c\u0000\u0500"+ - "\u014d\u0001\u0000\u0000\u0000\u0501\u0502\u0003\u00b6S\u0000\u0502\u0503"+ - "\u0001\u0000\u0000\u0000\u0503\u0504\u0006\u009f\u001f\u0000\u0504\u014f"+ - "\u0001\u0000\u0000\u0000\u0505\u0506\u0003:\u0015\u0000\u0506\u0507\u0001"+ - "\u0000\u0000\u0000\u0507\u0508\u0006\u00a0\u000b\u0000\u0508\u0151\u0001"+ - "\u0000\u0000\u0000\u0509\u050a\u0003<\u0016\u0000\u050a\u050b\u0001\u0000"+ - "\u0000\u0000\u050b\u050c\u0006\u00a1\u000b\u0000\u050c\u0153\u0001\u0000"+ - "\u0000\u0000\u050d\u050e\u0003>\u0017\u0000\u050e\u050f\u0001\u0000\u0000"+ - "\u0000\u050f\u0510\u0006\u00a2\u000b\u0000\u0510\u0155\u0001\u0000\u0000"+ - "\u0000\u0511\u0512\u0003N\u001f\u0000\u0512\u0513\u0001\u0000\u0000\u0000"+ - "\u0513\u0514\u0006\u00a3\u000e\u0000\u0514\u0515\u0006\u00a3\u000f\u0000"+ - "\u0515\u0157\u0001\u0000\u0000\u0000\u0516\u0517\u0005i\u0000\u0000\u0517"+ - "\u0518\u0005n\u0000\u0000\u0518\u0519\u0005f\u0000\u0000\u0519\u051a\u0005"+ - "o\u0000\u0000\u051a\u0159\u0001\u0000\u0000\u0000\u051b\u051c\u0003:\u0015"+ - "\u0000\u051c\u051d\u0001\u0000\u0000\u0000\u051d\u051e\u0006\u00a5\u000b"+ - "\u0000\u051e\u015b\u0001\u0000\u0000\u0000\u051f\u0520\u0003<\u0016\u0000"+ - "\u0520\u0521\u0001\u0000\u0000\u0000\u0521\u0522\u0006\u00a6\u000b\u0000"+ - "\u0522\u015d\u0001\u0000\u0000\u0000\u0523\u0524\u0003>\u0017\u0000\u0524"+ - "\u0525\u0001\u0000\u0000\u0000\u0525\u0526\u0006\u00a7\u000b\u0000\u0526"+ - "\u015f\u0001\u0000\u0000\u0000\u0527\u0528\u0003N\u001f\u0000\u0528\u0529"+ - "\u0001\u0000\u0000\u0000\u0529\u052a\u0006\u00a8\u000e\u0000\u052a\u052b"+ - "\u0006\u00a8\u000f\u0000\u052b\u0161\u0001\u0000\u0000\u0000\u052c\u052d"+ - "\u0005f\u0000\u0000\u052d\u052e\u0005u\u0000\u0000\u052e\u052f\u0005n"+ - "\u0000\u0000\u052f\u0530\u0005c\u0000\u0000\u0530\u0531\u0005t\u0000\u0000"+ - "\u0531\u0532\u0005i\u0000\u0000\u0532\u0533\u0005o\u0000\u0000\u0533\u0534"+ - "\u0005n\u0000\u0000\u0534\u0535\u0005s\u0000\u0000\u0535\u0163\u0001\u0000"+ - "\u0000\u0000\u0536\u0537\u0003:\u0015\u0000\u0537\u0538\u0001\u0000\u0000"+ - "\u0000\u0538\u0539\u0006\u00aa\u000b\u0000\u0539\u0165\u0001\u0000\u0000"+ - "\u0000\u053a\u053b\u0003<\u0016\u0000\u053b\u053c\u0001\u0000\u0000\u0000"+ - "\u053c\u053d\u0006\u00ab\u000b\u0000\u053d\u0167\u0001\u0000\u0000\u0000"+ - "\u053e\u053f\u0003>\u0017\u0000\u053f\u0540\u0001\u0000\u0000\u0000\u0540"+ - "\u0541\u0006\u00ac\u000b\u0000\u0541\u0169\u0001\u0000\u0000\u0000\u0542"+ - "\u0543\u0003\u00b4R\u0000\u0543\u0544\u0001\u0000\u0000\u0000\u0544\u0545"+ - "\u0006\u00ad\u0010\u0000\u0545\u0546\u0006\u00ad\u000f\u0000\u0546\u016b"+ - "\u0001\u0000\u0000\u0000\u0547\u0548\u0005:\u0000\u0000\u0548\u016d\u0001"+ - "\u0000\u0000\u0000\u0549\u054f\u0003Z%\u0000\u054a\u054f\u0003P \u0000"+ - "\u054b\u054f\u0003x4\u0000\u054c\u054f\u0003R!\u0000\u054d\u054f\u0003"+ - "`(\u0000\u054e\u0549\u0001\u0000\u0000\u0000\u054e\u054a\u0001\u0000\u0000"+ - "\u0000\u054e\u054b\u0001\u0000\u0000\u0000\u054e\u054c\u0001\u0000\u0000"+ - "\u0000\u054e\u054d\u0001\u0000\u0000\u0000\u054f\u0550\u0001\u0000\u0000"+ - "\u0000\u0550\u054e\u0001\u0000\u0000\u0000\u0550\u0551\u0001\u0000\u0000"+ - "\u0000\u0551\u016f\u0001\u0000\u0000\u0000\u0552\u0553\u0003:\u0015\u0000"+ - "\u0553\u0554\u0001\u0000\u0000\u0000\u0554\u0555\u0006\u00b0\u000b\u0000"+ - "\u0555\u0171\u0001\u0000\u0000\u0000\u0556\u0557\u0003<\u0016\u0000\u0557"+ - "\u0558\u0001\u0000\u0000\u0000\u0558\u0559\u0006\u00b1\u000b\u0000\u0559"+ - "\u0173\u0001\u0000\u0000\u0000\u055a\u055b\u0003>\u0017\u0000\u055b\u055c"+ - "\u0001\u0000\u0000\u0000\u055c\u055d\u0006\u00b2\u000b\u0000\u055d\u0175"+ - "\u0001\u0000\u0000\u0000\u055e\u055f\u0003N\u001f\u0000\u055f\u0560\u0001"+ - "\u0000\u0000\u0000\u0560\u0561\u0006\u00b3\u000e\u0000\u0561\u0562\u0006"+ - "\u00b3\u000f\u0000\u0562\u0177\u0001\u0000\u0000\u0000\u0563\u0564\u0003"+ - "B\u0019\u0000\u0564\u0565\u0001\u0000\u0000\u0000\u0565\u0566\u0006\u00b4"+ - "\u0014\u0000\u0566\u0567\u0006\u00b4\u000f\u0000\u0567\u0568\u0006\u00b4"+ - " \u0000\u0568\u0179\u0001\u0000\u0000\u0000\u0569\u056a\u0003d*\u0000"+ - "\u056a\u056b\u0001\u0000\u0000\u0000\u056b\u056c\u0006\u00b5\u0015\u0000"+ - "\u056c\u056d\u0006\u00b5\u000f\u0000\u056d\u056e\u0006\u00b5 \u0000\u056e"+ - "\u017b\u0001\u0000\u0000\u0000\u056f\u0570\u0003:\u0015\u0000\u0570\u0571"+ - "\u0001\u0000\u0000\u0000\u0571\u0572\u0006\u00b6\u000b\u0000\u0572\u017d"+ - "\u0001\u0000\u0000\u0000\u0573\u0574\u0003<\u0016\u0000\u0574\u0575\u0001"+ - "\u0000\u0000\u0000\u0575\u0576\u0006\u00b7\u000b\u0000\u0576\u017f\u0001"+ - "\u0000\u0000\u0000\u0577\u0578\u0003>\u0017\u0000\u0578\u0579\u0001\u0000"+ - "\u0000\u0000\u0579\u057a\u0006\u00b8\u000b\u0000\u057a\u0181\u0001\u0000"+ - "\u0000\u0000\u057b\u057c\u0003\u016c\u00ae\u0000\u057c\u057d\u0001\u0000"+ - "\u0000\u0000\u057d\u057e\u0006\u00b9\u0011\u0000\u057e\u057f\u0006\u00b9"+ - "\u000f\u0000\u057f\u0580\u0006\u00b9\u0007\u0000\u0580\u0183\u0001\u0000"+ - "\u0000\u0000\u0581\u0582\u0003t2\u0000\u0582\u0583\u0001\u0000\u0000\u0000"+ - "\u0583\u0584\u0006\u00ba\u0012\u0000\u0584\u0585\u0006\u00ba\u000f\u0000"+ - "\u0585\u0586\u0006\u00ba\u0007\u0000\u0586\u0185\u0001\u0000\u0000\u0000"+ - "\u0587\u0588\u0003:\u0015\u0000\u0588\u0589\u0001\u0000\u0000\u0000\u0589"+ - "\u058a\u0006\u00bb\u000b\u0000\u058a\u0187\u0001\u0000\u0000\u0000\u058b"+ - "\u058c\u0003<\u0016\u0000\u058c\u058d\u0001\u0000\u0000\u0000\u058d\u058e"+ - "\u0006\u00bc\u000b\u0000\u058e\u0189\u0001\u0000\u0000\u0000\u058f\u0590"+ - "\u0003>\u0017\u0000\u0590\u0591\u0001\u0000\u0000\u0000\u0591\u0592\u0006"+ - "\u00bd\u000b\u0000\u0592\u018b\u0001\u0000\u0000\u0000\u0593\u0594\u0003"+ - "\u00baU\u0000\u0594\u0595\u0001\u0000\u0000\u0000\u0595\u0596\u0006\u00be"+ - "\u000f\u0000\u0596\u0597\u0006\u00be\u0000\u0000\u0597\u0598\u0006\u00be"+ - "\u001c\u0000\u0598\u018d\u0001\u0000\u0000\u0000\u0599\u059a\u0003\u00b6"+ - "S\u0000\u059a\u059b\u0001\u0000\u0000\u0000\u059b\u059c\u0006\u00bf\u000f"+ - "\u0000\u059c\u059d\u0006\u00bf\u0000\u0000\u059d\u059e\u0006\u00bf\u001f"+ - "\u0000\u059e\u018f\u0001\u0000\u0000\u0000\u059f\u05a0\u0003j-\u0000\u05a0"+ - "\u05a1\u0001\u0000\u0000\u0000\u05a1\u05a2\u0006\u00c0\u000f\u0000\u05a2"+ - "\u05a3\u0006\u00c0\u0000\u0000\u05a3\u05a4\u0006\u00c0!\u0000\u05a4\u0191"+ - "\u0001\u0000\u0000\u0000\u05a5\u05a6\u0003N\u001f\u0000\u05a6\u05a7\u0001"+ - "\u0000\u0000\u0000\u05a7\u05a8\u0006\u00c1\u000e\u0000\u05a8\u05a9\u0006"+ - "\u00c1\u000f\u0000\u05a9\u0193\u0001\u0000\u0000\u0000A\u0000\u0001\u0002"+ - "\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0240\u024a"+ - "\u024e\u0251\u025a\u025c\u0267\u026e\u0273\u029a\u029f\u02a8\u02af\u02b4"+ - "\u02b6\u02c1\u02c9\u02cc\u02ce\u02d3\u02d8\u02de\u02e5\u02ea\u02f0\u02f3"+ - "\u02fb\u02ff\u037b\u0382\u0384\u0394\u0399\u039e\u03a0\u03a6\u03ff\u0403"+ - "\u0408\u040d\u0412\u0414\u0418\u041a\u0467\u046b\u0470\u054e\u0550\"\u0005"+ + "\u0000\u02f2\u02f3\u0001\u0000\u0000\u0000\u02f3\u02f6\u0001\u0000\u0000"+ + "\u0000\u02f4\u02f2\u0001\u0000\u0000\u0000\u02f5\u02ee\u0001\u0000\u0000"+ + "\u0000\u02f5\u02f6\u0001\u0000\u0000\u0000\u02f6\u02f7\u0001\u0000\u0000"+ + "\u0000\u02f7\u02f8\u0003X$\u0000\u02f8\u0302\u0001\u0000\u0000\u0000\u02f9"+ + "\u02fb\u0003x4\u0000\u02fa\u02fc\u0003P \u0000\u02fb\u02fa\u0001\u0000"+ + "\u0000\u0000\u02fc\u02fd\u0001\u0000\u0000\u0000\u02fd\u02fb\u0001\u0000"+ + "\u0000\u0000\u02fd\u02fe\u0001\u0000\u0000\u0000\u02fe\u02ff\u0001\u0000"+ + "\u0000\u0000\u02ff\u0300\u0003X$\u0000\u0300\u0302\u0001\u0000\u0000\u0000"+ + "\u0301\u02d8\u0001\u0000\u0000\u0000\u0301\u02e3\u0001\u0000\u0000\u0000"+ + "\u0301\u02ea\u0001\u0000\u0000\u0000\u0301\u02f9\u0001\u0000\u0000\u0000"+ + "\u0302i\u0001\u0000\u0000\u0000\u0303\u0304\u0005b\u0000\u0000\u0304\u0305"+ + "\u0005y\u0000\u0000\u0305k\u0001\u0000\u0000\u0000\u0306\u0307\u0005a"+ + "\u0000\u0000\u0307\u0308\u0005n\u0000\u0000\u0308\u0309\u0005d\u0000\u0000"+ + "\u0309m\u0001\u0000\u0000\u0000\u030a\u030b\u0005a\u0000\u0000\u030b\u030c"+ + "\u0005s\u0000\u0000\u030c\u030d\u0005c\u0000\u0000\u030do\u0001\u0000"+ + "\u0000\u0000\u030e\u030f\u0005=\u0000\u0000\u030fq\u0001\u0000\u0000\u0000"+ + "\u0310\u0311\u0005:\u0000\u0000\u0311\u0312\u0005:\u0000\u0000\u0312s"+ + "\u0001\u0000\u0000\u0000\u0313\u0314\u0005,\u0000\u0000\u0314u\u0001\u0000"+ + "\u0000\u0000\u0315\u0316\u0005d\u0000\u0000\u0316\u0317\u0005e\u0000\u0000"+ + "\u0317\u0318\u0005s\u0000\u0000\u0318\u0319\u0005c\u0000\u0000\u0319w"+ + "\u0001\u0000\u0000\u0000\u031a\u031b\u0005.\u0000\u0000\u031by\u0001\u0000"+ + "\u0000\u0000\u031c\u031d\u0005f\u0000\u0000\u031d\u031e\u0005a\u0000\u0000"+ + "\u031e\u031f\u0005l\u0000\u0000\u031f\u0320\u0005s\u0000\u0000\u0320\u0321"+ + "\u0005e\u0000\u0000\u0321{\u0001\u0000\u0000\u0000\u0322\u0323\u0005f"+ + "\u0000\u0000\u0323\u0324\u0005i\u0000\u0000\u0324\u0325\u0005r\u0000\u0000"+ + "\u0325\u0326\u0005s\u0000\u0000\u0326\u0327\u0005t\u0000\u0000\u0327}"+ + "\u0001\u0000\u0000\u0000\u0328\u0329\u0005i\u0000\u0000\u0329\u032a\u0005"+ + "n\u0000\u0000\u032a\u007f\u0001\u0000\u0000\u0000\u032b\u032c\u0005i\u0000"+ + "\u0000\u032c\u032d\u0005s\u0000\u0000\u032d\u0081\u0001\u0000\u0000\u0000"+ + "\u032e\u032f\u0005l\u0000\u0000\u032f\u0330\u0005a\u0000\u0000\u0330\u0331"+ + "\u0005s\u0000\u0000\u0331\u0332\u0005t\u0000\u0000\u0332\u0083\u0001\u0000"+ + "\u0000\u0000\u0333\u0334\u0005l\u0000\u0000\u0334\u0335\u0005i\u0000\u0000"+ + "\u0335\u0336\u0005k\u0000\u0000\u0336\u0337\u0005e\u0000\u0000\u0337\u0085"+ + "\u0001\u0000\u0000\u0000\u0338\u0339\u0005(\u0000\u0000\u0339\u0087\u0001"+ + "\u0000\u0000\u0000\u033a\u033b\u0005m\u0000\u0000\u033b\u033c\u0005a\u0000"+ + "\u0000\u033c\u033d\u0005t\u0000\u0000\u033d\u033e\u0005c\u0000\u0000\u033e"+ + "\u033f\u0005h\u0000\u0000\u033f\u0089\u0001\u0000\u0000\u0000\u0340\u0341"+ + "\u0005n\u0000\u0000\u0341\u0342\u0005o\u0000\u0000\u0342\u0343\u0005t"+ + "\u0000\u0000\u0343\u008b\u0001\u0000\u0000\u0000\u0344\u0345\u0005n\u0000"+ + "\u0000\u0345\u0346\u0005u\u0000\u0000\u0346\u0347\u0005l\u0000\u0000\u0347"+ + "\u0348\u0005l\u0000\u0000\u0348\u008d\u0001\u0000\u0000\u0000\u0349\u034a"+ + "\u0005n\u0000\u0000\u034a\u034b\u0005u\u0000\u0000\u034b\u034c\u0005l"+ + "\u0000\u0000\u034c\u034d\u0005l\u0000\u0000\u034d\u034e\u0005s\u0000\u0000"+ + "\u034e\u008f\u0001\u0000\u0000\u0000\u034f\u0350\u0005o\u0000\u0000\u0350"+ + "\u0351\u0005r\u0000\u0000\u0351\u0091\u0001\u0000\u0000\u0000\u0352\u0353"+ + "\u0005?\u0000\u0000\u0353\u0093\u0001\u0000\u0000\u0000\u0354\u0355\u0005"+ + "r\u0000\u0000\u0355\u0356\u0005l\u0000\u0000\u0356\u0357\u0005i\u0000"+ + "\u0000\u0357\u0358\u0005k\u0000\u0000\u0358\u0359\u0005e\u0000\u0000\u0359"+ + "\u0095\u0001\u0000\u0000\u0000\u035a\u035b\u0005)\u0000\u0000\u035b\u0097"+ + "\u0001\u0000\u0000\u0000\u035c\u035d\u0005t\u0000\u0000\u035d\u035e\u0005"+ + "r\u0000\u0000\u035e\u035f\u0005u\u0000\u0000\u035f\u0360\u0005e\u0000"+ + "\u0000\u0360\u0099\u0001\u0000\u0000\u0000\u0361\u0362\u0005=\u0000\u0000"+ + "\u0362\u0363\u0005=\u0000\u0000\u0363\u009b\u0001\u0000\u0000\u0000\u0364"+ + "\u0365\u0005=\u0000\u0000\u0365\u0366\u0005~\u0000\u0000\u0366\u009d\u0001"+ + "\u0000\u0000\u0000\u0367\u0368\u0005!\u0000\u0000\u0368\u0369\u0005=\u0000"+ + "\u0000\u0369\u009f\u0001\u0000\u0000\u0000\u036a\u036b\u0005<\u0000\u0000"+ + "\u036b\u00a1\u0001\u0000\u0000\u0000\u036c\u036d\u0005<\u0000\u0000\u036d"+ + "\u036e\u0005=\u0000\u0000\u036e\u00a3\u0001\u0000\u0000\u0000\u036f\u0370"+ + "\u0005>\u0000\u0000\u0370\u00a5\u0001\u0000\u0000\u0000\u0371\u0372\u0005"+ + ">\u0000\u0000\u0372\u0373\u0005=\u0000\u0000\u0373\u00a7\u0001\u0000\u0000"+ + "\u0000\u0374\u0375\u0005+\u0000\u0000\u0375\u00a9\u0001\u0000\u0000\u0000"+ + "\u0376\u0377\u0005-\u0000\u0000\u0377\u00ab\u0001\u0000\u0000\u0000\u0378"+ + "\u0379\u0005*\u0000\u0000\u0379\u00ad\u0001\u0000\u0000\u0000\u037a\u037b"+ + "\u0005/\u0000\u0000\u037b\u00af\u0001\u0000\u0000\u0000\u037c\u037d\u0005"+ + "%\u0000\u0000\u037d\u00b1\u0001\u0000\u0000\u0000\u037e\u037f\u0003\u0092"+ + "A\u0000\u037f\u0383\u0003R!\u0000\u0380\u0382\u0003b)\u0000\u0381\u0380"+ + "\u0001\u0000\u0000\u0000\u0382\u0385\u0001\u0000\u0000\u0000\u0383\u0381"+ + "\u0001\u0000\u0000\u0000\u0383\u0384\u0001\u0000\u0000\u0000\u0384\u038d"+ + "\u0001\u0000\u0000\u0000\u0385\u0383\u0001\u0000\u0000\u0000\u0386\u0388"+ + "\u0003\u0092A\u0000\u0387\u0389\u0003P \u0000\u0388\u0387\u0001\u0000"+ + "\u0000\u0000\u0389\u038a\u0001\u0000\u0000\u0000\u038a\u0388\u0001\u0000"+ + "\u0000\u0000\u038a\u038b\u0001\u0000\u0000\u0000\u038b\u038d\u0001\u0000"+ + "\u0000\u0000\u038c\u037e\u0001\u0000\u0000\u0000\u038c\u0386\u0001\u0000"+ + "\u0000\u0000\u038d\u00b3\u0001\u0000\u0000\u0000\u038e\u038f\u0005[\u0000"+ + "\u0000\u038f\u0390\u0001\u0000\u0000\u0000\u0390\u0391\u0006R\u0000\u0000"+ + "\u0391\u0392\u0006R\u0000\u0000\u0392\u00b5\u0001\u0000\u0000\u0000\u0393"+ + "\u0394\u0005]\u0000\u0000\u0394\u0395\u0001\u0000\u0000\u0000\u0395\u0396"+ + "\u0006S\u000f\u0000\u0396\u0397\u0006S\u000f\u0000\u0397\u00b7\u0001\u0000"+ + "\u0000\u0000\u0398\u039c\u0003R!\u0000\u0399\u039b\u0003b)\u0000\u039a"+ + "\u0399\u0001\u0000\u0000\u0000\u039b\u039e\u0001\u0000\u0000\u0000\u039c"+ + "\u039a\u0001\u0000\u0000\u0000\u039c\u039d\u0001\u0000\u0000\u0000\u039d"+ + "\u03a9\u0001\u0000\u0000\u0000\u039e\u039c\u0001\u0000\u0000\u0000\u039f"+ + "\u03a2\u0003`(\u0000\u03a0\u03a2\u0003Z%\u0000\u03a1\u039f\u0001\u0000"+ + "\u0000\u0000\u03a1\u03a0\u0001\u0000\u0000\u0000\u03a2\u03a4\u0001\u0000"+ + "\u0000\u0000\u03a3\u03a5\u0003b)\u0000\u03a4\u03a3\u0001\u0000\u0000\u0000"+ + "\u03a5\u03a6\u0001\u0000\u0000\u0000\u03a6\u03a4\u0001\u0000\u0000\u0000"+ + "\u03a6\u03a7\u0001\u0000\u0000\u0000\u03a7\u03a9\u0001\u0000\u0000\u0000"+ + "\u03a8\u0398\u0001\u0000\u0000\u0000\u03a8\u03a1\u0001\u0000\u0000\u0000"+ + "\u03a9\u00b9\u0001\u0000\u0000\u0000\u03aa\u03ac\u0003\\&\u0000\u03ab"+ + "\u03ad\u0003^\'\u0000\u03ac\u03ab\u0001\u0000\u0000\u0000\u03ad\u03ae"+ + "\u0001\u0000\u0000\u0000\u03ae\u03ac\u0001\u0000\u0000\u0000\u03ae\u03af"+ + "\u0001\u0000\u0000\u0000\u03af\u03b0\u0001\u0000\u0000\u0000\u03b0\u03b1"+ + "\u0003\\&\u0000\u03b1\u00bb\u0001\u0000\u0000\u0000\u03b2\u03b3\u0003"+ + "\u00baU\u0000\u03b3\u00bd\u0001\u0000\u0000\u0000\u03b4\u03b5\u0003:\u0015"+ + "\u0000\u03b5\u03b6\u0001\u0000\u0000\u0000\u03b6\u03b7\u0006W\u000b\u0000"+ + "\u03b7\u00bf\u0001\u0000\u0000\u0000\u03b8\u03b9\u0003<\u0016\u0000\u03b9"+ + "\u03ba\u0001\u0000\u0000\u0000\u03ba\u03bb\u0006X\u000b\u0000\u03bb\u00c1"+ + "\u0001\u0000\u0000\u0000\u03bc\u03bd\u0003>\u0017\u0000\u03bd\u03be\u0001"+ + "\u0000\u0000\u0000\u03be\u03bf\u0006Y\u000b\u0000\u03bf\u00c3\u0001\u0000"+ + "\u0000\u0000\u03c0\u03c1\u0003N\u001f\u0000\u03c1\u03c2\u0001\u0000\u0000"+ + "\u0000\u03c2\u03c3\u0006Z\u000e\u0000\u03c3\u03c4\u0006Z\u000f\u0000\u03c4"+ + "\u00c5\u0001\u0000\u0000\u0000\u03c5\u03c6\u0003\u00b4R\u0000\u03c6\u03c7"+ + "\u0001\u0000\u0000\u0000\u03c7\u03c8\u0006[\f\u0000\u03c8\u00c7\u0001"+ + "\u0000\u0000\u0000\u03c9\u03ca\u0003\u00b6S\u0000\u03ca\u03cb\u0001\u0000"+ + "\u0000\u0000\u03cb\u03cc\u0006\\\u0010\u0000\u03cc\u00c9\u0001\u0000\u0000"+ + "\u0000\u03cd\u03ce\u0003\u016e\u00af\u0000\u03ce\u03cf\u0001\u0000\u0000"+ + "\u0000\u03cf\u03d0\u0006]\u0011\u0000\u03d0\u00cb\u0001\u0000\u0000\u0000"+ + "\u03d1\u03d2\u0003t2\u0000\u03d2\u03d3\u0001\u0000\u0000\u0000\u03d3\u03d4"+ + "\u0006^\u0012\u0000\u03d4\u00cd\u0001\u0000\u0000\u0000\u03d5\u03d6\u0003"+ + "p0\u0000\u03d6\u03d7\u0001\u0000\u0000\u0000\u03d7\u03d8\u0006_\u0013"+ + "\u0000\u03d8\u00cf\u0001\u0000\u0000\u0000\u03d9\u03da\u0005m\u0000\u0000"+ + "\u03da\u03db\u0005e\u0000\u0000\u03db\u03dc\u0005t\u0000\u0000\u03dc\u03dd"+ + "\u0005a\u0000\u0000\u03dd\u03de\u0005d\u0000\u0000\u03de\u03df\u0005a"+ + "\u0000\u0000\u03df\u03e0\u0005t\u0000\u0000\u03e0\u03e1\u0005a\u0000\u0000"+ + "\u03e1\u00d1\u0001\u0000\u0000\u0000\u03e2\u03e3\u0003B\u0019\u0000\u03e3"+ + "\u03e4\u0001\u0000\u0000\u0000\u03e4\u03e5\u0006a\u0014\u0000\u03e5\u00d3"+ + "\u0001\u0000\u0000\u0000\u03e6\u03e7\u0003d*\u0000\u03e7\u03e8\u0001\u0000"+ + "\u0000\u0000\u03e8\u03e9\u0006b\u0015\u0000\u03e9\u00d5\u0001\u0000\u0000"+ + "\u0000\u03ea\u03eb\u0003:\u0015\u0000\u03eb\u03ec\u0001\u0000\u0000\u0000"+ + "\u03ec\u03ed\u0006c\u000b\u0000\u03ed\u00d7\u0001\u0000\u0000\u0000\u03ee"+ + "\u03ef\u0003<\u0016\u0000\u03ef\u03f0\u0001\u0000\u0000\u0000\u03f0\u03f1"+ + "\u0006d\u000b\u0000\u03f1\u00d9\u0001\u0000\u0000\u0000\u03f2\u03f3\u0003"+ + ">\u0017\u0000\u03f3\u03f4\u0001\u0000\u0000\u0000\u03f4\u03f5\u0006e\u000b"+ + "\u0000\u03f5\u00db\u0001\u0000\u0000\u0000\u03f6\u03f7\u0003N\u001f\u0000"+ + "\u03f7\u03f8\u0001\u0000\u0000\u0000\u03f8\u03f9\u0006f\u000e\u0000\u03f9"+ + "\u03fa\u0006f\u000f\u0000\u03fa\u00dd\u0001\u0000\u0000\u0000\u03fb\u03fc"+ + "\u0003x4\u0000\u03fc\u03fd\u0001\u0000\u0000\u0000\u03fd\u03fe\u0006g"+ + "\u0016\u0000\u03fe\u00df\u0001\u0000\u0000\u0000\u03ff\u0400\u0003t2\u0000"+ + "\u0400\u0401\u0001\u0000\u0000\u0000\u0401\u0402\u0006h\u0012\u0000\u0402"+ + "\u00e1\u0001\u0000\u0000\u0000\u0403\u0408\u0003R!\u0000\u0404\u0408\u0003"+ + "P \u0000\u0405\u0408\u0003`(\u0000\u0406\u0408\u0003\u00acN\u0000\u0407"+ + "\u0403\u0001\u0000\u0000\u0000\u0407\u0404\u0001\u0000\u0000\u0000\u0407"+ + "\u0405\u0001\u0000\u0000\u0000\u0407\u0406\u0001\u0000\u0000\u0000\u0408"+ + "\u00e3\u0001\u0000\u0000\u0000\u0409\u040c\u0003R!\u0000\u040a\u040c\u0003"+ + "\u00acN\u0000\u040b\u0409\u0001\u0000\u0000\u0000\u040b\u040a\u0001\u0000"+ + "\u0000\u0000\u040c\u0410\u0001\u0000\u0000\u0000\u040d\u040f\u0003\u00e2"+ + "i\u0000\u040e\u040d\u0001\u0000\u0000\u0000\u040f\u0412\u0001\u0000\u0000"+ + "\u0000\u0410\u040e\u0001\u0000\u0000\u0000\u0410\u0411\u0001\u0000\u0000"+ + "\u0000\u0411\u041d\u0001\u0000\u0000\u0000\u0412\u0410\u0001\u0000\u0000"+ + "\u0000\u0413\u0416\u0003`(\u0000\u0414\u0416\u0003Z%\u0000\u0415\u0413"+ + "\u0001\u0000\u0000\u0000\u0415\u0414\u0001\u0000\u0000\u0000\u0416\u0418"+ + "\u0001\u0000\u0000\u0000\u0417\u0419\u0003\u00e2i\u0000\u0418\u0417\u0001"+ + "\u0000\u0000\u0000\u0419\u041a\u0001\u0000\u0000\u0000\u041a\u0418\u0001"+ + "\u0000\u0000\u0000\u041a\u041b\u0001\u0000\u0000\u0000\u041b\u041d\u0001"+ + "\u0000\u0000\u0000\u041c\u040b\u0001\u0000\u0000\u0000\u041c\u0415\u0001"+ + "\u0000\u0000\u0000\u041d\u00e5\u0001\u0000\u0000\u0000\u041e\u0421\u0003"+ + "\u00e4j\u0000\u041f\u0421\u0003\u00baU\u0000\u0420\u041e\u0001\u0000\u0000"+ + "\u0000\u0420\u041f\u0001\u0000\u0000\u0000\u0421\u0422\u0001\u0000\u0000"+ + "\u0000\u0422\u0420\u0001\u0000\u0000\u0000\u0422\u0423\u0001\u0000\u0000"+ + "\u0000\u0423\u00e7\u0001\u0000\u0000\u0000\u0424\u0425\u0003:\u0015\u0000"+ + "\u0425\u0426\u0001\u0000\u0000\u0000\u0426\u0427\u0006l\u000b\u0000\u0427"+ + "\u00e9\u0001\u0000\u0000\u0000\u0428\u0429\u0003<\u0016\u0000\u0429\u042a"+ + "\u0001\u0000\u0000\u0000\u042a\u042b\u0006m\u000b\u0000\u042b\u00eb\u0001"+ + "\u0000\u0000\u0000\u042c\u042d\u0003>\u0017\u0000\u042d\u042e\u0001\u0000"+ + "\u0000\u0000\u042e\u042f\u0006n\u000b\u0000\u042f\u00ed\u0001\u0000\u0000"+ + "\u0000\u0430\u0431\u0003N\u001f\u0000\u0431\u0432\u0001\u0000\u0000\u0000"+ + "\u0432\u0433\u0006o\u000e\u0000\u0433\u0434\u0006o\u000f\u0000\u0434\u00ef"+ + "\u0001\u0000\u0000\u0000\u0435\u0436\u0003p0\u0000\u0436\u0437\u0001\u0000"+ + "\u0000\u0000\u0437\u0438\u0006p\u0013\u0000\u0438\u00f1\u0001\u0000\u0000"+ + "\u0000\u0439\u043a\u0003t2\u0000\u043a\u043b\u0001\u0000\u0000\u0000\u043b"+ + "\u043c\u0006q\u0012\u0000\u043c\u00f3\u0001\u0000\u0000\u0000\u043d\u043e"+ + "\u0003x4\u0000\u043e\u043f\u0001\u0000\u0000\u0000\u043f\u0440\u0006r"+ + "\u0016\u0000\u0440\u00f5\u0001\u0000\u0000\u0000\u0441\u0442\u0005a\u0000"+ + "\u0000\u0442\u0443\u0005s\u0000\u0000\u0443\u00f7\u0001\u0000\u0000\u0000"+ + "\u0444\u0445\u0003\u00e6k\u0000\u0445\u0446\u0001\u0000\u0000\u0000\u0446"+ + "\u0447\u0006t\u0017\u0000\u0447\u00f9\u0001\u0000\u0000\u0000\u0448\u0449"+ + "\u0003:\u0015\u0000\u0449\u044a\u0001\u0000\u0000\u0000\u044a\u044b\u0006"+ + "u\u000b\u0000\u044b\u00fb\u0001\u0000\u0000\u0000\u044c\u044d\u0003<\u0016"+ + "\u0000\u044d\u044e\u0001\u0000\u0000\u0000\u044e\u044f\u0006v\u000b\u0000"+ + "\u044f\u00fd\u0001\u0000\u0000\u0000\u0450\u0451\u0003>\u0017\u0000\u0451"+ + "\u0452\u0001\u0000\u0000\u0000\u0452\u0453\u0006w\u000b\u0000\u0453\u00ff"+ + "\u0001\u0000\u0000\u0000\u0454\u0455\u0003N\u001f\u0000\u0455\u0456\u0001"+ + "\u0000\u0000\u0000\u0456\u0457\u0006x\u000e\u0000\u0457\u0458\u0006x\u000f"+ + "\u0000\u0458\u0101\u0001\u0000\u0000\u0000\u0459\u045a\u0003\u00b4R\u0000"+ + "\u045a\u045b\u0001\u0000\u0000\u0000\u045b\u045c\u0006y\f\u0000\u045c"+ + "\u045d\u0006y\u0018\u0000\u045d\u0103\u0001\u0000\u0000\u0000\u045e\u045f"+ + "\u0005o\u0000\u0000\u045f\u0460\u0005n\u0000\u0000\u0460\u0461\u0001\u0000"+ + "\u0000\u0000\u0461\u0462\u0006z\u0019\u0000\u0462\u0105\u0001\u0000\u0000"+ + "\u0000\u0463\u0464\u0005w\u0000\u0000\u0464\u0465\u0005i\u0000\u0000\u0465"+ + "\u0466\u0005t\u0000\u0000\u0466\u0467\u0005h\u0000\u0000\u0467\u0468\u0001"+ + "\u0000\u0000\u0000\u0468\u0469\u0006{\u0019\u0000\u0469\u0107\u0001\u0000"+ + "\u0000\u0000\u046a\u046b\b\f\u0000\u0000\u046b\u0109\u0001\u0000\u0000"+ + "\u0000\u046c\u046e\u0003\u0108|\u0000\u046d\u046c\u0001\u0000\u0000\u0000"+ + "\u046e\u046f\u0001\u0000\u0000\u0000\u046f\u046d\u0001\u0000\u0000\u0000"+ + "\u046f\u0470\u0001\u0000\u0000\u0000\u0470\u0471\u0001\u0000\u0000\u0000"+ + "\u0471\u0472\u0003\u016e\u00af\u0000\u0472\u0474\u0001\u0000\u0000\u0000"+ + "\u0473\u046d\u0001\u0000\u0000\u0000\u0473\u0474\u0001\u0000\u0000\u0000"+ + "\u0474\u0476\u0001\u0000\u0000\u0000\u0475\u0477\u0003\u0108|\u0000\u0476"+ + "\u0475\u0001\u0000\u0000\u0000\u0477\u0478\u0001\u0000\u0000\u0000\u0478"+ + "\u0476\u0001\u0000\u0000\u0000\u0478\u0479\u0001\u0000\u0000\u0000\u0479"+ + "\u010b\u0001\u0000\u0000\u0000\u047a\u047b\u0003\u010a}\u0000\u047b\u047c"+ + "\u0001\u0000\u0000\u0000\u047c\u047d\u0006~\u001a\u0000\u047d\u010d\u0001"+ + "\u0000\u0000\u0000\u047e\u047f\u0003:\u0015\u0000\u047f\u0480\u0001\u0000"+ + "\u0000\u0000\u0480\u0481\u0006\u007f\u000b\u0000\u0481\u010f\u0001\u0000"+ + "\u0000\u0000\u0482\u0483\u0003<\u0016\u0000\u0483\u0484\u0001\u0000\u0000"+ + "\u0000\u0484\u0485\u0006\u0080\u000b\u0000\u0485\u0111\u0001\u0000\u0000"+ + "\u0000\u0486\u0487\u0003>\u0017\u0000\u0487\u0488\u0001\u0000\u0000\u0000"+ + "\u0488\u0489\u0006\u0081\u000b\u0000\u0489\u0113\u0001\u0000\u0000\u0000"+ + "\u048a\u048b\u0003N\u001f\u0000\u048b\u048c\u0001\u0000\u0000\u0000\u048c"+ + "\u048d\u0006\u0082\u000e\u0000\u048d\u048e\u0006\u0082\u000f\u0000\u048e"+ + "\u048f\u0006\u0082\u000f\u0000\u048f\u0115\u0001\u0000\u0000\u0000\u0490"+ + "\u0491\u0003p0\u0000\u0491\u0492\u0001\u0000\u0000\u0000\u0492\u0493\u0006"+ + "\u0083\u0013\u0000\u0493\u0117\u0001\u0000\u0000\u0000\u0494\u0495\u0003"+ + "t2\u0000\u0495\u0496\u0001\u0000\u0000\u0000\u0496\u0497\u0006\u0084\u0012"+ + "\u0000\u0497\u0119\u0001\u0000\u0000\u0000\u0498\u0499\u0003x4\u0000\u0499"+ + "\u049a\u0001\u0000\u0000\u0000\u049a\u049b\u0006\u0085\u0016\u0000\u049b"+ + "\u011b\u0001\u0000\u0000\u0000\u049c\u049d\u0003\u0106{\u0000\u049d\u049e"+ + "\u0001\u0000\u0000\u0000\u049e\u049f\u0006\u0086\u001b\u0000\u049f\u011d"+ + "\u0001\u0000\u0000\u0000\u04a0\u04a1\u0003\u00e6k\u0000\u04a1\u04a2\u0001"+ + "\u0000\u0000\u0000\u04a2\u04a3\u0006\u0087\u0017\u0000\u04a3\u011f\u0001"+ + "\u0000\u0000\u0000\u04a4\u04a5\u0003\u00bcV\u0000\u04a5\u04a6\u0001\u0000"+ + "\u0000\u0000\u04a6\u04a7\u0006\u0088\u001c\u0000\u04a7\u0121\u0001\u0000"+ + "\u0000\u0000\u04a8\u04a9\u0003:\u0015\u0000\u04a9\u04aa\u0001\u0000\u0000"+ + "\u0000\u04aa\u04ab\u0006\u0089\u000b\u0000\u04ab\u0123\u0001\u0000\u0000"+ + "\u0000\u04ac\u04ad\u0003<\u0016\u0000\u04ad\u04ae\u0001\u0000\u0000\u0000"+ + "\u04ae\u04af\u0006\u008a\u000b\u0000\u04af\u0125\u0001\u0000\u0000\u0000"+ + "\u04b0\u04b1\u0003>\u0017\u0000\u04b1\u04b2\u0001\u0000\u0000\u0000\u04b2"+ + "\u04b3\u0006\u008b\u000b\u0000\u04b3\u0127\u0001\u0000\u0000\u0000\u04b4"+ + "\u04b5\u0003N\u001f\u0000\u04b5\u04b6\u0001\u0000\u0000\u0000\u04b6\u04b7"+ + "\u0006\u008c\u000e\u0000\u04b7\u04b8\u0006\u008c\u000f\u0000\u04b8\u0129"+ + "\u0001\u0000\u0000\u0000\u04b9\u04ba\u0003\u016e\u00af\u0000\u04ba\u04bb"+ + "\u0001\u0000\u0000\u0000\u04bb\u04bc\u0006\u008d\u0011\u0000\u04bc\u012b"+ + "\u0001\u0000\u0000\u0000\u04bd\u04be\u0003t2\u0000\u04be\u04bf\u0001\u0000"+ + "\u0000\u0000\u04bf\u04c0\u0006\u008e\u0012\u0000\u04c0\u012d\u0001\u0000"+ + "\u0000\u0000\u04c1\u04c2\u0003x4\u0000\u04c2\u04c3\u0001\u0000\u0000\u0000"+ + "\u04c3\u04c4\u0006\u008f\u0016\u0000\u04c4\u012f\u0001\u0000\u0000\u0000"+ + "\u04c5\u04c6\u0003\u0104z\u0000\u04c6\u04c7\u0001\u0000\u0000\u0000\u04c7"+ + "\u04c8\u0006\u0090\u001d\u0000\u04c8\u04c9\u0006\u0090\u001e\u0000\u04c9"+ + "\u0131\u0001\u0000\u0000\u0000\u04ca\u04cb\u0003B\u0019\u0000\u04cb\u04cc"+ + "\u0001\u0000\u0000\u0000\u04cc\u04cd\u0006\u0091\u0014\u0000\u04cd\u0133"+ + "\u0001\u0000\u0000\u0000\u04ce\u04cf\u0003d*\u0000\u04cf\u04d0\u0001\u0000"+ + "\u0000\u0000\u04d0\u04d1\u0006\u0092\u0015\u0000\u04d1\u0135\u0001\u0000"+ + "\u0000\u0000\u04d2\u04d3\u0003:\u0015\u0000\u04d3\u04d4\u0001\u0000\u0000"+ + "\u0000\u04d4\u04d5\u0006\u0093\u000b\u0000\u04d5\u0137\u0001\u0000\u0000"+ + "\u0000\u04d6\u04d7\u0003<\u0016\u0000\u04d7\u04d8\u0001\u0000\u0000\u0000"+ + "\u04d8\u04d9\u0006\u0094\u000b\u0000\u04d9\u0139\u0001\u0000\u0000\u0000"+ + "\u04da\u04db\u0003>\u0017\u0000\u04db\u04dc\u0001\u0000\u0000\u0000\u04dc"+ + "\u04dd\u0006\u0095\u000b\u0000\u04dd\u013b\u0001\u0000\u0000\u0000\u04de"+ + "\u04df\u0003N\u001f\u0000\u04df\u04e0\u0001\u0000\u0000\u0000\u04e0\u04e1"+ + "\u0006\u0096\u000e\u0000\u04e1\u04e2\u0006\u0096\u000f\u0000\u04e2\u04e3"+ + "\u0006\u0096\u000f\u0000\u04e3\u013d\u0001\u0000\u0000\u0000\u04e4\u04e5"+ + "\u0003t2\u0000\u04e5\u04e6\u0001\u0000\u0000\u0000\u04e6\u04e7\u0006\u0097"+ + "\u0012\u0000\u04e7\u013f\u0001\u0000\u0000\u0000\u04e8\u04e9\u0003x4\u0000"+ + "\u04e9\u04ea\u0001\u0000\u0000\u0000\u04ea\u04eb\u0006\u0098\u0016\u0000"+ + "\u04eb\u0141\u0001\u0000\u0000\u0000\u04ec\u04ed\u0003\u00e6k\u0000\u04ed"+ + "\u04ee\u0001\u0000\u0000\u0000\u04ee\u04ef\u0006\u0099\u0017\u0000\u04ef"+ + "\u0143\u0001\u0000\u0000\u0000\u04f0\u04f1\u0003:\u0015\u0000\u04f1\u04f2"+ + "\u0001\u0000\u0000\u0000\u04f2\u04f3\u0006\u009a\u000b\u0000\u04f3\u0145"+ + "\u0001\u0000\u0000\u0000\u04f4\u04f5\u0003<\u0016\u0000\u04f5\u04f6\u0001"+ + "\u0000\u0000\u0000\u04f6\u04f7\u0006\u009b\u000b\u0000\u04f7\u0147\u0001"+ + "\u0000\u0000\u0000\u04f8\u04f9\u0003>\u0017\u0000\u04f9\u04fa\u0001\u0000"+ + "\u0000\u0000\u04fa\u04fb\u0006\u009c\u000b\u0000\u04fb\u0149\u0001\u0000"+ + "\u0000\u0000\u04fc\u04fd\u0003N\u001f\u0000\u04fd\u04fe\u0001\u0000\u0000"+ + "\u0000\u04fe\u04ff\u0006\u009d\u000e\u0000\u04ff\u0500\u0006\u009d\u000f"+ + "\u0000\u0500\u014b\u0001\u0000\u0000\u0000\u0501\u0502\u0003x4\u0000\u0502"+ + "\u0503\u0001\u0000\u0000\u0000\u0503\u0504\u0006\u009e\u0016\u0000\u0504"+ + "\u014d\u0001\u0000\u0000\u0000\u0505\u0506\u0003\u00bcV\u0000\u0506\u0507"+ + "\u0001\u0000\u0000\u0000\u0507\u0508\u0006\u009f\u001c\u0000\u0508\u014f"+ + "\u0001\u0000\u0000\u0000\u0509\u050a\u0003\u00b8T\u0000\u050a\u050b\u0001"+ + "\u0000\u0000\u0000\u050b\u050c\u0006\u00a0\u001f\u0000\u050c\u0151\u0001"+ + "\u0000\u0000\u0000\u050d\u050e\u0003:\u0015\u0000\u050e\u050f\u0001\u0000"+ + "\u0000\u0000\u050f\u0510\u0006\u00a1\u000b\u0000\u0510\u0153\u0001\u0000"+ + "\u0000\u0000\u0511\u0512\u0003<\u0016\u0000\u0512\u0513\u0001\u0000\u0000"+ + "\u0000\u0513\u0514\u0006\u00a2\u000b\u0000\u0514\u0155\u0001\u0000\u0000"+ + "\u0000\u0515\u0516\u0003>\u0017\u0000\u0516\u0517\u0001\u0000\u0000\u0000"+ + "\u0517\u0518\u0006\u00a3\u000b\u0000\u0518\u0157\u0001\u0000\u0000\u0000"+ + "\u0519\u051a\u0003N\u001f\u0000\u051a\u051b\u0001\u0000\u0000\u0000\u051b"+ + "\u051c\u0006\u00a4\u000e\u0000\u051c\u051d\u0006\u00a4\u000f\u0000\u051d"+ + "\u0159\u0001\u0000\u0000\u0000\u051e\u051f\u0005i\u0000\u0000\u051f\u0520"+ + "\u0005n\u0000\u0000\u0520\u0521\u0005f\u0000\u0000\u0521\u0522\u0005o"+ + "\u0000\u0000\u0522\u015b\u0001\u0000\u0000\u0000\u0523\u0524\u0003:\u0015"+ + "\u0000\u0524\u0525\u0001\u0000\u0000\u0000\u0525\u0526\u0006\u00a6\u000b"+ + "\u0000\u0526\u015d\u0001\u0000\u0000\u0000\u0527\u0528\u0003<\u0016\u0000"+ + "\u0528\u0529\u0001\u0000\u0000\u0000\u0529\u052a\u0006\u00a7\u000b\u0000"+ + "\u052a\u015f\u0001\u0000\u0000\u0000\u052b\u052c\u0003>\u0017\u0000\u052c"+ + "\u052d\u0001\u0000\u0000\u0000\u052d\u052e\u0006\u00a8\u000b\u0000\u052e"+ + "\u0161\u0001\u0000\u0000\u0000\u052f\u0530\u0003N\u001f\u0000\u0530\u0531"+ + "\u0001\u0000\u0000\u0000\u0531\u0532\u0006\u00a9\u000e\u0000\u0532\u0533"+ + "\u0006\u00a9\u000f\u0000\u0533\u0163\u0001\u0000\u0000\u0000\u0534\u0535"+ + "\u0005f\u0000\u0000\u0535\u0536\u0005u\u0000\u0000\u0536\u0537\u0005n"+ + "\u0000\u0000\u0537\u0538\u0005c\u0000\u0000\u0538\u0539\u0005t\u0000\u0000"+ + "\u0539\u053a\u0005i\u0000\u0000\u053a\u053b\u0005o\u0000\u0000\u053b\u053c"+ + "\u0005n\u0000\u0000\u053c\u053d\u0005s\u0000\u0000\u053d\u0165\u0001\u0000"+ + "\u0000\u0000\u053e\u053f\u0003:\u0015\u0000\u053f\u0540\u0001\u0000\u0000"+ + "\u0000\u0540\u0541\u0006\u00ab\u000b\u0000\u0541\u0167\u0001\u0000\u0000"+ + "\u0000\u0542\u0543\u0003<\u0016\u0000\u0543\u0544\u0001\u0000\u0000\u0000"+ + "\u0544\u0545\u0006\u00ac\u000b\u0000\u0545\u0169\u0001\u0000\u0000\u0000"+ + "\u0546\u0547\u0003>\u0017\u0000\u0547\u0548\u0001\u0000\u0000\u0000\u0548"+ + "\u0549\u0006\u00ad\u000b\u0000\u0549\u016b\u0001\u0000\u0000\u0000\u054a"+ + "\u054b\u0003\u00b6S\u0000\u054b\u054c\u0001\u0000\u0000\u0000\u054c\u054d"+ + "\u0006\u00ae\u0010\u0000\u054d\u054e\u0006\u00ae\u000f\u0000\u054e\u016d"+ + "\u0001\u0000\u0000\u0000\u054f\u0550\u0005:\u0000\u0000\u0550\u016f\u0001"+ + "\u0000\u0000\u0000\u0551\u0557\u0003Z%\u0000\u0552\u0557\u0003P \u0000"+ + "\u0553\u0557\u0003x4\u0000\u0554\u0557\u0003R!\u0000\u0555\u0557\u0003"+ + "`(\u0000\u0556\u0551\u0001\u0000\u0000\u0000\u0556\u0552\u0001\u0000\u0000"+ + "\u0000\u0556\u0553\u0001\u0000\u0000\u0000\u0556\u0554\u0001\u0000\u0000"+ + "\u0000\u0556\u0555\u0001\u0000\u0000\u0000\u0557\u0558\u0001\u0000\u0000"+ + "\u0000\u0558\u0556\u0001\u0000\u0000\u0000\u0558\u0559\u0001\u0000\u0000"+ + "\u0000\u0559\u0171\u0001\u0000\u0000\u0000\u055a\u055b\u0003:\u0015\u0000"+ + "\u055b\u055c\u0001\u0000\u0000\u0000\u055c\u055d\u0006\u00b1\u000b\u0000"+ + "\u055d\u0173\u0001\u0000\u0000\u0000\u055e\u055f\u0003<\u0016\u0000\u055f"+ + "\u0560\u0001\u0000\u0000\u0000\u0560\u0561\u0006\u00b2\u000b\u0000\u0561"+ + "\u0175\u0001\u0000\u0000\u0000\u0562\u0563\u0003>\u0017\u0000\u0563\u0564"+ + "\u0001\u0000\u0000\u0000\u0564\u0565\u0006\u00b3\u000b\u0000\u0565\u0177"+ + "\u0001\u0000\u0000\u0000\u0566\u0567\u0003N\u001f\u0000\u0567\u0568\u0001"+ + "\u0000\u0000\u0000\u0568\u0569\u0006\u00b4\u000e\u0000\u0569\u056a\u0006"+ + "\u00b4\u000f\u0000\u056a\u0179\u0001\u0000\u0000\u0000\u056b\u056c\u0003"+ + "B\u0019\u0000\u056c\u056d\u0001\u0000\u0000\u0000\u056d\u056e\u0006\u00b5"+ + "\u0014\u0000\u056e\u056f\u0006\u00b5\u000f\u0000\u056f\u0570\u0006\u00b5"+ + " \u0000\u0570\u017b\u0001\u0000\u0000\u0000\u0571\u0572\u0003d*\u0000"+ + "\u0572\u0573\u0001\u0000\u0000\u0000\u0573\u0574\u0006\u00b6\u0015\u0000"+ + "\u0574\u0575\u0006\u00b6\u000f\u0000\u0575\u0576\u0006\u00b6 \u0000\u0576"+ + "\u017d\u0001\u0000\u0000\u0000\u0577\u0578\u0003:\u0015\u0000\u0578\u0579"+ + "\u0001\u0000\u0000\u0000\u0579\u057a\u0006\u00b7\u000b\u0000\u057a\u017f"+ + "\u0001\u0000\u0000\u0000\u057b\u057c\u0003<\u0016\u0000\u057c\u057d\u0001"+ + "\u0000\u0000\u0000\u057d\u057e\u0006\u00b8\u000b\u0000\u057e\u0181\u0001"+ + "\u0000\u0000\u0000\u057f\u0580\u0003>\u0017\u0000\u0580\u0581\u0001\u0000"+ + "\u0000\u0000\u0581\u0582\u0006\u00b9\u000b\u0000\u0582\u0183\u0001\u0000"+ + "\u0000\u0000\u0583\u0584\u0003\u016e\u00af\u0000\u0584\u0585\u0001\u0000"+ + "\u0000\u0000\u0585\u0586\u0006\u00ba\u0011\u0000\u0586\u0587\u0006\u00ba"+ + "\u000f\u0000\u0587\u0588\u0006\u00ba\u0007\u0000\u0588\u0185\u0001\u0000"+ + "\u0000\u0000\u0589\u058a\u0003t2\u0000\u058a\u058b\u0001\u0000\u0000\u0000"+ + "\u058b\u058c\u0006\u00bb\u0012\u0000\u058c\u058d\u0006\u00bb\u000f\u0000"+ + "\u058d\u058e\u0006\u00bb\u0007\u0000\u058e\u0187\u0001\u0000\u0000\u0000"+ + "\u058f\u0590\u0003:\u0015\u0000\u0590\u0591\u0001\u0000\u0000\u0000\u0591"+ + "\u0592\u0006\u00bc\u000b\u0000\u0592\u0189\u0001\u0000\u0000\u0000\u0593"+ + "\u0594\u0003<\u0016\u0000\u0594\u0595\u0001\u0000\u0000\u0000\u0595\u0596"+ + "\u0006\u00bd\u000b\u0000\u0596\u018b\u0001\u0000\u0000\u0000\u0597\u0598"+ + "\u0003>\u0017\u0000\u0598\u0599\u0001\u0000\u0000\u0000\u0599\u059a\u0006"+ + "\u00be\u000b\u0000\u059a\u018d\u0001\u0000\u0000\u0000\u059b\u059c\u0003"+ + "\u00bcV\u0000\u059c\u059d\u0001\u0000\u0000\u0000\u059d\u059e\u0006\u00bf"+ + "\u000f\u0000\u059e\u059f\u0006\u00bf\u0000\u0000\u059f\u05a0\u0006\u00bf"+ + "\u001c\u0000\u05a0\u018f\u0001\u0000\u0000\u0000\u05a1\u05a2\u0003\u00b8"+ + "T\u0000\u05a2\u05a3\u0001\u0000\u0000\u0000\u05a3\u05a4\u0006\u00c0\u000f"+ + "\u0000\u05a4\u05a5\u0006\u00c0\u0000\u0000\u05a5\u05a6\u0006\u00c0\u001f"+ + "\u0000\u05a6\u0191\u0001\u0000\u0000\u0000\u05a7\u05a8\u0003j-\u0000\u05a8"+ + "\u05a9\u0001\u0000\u0000\u0000\u05a9\u05aa\u0006\u00c1\u000f\u0000\u05aa"+ + "\u05ab\u0006\u00c1\u0000\u0000\u05ab\u05ac\u0006\u00c1!\u0000\u05ac\u0193"+ + "\u0001\u0000\u0000\u0000\u05ad\u05ae\u0003N\u001f\u0000\u05ae\u05af\u0001"+ + "\u0000\u0000\u0000\u05af\u05b0\u0006\u00c2\u000e\u0000\u05b0\u05b1\u0006"+ + "\u00c2\u000f\u0000\u05b1\u0195\u0001\u0000\u0000\u0000A\u0000\u0001\u0002"+ + "\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0242\u024c"+ + "\u0250\u0253\u025c\u025e\u0269\u0270\u0275\u029c\u02a1\u02aa\u02b1\u02b6"+ + "\u02b8\u02c3\u02cb\u02ce\u02d0\u02d5\u02da\u02e0\u02e7\u02ec\u02f2\u02f5"+ + "\u02fd\u0301\u0383\u038a\u038c\u039c\u03a1\u03a6\u03a8\u03ae\u0407\u040b"+ + "\u0410\u0415\u041a\u041c\u0420\u0422\u046f\u0473\u0478\u0556\u0558\"\u0005"+ "\u0002\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005"+ "\u0003\u0000\u0005\b\u0000\u0005\f\u0000\u0005\u000e\u0000\u0005\n\u0000"+ - "\u0005\u0005\u0000\u0005\u000b\u0000\u0000\u0001\u0000\u0007E\u0000\u0005"+ - "\u0000\u0000\u0007\u001d\u0000\u0004\u0000\u0000\u0007F\u0000\u0007r\u0000"+ + "\u0005\u0005\u0000\u0005\u000b\u0000\u0000\u0001\u0000\u0007F\u0000\u0005"+ + "\u0000\u0000\u0007\u001d\u0000\u0004\u0000\u0000\u0007G\u0000\u0007s\u0000"+ "\u0007&\u0000\u0007$\u0000\u0007\u0019\u0000\u0007\u001e\u0000\u0007("+ - "\u0000\u0007P\u0000\u0005\r\u0000\u0005\u0007\u0000\u0007Z\u0000\u0007"+ - "Y\u0000\u0007H\u0000\u0007X\u0000\u0005\t\u0000\u0007G\u0000\u0005\u000f"+ + "\u0000\u0007Q\u0000\u0005\r\u0000\u0005\u0007\u0000\u0007[\u0000\u0007"+ + "Z\u0000\u0007I\u0000\u0007Y\u0000\u0005\t\u0000\u0007H\u0000\u0005\u000f"+ "\u0000\u0007!\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index 6c5edef9e98f0..b52db660b55ee 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -42,11 +42,12 @@ null '.' 'false' 'first' -'last' -'(' 'in' 'is' +'last' 'like' +'(' +'match' 'not' 'null' 'nulls' @@ -169,11 +170,12 @@ DESC DOT FALSE FIRST -LAST -LP IN IS +LAST LIKE +LP +MATCH NOT NULL NULLS @@ -260,6 +262,7 @@ processingCommand whereCommand booleanExpression regexBooleanExpression +matchBooleanExpression valueExpression operatorExpression primaryExpression @@ -314,4 +317,4 @@ lookupCommand atn: -[4, 1, 124, 567, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 126, 8, 1, 10, 1, 12, 1, 129, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 137, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 153, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 165, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 172, 8, 5, 10, 5, 12, 5, 175, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 182, 8, 5, 1, 5, 1, 5, 3, 5, 186, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 194, 8, 5, 10, 5, 12, 5, 197, 9, 5, 1, 6, 1, 6, 3, 6, 201, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 208, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 213, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 220, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 226, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 234, 8, 8, 10, 8, 12, 8, 237, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 247, 8, 9, 1, 9, 1, 9, 1, 9, 5, 9, 252, 8, 9, 10, 9, 12, 9, 255, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 263, 8, 10, 10, 10, 12, 10, 266, 9, 10, 3, 10, 268, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 5, 13, 280, 8, 13, 10, 13, 12, 13, 283, 9, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 290, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 296, 8, 15, 10, 15, 12, 15, 299, 9, 15, 1, 15, 3, 15, 302, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 3, 16, 309, 8, 16, 1, 17, 1, 17, 1, 18, 1, 18, 1, 19, 1, 19, 3, 19, 317, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 323, 8, 20, 10, 20, 12, 20, 326, 9, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 336, 8, 22, 10, 22, 12, 22, 339, 9, 22, 1, 22, 3, 22, 342, 8, 22, 1, 22, 1, 22, 3, 22, 346, 8, 22, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 3, 24, 353, 8, 24, 1, 24, 1, 24, 3, 24, 357, 8, 24, 1, 25, 1, 25, 1, 25, 1, 25, 3, 25, 363, 8, 25, 1, 26, 1, 26, 1, 26, 5, 26, 368, 8, 26, 10, 26, 12, 26, 371, 9, 26, 1, 27, 1, 27, 1, 27, 5, 27, 376, 8, 27, 10, 27, 12, 27, 379, 9, 27, 1, 28, 1, 28, 1, 28, 5, 28, 384, 8, 28, 10, 28, 12, 28, 387, 9, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 406, 8, 31, 10, 31, 12, 31, 409, 9, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 417, 8, 31, 10, 31, 12, 31, 420, 9, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 428, 8, 31, 10, 31, 12, 31, 431, 9, 31, 1, 31, 1, 31, 3, 31, 435, 8, 31, 1, 32, 1, 32, 3, 32, 439, 8, 32, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 448, 8, 34, 10, 34, 12, 34, 451, 9, 34, 1, 35, 1, 35, 3, 35, 455, 8, 35, 1, 35, 1, 35, 3, 35, 459, 8, 35, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 471, 8, 38, 10, 38, 12, 38, 474, 9, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 3, 40, 484, 8, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 5, 43, 496, 8, 43, 10, 43, 12, 43, 499, 9, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 3, 46, 509, 8, 46, 1, 47, 3, 47, 512, 8, 47, 1, 47, 1, 47, 1, 48, 3, 48, 517, 8, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 3, 55, 542, 8, 55, 1, 55, 1, 55, 1, 55, 1, 55, 5, 55, 548, 8, 55, 10, 55, 12, 55, 551, 9, 55, 3, 55, 553, 8, 55, 1, 56, 1, 56, 1, 56, 3, 56, 558, 8, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 0, 4, 2, 10, 16, 18, 58, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 0, 8, 1, 0, 63, 64, 1, 0, 65, 67, 2, 0, 25, 25, 30, 30, 1, 0, 71, 72, 2, 0, 35, 35, 39, 39, 1, 0, 42, 43, 2, 0, 41, 41, 55, 55, 2, 0, 56, 56, 58, 62, 592, 0, 116, 1, 0, 0, 0, 2, 119, 1, 0, 0, 0, 4, 136, 1, 0, 0, 0, 6, 152, 1, 0, 0, 0, 8, 154, 1, 0, 0, 0, 10, 185, 1, 0, 0, 0, 12, 212, 1, 0, 0, 0, 14, 219, 1, 0, 0, 0, 16, 225, 1, 0, 0, 0, 18, 246, 1, 0, 0, 0, 20, 256, 1, 0, 0, 0, 22, 271, 1, 0, 0, 0, 24, 273, 1, 0, 0, 0, 26, 276, 1, 0, 0, 0, 28, 289, 1, 0, 0, 0, 30, 291, 1, 0, 0, 0, 32, 308, 1, 0, 0, 0, 34, 310, 1, 0, 0, 0, 36, 312, 1, 0, 0, 0, 38, 316, 1, 0, 0, 0, 40, 318, 1, 0, 0, 0, 42, 327, 1, 0, 0, 0, 44, 331, 1, 0, 0, 0, 46, 347, 1, 0, 0, 0, 48, 350, 1, 0, 0, 0, 50, 358, 1, 0, 0, 0, 52, 364, 1, 0, 0, 0, 54, 372, 1, 0, 0, 0, 56, 380, 1, 0, 0, 0, 58, 388, 1, 0, 0, 0, 60, 390, 1, 0, 0, 0, 62, 434, 1, 0, 0, 0, 64, 438, 1, 0, 0, 0, 66, 440, 1, 0, 0, 0, 68, 443, 1, 0, 0, 0, 70, 452, 1, 0, 0, 0, 72, 460, 1, 0, 0, 0, 74, 463, 1, 0, 0, 0, 76, 466, 1, 0, 0, 0, 78, 475, 1, 0, 0, 0, 80, 479, 1, 0, 0, 0, 82, 485, 1, 0, 0, 0, 84, 489, 1, 0, 0, 0, 86, 492, 1, 0, 0, 0, 88, 500, 1, 0, 0, 0, 90, 504, 1, 0, 0, 0, 92, 508, 1, 0, 0, 0, 94, 511, 1, 0, 0, 0, 96, 516, 1, 0, 0, 0, 98, 520, 1, 0, 0, 0, 100, 522, 1, 0, 0, 0, 102, 524, 1, 0, 0, 0, 104, 527, 1, 0, 0, 0, 106, 531, 1, 0, 0, 0, 108, 534, 1, 0, 0, 0, 110, 537, 1, 0, 0, 0, 112, 557, 1, 0, 0, 0, 114, 561, 1, 0, 0, 0, 116, 117, 3, 2, 1, 0, 117, 118, 5, 0, 0, 1, 118, 1, 1, 0, 0, 0, 119, 120, 6, 1, -1, 0, 120, 121, 3, 4, 2, 0, 121, 127, 1, 0, 0, 0, 122, 123, 10, 1, 0, 0, 123, 124, 5, 29, 0, 0, 124, 126, 3, 6, 3, 0, 125, 122, 1, 0, 0, 0, 126, 129, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 128, 1, 0, 0, 0, 128, 3, 1, 0, 0, 0, 129, 127, 1, 0, 0, 0, 130, 137, 3, 102, 51, 0, 131, 137, 3, 30, 15, 0, 132, 137, 3, 24, 12, 0, 133, 137, 3, 44, 22, 0, 134, 137, 3, 106, 53, 0, 135, 137, 3, 108, 54, 0, 136, 130, 1, 0, 0, 0, 136, 131, 1, 0, 0, 0, 136, 132, 1, 0, 0, 0, 136, 133, 1, 0, 0, 0, 136, 134, 1, 0, 0, 0, 136, 135, 1, 0, 0, 0, 137, 5, 1, 0, 0, 0, 138, 153, 3, 46, 23, 0, 139, 153, 3, 50, 25, 0, 140, 153, 3, 66, 33, 0, 141, 153, 3, 114, 57, 0, 142, 153, 3, 72, 36, 0, 143, 153, 3, 68, 34, 0, 144, 153, 3, 48, 24, 0, 145, 153, 3, 8, 4, 0, 146, 153, 3, 74, 37, 0, 147, 153, 3, 76, 38, 0, 148, 153, 3, 80, 40, 0, 149, 153, 3, 82, 41, 0, 150, 153, 3, 110, 55, 0, 151, 153, 3, 84, 42, 0, 152, 138, 1, 0, 0, 0, 152, 139, 1, 0, 0, 0, 152, 140, 1, 0, 0, 0, 152, 141, 1, 0, 0, 0, 152, 142, 1, 0, 0, 0, 152, 143, 1, 0, 0, 0, 152, 144, 1, 0, 0, 0, 152, 145, 1, 0, 0, 0, 152, 146, 1, 0, 0, 0, 152, 147, 1, 0, 0, 0, 152, 148, 1, 0, 0, 0, 152, 149, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 152, 151, 1, 0, 0, 0, 153, 7, 1, 0, 0, 0, 154, 155, 5, 20, 0, 0, 155, 156, 3, 10, 5, 0, 156, 9, 1, 0, 0, 0, 157, 158, 6, 5, -1, 0, 158, 159, 5, 48, 0, 0, 159, 186, 3, 10, 5, 7, 160, 186, 3, 14, 7, 0, 161, 186, 3, 12, 6, 0, 162, 164, 3, 14, 7, 0, 163, 165, 5, 48, 0, 0, 164, 163, 1, 0, 0, 0, 164, 165, 1, 0, 0, 0, 165, 166, 1, 0, 0, 0, 166, 167, 5, 45, 0, 0, 167, 168, 5, 44, 0, 0, 168, 173, 3, 14, 7, 0, 169, 170, 5, 38, 0, 0, 170, 172, 3, 14, 7, 0, 171, 169, 1, 0, 0, 0, 172, 175, 1, 0, 0, 0, 173, 171, 1, 0, 0, 0, 173, 174, 1, 0, 0, 0, 174, 176, 1, 0, 0, 0, 175, 173, 1, 0, 0, 0, 176, 177, 5, 54, 0, 0, 177, 186, 1, 0, 0, 0, 178, 179, 3, 14, 7, 0, 179, 181, 5, 46, 0, 0, 180, 182, 5, 48, 0, 0, 181, 180, 1, 0, 0, 0, 181, 182, 1, 0, 0, 0, 182, 183, 1, 0, 0, 0, 183, 184, 5, 49, 0, 0, 184, 186, 1, 0, 0, 0, 185, 157, 1, 0, 0, 0, 185, 160, 1, 0, 0, 0, 185, 161, 1, 0, 0, 0, 185, 162, 1, 0, 0, 0, 185, 178, 1, 0, 0, 0, 186, 195, 1, 0, 0, 0, 187, 188, 10, 4, 0, 0, 188, 189, 5, 34, 0, 0, 189, 194, 3, 10, 5, 5, 190, 191, 10, 3, 0, 0, 191, 192, 5, 51, 0, 0, 192, 194, 3, 10, 5, 4, 193, 187, 1, 0, 0, 0, 193, 190, 1, 0, 0, 0, 194, 197, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 195, 196, 1, 0, 0, 0, 196, 11, 1, 0, 0, 0, 197, 195, 1, 0, 0, 0, 198, 200, 3, 14, 7, 0, 199, 201, 5, 48, 0, 0, 200, 199, 1, 0, 0, 0, 200, 201, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 5, 47, 0, 0, 203, 204, 3, 98, 49, 0, 204, 213, 1, 0, 0, 0, 205, 207, 3, 14, 7, 0, 206, 208, 5, 48, 0, 0, 207, 206, 1, 0, 0, 0, 207, 208, 1, 0, 0, 0, 208, 209, 1, 0, 0, 0, 209, 210, 5, 53, 0, 0, 210, 211, 3, 98, 49, 0, 211, 213, 1, 0, 0, 0, 212, 198, 1, 0, 0, 0, 212, 205, 1, 0, 0, 0, 213, 13, 1, 0, 0, 0, 214, 220, 3, 16, 8, 0, 215, 216, 3, 16, 8, 0, 216, 217, 3, 100, 50, 0, 217, 218, 3, 16, 8, 0, 218, 220, 1, 0, 0, 0, 219, 214, 1, 0, 0, 0, 219, 215, 1, 0, 0, 0, 220, 15, 1, 0, 0, 0, 221, 222, 6, 8, -1, 0, 222, 226, 3, 18, 9, 0, 223, 224, 7, 0, 0, 0, 224, 226, 3, 16, 8, 3, 225, 221, 1, 0, 0, 0, 225, 223, 1, 0, 0, 0, 226, 235, 1, 0, 0, 0, 227, 228, 10, 2, 0, 0, 228, 229, 7, 1, 0, 0, 229, 234, 3, 16, 8, 3, 230, 231, 10, 1, 0, 0, 231, 232, 7, 0, 0, 0, 232, 234, 3, 16, 8, 2, 233, 227, 1, 0, 0, 0, 233, 230, 1, 0, 0, 0, 234, 237, 1, 0, 0, 0, 235, 233, 1, 0, 0, 0, 235, 236, 1, 0, 0, 0, 236, 17, 1, 0, 0, 0, 237, 235, 1, 0, 0, 0, 238, 239, 6, 9, -1, 0, 239, 247, 3, 62, 31, 0, 240, 247, 3, 52, 26, 0, 241, 247, 3, 20, 10, 0, 242, 243, 5, 44, 0, 0, 243, 244, 3, 10, 5, 0, 244, 245, 5, 54, 0, 0, 245, 247, 1, 0, 0, 0, 246, 238, 1, 0, 0, 0, 246, 240, 1, 0, 0, 0, 246, 241, 1, 0, 0, 0, 246, 242, 1, 0, 0, 0, 247, 253, 1, 0, 0, 0, 248, 249, 10, 1, 0, 0, 249, 250, 5, 37, 0, 0, 250, 252, 3, 22, 11, 0, 251, 248, 1, 0, 0, 0, 252, 255, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 253, 254, 1, 0, 0, 0, 254, 19, 1, 0, 0, 0, 255, 253, 1, 0, 0, 0, 256, 257, 3, 58, 29, 0, 257, 267, 5, 44, 0, 0, 258, 268, 5, 65, 0, 0, 259, 264, 3, 10, 5, 0, 260, 261, 5, 38, 0, 0, 261, 263, 3, 10, 5, 0, 262, 260, 1, 0, 0, 0, 263, 266, 1, 0, 0, 0, 264, 262, 1, 0, 0, 0, 264, 265, 1, 0, 0, 0, 265, 268, 1, 0, 0, 0, 266, 264, 1, 0, 0, 0, 267, 258, 1, 0, 0, 0, 267, 259, 1, 0, 0, 0, 267, 268, 1, 0, 0, 0, 268, 269, 1, 0, 0, 0, 269, 270, 5, 54, 0, 0, 270, 21, 1, 0, 0, 0, 271, 272, 3, 58, 29, 0, 272, 23, 1, 0, 0, 0, 273, 274, 5, 16, 0, 0, 274, 275, 3, 26, 13, 0, 275, 25, 1, 0, 0, 0, 276, 281, 3, 28, 14, 0, 277, 278, 5, 38, 0, 0, 278, 280, 3, 28, 14, 0, 279, 277, 1, 0, 0, 0, 280, 283, 1, 0, 0, 0, 281, 279, 1, 0, 0, 0, 281, 282, 1, 0, 0, 0, 282, 27, 1, 0, 0, 0, 283, 281, 1, 0, 0, 0, 284, 290, 3, 10, 5, 0, 285, 286, 3, 52, 26, 0, 286, 287, 5, 36, 0, 0, 287, 288, 3, 10, 5, 0, 288, 290, 1, 0, 0, 0, 289, 284, 1, 0, 0, 0, 289, 285, 1, 0, 0, 0, 290, 29, 1, 0, 0, 0, 291, 292, 5, 6, 0, 0, 292, 297, 3, 32, 16, 0, 293, 294, 5, 38, 0, 0, 294, 296, 3, 32, 16, 0, 295, 293, 1, 0, 0, 0, 296, 299, 1, 0, 0, 0, 297, 295, 1, 0, 0, 0, 297, 298, 1, 0, 0, 0, 298, 301, 1, 0, 0, 0, 299, 297, 1, 0, 0, 0, 300, 302, 3, 38, 19, 0, 301, 300, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 31, 1, 0, 0, 0, 303, 304, 3, 34, 17, 0, 304, 305, 5, 114, 0, 0, 305, 306, 3, 36, 18, 0, 306, 309, 1, 0, 0, 0, 307, 309, 3, 36, 18, 0, 308, 303, 1, 0, 0, 0, 308, 307, 1, 0, 0, 0, 309, 33, 1, 0, 0, 0, 310, 311, 5, 25, 0, 0, 311, 35, 1, 0, 0, 0, 312, 313, 7, 2, 0, 0, 313, 37, 1, 0, 0, 0, 314, 317, 3, 40, 20, 0, 315, 317, 3, 42, 21, 0, 316, 314, 1, 0, 0, 0, 316, 315, 1, 0, 0, 0, 317, 39, 1, 0, 0, 0, 318, 319, 5, 76, 0, 0, 319, 324, 5, 25, 0, 0, 320, 321, 5, 38, 0, 0, 321, 323, 5, 25, 0, 0, 322, 320, 1, 0, 0, 0, 323, 326, 1, 0, 0, 0, 324, 322, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 41, 1, 0, 0, 0, 326, 324, 1, 0, 0, 0, 327, 328, 5, 69, 0, 0, 328, 329, 3, 40, 20, 0, 329, 330, 5, 70, 0, 0, 330, 43, 1, 0, 0, 0, 331, 332, 5, 13, 0, 0, 332, 337, 3, 32, 16, 0, 333, 334, 5, 38, 0, 0, 334, 336, 3, 32, 16, 0, 335, 333, 1, 0, 0, 0, 336, 339, 1, 0, 0, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 341, 1, 0, 0, 0, 339, 337, 1, 0, 0, 0, 340, 342, 3, 26, 13, 0, 341, 340, 1, 0, 0, 0, 341, 342, 1, 0, 0, 0, 342, 345, 1, 0, 0, 0, 343, 344, 5, 33, 0, 0, 344, 346, 3, 26, 13, 0, 345, 343, 1, 0, 0, 0, 345, 346, 1, 0, 0, 0, 346, 45, 1, 0, 0, 0, 347, 348, 5, 4, 0, 0, 348, 349, 3, 26, 13, 0, 349, 47, 1, 0, 0, 0, 350, 352, 5, 19, 0, 0, 351, 353, 3, 26, 13, 0, 352, 351, 1, 0, 0, 0, 352, 353, 1, 0, 0, 0, 353, 356, 1, 0, 0, 0, 354, 355, 5, 33, 0, 0, 355, 357, 3, 26, 13, 0, 356, 354, 1, 0, 0, 0, 356, 357, 1, 0, 0, 0, 357, 49, 1, 0, 0, 0, 358, 359, 5, 8, 0, 0, 359, 362, 3, 26, 13, 0, 360, 361, 5, 33, 0, 0, 361, 363, 3, 26, 13, 0, 362, 360, 1, 0, 0, 0, 362, 363, 1, 0, 0, 0, 363, 51, 1, 0, 0, 0, 364, 369, 3, 58, 29, 0, 365, 366, 5, 40, 0, 0, 366, 368, 3, 58, 29, 0, 367, 365, 1, 0, 0, 0, 368, 371, 1, 0, 0, 0, 369, 367, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 53, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 372, 377, 3, 60, 30, 0, 373, 374, 5, 40, 0, 0, 374, 376, 3, 60, 30, 0, 375, 373, 1, 0, 0, 0, 376, 379, 1, 0, 0, 0, 377, 375, 1, 0, 0, 0, 377, 378, 1, 0, 0, 0, 378, 55, 1, 0, 0, 0, 379, 377, 1, 0, 0, 0, 380, 385, 3, 54, 27, 0, 381, 382, 5, 38, 0, 0, 382, 384, 3, 54, 27, 0, 383, 381, 1, 0, 0, 0, 384, 387, 1, 0, 0, 0, 385, 383, 1, 0, 0, 0, 385, 386, 1, 0, 0, 0, 386, 57, 1, 0, 0, 0, 387, 385, 1, 0, 0, 0, 388, 389, 7, 3, 0, 0, 389, 59, 1, 0, 0, 0, 390, 391, 5, 80, 0, 0, 391, 61, 1, 0, 0, 0, 392, 435, 5, 49, 0, 0, 393, 394, 3, 96, 48, 0, 394, 395, 5, 71, 0, 0, 395, 435, 1, 0, 0, 0, 396, 435, 3, 94, 47, 0, 397, 435, 3, 96, 48, 0, 398, 435, 3, 90, 45, 0, 399, 435, 3, 64, 32, 0, 400, 435, 3, 98, 49, 0, 401, 402, 5, 69, 0, 0, 402, 407, 3, 92, 46, 0, 403, 404, 5, 38, 0, 0, 404, 406, 3, 92, 46, 0, 405, 403, 1, 0, 0, 0, 406, 409, 1, 0, 0, 0, 407, 405, 1, 0, 0, 0, 407, 408, 1, 0, 0, 0, 408, 410, 1, 0, 0, 0, 409, 407, 1, 0, 0, 0, 410, 411, 5, 70, 0, 0, 411, 435, 1, 0, 0, 0, 412, 413, 5, 69, 0, 0, 413, 418, 3, 90, 45, 0, 414, 415, 5, 38, 0, 0, 415, 417, 3, 90, 45, 0, 416, 414, 1, 0, 0, 0, 417, 420, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418, 419, 1, 0, 0, 0, 419, 421, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 421, 422, 5, 70, 0, 0, 422, 435, 1, 0, 0, 0, 423, 424, 5, 69, 0, 0, 424, 429, 3, 98, 49, 0, 425, 426, 5, 38, 0, 0, 426, 428, 3, 98, 49, 0, 427, 425, 1, 0, 0, 0, 428, 431, 1, 0, 0, 0, 429, 427, 1, 0, 0, 0, 429, 430, 1, 0, 0, 0, 430, 432, 1, 0, 0, 0, 431, 429, 1, 0, 0, 0, 432, 433, 5, 70, 0, 0, 433, 435, 1, 0, 0, 0, 434, 392, 1, 0, 0, 0, 434, 393, 1, 0, 0, 0, 434, 396, 1, 0, 0, 0, 434, 397, 1, 0, 0, 0, 434, 398, 1, 0, 0, 0, 434, 399, 1, 0, 0, 0, 434, 400, 1, 0, 0, 0, 434, 401, 1, 0, 0, 0, 434, 412, 1, 0, 0, 0, 434, 423, 1, 0, 0, 0, 435, 63, 1, 0, 0, 0, 436, 439, 5, 52, 0, 0, 437, 439, 5, 68, 0, 0, 438, 436, 1, 0, 0, 0, 438, 437, 1, 0, 0, 0, 439, 65, 1, 0, 0, 0, 440, 441, 5, 10, 0, 0, 441, 442, 5, 31, 0, 0, 442, 67, 1, 0, 0, 0, 443, 444, 5, 18, 0, 0, 444, 449, 3, 70, 35, 0, 445, 446, 5, 38, 0, 0, 446, 448, 3, 70, 35, 0, 447, 445, 1, 0, 0, 0, 448, 451, 1, 0, 0, 0, 449, 447, 1, 0, 0, 0, 449, 450, 1, 0, 0, 0, 450, 69, 1, 0, 0, 0, 451, 449, 1, 0, 0, 0, 452, 454, 3, 10, 5, 0, 453, 455, 7, 4, 0, 0, 454, 453, 1, 0, 0, 0, 454, 455, 1, 0, 0, 0, 455, 458, 1, 0, 0, 0, 456, 457, 5, 50, 0, 0, 457, 459, 7, 5, 0, 0, 458, 456, 1, 0, 0, 0, 458, 459, 1, 0, 0, 0, 459, 71, 1, 0, 0, 0, 460, 461, 5, 9, 0, 0, 461, 462, 3, 56, 28, 0, 462, 73, 1, 0, 0, 0, 463, 464, 5, 2, 0, 0, 464, 465, 3, 56, 28, 0, 465, 75, 1, 0, 0, 0, 466, 467, 5, 15, 0, 0, 467, 472, 3, 78, 39, 0, 468, 469, 5, 38, 0, 0, 469, 471, 3, 78, 39, 0, 470, 468, 1, 0, 0, 0, 471, 474, 1, 0, 0, 0, 472, 470, 1, 0, 0, 0, 472, 473, 1, 0, 0, 0, 473, 77, 1, 0, 0, 0, 474, 472, 1, 0, 0, 0, 475, 476, 3, 54, 27, 0, 476, 477, 5, 84, 0, 0, 477, 478, 3, 54, 27, 0, 478, 79, 1, 0, 0, 0, 479, 480, 5, 1, 0, 0, 480, 481, 3, 18, 9, 0, 481, 483, 3, 98, 49, 0, 482, 484, 3, 86, 43, 0, 483, 482, 1, 0, 0, 0, 483, 484, 1, 0, 0, 0, 484, 81, 1, 0, 0, 0, 485, 486, 5, 7, 0, 0, 486, 487, 3, 18, 9, 0, 487, 488, 3, 98, 49, 0, 488, 83, 1, 0, 0, 0, 489, 490, 5, 14, 0, 0, 490, 491, 3, 52, 26, 0, 491, 85, 1, 0, 0, 0, 492, 497, 3, 88, 44, 0, 493, 494, 5, 38, 0, 0, 494, 496, 3, 88, 44, 0, 495, 493, 1, 0, 0, 0, 496, 499, 1, 0, 0, 0, 497, 495, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 87, 1, 0, 0, 0, 499, 497, 1, 0, 0, 0, 500, 501, 3, 58, 29, 0, 501, 502, 5, 36, 0, 0, 502, 503, 3, 62, 31, 0, 503, 89, 1, 0, 0, 0, 504, 505, 7, 6, 0, 0, 505, 91, 1, 0, 0, 0, 506, 509, 3, 94, 47, 0, 507, 509, 3, 96, 48, 0, 508, 506, 1, 0, 0, 0, 508, 507, 1, 0, 0, 0, 509, 93, 1, 0, 0, 0, 510, 512, 7, 0, 0, 0, 511, 510, 1, 0, 0, 0, 511, 512, 1, 0, 0, 0, 512, 513, 1, 0, 0, 0, 513, 514, 5, 32, 0, 0, 514, 95, 1, 0, 0, 0, 515, 517, 7, 0, 0, 0, 516, 515, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 519, 5, 31, 0, 0, 519, 97, 1, 0, 0, 0, 520, 521, 5, 30, 0, 0, 521, 99, 1, 0, 0, 0, 522, 523, 7, 7, 0, 0, 523, 101, 1, 0, 0, 0, 524, 525, 5, 5, 0, 0, 525, 526, 3, 104, 52, 0, 526, 103, 1, 0, 0, 0, 527, 528, 5, 69, 0, 0, 528, 529, 3, 2, 1, 0, 529, 530, 5, 70, 0, 0, 530, 105, 1, 0, 0, 0, 531, 532, 5, 17, 0, 0, 532, 533, 5, 106, 0, 0, 533, 107, 1, 0, 0, 0, 534, 535, 5, 12, 0, 0, 535, 536, 5, 110, 0, 0, 536, 109, 1, 0, 0, 0, 537, 538, 5, 3, 0, 0, 538, 541, 5, 90, 0, 0, 539, 540, 5, 88, 0, 0, 540, 542, 3, 54, 27, 0, 541, 539, 1, 0, 0, 0, 541, 542, 1, 0, 0, 0, 542, 552, 1, 0, 0, 0, 543, 544, 5, 89, 0, 0, 544, 549, 3, 112, 56, 0, 545, 546, 5, 38, 0, 0, 546, 548, 3, 112, 56, 0, 547, 545, 1, 0, 0, 0, 548, 551, 1, 0, 0, 0, 549, 547, 1, 0, 0, 0, 549, 550, 1, 0, 0, 0, 550, 553, 1, 0, 0, 0, 551, 549, 1, 0, 0, 0, 552, 543, 1, 0, 0, 0, 552, 553, 1, 0, 0, 0, 553, 111, 1, 0, 0, 0, 554, 555, 3, 54, 27, 0, 555, 556, 5, 36, 0, 0, 556, 558, 1, 0, 0, 0, 557, 554, 1, 0, 0, 0, 557, 558, 1, 0, 0, 0, 558, 559, 1, 0, 0, 0, 559, 560, 3, 54, 27, 0, 560, 113, 1, 0, 0, 0, 561, 562, 5, 11, 0, 0, 562, 563, 3, 32, 16, 0, 563, 564, 5, 88, 0, 0, 564, 565, 3, 56, 28, 0, 565, 115, 1, 0, 0, 0, 54, 127, 136, 152, 164, 173, 181, 185, 193, 195, 200, 207, 212, 219, 225, 233, 235, 246, 253, 264, 267, 281, 289, 297, 301, 308, 316, 324, 337, 341, 345, 352, 356, 362, 369, 377, 385, 407, 418, 429, 434, 438, 449, 454, 458, 472, 483, 497, 508, 511, 516, 541, 549, 552, 557] \ No newline at end of file +[4, 1, 125, 574, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 128, 8, 1, 10, 1, 12, 1, 131, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 139, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 155, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 168, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 175, 8, 5, 10, 5, 12, 5, 178, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 185, 8, 5, 1, 5, 1, 5, 3, 5, 189, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 197, 8, 5, 10, 5, 12, 5, 200, 9, 5, 1, 6, 1, 6, 3, 6, 204, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 211, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 216, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 227, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 233, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 241, 8, 9, 10, 9, 12, 9, 244, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 254, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 259, 8, 10, 10, 10, 12, 10, 262, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 270, 8, 11, 10, 11, 12, 11, 273, 9, 11, 3, 11, 275, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 5, 14, 287, 8, 14, 10, 14, 12, 14, 290, 9, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 297, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 5, 16, 303, 8, 16, 10, 16, 12, 16, 306, 9, 16, 1, 16, 3, 16, 309, 8, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 316, 8, 17, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 3, 20, 324, 8, 20, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 330, 8, 21, 10, 21, 12, 21, 333, 9, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 343, 8, 23, 10, 23, 12, 23, 346, 9, 23, 1, 23, 3, 23, 349, 8, 23, 1, 23, 1, 23, 3, 23, 353, 8, 23, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 3, 25, 360, 8, 25, 1, 25, 1, 25, 3, 25, 364, 8, 25, 1, 26, 1, 26, 1, 26, 1, 26, 3, 26, 370, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 375, 8, 27, 10, 27, 12, 27, 378, 9, 27, 1, 28, 1, 28, 1, 28, 5, 28, 383, 8, 28, 10, 28, 12, 28, 386, 9, 28, 1, 29, 1, 29, 1, 29, 5, 29, 391, 8, 29, 10, 29, 12, 29, 394, 9, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 413, 8, 32, 10, 32, 12, 32, 416, 9, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 424, 8, 32, 10, 32, 12, 32, 427, 9, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 435, 8, 32, 10, 32, 12, 32, 438, 9, 32, 1, 32, 1, 32, 3, 32, 442, 8, 32, 1, 33, 1, 33, 3, 33, 446, 8, 33, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 455, 8, 35, 10, 35, 12, 35, 458, 9, 35, 1, 36, 1, 36, 3, 36, 462, 8, 36, 1, 36, 1, 36, 3, 36, 466, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 5, 39, 478, 8, 39, 10, 39, 12, 39, 481, 9, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 41, 3, 41, 491, 8, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 5, 44, 503, 8, 44, 10, 44, 12, 44, 506, 9, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 3, 47, 516, 8, 47, 1, 48, 3, 48, 519, 8, 48, 1, 48, 1, 48, 1, 49, 3, 49, 524, 8, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 3, 56, 549, 8, 56, 1, 56, 1, 56, 1, 56, 1, 56, 5, 56, 555, 8, 56, 10, 56, 12, 56, 558, 9, 56, 3, 56, 560, 8, 56, 1, 57, 1, 57, 1, 57, 3, 57, 565, 8, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 0, 4, 2, 10, 18, 20, 59, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 0, 8, 1, 0, 64, 65, 1, 0, 66, 68, 2, 0, 25, 25, 30, 30, 1, 0, 72, 73, 2, 0, 35, 35, 39, 39, 2, 0, 42, 42, 45, 45, 2, 0, 41, 41, 56, 56, 2, 0, 57, 57, 59, 63, 599, 0, 118, 1, 0, 0, 0, 2, 121, 1, 0, 0, 0, 4, 138, 1, 0, 0, 0, 6, 154, 1, 0, 0, 0, 8, 156, 1, 0, 0, 0, 10, 188, 1, 0, 0, 0, 12, 215, 1, 0, 0, 0, 14, 217, 1, 0, 0, 0, 16, 226, 1, 0, 0, 0, 18, 232, 1, 0, 0, 0, 20, 253, 1, 0, 0, 0, 22, 263, 1, 0, 0, 0, 24, 278, 1, 0, 0, 0, 26, 280, 1, 0, 0, 0, 28, 283, 1, 0, 0, 0, 30, 296, 1, 0, 0, 0, 32, 298, 1, 0, 0, 0, 34, 315, 1, 0, 0, 0, 36, 317, 1, 0, 0, 0, 38, 319, 1, 0, 0, 0, 40, 323, 1, 0, 0, 0, 42, 325, 1, 0, 0, 0, 44, 334, 1, 0, 0, 0, 46, 338, 1, 0, 0, 0, 48, 354, 1, 0, 0, 0, 50, 357, 1, 0, 0, 0, 52, 365, 1, 0, 0, 0, 54, 371, 1, 0, 0, 0, 56, 379, 1, 0, 0, 0, 58, 387, 1, 0, 0, 0, 60, 395, 1, 0, 0, 0, 62, 397, 1, 0, 0, 0, 64, 441, 1, 0, 0, 0, 66, 445, 1, 0, 0, 0, 68, 447, 1, 0, 0, 0, 70, 450, 1, 0, 0, 0, 72, 459, 1, 0, 0, 0, 74, 467, 1, 0, 0, 0, 76, 470, 1, 0, 0, 0, 78, 473, 1, 0, 0, 0, 80, 482, 1, 0, 0, 0, 82, 486, 1, 0, 0, 0, 84, 492, 1, 0, 0, 0, 86, 496, 1, 0, 0, 0, 88, 499, 1, 0, 0, 0, 90, 507, 1, 0, 0, 0, 92, 511, 1, 0, 0, 0, 94, 515, 1, 0, 0, 0, 96, 518, 1, 0, 0, 0, 98, 523, 1, 0, 0, 0, 100, 527, 1, 0, 0, 0, 102, 529, 1, 0, 0, 0, 104, 531, 1, 0, 0, 0, 106, 534, 1, 0, 0, 0, 108, 538, 1, 0, 0, 0, 110, 541, 1, 0, 0, 0, 112, 544, 1, 0, 0, 0, 114, 564, 1, 0, 0, 0, 116, 568, 1, 0, 0, 0, 118, 119, 3, 2, 1, 0, 119, 120, 5, 0, 0, 1, 120, 1, 1, 0, 0, 0, 121, 122, 6, 1, -1, 0, 122, 123, 3, 4, 2, 0, 123, 129, 1, 0, 0, 0, 124, 125, 10, 1, 0, 0, 125, 126, 5, 29, 0, 0, 126, 128, 3, 6, 3, 0, 127, 124, 1, 0, 0, 0, 128, 131, 1, 0, 0, 0, 129, 127, 1, 0, 0, 0, 129, 130, 1, 0, 0, 0, 130, 3, 1, 0, 0, 0, 131, 129, 1, 0, 0, 0, 132, 139, 3, 104, 52, 0, 133, 139, 3, 32, 16, 0, 134, 139, 3, 26, 13, 0, 135, 139, 3, 46, 23, 0, 136, 139, 3, 108, 54, 0, 137, 139, 3, 110, 55, 0, 138, 132, 1, 0, 0, 0, 138, 133, 1, 0, 0, 0, 138, 134, 1, 0, 0, 0, 138, 135, 1, 0, 0, 0, 138, 136, 1, 0, 0, 0, 138, 137, 1, 0, 0, 0, 139, 5, 1, 0, 0, 0, 140, 155, 3, 48, 24, 0, 141, 155, 3, 52, 26, 0, 142, 155, 3, 68, 34, 0, 143, 155, 3, 116, 58, 0, 144, 155, 3, 74, 37, 0, 145, 155, 3, 70, 35, 0, 146, 155, 3, 50, 25, 0, 147, 155, 3, 8, 4, 0, 148, 155, 3, 76, 38, 0, 149, 155, 3, 78, 39, 0, 150, 155, 3, 82, 41, 0, 151, 155, 3, 84, 42, 0, 152, 155, 3, 112, 56, 0, 153, 155, 3, 86, 43, 0, 154, 140, 1, 0, 0, 0, 154, 141, 1, 0, 0, 0, 154, 142, 1, 0, 0, 0, 154, 143, 1, 0, 0, 0, 154, 144, 1, 0, 0, 0, 154, 145, 1, 0, 0, 0, 154, 146, 1, 0, 0, 0, 154, 147, 1, 0, 0, 0, 154, 148, 1, 0, 0, 0, 154, 149, 1, 0, 0, 0, 154, 150, 1, 0, 0, 0, 154, 151, 1, 0, 0, 0, 154, 152, 1, 0, 0, 0, 154, 153, 1, 0, 0, 0, 155, 7, 1, 0, 0, 0, 156, 157, 5, 20, 0, 0, 157, 158, 3, 10, 5, 0, 158, 9, 1, 0, 0, 0, 159, 160, 6, 5, -1, 0, 160, 161, 5, 49, 0, 0, 161, 189, 3, 10, 5, 8, 162, 189, 3, 16, 8, 0, 163, 189, 3, 12, 6, 0, 164, 189, 3, 14, 7, 0, 165, 167, 3, 16, 8, 0, 166, 168, 5, 49, 0, 0, 167, 166, 1, 0, 0, 0, 167, 168, 1, 0, 0, 0, 168, 169, 1, 0, 0, 0, 169, 170, 5, 43, 0, 0, 170, 171, 5, 47, 0, 0, 171, 176, 3, 16, 8, 0, 172, 173, 5, 38, 0, 0, 173, 175, 3, 16, 8, 0, 174, 172, 1, 0, 0, 0, 175, 178, 1, 0, 0, 0, 176, 174, 1, 0, 0, 0, 176, 177, 1, 0, 0, 0, 177, 179, 1, 0, 0, 0, 178, 176, 1, 0, 0, 0, 179, 180, 5, 55, 0, 0, 180, 189, 1, 0, 0, 0, 181, 182, 3, 16, 8, 0, 182, 184, 5, 44, 0, 0, 183, 185, 5, 49, 0, 0, 184, 183, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 187, 5, 50, 0, 0, 187, 189, 1, 0, 0, 0, 188, 159, 1, 0, 0, 0, 188, 162, 1, 0, 0, 0, 188, 163, 1, 0, 0, 0, 188, 164, 1, 0, 0, 0, 188, 165, 1, 0, 0, 0, 188, 181, 1, 0, 0, 0, 189, 198, 1, 0, 0, 0, 190, 191, 10, 4, 0, 0, 191, 192, 5, 34, 0, 0, 192, 197, 3, 10, 5, 5, 193, 194, 10, 3, 0, 0, 194, 195, 5, 52, 0, 0, 195, 197, 3, 10, 5, 4, 196, 190, 1, 0, 0, 0, 196, 193, 1, 0, 0, 0, 197, 200, 1, 0, 0, 0, 198, 196, 1, 0, 0, 0, 198, 199, 1, 0, 0, 0, 199, 11, 1, 0, 0, 0, 200, 198, 1, 0, 0, 0, 201, 203, 3, 16, 8, 0, 202, 204, 5, 49, 0, 0, 203, 202, 1, 0, 0, 0, 203, 204, 1, 0, 0, 0, 204, 205, 1, 0, 0, 0, 205, 206, 5, 46, 0, 0, 206, 207, 3, 100, 50, 0, 207, 216, 1, 0, 0, 0, 208, 210, 3, 16, 8, 0, 209, 211, 5, 49, 0, 0, 210, 209, 1, 0, 0, 0, 210, 211, 1, 0, 0, 0, 211, 212, 1, 0, 0, 0, 212, 213, 5, 54, 0, 0, 213, 214, 3, 100, 50, 0, 214, 216, 1, 0, 0, 0, 215, 201, 1, 0, 0, 0, 215, 208, 1, 0, 0, 0, 216, 13, 1, 0, 0, 0, 217, 218, 3, 54, 27, 0, 218, 219, 5, 48, 0, 0, 219, 220, 3, 100, 50, 0, 220, 15, 1, 0, 0, 0, 221, 227, 3, 18, 9, 0, 222, 223, 3, 18, 9, 0, 223, 224, 3, 102, 51, 0, 224, 225, 3, 18, 9, 0, 225, 227, 1, 0, 0, 0, 226, 221, 1, 0, 0, 0, 226, 222, 1, 0, 0, 0, 227, 17, 1, 0, 0, 0, 228, 229, 6, 9, -1, 0, 229, 233, 3, 20, 10, 0, 230, 231, 7, 0, 0, 0, 231, 233, 3, 18, 9, 3, 232, 228, 1, 0, 0, 0, 232, 230, 1, 0, 0, 0, 233, 242, 1, 0, 0, 0, 234, 235, 10, 2, 0, 0, 235, 236, 7, 1, 0, 0, 236, 241, 3, 18, 9, 3, 237, 238, 10, 1, 0, 0, 238, 239, 7, 0, 0, 0, 239, 241, 3, 18, 9, 2, 240, 234, 1, 0, 0, 0, 240, 237, 1, 0, 0, 0, 241, 244, 1, 0, 0, 0, 242, 240, 1, 0, 0, 0, 242, 243, 1, 0, 0, 0, 243, 19, 1, 0, 0, 0, 244, 242, 1, 0, 0, 0, 245, 246, 6, 10, -1, 0, 246, 254, 3, 64, 32, 0, 247, 254, 3, 54, 27, 0, 248, 254, 3, 22, 11, 0, 249, 250, 5, 47, 0, 0, 250, 251, 3, 10, 5, 0, 251, 252, 5, 55, 0, 0, 252, 254, 1, 0, 0, 0, 253, 245, 1, 0, 0, 0, 253, 247, 1, 0, 0, 0, 253, 248, 1, 0, 0, 0, 253, 249, 1, 0, 0, 0, 254, 260, 1, 0, 0, 0, 255, 256, 10, 1, 0, 0, 256, 257, 5, 37, 0, 0, 257, 259, 3, 24, 12, 0, 258, 255, 1, 0, 0, 0, 259, 262, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 260, 261, 1, 0, 0, 0, 261, 21, 1, 0, 0, 0, 262, 260, 1, 0, 0, 0, 263, 264, 3, 60, 30, 0, 264, 274, 5, 47, 0, 0, 265, 275, 5, 66, 0, 0, 266, 271, 3, 10, 5, 0, 267, 268, 5, 38, 0, 0, 268, 270, 3, 10, 5, 0, 269, 267, 1, 0, 0, 0, 270, 273, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 271, 272, 1, 0, 0, 0, 272, 275, 1, 0, 0, 0, 273, 271, 1, 0, 0, 0, 274, 265, 1, 0, 0, 0, 274, 266, 1, 0, 0, 0, 274, 275, 1, 0, 0, 0, 275, 276, 1, 0, 0, 0, 276, 277, 5, 55, 0, 0, 277, 23, 1, 0, 0, 0, 278, 279, 3, 60, 30, 0, 279, 25, 1, 0, 0, 0, 280, 281, 5, 16, 0, 0, 281, 282, 3, 28, 14, 0, 282, 27, 1, 0, 0, 0, 283, 288, 3, 30, 15, 0, 284, 285, 5, 38, 0, 0, 285, 287, 3, 30, 15, 0, 286, 284, 1, 0, 0, 0, 287, 290, 1, 0, 0, 0, 288, 286, 1, 0, 0, 0, 288, 289, 1, 0, 0, 0, 289, 29, 1, 0, 0, 0, 290, 288, 1, 0, 0, 0, 291, 297, 3, 10, 5, 0, 292, 293, 3, 54, 27, 0, 293, 294, 5, 36, 0, 0, 294, 295, 3, 10, 5, 0, 295, 297, 1, 0, 0, 0, 296, 291, 1, 0, 0, 0, 296, 292, 1, 0, 0, 0, 297, 31, 1, 0, 0, 0, 298, 299, 5, 6, 0, 0, 299, 304, 3, 34, 17, 0, 300, 301, 5, 38, 0, 0, 301, 303, 3, 34, 17, 0, 302, 300, 1, 0, 0, 0, 303, 306, 1, 0, 0, 0, 304, 302, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 308, 1, 0, 0, 0, 306, 304, 1, 0, 0, 0, 307, 309, 3, 40, 20, 0, 308, 307, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 33, 1, 0, 0, 0, 310, 311, 3, 36, 18, 0, 311, 312, 5, 115, 0, 0, 312, 313, 3, 38, 19, 0, 313, 316, 1, 0, 0, 0, 314, 316, 3, 38, 19, 0, 315, 310, 1, 0, 0, 0, 315, 314, 1, 0, 0, 0, 316, 35, 1, 0, 0, 0, 317, 318, 5, 25, 0, 0, 318, 37, 1, 0, 0, 0, 319, 320, 7, 2, 0, 0, 320, 39, 1, 0, 0, 0, 321, 324, 3, 42, 21, 0, 322, 324, 3, 44, 22, 0, 323, 321, 1, 0, 0, 0, 323, 322, 1, 0, 0, 0, 324, 41, 1, 0, 0, 0, 325, 326, 5, 77, 0, 0, 326, 331, 5, 25, 0, 0, 327, 328, 5, 38, 0, 0, 328, 330, 5, 25, 0, 0, 329, 327, 1, 0, 0, 0, 330, 333, 1, 0, 0, 0, 331, 329, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 43, 1, 0, 0, 0, 333, 331, 1, 0, 0, 0, 334, 335, 5, 70, 0, 0, 335, 336, 3, 42, 21, 0, 336, 337, 5, 71, 0, 0, 337, 45, 1, 0, 0, 0, 338, 339, 5, 13, 0, 0, 339, 344, 3, 34, 17, 0, 340, 341, 5, 38, 0, 0, 341, 343, 3, 34, 17, 0, 342, 340, 1, 0, 0, 0, 343, 346, 1, 0, 0, 0, 344, 342, 1, 0, 0, 0, 344, 345, 1, 0, 0, 0, 345, 348, 1, 0, 0, 0, 346, 344, 1, 0, 0, 0, 347, 349, 3, 28, 14, 0, 348, 347, 1, 0, 0, 0, 348, 349, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 351, 5, 33, 0, 0, 351, 353, 3, 28, 14, 0, 352, 350, 1, 0, 0, 0, 352, 353, 1, 0, 0, 0, 353, 47, 1, 0, 0, 0, 354, 355, 5, 4, 0, 0, 355, 356, 3, 28, 14, 0, 356, 49, 1, 0, 0, 0, 357, 359, 5, 19, 0, 0, 358, 360, 3, 28, 14, 0, 359, 358, 1, 0, 0, 0, 359, 360, 1, 0, 0, 0, 360, 363, 1, 0, 0, 0, 361, 362, 5, 33, 0, 0, 362, 364, 3, 28, 14, 0, 363, 361, 1, 0, 0, 0, 363, 364, 1, 0, 0, 0, 364, 51, 1, 0, 0, 0, 365, 366, 5, 8, 0, 0, 366, 369, 3, 28, 14, 0, 367, 368, 5, 33, 0, 0, 368, 370, 3, 28, 14, 0, 369, 367, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 53, 1, 0, 0, 0, 371, 376, 3, 60, 30, 0, 372, 373, 5, 40, 0, 0, 373, 375, 3, 60, 30, 0, 374, 372, 1, 0, 0, 0, 375, 378, 1, 0, 0, 0, 376, 374, 1, 0, 0, 0, 376, 377, 1, 0, 0, 0, 377, 55, 1, 0, 0, 0, 378, 376, 1, 0, 0, 0, 379, 384, 3, 62, 31, 0, 380, 381, 5, 40, 0, 0, 381, 383, 3, 62, 31, 0, 382, 380, 1, 0, 0, 0, 383, 386, 1, 0, 0, 0, 384, 382, 1, 0, 0, 0, 384, 385, 1, 0, 0, 0, 385, 57, 1, 0, 0, 0, 386, 384, 1, 0, 0, 0, 387, 392, 3, 56, 28, 0, 388, 389, 5, 38, 0, 0, 389, 391, 3, 56, 28, 0, 390, 388, 1, 0, 0, 0, 391, 394, 1, 0, 0, 0, 392, 390, 1, 0, 0, 0, 392, 393, 1, 0, 0, 0, 393, 59, 1, 0, 0, 0, 394, 392, 1, 0, 0, 0, 395, 396, 7, 3, 0, 0, 396, 61, 1, 0, 0, 0, 397, 398, 5, 81, 0, 0, 398, 63, 1, 0, 0, 0, 399, 442, 5, 50, 0, 0, 400, 401, 3, 98, 49, 0, 401, 402, 5, 72, 0, 0, 402, 442, 1, 0, 0, 0, 403, 442, 3, 96, 48, 0, 404, 442, 3, 98, 49, 0, 405, 442, 3, 92, 46, 0, 406, 442, 3, 66, 33, 0, 407, 442, 3, 100, 50, 0, 408, 409, 5, 70, 0, 0, 409, 414, 3, 94, 47, 0, 410, 411, 5, 38, 0, 0, 411, 413, 3, 94, 47, 0, 412, 410, 1, 0, 0, 0, 413, 416, 1, 0, 0, 0, 414, 412, 1, 0, 0, 0, 414, 415, 1, 0, 0, 0, 415, 417, 1, 0, 0, 0, 416, 414, 1, 0, 0, 0, 417, 418, 5, 71, 0, 0, 418, 442, 1, 0, 0, 0, 419, 420, 5, 70, 0, 0, 420, 425, 3, 92, 46, 0, 421, 422, 5, 38, 0, 0, 422, 424, 3, 92, 46, 0, 423, 421, 1, 0, 0, 0, 424, 427, 1, 0, 0, 0, 425, 423, 1, 0, 0, 0, 425, 426, 1, 0, 0, 0, 426, 428, 1, 0, 0, 0, 427, 425, 1, 0, 0, 0, 428, 429, 5, 71, 0, 0, 429, 442, 1, 0, 0, 0, 430, 431, 5, 70, 0, 0, 431, 436, 3, 100, 50, 0, 432, 433, 5, 38, 0, 0, 433, 435, 3, 100, 50, 0, 434, 432, 1, 0, 0, 0, 435, 438, 1, 0, 0, 0, 436, 434, 1, 0, 0, 0, 436, 437, 1, 0, 0, 0, 437, 439, 1, 0, 0, 0, 438, 436, 1, 0, 0, 0, 439, 440, 5, 71, 0, 0, 440, 442, 1, 0, 0, 0, 441, 399, 1, 0, 0, 0, 441, 400, 1, 0, 0, 0, 441, 403, 1, 0, 0, 0, 441, 404, 1, 0, 0, 0, 441, 405, 1, 0, 0, 0, 441, 406, 1, 0, 0, 0, 441, 407, 1, 0, 0, 0, 441, 408, 1, 0, 0, 0, 441, 419, 1, 0, 0, 0, 441, 430, 1, 0, 0, 0, 442, 65, 1, 0, 0, 0, 443, 446, 5, 53, 0, 0, 444, 446, 5, 69, 0, 0, 445, 443, 1, 0, 0, 0, 445, 444, 1, 0, 0, 0, 446, 67, 1, 0, 0, 0, 447, 448, 5, 10, 0, 0, 448, 449, 5, 31, 0, 0, 449, 69, 1, 0, 0, 0, 450, 451, 5, 18, 0, 0, 451, 456, 3, 72, 36, 0, 452, 453, 5, 38, 0, 0, 453, 455, 3, 72, 36, 0, 454, 452, 1, 0, 0, 0, 455, 458, 1, 0, 0, 0, 456, 454, 1, 0, 0, 0, 456, 457, 1, 0, 0, 0, 457, 71, 1, 0, 0, 0, 458, 456, 1, 0, 0, 0, 459, 461, 3, 10, 5, 0, 460, 462, 7, 4, 0, 0, 461, 460, 1, 0, 0, 0, 461, 462, 1, 0, 0, 0, 462, 465, 1, 0, 0, 0, 463, 464, 5, 51, 0, 0, 464, 466, 7, 5, 0, 0, 465, 463, 1, 0, 0, 0, 465, 466, 1, 0, 0, 0, 466, 73, 1, 0, 0, 0, 467, 468, 5, 9, 0, 0, 468, 469, 3, 58, 29, 0, 469, 75, 1, 0, 0, 0, 470, 471, 5, 2, 0, 0, 471, 472, 3, 58, 29, 0, 472, 77, 1, 0, 0, 0, 473, 474, 5, 15, 0, 0, 474, 479, 3, 80, 40, 0, 475, 476, 5, 38, 0, 0, 476, 478, 3, 80, 40, 0, 477, 475, 1, 0, 0, 0, 478, 481, 1, 0, 0, 0, 479, 477, 1, 0, 0, 0, 479, 480, 1, 0, 0, 0, 480, 79, 1, 0, 0, 0, 481, 479, 1, 0, 0, 0, 482, 483, 3, 56, 28, 0, 483, 484, 5, 85, 0, 0, 484, 485, 3, 56, 28, 0, 485, 81, 1, 0, 0, 0, 486, 487, 5, 1, 0, 0, 487, 488, 3, 20, 10, 0, 488, 490, 3, 100, 50, 0, 489, 491, 3, 88, 44, 0, 490, 489, 1, 0, 0, 0, 490, 491, 1, 0, 0, 0, 491, 83, 1, 0, 0, 0, 492, 493, 5, 7, 0, 0, 493, 494, 3, 20, 10, 0, 494, 495, 3, 100, 50, 0, 495, 85, 1, 0, 0, 0, 496, 497, 5, 14, 0, 0, 497, 498, 3, 54, 27, 0, 498, 87, 1, 0, 0, 0, 499, 504, 3, 90, 45, 0, 500, 501, 5, 38, 0, 0, 501, 503, 3, 90, 45, 0, 502, 500, 1, 0, 0, 0, 503, 506, 1, 0, 0, 0, 504, 502, 1, 0, 0, 0, 504, 505, 1, 0, 0, 0, 505, 89, 1, 0, 0, 0, 506, 504, 1, 0, 0, 0, 507, 508, 3, 60, 30, 0, 508, 509, 5, 36, 0, 0, 509, 510, 3, 64, 32, 0, 510, 91, 1, 0, 0, 0, 511, 512, 7, 6, 0, 0, 512, 93, 1, 0, 0, 0, 513, 516, 3, 96, 48, 0, 514, 516, 3, 98, 49, 0, 515, 513, 1, 0, 0, 0, 515, 514, 1, 0, 0, 0, 516, 95, 1, 0, 0, 0, 517, 519, 7, 0, 0, 0, 518, 517, 1, 0, 0, 0, 518, 519, 1, 0, 0, 0, 519, 520, 1, 0, 0, 0, 520, 521, 5, 32, 0, 0, 521, 97, 1, 0, 0, 0, 522, 524, 7, 0, 0, 0, 523, 522, 1, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 525, 1, 0, 0, 0, 525, 526, 5, 31, 0, 0, 526, 99, 1, 0, 0, 0, 527, 528, 5, 30, 0, 0, 528, 101, 1, 0, 0, 0, 529, 530, 7, 7, 0, 0, 530, 103, 1, 0, 0, 0, 531, 532, 5, 5, 0, 0, 532, 533, 3, 106, 53, 0, 533, 105, 1, 0, 0, 0, 534, 535, 5, 70, 0, 0, 535, 536, 3, 2, 1, 0, 536, 537, 5, 71, 0, 0, 537, 107, 1, 0, 0, 0, 538, 539, 5, 17, 0, 0, 539, 540, 5, 107, 0, 0, 540, 109, 1, 0, 0, 0, 541, 542, 5, 12, 0, 0, 542, 543, 5, 111, 0, 0, 543, 111, 1, 0, 0, 0, 544, 545, 5, 3, 0, 0, 545, 548, 5, 91, 0, 0, 546, 547, 5, 89, 0, 0, 547, 549, 3, 56, 28, 0, 548, 546, 1, 0, 0, 0, 548, 549, 1, 0, 0, 0, 549, 559, 1, 0, 0, 0, 550, 551, 5, 90, 0, 0, 551, 556, 3, 114, 57, 0, 552, 553, 5, 38, 0, 0, 553, 555, 3, 114, 57, 0, 554, 552, 1, 0, 0, 0, 555, 558, 1, 0, 0, 0, 556, 554, 1, 0, 0, 0, 556, 557, 1, 0, 0, 0, 557, 560, 1, 0, 0, 0, 558, 556, 1, 0, 0, 0, 559, 550, 1, 0, 0, 0, 559, 560, 1, 0, 0, 0, 560, 113, 1, 0, 0, 0, 561, 562, 3, 56, 28, 0, 562, 563, 5, 36, 0, 0, 563, 565, 1, 0, 0, 0, 564, 561, 1, 0, 0, 0, 564, 565, 1, 0, 0, 0, 565, 566, 1, 0, 0, 0, 566, 567, 3, 56, 28, 0, 567, 115, 1, 0, 0, 0, 568, 569, 5, 11, 0, 0, 569, 570, 3, 34, 17, 0, 570, 571, 5, 89, 0, 0, 571, 572, 3, 58, 29, 0, 572, 117, 1, 0, 0, 0, 54, 129, 138, 154, 167, 176, 184, 188, 196, 198, 203, 210, 215, 226, 232, 240, 242, 253, 260, 271, 274, 288, 296, 304, 308, 315, 323, 331, 344, 348, 352, 359, 363, 369, 376, 384, 392, 414, 425, 436, 441, 445, 456, 461, 465, 479, 490, 504, 515, 518, 523, 548, 556, 559, 564] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index afaf57ba1d218..f43506cbbe466 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -23,59 +23,61 @@ public class EsqlBaseParser extends Parser { MULTILINE_COMMENT=23, WS=24, UNQUOTED_SOURCE=25, EXPLAIN_WS=26, EXPLAIN_LINE_COMMENT=27, EXPLAIN_MULTILINE_COMMENT=28, PIPE=29, QUOTED_STRING=30, INTEGER_LITERAL=31, DECIMAL_LITERAL=32, BY=33, AND=34, ASC=35, ASSIGN=36, CAST_OP=37, COMMA=38, - DESC=39, DOT=40, FALSE=41, FIRST=42, LAST=43, LP=44, IN=45, IS=46, LIKE=47, - NOT=48, NULL=49, NULLS=50, OR=51, PARAM=52, RLIKE=53, RP=54, TRUE=55, - EQ=56, CIEQ=57, NEQ=58, LT=59, LTE=60, GT=61, GTE=62, PLUS=63, MINUS=64, - ASTERISK=65, SLASH=66, PERCENT=67, NAMED_OR_POSITIONAL_PARAM=68, OPENING_BRACKET=69, - CLOSING_BRACKET=70, UNQUOTED_IDENTIFIER=71, QUOTED_IDENTIFIER=72, EXPR_LINE_COMMENT=73, - EXPR_MULTILINE_COMMENT=74, EXPR_WS=75, METADATA=76, FROM_LINE_COMMENT=77, - FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, PROJECT_LINE_COMMENT=81, - PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, AS=84, RENAME_LINE_COMMENT=85, - RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, ON=88, WITH=89, ENRICH_POLICY_NAME=90, - ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, - ENRICH_FIELD_MULTILINE_COMMENT=95, ENRICH_FIELD_WS=96, LOOKUP_LINE_COMMENT=97, - LOOKUP_MULTILINE_COMMENT=98, LOOKUP_WS=99, LOOKUP_FIELD_LINE_COMMENT=100, - LOOKUP_FIELD_MULTILINE_COMMENT=101, LOOKUP_FIELD_WS=102, MVEXPAND_LINE_COMMENT=103, - MVEXPAND_MULTILINE_COMMENT=104, MVEXPAND_WS=105, INFO=106, SHOW_LINE_COMMENT=107, - SHOW_MULTILINE_COMMENT=108, SHOW_WS=109, FUNCTIONS=110, META_LINE_COMMENT=111, - META_MULTILINE_COMMENT=112, META_WS=113, COLON=114, SETTING=115, SETTING_LINE_COMMENT=116, - SETTTING_MULTILINE_COMMENT=117, SETTING_WS=118, METRICS_LINE_COMMENT=119, - METRICS_MULTILINE_COMMENT=120, METRICS_WS=121, CLOSING_METRICS_LINE_COMMENT=122, - CLOSING_METRICS_MULTILINE_COMMENT=123, CLOSING_METRICS_WS=124; + DESC=39, DOT=40, FALSE=41, FIRST=42, IN=43, IS=44, LAST=45, LIKE=46, LP=47, + MATCH=48, NOT=49, NULL=50, NULLS=51, OR=52, PARAM=53, RLIKE=54, RP=55, + TRUE=56, EQ=57, CIEQ=58, NEQ=59, LT=60, LTE=61, GT=62, GTE=63, PLUS=64, + MINUS=65, ASTERISK=66, SLASH=67, PERCENT=68, NAMED_OR_POSITIONAL_PARAM=69, + OPENING_BRACKET=70, CLOSING_BRACKET=71, UNQUOTED_IDENTIFIER=72, QUOTED_IDENTIFIER=73, + EXPR_LINE_COMMENT=74, EXPR_MULTILINE_COMMENT=75, EXPR_WS=76, METADATA=77, + FROM_LINE_COMMENT=78, FROM_MULTILINE_COMMENT=79, FROM_WS=80, ID_PATTERN=81, + PROJECT_LINE_COMMENT=82, PROJECT_MULTILINE_COMMENT=83, PROJECT_WS=84, + AS=85, RENAME_LINE_COMMENT=86, RENAME_MULTILINE_COMMENT=87, RENAME_WS=88, + ON=89, WITH=90, ENRICH_POLICY_NAME=91, ENRICH_LINE_COMMENT=92, ENRICH_MULTILINE_COMMENT=93, + ENRICH_WS=94, ENRICH_FIELD_LINE_COMMENT=95, ENRICH_FIELD_MULTILINE_COMMENT=96, + ENRICH_FIELD_WS=97, LOOKUP_LINE_COMMENT=98, LOOKUP_MULTILINE_COMMENT=99, + LOOKUP_WS=100, LOOKUP_FIELD_LINE_COMMENT=101, LOOKUP_FIELD_MULTILINE_COMMENT=102, + LOOKUP_FIELD_WS=103, MVEXPAND_LINE_COMMENT=104, MVEXPAND_MULTILINE_COMMENT=105, + MVEXPAND_WS=106, INFO=107, SHOW_LINE_COMMENT=108, SHOW_MULTILINE_COMMENT=109, + SHOW_WS=110, FUNCTIONS=111, META_LINE_COMMENT=112, META_MULTILINE_COMMENT=113, + META_WS=114, COLON=115, SETTING=116, SETTING_LINE_COMMENT=117, SETTTING_MULTILINE_COMMENT=118, + SETTING_WS=119, METRICS_LINE_COMMENT=120, METRICS_MULTILINE_COMMENT=121, + METRICS_WS=122, CLOSING_METRICS_LINE_COMMENT=123, CLOSING_METRICS_MULTILINE_COMMENT=124, + CLOSING_METRICS_WS=125; public static final int RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, - RULE_valueExpression = 7, RULE_operatorExpression = 8, RULE_primaryExpression = 9, - RULE_functionExpression = 10, RULE_dataType = 11, RULE_rowCommand = 12, - RULE_fields = 13, RULE_field = 14, RULE_fromCommand = 15, RULE_indexPattern = 16, - RULE_clusterString = 17, RULE_indexString = 18, RULE_metadata = 19, RULE_metadataOption = 20, - RULE_deprecated_metadata = 21, RULE_metricsCommand = 22, RULE_evalCommand = 23, - RULE_statsCommand = 24, RULE_inlinestatsCommand = 25, RULE_qualifiedName = 26, - RULE_qualifiedNamePattern = 27, RULE_qualifiedNamePatterns = 28, RULE_identifier = 29, - RULE_identifierPattern = 30, RULE_constant = 31, RULE_params = 32, RULE_limitCommand = 33, - RULE_sortCommand = 34, RULE_orderExpression = 35, RULE_keepCommand = 36, - RULE_dropCommand = 37, RULE_renameCommand = 38, RULE_renameClause = 39, - RULE_dissectCommand = 40, RULE_grokCommand = 41, RULE_mvExpandCommand = 42, - RULE_commandOptions = 43, RULE_commandOption = 44, RULE_booleanValue = 45, - RULE_numericValue = 46, RULE_decimalValue = 47, RULE_integerValue = 48, - RULE_string = 49, RULE_comparisonOperator = 50, RULE_explainCommand = 51, - RULE_subqueryExpression = 52, RULE_showCommand = 53, RULE_metaCommand = 54, - RULE_enrichCommand = 55, RULE_enrichWithClause = 56, RULE_lookupCommand = 57; + RULE_matchBooleanExpression = 7, RULE_valueExpression = 8, RULE_operatorExpression = 9, + RULE_primaryExpression = 10, RULE_functionExpression = 11, RULE_dataType = 12, + RULE_rowCommand = 13, RULE_fields = 14, RULE_field = 15, RULE_fromCommand = 16, + RULE_indexPattern = 17, RULE_clusterString = 18, RULE_indexString = 19, + RULE_metadata = 20, RULE_metadataOption = 21, RULE_deprecated_metadata = 22, + RULE_metricsCommand = 23, RULE_evalCommand = 24, RULE_statsCommand = 25, + RULE_inlinestatsCommand = 26, RULE_qualifiedName = 27, RULE_qualifiedNamePattern = 28, + RULE_qualifiedNamePatterns = 29, RULE_identifier = 30, RULE_identifierPattern = 31, + RULE_constant = 32, RULE_params = 33, RULE_limitCommand = 34, RULE_sortCommand = 35, + RULE_orderExpression = 36, RULE_keepCommand = 37, RULE_dropCommand = 38, + RULE_renameCommand = 39, RULE_renameClause = 40, RULE_dissectCommand = 41, + RULE_grokCommand = 42, RULE_mvExpandCommand = 43, RULE_commandOptions = 44, + RULE_commandOption = 45, RULE_booleanValue = 46, RULE_numericValue = 47, + RULE_decimalValue = 48, RULE_integerValue = 49, RULE_string = 50, RULE_comparisonOperator = 51, + RULE_explainCommand = 52, RULE_subqueryExpression = 53, RULE_showCommand = 54, + RULE_metaCommand = 55, RULE_enrichCommand = 56, RULE_enrichWithClause = 57, + RULE_lookupCommand = 58; private static String[] makeRuleNames() { return new String[] { "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", - "booleanExpression", "regexBooleanExpression", "valueExpression", "operatorExpression", - "primaryExpression", "functionExpression", "dataType", "rowCommand", - "fields", "field", "fromCommand", "indexPattern", "clusterString", "indexString", - "metadata", "metadataOption", "deprecated_metadata", "metricsCommand", - "evalCommand", "statsCommand", "inlinestatsCommand", "qualifiedName", - "qualifiedNamePattern", "qualifiedNamePatterns", "identifier", "identifierPattern", - "constant", "params", "limitCommand", "sortCommand", "orderExpression", - "keepCommand", "dropCommand", "renameCommand", "renameClause", "dissectCommand", - "grokCommand", "mvExpandCommand", "commandOptions", "commandOption", - "booleanValue", "numericValue", "decimalValue", "integerValue", "string", - "comparisonOperator", "explainCommand", "subqueryExpression", "showCommand", - "metaCommand", "enrichCommand", "enrichWithClause", "lookupCommand" + "booleanExpression", "regexBooleanExpression", "matchBooleanExpression", + "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", + "dataType", "rowCommand", "fields", "field", "fromCommand", "indexPattern", + "clusterString", "indexString", "metadata", "metadataOption", "deprecated_metadata", + "metricsCommand", "evalCommand", "statsCommand", "inlinestatsCommand", + "qualifiedName", "qualifiedNamePattern", "qualifiedNamePatterns", "identifier", + "identifierPattern", "constant", "params", "limitCommand", "sortCommand", + "orderExpression", "keepCommand", "dropCommand", "renameCommand", "renameClause", + "dissectCommand", "grokCommand", "mvExpandCommand", "commandOptions", + "commandOption", "booleanValue", "numericValue", "decimalValue", "integerValue", + "string", "comparisonOperator", "explainCommand", "subqueryExpression", + "showCommand", "metaCommand", "enrichCommand", "enrichWithClause", "lookupCommand" }; } public static final String[] ruleNames = makeRuleNames(); @@ -87,11 +89,11 @@ private static String[] makeLiteralNames() { "'metrics'", "'mv_expand'", "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, null, "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", "','", - "'desc'", "'.'", "'false'", "'first'", "'last'", "'('", "'in'", "'is'", - "'like'", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", - "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", - "'-'", "'*'", "'/'", "'%'", null, null, "']'", null, null, null, null, - null, "'metadata'", null, null, null, null, null, null, null, "'as'", + "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", "'like'", + "'('", "'match'", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", + "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", + "'+'", "'-'", "'*'", "'/'", "'%'", null, null, "']'", null, null, null, + null, null, "'metadata'", null, null, null, null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, "'info'", null, null, null, "'functions'", null, null, null, "':'" @@ -106,8 +108,8 @@ private static String[] makeSymbolicNames() { "MULTILINE_COMMENT", "WS", "UNQUOTED_SOURCE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", - "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", "NOT", - "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", + "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "MATCH", + "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", @@ -211,9 +213,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(116); + setState(118); query(0); - setState(117); + setState(119); match(EOF); } } @@ -309,11 +311,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(120); + setState(122); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(127); + setState(129); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -324,16 +326,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(122); + setState(124); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(123); + setState(125); match(PIPE); - setState(124); + setState(126); processingCommand(); } } } - setState(129); + setState(131); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -394,48 +396,48 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(136); + setState(138); _errHandler.sync(this); switch (_input.LA(1)) { case EXPLAIN: enterOuterAlt(_localctx, 1); { - setState(130); + setState(132); explainCommand(); } break; case FROM: enterOuterAlt(_localctx, 2); { - setState(131); + setState(133); fromCommand(); } break; case ROW: enterOuterAlt(_localctx, 3); { - setState(132); + setState(134); rowCommand(); } break; case METRICS: enterOuterAlt(_localctx, 4); { - setState(133); + setState(135); metricsCommand(); } break; case SHOW: enterOuterAlt(_localctx, 5); { - setState(134); + setState(136); showCommand(); } break; case META: enterOuterAlt(_localctx, 6); { - setState(135); + setState(137); metaCommand(); } break; @@ -522,104 +524,104 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(152); + setState(154); _errHandler.sync(this); switch (_input.LA(1)) { case EVAL: enterOuterAlt(_localctx, 1); { - setState(138); + setState(140); evalCommand(); } break; case INLINESTATS: enterOuterAlt(_localctx, 2); { - setState(139); + setState(141); inlinestatsCommand(); } break; case LIMIT: enterOuterAlt(_localctx, 3); { - setState(140); + setState(142); limitCommand(); } break; case LOOKUP: enterOuterAlt(_localctx, 4); { - setState(141); + setState(143); lookupCommand(); } break; case KEEP: enterOuterAlt(_localctx, 5); { - setState(142); + setState(144); keepCommand(); } break; case SORT: enterOuterAlt(_localctx, 6); { - setState(143); + setState(145); sortCommand(); } break; case STATS: enterOuterAlt(_localctx, 7); { - setState(144); + setState(146); statsCommand(); } break; case WHERE: enterOuterAlt(_localctx, 8); { - setState(145); + setState(147); whereCommand(); } break; case DROP: enterOuterAlt(_localctx, 9); { - setState(146); + setState(148); dropCommand(); } break; case RENAME: enterOuterAlt(_localctx, 10); { - setState(147); + setState(149); renameCommand(); } break; case DISSECT: enterOuterAlt(_localctx, 11); { - setState(148); + setState(150); dissectCommand(); } break; case GROK: enterOuterAlt(_localctx, 12); { - setState(149); + setState(151); grokCommand(); } break; case ENRICH: enterOuterAlt(_localctx, 13); { - setState(150); + setState(152); enrichCommand(); } break; case MV_EXPAND: enterOuterAlt(_localctx, 14); { - setState(151); + setState(153); mvExpandCommand(); } break; @@ -670,9 +672,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(154); + setState(156); match(WHERE); - setState(155); + setState(157); booleanExpression(0); } } @@ -702,6 +704,27 @@ public void copyFrom(BooleanExpressionContext ctx) { } } @SuppressWarnings("CheckReturnValue") + public static class MatchExpressionContext extends BooleanExpressionContext { + public MatchBooleanExpressionContext matchBooleanExpression() { + return getRuleContext(MatchBooleanExpressionContext.class,0); + } + @SuppressWarnings("this-escape") + public MatchExpressionContext(BooleanExpressionContext ctx) { copyFrom(ctx); } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterMatchExpression(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitMatchExpression(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitMatchExpression(this); + else return visitor.visitChildren(this); + } + } + @SuppressWarnings("CheckReturnValue") public static class LogicalNotContext extends BooleanExpressionContext { public TerminalNode NOT() { return getToken(EsqlBaseParser.NOT, 0); } public BooleanExpressionContext booleanExpression() { @@ -867,7 +890,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(185); + setState(188); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -876,10 +899,10 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(158); + setState(160); match(NOT); - setState(159); - booleanExpression(7); + setState(161); + booleanExpression(8); } break; case 2: @@ -887,7 +910,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(160); + setState(162); valueExpression(); } break; @@ -896,79 +919,88 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(161); + setState(163); regexBooleanExpression(); } break; case 4: + { + _localctx = new MatchExpressionContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(164); + matchBooleanExpression(); + } + break; + case 5: { _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(162); + setState(165); valueExpression(); - setState(164); + setState(167); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(163); + setState(166); match(NOT); } } - setState(166); + setState(169); match(IN); - setState(167); + setState(170); match(LP); - setState(168); + setState(171); valueExpression(); - setState(173); + setState(176); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(169); + setState(172); match(COMMA); - setState(170); + setState(173); valueExpression(); } } - setState(175); + setState(178); _errHandler.sync(this); _la = _input.LA(1); } - setState(176); + setState(179); match(RP); } break; - case 5: + case 6: { _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(178); + setState(181); valueExpression(); - setState(179); + setState(182); match(IS); - setState(181); + setState(184); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(180); + setState(183); match(NOT); } } - setState(183); + setState(186); match(NULL); } break; } _ctx.stop = _input.LT(-1); - setState(195); + setState(198); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -976,7 +1008,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(193); + setState(196); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -984,11 +1016,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(187); + setState(190); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(188); + setState(191); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(189); + setState(192); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; @@ -997,18 +1029,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(190); + setState(193); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(191); + setState(194); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(192); + setState(195); ((LogicalBinaryContext)_localctx).right = booleanExpression(4); } break; } } } - setState(197); + setState(200); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1063,48 +1095,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(212); + setState(215); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(198); + setState(201); valueExpression(); - setState(200); + setState(203); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(199); + setState(202); match(NOT); } } - setState(202); + setState(205); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(203); + setState(206); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(205); + setState(208); valueExpression(); - setState(207); + setState(210); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(206); + setState(209); match(NOT); } } - setState(209); + setState(212); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(210); + setState(213); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1121,6 +1153,61 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog return _localctx; } + @SuppressWarnings("CheckReturnValue") + public static class MatchBooleanExpressionContext extends ParserRuleContext { + public StringContext queryString; + public QualifiedNameContext qualifiedName() { + return getRuleContext(QualifiedNameContext.class,0); + } + public TerminalNode MATCH() { return getToken(EsqlBaseParser.MATCH, 0); } + public StringContext string() { + return getRuleContext(StringContext.class,0); + } + @SuppressWarnings("this-escape") + public MatchBooleanExpressionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_matchBooleanExpression; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterMatchBooleanExpression(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitMatchBooleanExpression(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitMatchBooleanExpression(this); + else return visitor.visitChildren(this); + } + } + + public final MatchBooleanExpressionContext matchBooleanExpression() throws RecognitionException { + MatchBooleanExpressionContext _localctx = new MatchBooleanExpressionContext(_ctx, getState()); + enterRule(_localctx, 14, RULE_matchBooleanExpression); + try { + enterOuterAlt(_localctx, 1); + { + setState(217); + qualifiedName(); + setState(218); + match(MATCH); + setState(219); + ((MatchBooleanExpressionContext)_localctx).queryString = string(); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + @SuppressWarnings("CheckReturnValue") public static class ValueExpressionContext extends ParserRuleContext { @SuppressWarnings("this-escape") @@ -1188,16 +1275,16 @@ public T accept(ParseTreeVisitor visitor) { public final ValueExpressionContext valueExpression() throws RecognitionException { ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); - enterRule(_localctx, 14, RULE_valueExpression); + enterRule(_localctx, 16, RULE_valueExpression); try { - setState(219); + setState(226); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(214); + setState(221); operatorExpression(0); } break; @@ -1205,11 +1292,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(215); + setState(222); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(216); + setState(223); comparisonOperator(); - setState(217); + setState(224); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1327,14 +1414,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _parentState = getState(); OperatorExpressionContext _localctx = new OperatorExpressionContext(_ctx, _parentState); OperatorExpressionContext _prevctx = _localctx; - int _startState = 16; - enterRecursionRule(_localctx, 16, RULE_operatorExpression, _p); + int _startState = 18; + enterRecursionRule(_localctx, 18, RULE_operatorExpression, _p); int _la; try { int _alt; enterOuterAlt(_localctx, 1); { - setState(225); + setState(232); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: @@ -1343,7 +1430,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(222); + setState(229); primaryExpression(0); } break; @@ -1352,7 +1439,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(223); + setState(230); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1363,13 +1450,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(224); + setState(231); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(235); + setState(242); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1377,7 +1464,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(233); + setState(240); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1385,12 +1472,12 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(227); + setState(234); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(228); + setState(235); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !(((((_la - 65)) & ~0x3f) == 0 && ((1L << (_la - 65)) & 7L) != 0)) ) { + if ( !(((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & 7L) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { @@ -1398,7 +1485,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(229); + setState(236); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1407,9 +1494,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(230); + setState(237); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(231); + setState(238); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1420,14 +1507,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(232); + setState(239); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(237); + setState(244); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -1579,13 +1666,13 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc int _parentState = getState(); PrimaryExpressionContext _localctx = new PrimaryExpressionContext(_ctx, _parentState); PrimaryExpressionContext _prevctx = _localctx; - int _startState = 18; - enterRecursionRule(_localctx, 18, RULE_primaryExpression, _p); + int _startState = 20; + enterRecursionRule(_localctx, 20, RULE_primaryExpression, _p); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(246); + setState(253); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: @@ -1594,7 +1681,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(239); + setState(246); constant(); } break; @@ -1603,7 +1690,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(240); + setState(247); qualifiedName(); } break; @@ -1612,7 +1699,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(241); + setState(248); functionExpression(); } break; @@ -1621,17 +1708,17 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(242); + setState(249); match(LP); - setState(243); + setState(250); booleanExpression(0); - setState(244); + setState(251); match(RP); } break; } _ctx.stop = _input.LT(-1); - setState(253); + setState(260); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1642,16 +1729,16 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc { _localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(248); + setState(255); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(249); + setState(256); match(CAST_OP); - setState(250); + setState(257); dataType(); } } } - setState(255); + setState(262); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); } @@ -1708,21 +1795,21 @@ public T accept(ParseTreeVisitor visitor) { public final FunctionExpressionContext functionExpression() throws RecognitionException { FunctionExpressionContext _localctx = new FunctionExpressionContext(_ctx, getState()); - enterRule(_localctx, 20, RULE_functionExpression); + enterRule(_localctx, 22, RULE_functionExpression); int _la; try { enterOuterAlt(_localctx, 1); { - setState(256); + setState(263); identifier(); - setState(257); + setState(264); match(LP); - setState(267); + setState(274); _errHandler.sync(this); switch (_input.LA(1)) { case ASTERISK: { - setState(258); + setState(265); match(ASTERISK); } break; @@ -1743,21 +1830,21 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx case QUOTED_IDENTIFIER: { { - setState(259); + setState(266); booleanExpression(0); - setState(264); + setState(271); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(260); + setState(267); match(COMMA); - setState(261); + setState(268); booleanExpression(0); } } - setState(266); + setState(273); _errHandler.sync(this); _la = _input.LA(1); } @@ -1769,7 +1856,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx default: break; } - setState(269); + setState(276); match(RP); } } @@ -1822,12 +1909,12 @@ public T accept(ParseTreeVisitor visitor) { public final DataTypeContext dataType() throws RecognitionException { DataTypeContext _localctx = new DataTypeContext(_ctx, getState()); - enterRule(_localctx, 22, RULE_dataType); + enterRule(_localctx, 24, RULE_dataType); try { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(271); + setState(278); identifier(); } } @@ -1870,13 +1957,13 @@ public T accept(ParseTreeVisitor visitor) { public final RowCommandContext rowCommand() throws RecognitionException { RowCommandContext _localctx = new RowCommandContext(_ctx, getState()); - enterRule(_localctx, 24, RULE_rowCommand); + enterRule(_localctx, 26, RULE_rowCommand); try { enterOuterAlt(_localctx, 1); { - setState(273); + setState(280); match(ROW); - setState(274); + setState(281); fields(); } } @@ -1925,28 +2012,28 @@ public T accept(ParseTreeVisitor visitor) { public final FieldsContext fields() throws RecognitionException { FieldsContext _localctx = new FieldsContext(_ctx, getState()); - enterRule(_localctx, 26, RULE_fields); + enterRule(_localctx, 28, RULE_fields); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(276); + setState(283); field(); - setState(281); + setState(288); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(277); + setState(284); match(COMMA); - setState(278); + setState(285); field(); } } } - setState(283); + setState(290); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); } @@ -1994,26 +2081,26 @@ public T accept(ParseTreeVisitor visitor) { public final FieldContext field() throws RecognitionException { FieldContext _localctx = new FieldContext(_ctx, getState()); - enterRule(_localctx, 28, RULE_field); + enterRule(_localctx, 30, RULE_field); try { - setState(289); + setState(296); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(284); + setState(291); booleanExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(285); + setState(292); qualifiedName(); - setState(286); + setState(293); match(ASSIGN); - setState(287); + setState(294); booleanExpression(0); } break; @@ -2068,39 +2155,39 @@ public T accept(ParseTreeVisitor visitor) { public final FromCommandContext fromCommand() throws RecognitionException { FromCommandContext _localctx = new FromCommandContext(_ctx, getState()); - enterRule(_localctx, 30, RULE_fromCommand); + enterRule(_localctx, 32, RULE_fromCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(291); + setState(298); match(FROM); - setState(292); + setState(299); indexPattern(); - setState(297); + setState(304); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(293); + setState(300); match(COMMA); - setState(294); + setState(301); indexPattern(); } } } - setState(299); + setState(306); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); } - setState(301); + setState(308); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { case 1: { - setState(300); + setState(307); metadata(); } break; @@ -2149,26 +2236,26 @@ public T accept(ParseTreeVisitor visitor) { public final IndexPatternContext indexPattern() throws RecognitionException { IndexPatternContext _localctx = new IndexPatternContext(_ctx, getState()); - enterRule(_localctx, 32, RULE_indexPattern); + enterRule(_localctx, 34, RULE_indexPattern); try { - setState(308); + setState(315); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(303); + setState(310); clusterString(); - setState(304); + setState(311); match(COLON); - setState(305); + setState(312); indexString(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(307); + setState(314); indexString(); } break; @@ -2210,11 +2297,11 @@ public T accept(ParseTreeVisitor visitor) { public final ClusterStringContext clusterString() throws RecognitionException { ClusterStringContext _localctx = new ClusterStringContext(_ctx, getState()); - enterRule(_localctx, 34, RULE_clusterString); + enterRule(_localctx, 36, RULE_clusterString); try { enterOuterAlt(_localctx, 1); { - setState(310); + setState(317); match(UNQUOTED_SOURCE); } } @@ -2255,12 +2342,12 @@ public T accept(ParseTreeVisitor visitor) { public final IndexStringContext indexString() throws RecognitionException { IndexStringContext _localctx = new IndexStringContext(_ctx, getState()); - enterRule(_localctx, 36, RULE_indexString); + enterRule(_localctx, 38, RULE_indexString); int _la; try { enterOuterAlt(_localctx, 1); { - setState(312); + setState(319); _la = _input.LA(1); if ( !(_la==UNQUOTED_SOURCE || _la==QUOTED_STRING) ) { _errHandler.recoverInline(this); @@ -2313,22 +2400,22 @@ public T accept(ParseTreeVisitor visitor) { public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); - enterRule(_localctx, 38, RULE_metadata); + enterRule(_localctx, 40, RULE_metadata); try { - setState(316); + setState(323); _errHandler.sync(this); switch (_input.LA(1)) { case METADATA: enterOuterAlt(_localctx, 1); { - setState(314); + setState(321); metadataOption(); } break; case OPENING_BRACKET: enterOuterAlt(_localctx, 2); { - setState(315); + setState(322); deprecated_metadata(); } break; @@ -2380,30 +2467,30 @@ public T accept(ParseTreeVisitor visitor) { public final MetadataOptionContext metadataOption() throws RecognitionException { MetadataOptionContext _localctx = new MetadataOptionContext(_ctx, getState()); - enterRule(_localctx, 40, RULE_metadataOption); + enterRule(_localctx, 42, RULE_metadataOption); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(318); + setState(325); match(METADATA); - setState(319); + setState(326); match(UNQUOTED_SOURCE); - setState(324); + setState(331); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,26,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(320); + setState(327); match(COMMA); - setState(321); + setState(328); match(UNQUOTED_SOURCE); } } } - setState(326); + setState(333); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,26,_ctx); } @@ -2448,15 +2535,15 @@ public T accept(ParseTreeVisitor visitor) { public final Deprecated_metadataContext deprecated_metadata() throws RecognitionException { Deprecated_metadataContext _localctx = new Deprecated_metadataContext(_ctx, getState()); - enterRule(_localctx, 42, RULE_deprecated_metadata); + enterRule(_localctx, 44, RULE_deprecated_metadata); try { enterOuterAlt(_localctx, 1); { - setState(327); + setState(334); match(OPENING_BRACKET); - setState(328); + setState(335); metadataOption(); - setState(329); + setState(336); match(CLOSING_BRACKET); } } @@ -2515,51 +2602,51 @@ public T accept(ParseTreeVisitor visitor) { public final MetricsCommandContext metricsCommand() throws RecognitionException { MetricsCommandContext _localctx = new MetricsCommandContext(_ctx, getState()); - enterRule(_localctx, 44, RULE_metricsCommand); + enterRule(_localctx, 46, RULE_metricsCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(331); + setState(338); match(METRICS); - setState(332); + setState(339); indexPattern(); - setState(337); + setState(344); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(333); + setState(340); match(COMMA); - setState(334); + setState(341); indexPattern(); } } } - setState(339); + setState(346); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } - setState(341); + setState(348); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { case 1: { - setState(340); + setState(347); ((MetricsCommandContext)_localctx).aggregates = fields(); } break; } - setState(345); + setState(352); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: { - setState(343); + setState(350); match(BY); - setState(344); + setState(351); ((MetricsCommandContext)_localctx).grouping = fields(); } break; @@ -2605,13 +2692,13 @@ public T accept(ParseTreeVisitor visitor) { public final EvalCommandContext evalCommand() throws RecognitionException { EvalCommandContext _localctx = new EvalCommandContext(_ctx, getState()); - enterRule(_localctx, 46, RULE_evalCommand); + enterRule(_localctx, 48, RULE_evalCommand); try { enterOuterAlt(_localctx, 1); { - setState(347); + setState(354); match(EVAL); - setState(348); + setState(355); fields(); } } @@ -2660,30 +2747,30 @@ public T accept(ParseTreeVisitor visitor) { public final StatsCommandContext statsCommand() throws RecognitionException { StatsCommandContext _localctx = new StatsCommandContext(_ctx, getState()); - enterRule(_localctx, 48, RULE_statsCommand); + enterRule(_localctx, 50, RULE_statsCommand); try { enterOuterAlt(_localctx, 1); { - setState(350); + setState(357); match(STATS); - setState(352); + setState(359); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(351); + setState(358); ((StatsCommandContext)_localctx).stats = fields(); } break; } - setState(356); + setState(363); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(354); + setState(361); match(BY); - setState(355); + setState(362); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2735,22 +2822,22 @@ public T accept(ParseTreeVisitor visitor) { public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionException { InlinestatsCommandContext _localctx = new InlinestatsCommandContext(_ctx, getState()); - enterRule(_localctx, 50, RULE_inlinestatsCommand); + enterRule(_localctx, 52, RULE_inlinestatsCommand); try { enterOuterAlt(_localctx, 1); { - setState(358); + setState(365); match(INLINESTATS); - setState(359); + setState(366); ((InlinestatsCommandContext)_localctx).stats = fields(); - setState(362); + setState(369); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { case 1: { - setState(360); + setState(367); match(BY); - setState(361); + setState(368); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -2802,28 +2889,28 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNameContext qualifiedName() throws RecognitionException { QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState()); - enterRule(_localctx, 52, RULE_qualifiedName); + enterRule(_localctx, 54, RULE_qualifiedName); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(364); + setState(371); identifier(); - setState(369); + setState(376); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,33,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(365); + setState(372); match(DOT); - setState(366); + setState(373); identifier(); } } } - setState(371); + setState(378); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,33,_ctx); } @@ -2874,28 +2961,28 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNamePatternContext qualifiedNamePattern() throws RecognitionException { QualifiedNamePatternContext _localctx = new QualifiedNamePatternContext(_ctx, getState()); - enterRule(_localctx, 54, RULE_qualifiedNamePattern); + enterRule(_localctx, 56, RULE_qualifiedNamePattern); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(372); + setState(379); identifierPattern(); - setState(377); + setState(384); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(373); + setState(380); match(DOT); - setState(374); + setState(381); identifierPattern(); } } } - setState(379); + setState(386); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } @@ -2946,28 +3033,28 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNamePatternsContext qualifiedNamePatterns() throws RecognitionException { QualifiedNamePatternsContext _localctx = new QualifiedNamePatternsContext(_ctx, getState()); - enterRule(_localctx, 56, RULE_qualifiedNamePatterns); + enterRule(_localctx, 58, RULE_qualifiedNamePatterns); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(380); + setState(387); qualifiedNamePattern(); - setState(385); + setState(392); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,35,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(381); + setState(388); match(COMMA); - setState(382); + setState(389); qualifiedNamePattern(); } } } - setState(387); + setState(394); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,35,_ctx); } @@ -3010,12 +3097,12 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 58, RULE_identifier); + enterRule(_localctx, 60, RULE_identifier); int _la; try { enterOuterAlt(_localctx, 1); { - setState(388); + setState(395); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -3063,11 +3150,11 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierPatternContext identifierPattern() throws RecognitionException { IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); - enterRule(_localctx, 60, RULE_identifierPattern); + enterRule(_localctx, 62, RULE_identifierPattern); try { enterOuterAlt(_localctx, 1); { - setState(390); + setState(397); match(ID_PATTERN); } } @@ -3335,17 +3422,17 @@ public T accept(ParseTreeVisitor visitor) { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 62, RULE_constant); + enterRule(_localctx, 64, RULE_constant); int _la; try { - setState(434); + setState(441); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,39,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(392); + setState(399); match(NULL); } break; @@ -3353,9 +3440,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(393); + setState(400); integerValue(); - setState(394); + setState(401); match(UNQUOTED_IDENTIFIER); } break; @@ -3363,7 +3450,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(396); + setState(403); decimalValue(); } break; @@ -3371,7 +3458,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(397); + setState(404); integerValue(); } break; @@ -3379,7 +3466,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(398); + setState(405); booleanValue(); } break; @@ -3387,7 +3474,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParamsContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(399); + setState(406); params(); } break; @@ -3395,7 +3482,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(400); + setState(407); string(); } break; @@ -3403,27 +3490,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(401); + setState(408); match(OPENING_BRACKET); - setState(402); + setState(409); numericValue(); - setState(407); + setState(414); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(403); + setState(410); match(COMMA); - setState(404); + setState(411); numericValue(); } } - setState(409); + setState(416); _errHandler.sync(this); _la = _input.LA(1); } - setState(410); + setState(417); match(CLOSING_BRACKET); } break; @@ -3431,27 +3518,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(412); + setState(419); match(OPENING_BRACKET); - setState(413); + setState(420); booleanValue(); - setState(418); + setState(425); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(414); + setState(421); match(COMMA); - setState(415); + setState(422); booleanValue(); } } - setState(420); + setState(427); _errHandler.sync(this); _la = _input.LA(1); } - setState(421); + setState(428); match(CLOSING_BRACKET); } break; @@ -3459,27 +3546,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(423); + setState(430); match(OPENING_BRACKET); - setState(424); + setState(431); string(); - setState(429); + setState(436); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(425); + setState(432); match(COMMA); - setState(426); + setState(433); string(); } } - setState(431); + setState(438); _errHandler.sync(this); _la = _input.LA(1); } - setState(432); + setState(439); match(CLOSING_BRACKET); } break; @@ -3551,16 +3638,16 @@ public T accept(ParseTreeVisitor visitor) { public final ParamsContext params() throws RecognitionException { ParamsContext _localctx = new ParamsContext(_ctx, getState()); - enterRule(_localctx, 64, RULE_params); + enterRule(_localctx, 66, RULE_params); try { - setState(438); + setState(445); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(436); + setState(443); match(PARAM); } break; @@ -3568,7 +3655,7 @@ public final ParamsContext params() throws RecognitionException { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(437); + setState(444); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -3613,13 +3700,13 @@ public T accept(ParseTreeVisitor visitor) { public final LimitCommandContext limitCommand() throws RecognitionException { LimitCommandContext _localctx = new LimitCommandContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_limitCommand); + enterRule(_localctx, 68, RULE_limitCommand); try { enterOuterAlt(_localctx, 1); { - setState(440); + setState(447); match(LIMIT); - setState(441); + setState(448); match(INTEGER_LITERAL); } } @@ -3669,30 +3756,30 @@ public T accept(ParseTreeVisitor visitor) { public final SortCommandContext sortCommand() throws RecognitionException { SortCommandContext _localctx = new SortCommandContext(_ctx, getState()); - enterRule(_localctx, 68, RULE_sortCommand); + enterRule(_localctx, 70, RULE_sortCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(443); + setState(450); match(SORT); - setState(444); + setState(451); orderExpression(); - setState(449); + setState(456); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,41,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(445); + setState(452); match(COMMA); - setState(446); + setState(453); orderExpression(); } } } - setState(451); + setState(458); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,41,_ctx); } @@ -3743,19 +3830,19 @@ public T accept(ParseTreeVisitor visitor) { public final OrderExpressionContext orderExpression() throws RecognitionException { OrderExpressionContext _localctx = new OrderExpressionContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_orderExpression); + enterRule(_localctx, 72, RULE_orderExpression); int _la; try { enterOuterAlt(_localctx, 1); { - setState(452); + setState(459); booleanExpression(0); - setState(454); + setState(461); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { case 1: { - setState(453); + setState(460); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -3769,14 +3856,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(458); + setState(465); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: { - setState(456); + setState(463); match(NULLS); - setState(457); + setState(464); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -3831,13 +3918,13 @@ public T accept(ParseTreeVisitor visitor) { public final KeepCommandContext keepCommand() throws RecognitionException { KeepCommandContext _localctx = new KeepCommandContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_keepCommand); + enterRule(_localctx, 74, RULE_keepCommand); try { enterOuterAlt(_localctx, 1); { - setState(460); + setState(467); match(KEEP); - setState(461); + setState(468); qualifiedNamePatterns(); } } @@ -3880,13 +3967,13 @@ public T accept(ParseTreeVisitor visitor) { public final DropCommandContext dropCommand() throws RecognitionException { DropCommandContext _localctx = new DropCommandContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_dropCommand); + enterRule(_localctx, 76, RULE_dropCommand); try { enterOuterAlt(_localctx, 1); { - setState(463); + setState(470); match(DROP); - setState(464); + setState(471); qualifiedNamePatterns(); } } @@ -3936,30 +4023,30 @@ public T accept(ParseTreeVisitor visitor) { public final RenameCommandContext renameCommand() throws RecognitionException { RenameCommandContext _localctx = new RenameCommandContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_renameCommand); + enterRule(_localctx, 78, RULE_renameCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(466); + setState(473); match(RENAME); - setState(467); + setState(474); renameClause(); - setState(472); + setState(479); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,44,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(468); + setState(475); match(COMMA); - setState(469); + setState(476); renameClause(); } } } - setState(474); + setState(481); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,44,_ctx); } @@ -4009,15 +4096,15 @@ public T accept(ParseTreeVisitor visitor) { public final RenameClauseContext renameClause() throws RecognitionException { RenameClauseContext _localctx = new RenameClauseContext(_ctx, getState()); - enterRule(_localctx, 78, RULE_renameClause); + enterRule(_localctx, 80, RULE_renameClause); try { enterOuterAlt(_localctx, 1); { - setState(475); + setState(482); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(476); + setState(483); match(AS); - setState(477); + setState(484); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -4066,22 +4153,22 @@ public T accept(ParseTreeVisitor visitor) { public final DissectCommandContext dissectCommand() throws RecognitionException { DissectCommandContext _localctx = new DissectCommandContext(_ctx, getState()); - enterRule(_localctx, 80, RULE_dissectCommand); + enterRule(_localctx, 82, RULE_dissectCommand); try { enterOuterAlt(_localctx, 1); { - setState(479); + setState(486); match(DISSECT); - setState(480); + setState(487); primaryExpression(0); - setState(481); + setState(488); string(); - setState(483); + setState(490); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { case 1: { - setState(482); + setState(489); commandOptions(); } break; @@ -4130,15 +4217,15 @@ public T accept(ParseTreeVisitor visitor) { public final GrokCommandContext grokCommand() throws RecognitionException { GrokCommandContext _localctx = new GrokCommandContext(_ctx, getState()); - enterRule(_localctx, 82, RULE_grokCommand); + enterRule(_localctx, 84, RULE_grokCommand); try { enterOuterAlt(_localctx, 1); { - setState(485); + setState(492); match(GROK); - setState(486); + setState(493); primaryExpression(0); - setState(487); + setState(494); string(); } } @@ -4181,13 +4268,13 @@ public T accept(ParseTreeVisitor visitor) { public final MvExpandCommandContext mvExpandCommand() throws RecognitionException { MvExpandCommandContext _localctx = new MvExpandCommandContext(_ctx, getState()); - enterRule(_localctx, 84, RULE_mvExpandCommand); + enterRule(_localctx, 86, RULE_mvExpandCommand); try { enterOuterAlt(_localctx, 1); { - setState(489); + setState(496); match(MV_EXPAND); - setState(490); + setState(497); qualifiedName(); } } @@ -4236,28 +4323,28 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionsContext commandOptions() throws RecognitionException { CommandOptionsContext _localctx = new CommandOptionsContext(_ctx, getState()); - enterRule(_localctx, 86, RULE_commandOptions); + enterRule(_localctx, 88, RULE_commandOptions); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(492); + setState(499); commandOption(); - setState(497); + setState(504); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,46,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(493); + setState(500); match(COMMA); - setState(494); + setState(501); commandOption(); } } } - setState(499); + setState(506); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,46,_ctx); } @@ -4305,15 +4392,15 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionContext commandOption() throws RecognitionException { CommandOptionContext _localctx = new CommandOptionContext(_ctx, getState()); - enterRule(_localctx, 88, RULE_commandOption); + enterRule(_localctx, 90, RULE_commandOption); try { enterOuterAlt(_localctx, 1); { - setState(500); + setState(507); identifier(); - setState(501); + setState(508); match(ASSIGN); - setState(502); + setState(509); constant(); } } @@ -4354,12 +4441,12 @@ public T accept(ParseTreeVisitor visitor) { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 90, RULE_booleanValue); + enterRule(_localctx, 92, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(504); + setState(511); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4412,22 +4499,22 @@ public T accept(ParseTreeVisitor visitor) { public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); - enterRule(_localctx, 92, RULE_numericValue); + enterRule(_localctx, 94, RULE_numericValue); try { - setState(508); + setState(515); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(506); + setState(513); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(507); + setState(514); integerValue(); } break; @@ -4471,17 +4558,17 @@ public T accept(ParseTreeVisitor visitor) { public final DecimalValueContext decimalValue() throws RecognitionException { DecimalValueContext _localctx = new DecimalValueContext(_ctx, getState()); - enterRule(_localctx, 94, RULE_decimalValue); + enterRule(_localctx, 96, RULE_decimalValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(511); + setState(518); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(510); + setState(517); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4494,7 +4581,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(513); + setState(520); match(DECIMAL_LITERAL); } } @@ -4536,17 +4623,17 @@ public T accept(ParseTreeVisitor visitor) { public final IntegerValueContext integerValue() throws RecognitionException { IntegerValueContext _localctx = new IntegerValueContext(_ctx, getState()); - enterRule(_localctx, 96, RULE_integerValue); + enterRule(_localctx, 98, RULE_integerValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(516); + setState(523); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(515); + setState(522); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4559,7 +4646,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(518); + setState(525); match(INTEGER_LITERAL); } } @@ -4599,11 +4686,11 @@ public T accept(ParseTreeVisitor visitor) { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 98, RULE_string); + enterRule(_localctx, 100, RULE_string); try { enterOuterAlt(_localctx, 1); { - setState(520); + setState(527); match(QUOTED_STRING); } } @@ -4648,14 +4735,14 @@ public T accept(ParseTreeVisitor visitor) { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 100, RULE_comparisonOperator); + enterRule(_localctx, 102, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(522); + setState(529); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 9007199254740992000L) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -432345564227567616L) != 0)) ) { _errHandler.recoverInline(this); } else { @@ -4704,13 +4791,13 @@ public T accept(ParseTreeVisitor visitor) { public final ExplainCommandContext explainCommand() throws RecognitionException { ExplainCommandContext _localctx = new ExplainCommandContext(_ctx, getState()); - enterRule(_localctx, 102, RULE_explainCommand); + enterRule(_localctx, 104, RULE_explainCommand); try { enterOuterAlt(_localctx, 1); { - setState(524); + setState(531); match(EXPLAIN); - setState(525); + setState(532); subqueryExpression(); } } @@ -4754,15 +4841,15 @@ public T accept(ParseTreeVisitor visitor) { public final SubqueryExpressionContext subqueryExpression() throws RecognitionException { SubqueryExpressionContext _localctx = new SubqueryExpressionContext(_ctx, getState()); - enterRule(_localctx, 104, RULE_subqueryExpression); + enterRule(_localctx, 106, RULE_subqueryExpression); try { enterOuterAlt(_localctx, 1); { - setState(527); + setState(534); match(OPENING_BRACKET); - setState(528); + setState(535); query(0); - setState(529); + setState(536); match(CLOSING_BRACKET); } } @@ -4814,14 +4901,14 @@ public T accept(ParseTreeVisitor visitor) { public final ShowCommandContext showCommand() throws RecognitionException { ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState()); - enterRule(_localctx, 106, RULE_showCommand); + enterRule(_localctx, 108, RULE_showCommand); try { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(531); + setState(538); match(SHOW); - setState(532); + setState(539); match(INFO); } } @@ -4873,14 +4960,14 @@ public T accept(ParseTreeVisitor visitor) { public final MetaCommandContext metaCommand() throws RecognitionException { MetaCommandContext _localctx = new MetaCommandContext(_ctx, getState()); - enterRule(_localctx, 108, RULE_metaCommand); + enterRule(_localctx, 110, RULE_metaCommand); try { _localctx = new MetaFunctionsContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(534); + setState(541); match(META); - setState(535); + setState(542); match(FUNCTIONS); } } @@ -4938,51 +5025,51 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichCommandContext enrichCommand() throws RecognitionException { EnrichCommandContext _localctx = new EnrichCommandContext(_ctx, getState()); - enterRule(_localctx, 110, RULE_enrichCommand); + enterRule(_localctx, 112, RULE_enrichCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(537); + setState(544); match(ENRICH); - setState(538); + setState(545); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(541); + setState(548); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { case 1: { - setState(539); + setState(546); match(ON); - setState(540); + setState(547); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(552); + setState(559); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,52,_ctx) ) { case 1: { - setState(543); + setState(550); match(WITH); - setState(544); + setState(551); enrichWithClause(); - setState(549); + setState(556); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,51,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(545); + setState(552); match(COMMA); - setState(546); + setState(553); enrichWithClause(); } } } - setState(551); + setState(558); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,51,_ctx); } @@ -5035,23 +5122,23 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichWithClauseContext enrichWithClause() throws RecognitionException { EnrichWithClauseContext _localctx = new EnrichWithClauseContext(_ctx, getState()); - enterRule(_localctx, 112, RULE_enrichWithClause); + enterRule(_localctx, 114, RULE_enrichWithClause); try { enterOuterAlt(_localctx, 1); { - setState(557); + setState(564); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,53,_ctx) ) { case 1: { - setState(554); + setState(561); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(555); + setState(562); match(ASSIGN); } break; } - setState(559); + setState(566); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -5100,17 +5187,17 @@ public T accept(ParseTreeVisitor visitor) { public final LookupCommandContext lookupCommand() throws RecognitionException { LookupCommandContext _localctx = new LookupCommandContext(_ctx, getState()); - enterRule(_localctx, 114, RULE_lookupCommand); + enterRule(_localctx, 116, RULE_lookupCommand); try { enterOuterAlt(_localctx, 1); { - setState(561); + setState(568); match(LOOKUP); - setState(562); + setState(569); ((LookupCommandContext)_localctx).tableName = indexPattern(); - setState(563); + setState(570); match(ON); - setState(564); + setState(571); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -5131,9 +5218,9 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return query_sempred((QueryContext)_localctx, predIndex); case 5: return booleanExpression_sempred((BooleanExpressionContext)_localctx, predIndex); - case 8: - return operatorExpression_sempred((OperatorExpressionContext)_localctx, predIndex); case 9: + return operatorExpression_sempred((OperatorExpressionContext)_localctx, predIndex); + case 10: return primaryExpression_sempred((PrimaryExpressionContext)_localctx, predIndex); } return true; @@ -5172,7 +5259,7 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in } public static final String _serializedATN = - "\u0004\u0001|\u0237\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001}\u023e\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -5187,351 +5274,354 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u0002"+ - "7\u00077\u00028\u00078\u00029\u00079\u0001\u0000\u0001\u0000\u0001\u0000"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0005\u0001~\b\u0001\n\u0001\f\u0001\u0081\t\u0001\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u0089"+ - "\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "7\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0001\u0000\u0001\u0000"+ + "\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0005\u0001\u0080\b\u0001\n\u0001\f\u0001\u0083\t\u0001\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003"+ + "\u0002\u008b\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u0099\b\u0003\u0001\u0004\u0001"+ - "\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00a5\b\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00ac\b\u0005\n"+ - "\u0005\f\u0005\u00af\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0003\u0005\u00b6\b\u0005\u0001\u0005\u0001\u0005\u0003"+ - "\u0005\u00ba\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0005\u0005\u00c2\b\u0005\n\u0005\f\u0005\u00c5\t\u0005"+ - "\u0001\u0006\u0001\u0006\u0003\u0006\u00c9\b\u0006\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00d0\b\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0003\u0006\u00d5\b\u0006\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0003\u0007\u00dc\b\u0007\u0001\b"+ - "\u0001\b\u0001\b\u0001\b\u0003\b\u00e2\b\b\u0001\b\u0001\b\u0001\b\u0001"+ - "\b\u0001\b\u0001\b\u0005\b\u00ea\b\b\n\b\f\b\u00ed\t\b\u0001\t\u0001\t"+ - "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00f7\b\t\u0001"+ - "\t\u0001\t\u0001\t\u0005\t\u00fc\b\t\n\t\f\t\u00ff\t\t\u0001\n\u0001\n"+ - "\u0001\n\u0001\n\u0001\n\u0001\n\u0005\n\u0107\b\n\n\n\f\n\u010a\t\n\u0003"+ - "\n\u010c\b\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001"+ - "\f\u0001\r\u0001\r\u0001\r\u0005\r\u0118\b\r\n\r\f\r\u011b\t\r\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0003\u000e\u0122\b\u000e"+ - "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0005\u000f\u0128\b\u000f"+ - "\n\u000f\f\u000f\u012b\t\u000f\u0001\u000f\u0003\u000f\u012e\b\u000f\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0003\u0010\u0135"+ - "\b\u0010\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0013\u0001"+ - "\u0013\u0003\u0013\u013d\b\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ - "\u0014\u0005\u0014\u0143\b\u0014\n\u0014\f\u0014\u0146\t\u0014\u0001\u0015"+ - "\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016"+ - "\u0001\u0016\u0005\u0016\u0150\b\u0016\n\u0016\f\u0016\u0153\t\u0016\u0001"+ - "\u0016\u0003\u0016\u0156\b\u0016\u0001\u0016\u0001\u0016\u0003\u0016\u015a"+ - "\b\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0003"+ - "\u0018\u0161\b\u0018\u0001\u0018\u0001\u0018\u0003\u0018\u0165\b\u0018"+ - "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0003\u0019\u016b\b\u0019"+ - "\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u0170\b\u001a\n\u001a"+ - "\f\u001a\u0173\t\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0005\u001b"+ - "\u0178\b\u001b\n\u001b\f\u001b\u017b\t\u001b\u0001\u001c\u0001\u001c\u0001"+ - "\u001c\u0005\u001c\u0180\b\u001c\n\u001c\f\u001c\u0183\t\u001c\u0001\u001d"+ - "\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001\u001f"+ - "\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f"+ - "\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u0196\b\u001f"+ - "\n\u001f\f\u001f\u0199\t\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001"+ - "\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01a1\b\u001f\n\u001f\f\u001f"+ - "\u01a4\t\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f"+ - "\u0001\u001f\u0005\u001f\u01ac\b\u001f\n\u001f\f\u001f\u01af\t\u001f\u0001"+ - "\u001f\u0001\u001f\u0003\u001f\u01b3\b\u001f\u0001 \u0001 \u0003 \u01b7"+ - "\b \u0001!\u0001!\u0001!\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01c0"+ - "\b\"\n\"\f\"\u01c3\t\"\u0001#\u0001#\u0003#\u01c7\b#\u0001#\u0001#\u0003"+ - "#\u01cb\b#\u0001$\u0001$\u0001$\u0001%\u0001%\u0001%\u0001&\u0001&\u0001"+ - "&\u0001&\u0005&\u01d7\b&\n&\f&\u01da\t&\u0001\'\u0001\'\u0001\'\u0001"+ - "\'\u0001(\u0001(\u0001(\u0001(\u0003(\u01e4\b(\u0001)\u0001)\u0001)\u0001"+ - ")\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0005+\u01f0\b+\n+\f+\u01f3"+ - "\t+\u0001,\u0001,\u0001,\u0001,\u0001-\u0001-\u0001.\u0001.\u0003.\u01fd"+ - "\b.\u0001/\u0003/\u0200\b/\u0001/\u0001/\u00010\u00030\u0205\b0\u0001"+ - "0\u00010\u00011\u00011\u00012\u00012\u00013\u00013\u00013\u00014\u0001"+ - "4\u00014\u00014\u00015\u00015\u00015\u00016\u00016\u00016\u00017\u0001"+ - "7\u00017\u00017\u00037\u021e\b7\u00017\u00017\u00017\u00017\u00057\u0224"+ - "\b7\n7\f7\u0227\t7\u00037\u0229\b7\u00018\u00018\u00018\u00038\u022e\b"+ - "8\u00018\u00018\u00019\u00019\u00019\u00019\u00019\u00019\u0000\u0004"+ - "\u0002\n\u0010\u0012:\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u009b\b\u0003\u0001"+ + "\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00a8"+ + "\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005"+ + "\u0005\u00af\b\u0005\n\u0005\f\u0005\u00b2\t\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00b9\b\u0005\u0001\u0005"+ + "\u0001\u0005\u0003\u0005\u00bd\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00c5\b\u0005\n\u0005"+ + "\f\u0005\u00c8\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006\u00cc\b\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006"+ + "\u00d3\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00d8\b"+ + "\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b"+ + "\u0001\b\u0001\b\u0001\b\u0003\b\u00e3\b\b\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0003\t\u00e9\b\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0005"+ + "\t\u00f1\b\t\n\t\f\t\u00f4\t\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n"+ + "\u0001\n\u0001\n\u0001\n\u0003\n\u00fe\b\n\u0001\n\u0001\n\u0001\n\u0005"+ + "\n\u0103\b\n\n\n\f\n\u0106\t\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ + "\u000b\u0001\u000b\u0001\u000b\u0005\u000b\u010e\b\u000b\n\u000b\f\u000b"+ + "\u0111\t\u000b\u0003\u000b\u0113\b\u000b\u0001\u000b\u0001\u000b\u0001"+ + "\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e"+ + "\u0005\u000e\u011f\b\u000e\n\u000e\f\u000e\u0122\t\u000e\u0001\u000f\u0001"+ + "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0003\u000f\u0129\b\u000f\u0001"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0005\u0010\u012f\b\u0010\n"+ + "\u0010\f\u0010\u0132\t\u0010\u0001\u0010\u0003\u0010\u0135\b\u0010\u0001"+ + "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0003\u0011\u013c"+ + "\b\u0011\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0014\u0001"+ + "\u0014\u0003\u0014\u0144\b\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001"+ + "\u0015\u0005\u0015\u014a\b\u0015\n\u0015\f\u0015\u014d\t\u0015\u0001\u0016"+ + "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001\u0017"+ + "\u0001\u0017\u0005\u0017\u0157\b\u0017\n\u0017\f\u0017\u015a\t\u0017\u0001"+ + "\u0017\u0003\u0017\u015d\b\u0017\u0001\u0017\u0001\u0017\u0003\u0017\u0161"+ + "\b\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0003"+ + "\u0019\u0168\b\u0019\u0001\u0019\u0001\u0019\u0003\u0019\u016c\b\u0019"+ + "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0003\u001a\u0172\b\u001a"+ + "\u0001\u001b\u0001\u001b\u0001\u001b\u0005\u001b\u0177\b\u001b\n\u001b"+ + "\f\u001b\u017a\t\u001b\u0001\u001c\u0001\u001c\u0001\u001c\u0005\u001c"+ + "\u017f\b\u001c\n\u001c\f\u001c\u0182\t\u001c\u0001\u001d\u0001\u001d\u0001"+ + "\u001d\u0005\u001d\u0187\b\u001d\n\u001d\f\u001d\u018a\t\u001d\u0001\u001e"+ + "\u0001\u001e\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001 \u0001 \u0001"+ + " \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0005 \u019d"+ + "\b \n \f \u01a0\t \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0005 \u01a8"+ + "\b \n \f \u01ab\t \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0005 \u01b3"+ + "\b \n \f \u01b6\t \u0001 \u0001 \u0003 \u01ba\b \u0001!\u0001!\u0003!"+ + "\u01be\b!\u0001\"\u0001\"\u0001\"\u0001#\u0001#\u0001#\u0001#\u0005#\u01c7"+ + "\b#\n#\f#\u01ca\t#\u0001$\u0001$\u0003$\u01ce\b$\u0001$\u0001$\u0003$"+ + "\u01d2\b$\u0001%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001"+ + "\'\u0001\'\u0005\'\u01de\b\'\n\'\f\'\u01e1\t\'\u0001(\u0001(\u0001(\u0001"+ + "(\u0001)\u0001)\u0001)\u0001)\u0003)\u01eb\b)\u0001*\u0001*\u0001*\u0001"+ + "*\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0005,\u01f7\b,\n,\f,\u01fa"+ + "\t,\u0001-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001/\u0001/\u0003/\u0204"+ + "\b/\u00010\u00030\u0207\b0\u00010\u00010\u00011\u00031\u020c\b1\u0001"+ + "1\u00011\u00012\u00012\u00013\u00013\u00014\u00014\u00014\u00015\u0001"+ + "5\u00015\u00015\u00016\u00016\u00016\u00017\u00017\u00017\u00018\u0001"+ + "8\u00018\u00018\u00038\u0225\b8\u00018\u00018\u00018\u00018\u00058\u022b"+ + "\b8\n8\f8\u022e\t8\u00038\u0230\b8\u00019\u00019\u00019\u00039\u0235\b"+ + "9\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0001:\u0001:\u0000\u0004"+ + "\u0002\n\u0012\u0014;\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012"+ "\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\"+ - "^`bdfhjlnpr\u0000\b\u0001\u0000?@\u0001\u0000AC\u0002\u0000\u0019\u0019"+ - "\u001e\u001e\u0001\u0000GH\u0002\u0000##\'\'\u0001\u0000*+\u0002\u0000"+ - "))77\u0002\u000088:>\u0250\u0000t\u0001\u0000\u0000\u0000\u0002w\u0001"+ - "\u0000\u0000\u0000\u0004\u0088\u0001\u0000\u0000\u0000\u0006\u0098\u0001"+ - "\u0000\u0000\u0000\b\u009a\u0001\u0000\u0000\u0000\n\u00b9\u0001\u0000"+ - "\u0000\u0000\f\u00d4\u0001\u0000\u0000\u0000\u000e\u00db\u0001\u0000\u0000"+ - "\u0000\u0010\u00e1\u0001\u0000\u0000\u0000\u0012\u00f6\u0001\u0000\u0000"+ - "\u0000\u0014\u0100\u0001\u0000\u0000\u0000\u0016\u010f\u0001\u0000\u0000"+ - "\u0000\u0018\u0111\u0001\u0000\u0000\u0000\u001a\u0114\u0001\u0000\u0000"+ - "\u0000\u001c\u0121\u0001\u0000\u0000\u0000\u001e\u0123\u0001\u0000\u0000"+ - "\u0000 \u0134\u0001\u0000\u0000\u0000\"\u0136\u0001\u0000\u0000\u0000"+ - "$\u0138\u0001\u0000\u0000\u0000&\u013c\u0001\u0000\u0000\u0000(\u013e"+ - "\u0001\u0000\u0000\u0000*\u0147\u0001\u0000\u0000\u0000,\u014b\u0001\u0000"+ - "\u0000\u0000.\u015b\u0001\u0000\u0000\u00000\u015e\u0001\u0000\u0000\u0000"+ - "2\u0166\u0001\u0000\u0000\u00004\u016c\u0001\u0000\u0000\u00006\u0174"+ - "\u0001\u0000\u0000\u00008\u017c\u0001\u0000\u0000\u0000:\u0184\u0001\u0000"+ - "\u0000\u0000<\u0186\u0001\u0000\u0000\u0000>\u01b2\u0001\u0000\u0000\u0000"+ - "@\u01b6\u0001\u0000\u0000\u0000B\u01b8\u0001\u0000\u0000\u0000D\u01bb"+ - "\u0001\u0000\u0000\u0000F\u01c4\u0001\u0000\u0000\u0000H\u01cc\u0001\u0000"+ - "\u0000\u0000J\u01cf\u0001\u0000\u0000\u0000L\u01d2\u0001\u0000\u0000\u0000"+ - "N\u01db\u0001\u0000\u0000\u0000P\u01df\u0001\u0000\u0000\u0000R\u01e5"+ - "\u0001\u0000\u0000\u0000T\u01e9\u0001\u0000\u0000\u0000V\u01ec\u0001\u0000"+ - "\u0000\u0000X\u01f4\u0001\u0000\u0000\u0000Z\u01f8\u0001\u0000\u0000\u0000"+ - "\\\u01fc\u0001\u0000\u0000\u0000^\u01ff\u0001\u0000\u0000\u0000`\u0204"+ - "\u0001\u0000\u0000\u0000b\u0208\u0001\u0000\u0000\u0000d\u020a\u0001\u0000"+ - "\u0000\u0000f\u020c\u0001\u0000\u0000\u0000h\u020f\u0001\u0000\u0000\u0000"+ - "j\u0213\u0001\u0000\u0000\u0000l\u0216\u0001\u0000\u0000\u0000n\u0219"+ - "\u0001\u0000\u0000\u0000p\u022d\u0001\u0000\u0000\u0000r\u0231\u0001\u0000"+ - "\u0000\u0000tu\u0003\u0002\u0001\u0000uv\u0005\u0000\u0000\u0001v\u0001"+ - "\u0001\u0000\u0000\u0000wx\u0006\u0001\uffff\uffff\u0000xy\u0003\u0004"+ - "\u0002\u0000y\u007f\u0001\u0000\u0000\u0000z{\n\u0001\u0000\u0000{|\u0005"+ - "\u001d\u0000\u0000|~\u0003\u0006\u0003\u0000}z\u0001\u0000\u0000\u0000"+ - "~\u0081\u0001\u0000\u0000\u0000\u007f}\u0001\u0000\u0000\u0000\u007f\u0080"+ - "\u0001\u0000\u0000\u0000\u0080\u0003\u0001\u0000\u0000\u0000\u0081\u007f"+ - "\u0001\u0000\u0000\u0000\u0082\u0089\u0003f3\u0000\u0083\u0089\u0003\u001e"+ - "\u000f\u0000\u0084\u0089\u0003\u0018\f\u0000\u0085\u0089\u0003,\u0016"+ - "\u0000\u0086\u0089\u0003j5\u0000\u0087\u0089\u0003l6\u0000\u0088\u0082"+ - "\u0001\u0000\u0000\u0000\u0088\u0083\u0001\u0000\u0000\u0000\u0088\u0084"+ - "\u0001\u0000\u0000\u0000\u0088\u0085\u0001\u0000\u0000\u0000\u0088\u0086"+ - "\u0001\u0000\u0000\u0000\u0088\u0087\u0001\u0000\u0000\u0000\u0089\u0005"+ - "\u0001\u0000\u0000\u0000\u008a\u0099\u0003.\u0017\u0000\u008b\u0099\u0003"+ - "2\u0019\u0000\u008c\u0099\u0003B!\u0000\u008d\u0099\u0003r9\u0000\u008e"+ - "\u0099\u0003H$\u0000\u008f\u0099\u0003D\"\u0000\u0090\u0099\u00030\u0018"+ - "\u0000\u0091\u0099\u0003\b\u0004\u0000\u0092\u0099\u0003J%\u0000\u0093"+ - "\u0099\u0003L&\u0000\u0094\u0099\u0003P(\u0000\u0095\u0099\u0003R)\u0000"+ - "\u0096\u0099\u0003n7\u0000\u0097\u0099\u0003T*\u0000\u0098\u008a\u0001"+ - "\u0000\u0000\u0000\u0098\u008b\u0001\u0000\u0000\u0000\u0098\u008c\u0001"+ - "\u0000\u0000\u0000\u0098\u008d\u0001\u0000\u0000\u0000\u0098\u008e\u0001"+ - "\u0000\u0000\u0000\u0098\u008f\u0001\u0000\u0000\u0000\u0098\u0090\u0001"+ - "\u0000\u0000\u0000\u0098\u0091\u0001\u0000\u0000\u0000\u0098\u0092\u0001"+ - "\u0000\u0000\u0000\u0098\u0093\u0001\u0000\u0000\u0000\u0098\u0094\u0001"+ - "\u0000\u0000\u0000\u0098\u0095\u0001\u0000\u0000\u0000\u0098\u0096\u0001"+ - "\u0000\u0000\u0000\u0098\u0097\u0001\u0000\u0000\u0000\u0099\u0007\u0001"+ - "\u0000\u0000\u0000\u009a\u009b\u0005\u0014\u0000\u0000\u009b\u009c\u0003"+ - "\n\u0005\u0000\u009c\t\u0001\u0000\u0000\u0000\u009d\u009e\u0006\u0005"+ - "\uffff\uffff\u0000\u009e\u009f\u00050\u0000\u0000\u009f\u00ba\u0003\n"+ - "\u0005\u0007\u00a0\u00ba\u0003\u000e\u0007\u0000\u00a1\u00ba\u0003\f\u0006"+ - "\u0000\u00a2\u00a4\u0003\u000e\u0007\u0000\u00a3\u00a5\u00050\u0000\u0000"+ - "\u00a4\u00a3\u0001\u0000\u0000\u0000\u00a4\u00a5\u0001\u0000\u0000\u0000"+ - "\u00a5\u00a6\u0001\u0000\u0000\u0000\u00a6\u00a7\u0005-\u0000\u0000\u00a7"+ - "\u00a8\u0005,\u0000\u0000\u00a8\u00ad\u0003\u000e\u0007\u0000\u00a9\u00aa"+ - "\u0005&\u0000\u0000\u00aa\u00ac\u0003\u000e\u0007\u0000\u00ab\u00a9\u0001"+ - "\u0000\u0000\u0000\u00ac\u00af\u0001\u0000\u0000\u0000\u00ad\u00ab\u0001"+ - "\u0000\u0000\u0000\u00ad\u00ae\u0001\u0000\u0000\u0000\u00ae\u00b0\u0001"+ - "\u0000\u0000\u0000\u00af\u00ad\u0001\u0000\u0000\u0000\u00b0\u00b1\u0005"+ - "6\u0000\u0000\u00b1\u00ba\u0001\u0000\u0000\u0000\u00b2\u00b3\u0003\u000e"+ - "\u0007\u0000\u00b3\u00b5\u0005.\u0000\u0000\u00b4\u00b6\u00050\u0000\u0000"+ - "\u00b5\u00b4\u0001\u0000\u0000\u0000\u00b5\u00b6\u0001\u0000\u0000\u0000"+ - "\u00b6\u00b7\u0001\u0000\u0000\u0000\u00b7\u00b8\u00051\u0000\u0000\u00b8"+ - "\u00ba\u0001\u0000\u0000\u0000\u00b9\u009d\u0001\u0000\u0000\u0000\u00b9"+ - "\u00a0\u0001\u0000\u0000\u0000\u00b9\u00a1\u0001\u0000\u0000\u0000\u00b9"+ - "\u00a2\u0001\u0000\u0000\u0000\u00b9\u00b2\u0001\u0000\u0000\u0000\u00ba"+ - "\u00c3\u0001\u0000\u0000\u0000\u00bb\u00bc\n\u0004\u0000\u0000\u00bc\u00bd"+ - "\u0005\"\u0000\u0000\u00bd\u00c2\u0003\n\u0005\u0005\u00be\u00bf\n\u0003"+ - "\u0000\u0000\u00bf\u00c0\u00053\u0000\u0000\u00c0\u00c2\u0003\n\u0005"+ - "\u0004\u00c1\u00bb\u0001\u0000\u0000\u0000\u00c1\u00be\u0001\u0000\u0000"+ - "\u0000\u00c2\u00c5\u0001\u0000\u0000\u0000\u00c3\u00c1\u0001\u0000\u0000"+ - "\u0000\u00c3\u00c4\u0001\u0000\u0000\u0000\u00c4\u000b\u0001\u0000\u0000"+ - "\u0000\u00c5\u00c3\u0001\u0000\u0000\u0000\u00c6\u00c8\u0003\u000e\u0007"+ - "\u0000\u00c7\u00c9\u00050\u0000\u0000\u00c8\u00c7\u0001\u0000\u0000\u0000"+ - "\u00c8\u00c9\u0001\u0000\u0000\u0000\u00c9\u00ca\u0001\u0000\u0000\u0000"+ - "\u00ca\u00cb\u0005/\u0000\u0000\u00cb\u00cc\u0003b1\u0000\u00cc\u00d5"+ - "\u0001\u0000\u0000\u0000\u00cd\u00cf\u0003\u000e\u0007\u0000\u00ce\u00d0"+ - "\u00050\u0000\u0000\u00cf\u00ce\u0001\u0000\u0000\u0000\u00cf\u00d0\u0001"+ - "\u0000\u0000\u0000\u00d0\u00d1\u0001\u0000\u0000\u0000\u00d1\u00d2\u0005"+ - "5\u0000\u0000\u00d2\u00d3\u0003b1\u0000\u00d3\u00d5\u0001\u0000\u0000"+ - "\u0000\u00d4\u00c6\u0001\u0000\u0000\u0000\u00d4\u00cd\u0001\u0000\u0000"+ - "\u0000\u00d5\r\u0001\u0000\u0000\u0000\u00d6\u00dc\u0003\u0010\b\u0000"+ - "\u00d7\u00d8\u0003\u0010\b\u0000\u00d8\u00d9\u0003d2\u0000\u00d9\u00da"+ - "\u0003\u0010\b\u0000\u00da\u00dc\u0001\u0000\u0000\u0000\u00db\u00d6\u0001"+ - "\u0000\u0000\u0000\u00db\u00d7\u0001\u0000\u0000\u0000\u00dc\u000f\u0001"+ - "\u0000\u0000\u0000\u00dd\u00de\u0006\b\uffff\uffff\u0000\u00de\u00e2\u0003"+ - "\u0012\t\u0000\u00df\u00e0\u0007\u0000\u0000\u0000\u00e0\u00e2\u0003\u0010"+ - "\b\u0003\u00e1\u00dd\u0001\u0000\u0000\u0000\u00e1\u00df\u0001\u0000\u0000"+ - "\u0000\u00e2\u00eb\u0001\u0000\u0000\u0000\u00e3\u00e4\n\u0002\u0000\u0000"+ - "\u00e4\u00e5\u0007\u0001\u0000\u0000\u00e5\u00ea\u0003\u0010\b\u0003\u00e6"+ - "\u00e7\n\u0001\u0000\u0000\u00e7\u00e8\u0007\u0000\u0000\u0000\u00e8\u00ea"+ - "\u0003\u0010\b\u0002\u00e9\u00e3\u0001\u0000\u0000\u0000\u00e9\u00e6\u0001"+ - "\u0000\u0000\u0000\u00ea\u00ed\u0001\u0000\u0000\u0000\u00eb\u00e9\u0001"+ - "\u0000\u0000\u0000\u00eb\u00ec\u0001\u0000\u0000\u0000\u00ec\u0011\u0001"+ - "\u0000\u0000\u0000\u00ed\u00eb\u0001\u0000\u0000\u0000\u00ee\u00ef\u0006"+ - "\t\uffff\uffff\u0000\u00ef\u00f7\u0003>\u001f\u0000\u00f0\u00f7\u0003"+ - "4\u001a\u0000\u00f1\u00f7\u0003\u0014\n\u0000\u00f2\u00f3\u0005,\u0000"+ - "\u0000\u00f3\u00f4\u0003\n\u0005\u0000\u00f4\u00f5\u00056\u0000\u0000"+ - "\u00f5\u00f7\u0001\u0000\u0000\u0000\u00f6\u00ee\u0001\u0000\u0000\u0000"+ - "\u00f6\u00f0\u0001\u0000\u0000\u0000\u00f6\u00f1\u0001\u0000\u0000\u0000"+ - "\u00f6\u00f2\u0001\u0000\u0000\u0000\u00f7\u00fd\u0001\u0000\u0000\u0000"+ - "\u00f8\u00f9\n\u0001\u0000\u0000\u00f9\u00fa\u0005%\u0000\u0000\u00fa"+ - "\u00fc\u0003\u0016\u000b\u0000\u00fb\u00f8\u0001\u0000\u0000\u0000\u00fc"+ - "\u00ff\u0001\u0000\u0000\u0000\u00fd\u00fb\u0001\u0000\u0000\u0000\u00fd"+ - "\u00fe\u0001\u0000\u0000\u0000\u00fe\u0013\u0001\u0000\u0000\u0000\u00ff"+ - "\u00fd\u0001\u0000\u0000\u0000\u0100\u0101\u0003:\u001d\u0000\u0101\u010b"+ - "\u0005,\u0000\u0000\u0102\u010c\u0005A\u0000\u0000\u0103\u0108\u0003\n"+ - "\u0005\u0000\u0104\u0105\u0005&\u0000\u0000\u0105\u0107\u0003\n\u0005"+ - "\u0000\u0106\u0104\u0001\u0000\u0000\u0000\u0107\u010a\u0001\u0000\u0000"+ - "\u0000\u0108\u0106\u0001\u0000\u0000\u0000\u0108\u0109\u0001\u0000\u0000"+ - "\u0000\u0109\u010c\u0001\u0000\u0000\u0000\u010a\u0108\u0001\u0000\u0000"+ - "\u0000\u010b\u0102\u0001\u0000\u0000\u0000\u010b\u0103\u0001\u0000\u0000"+ - "\u0000\u010b\u010c\u0001\u0000\u0000\u0000\u010c\u010d\u0001\u0000\u0000"+ - "\u0000\u010d\u010e\u00056\u0000\u0000\u010e\u0015\u0001\u0000\u0000\u0000"+ - "\u010f\u0110\u0003:\u001d\u0000\u0110\u0017\u0001\u0000\u0000\u0000\u0111"+ - "\u0112\u0005\u0010\u0000\u0000\u0112\u0113\u0003\u001a\r\u0000\u0113\u0019"+ - "\u0001\u0000\u0000\u0000\u0114\u0119\u0003\u001c\u000e\u0000\u0115\u0116"+ - "\u0005&\u0000\u0000\u0116\u0118\u0003\u001c\u000e\u0000\u0117\u0115\u0001"+ - "\u0000\u0000\u0000\u0118\u011b\u0001\u0000\u0000\u0000\u0119\u0117\u0001"+ - "\u0000\u0000\u0000\u0119\u011a\u0001\u0000\u0000\u0000\u011a\u001b\u0001"+ - "\u0000\u0000\u0000\u011b\u0119\u0001\u0000\u0000\u0000\u011c\u0122\u0003"+ - "\n\u0005\u0000\u011d\u011e\u00034\u001a\u0000\u011e\u011f\u0005$\u0000"+ - "\u0000\u011f\u0120\u0003\n\u0005\u0000\u0120\u0122\u0001\u0000\u0000\u0000"+ - "\u0121\u011c\u0001\u0000\u0000\u0000\u0121\u011d\u0001\u0000\u0000\u0000"+ - "\u0122\u001d\u0001\u0000\u0000\u0000\u0123\u0124\u0005\u0006\u0000\u0000"+ - "\u0124\u0129\u0003 \u0010\u0000\u0125\u0126\u0005&\u0000\u0000\u0126\u0128"+ - "\u0003 \u0010\u0000\u0127\u0125\u0001\u0000\u0000\u0000\u0128\u012b\u0001"+ - "\u0000\u0000\u0000\u0129\u0127\u0001\u0000\u0000\u0000\u0129\u012a\u0001"+ - "\u0000\u0000\u0000\u012a\u012d\u0001\u0000\u0000\u0000\u012b\u0129\u0001"+ - "\u0000\u0000\u0000\u012c\u012e\u0003&\u0013\u0000\u012d\u012c\u0001\u0000"+ - "\u0000\u0000\u012d\u012e\u0001\u0000\u0000\u0000\u012e\u001f\u0001\u0000"+ - "\u0000\u0000\u012f\u0130\u0003\"\u0011\u0000\u0130\u0131\u0005r\u0000"+ - "\u0000\u0131\u0132\u0003$\u0012\u0000\u0132\u0135\u0001\u0000\u0000\u0000"+ - "\u0133\u0135\u0003$\u0012\u0000\u0134\u012f\u0001\u0000\u0000\u0000\u0134"+ - "\u0133\u0001\u0000\u0000\u0000\u0135!\u0001\u0000\u0000\u0000\u0136\u0137"+ - "\u0005\u0019\u0000\u0000\u0137#\u0001\u0000\u0000\u0000\u0138\u0139\u0007"+ - "\u0002\u0000\u0000\u0139%\u0001\u0000\u0000\u0000\u013a\u013d\u0003(\u0014"+ - "\u0000\u013b\u013d\u0003*\u0015\u0000\u013c\u013a\u0001\u0000\u0000\u0000"+ - "\u013c\u013b\u0001\u0000\u0000\u0000\u013d\'\u0001\u0000\u0000\u0000\u013e"+ - "\u013f\u0005L\u0000\u0000\u013f\u0144\u0005\u0019\u0000\u0000\u0140\u0141"+ - "\u0005&\u0000\u0000\u0141\u0143\u0005\u0019\u0000\u0000\u0142\u0140\u0001"+ - "\u0000\u0000\u0000\u0143\u0146\u0001\u0000\u0000\u0000\u0144\u0142\u0001"+ - "\u0000\u0000\u0000\u0144\u0145\u0001\u0000\u0000\u0000\u0145)\u0001\u0000"+ - "\u0000\u0000\u0146\u0144\u0001\u0000\u0000\u0000\u0147\u0148\u0005E\u0000"+ - "\u0000\u0148\u0149\u0003(\u0014\u0000\u0149\u014a\u0005F\u0000\u0000\u014a"+ - "+\u0001\u0000\u0000\u0000\u014b\u014c\u0005\r\u0000\u0000\u014c\u0151"+ - "\u0003 \u0010\u0000\u014d\u014e\u0005&\u0000\u0000\u014e\u0150\u0003 "+ - "\u0010\u0000\u014f\u014d\u0001\u0000\u0000\u0000\u0150\u0153\u0001\u0000"+ - "\u0000\u0000\u0151\u014f\u0001\u0000\u0000\u0000\u0151\u0152\u0001\u0000"+ - "\u0000\u0000\u0152\u0155\u0001\u0000\u0000\u0000\u0153\u0151\u0001\u0000"+ - "\u0000\u0000\u0154\u0156\u0003\u001a\r\u0000\u0155\u0154\u0001\u0000\u0000"+ - "\u0000\u0155\u0156\u0001\u0000\u0000\u0000\u0156\u0159\u0001\u0000\u0000"+ - "\u0000\u0157\u0158\u0005!\u0000\u0000\u0158\u015a\u0003\u001a\r\u0000"+ - "\u0159\u0157\u0001\u0000\u0000\u0000\u0159\u015a\u0001\u0000\u0000\u0000"+ - "\u015a-\u0001\u0000\u0000\u0000\u015b\u015c\u0005\u0004\u0000\u0000\u015c"+ - "\u015d\u0003\u001a\r\u0000\u015d/\u0001\u0000\u0000\u0000\u015e\u0160"+ - "\u0005\u0013\u0000\u0000\u015f\u0161\u0003\u001a\r\u0000\u0160\u015f\u0001"+ - "\u0000\u0000\u0000\u0160\u0161\u0001\u0000\u0000\u0000\u0161\u0164\u0001"+ - "\u0000\u0000\u0000\u0162\u0163\u0005!\u0000\u0000\u0163\u0165\u0003\u001a"+ - "\r\u0000\u0164\u0162\u0001\u0000\u0000\u0000\u0164\u0165\u0001\u0000\u0000"+ - "\u0000\u01651\u0001\u0000\u0000\u0000\u0166\u0167\u0005\b\u0000\u0000"+ - "\u0167\u016a\u0003\u001a\r\u0000\u0168\u0169\u0005!\u0000\u0000\u0169"+ - "\u016b\u0003\u001a\r\u0000\u016a\u0168\u0001\u0000\u0000\u0000\u016a\u016b"+ - "\u0001\u0000\u0000\u0000\u016b3\u0001\u0000\u0000\u0000\u016c\u0171\u0003"+ - ":\u001d\u0000\u016d\u016e\u0005(\u0000\u0000\u016e\u0170\u0003:\u001d"+ - "\u0000\u016f\u016d\u0001\u0000\u0000\u0000\u0170\u0173\u0001\u0000\u0000"+ - "\u0000\u0171\u016f\u0001\u0000\u0000\u0000\u0171\u0172\u0001\u0000\u0000"+ - "\u0000\u01725\u0001\u0000\u0000\u0000\u0173\u0171\u0001\u0000\u0000\u0000"+ - "\u0174\u0179\u0003<\u001e\u0000\u0175\u0176\u0005(\u0000\u0000\u0176\u0178"+ - "\u0003<\u001e\u0000\u0177\u0175\u0001\u0000\u0000\u0000\u0178\u017b\u0001"+ - "\u0000\u0000\u0000\u0179\u0177\u0001\u0000\u0000\u0000\u0179\u017a\u0001"+ - "\u0000\u0000\u0000\u017a7\u0001\u0000\u0000\u0000\u017b\u0179\u0001\u0000"+ - "\u0000\u0000\u017c\u0181\u00036\u001b\u0000\u017d\u017e\u0005&\u0000\u0000"+ - "\u017e\u0180\u00036\u001b\u0000\u017f\u017d\u0001\u0000\u0000\u0000\u0180"+ - "\u0183\u0001\u0000\u0000\u0000\u0181\u017f\u0001\u0000\u0000\u0000\u0181"+ - "\u0182\u0001\u0000\u0000\u0000\u01829\u0001\u0000\u0000\u0000\u0183\u0181"+ - "\u0001\u0000\u0000\u0000\u0184\u0185\u0007\u0003\u0000\u0000\u0185;\u0001"+ - "\u0000\u0000\u0000\u0186\u0187\u0005P\u0000\u0000\u0187=\u0001\u0000\u0000"+ - "\u0000\u0188\u01b3\u00051\u0000\u0000\u0189\u018a\u0003`0\u0000\u018a"+ - "\u018b\u0005G\u0000\u0000\u018b\u01b3\u0001\u0000\u0000\u0000\u018c\u01b3"+ - "\u0003^/\u0000\u018d\u01b3\u0003`0\u0000\u018e\u01b3\u0003Z-\u0000\u018f"+ - "\u01b3\u0003@ \u0000\u0190\u01b3\u0003b1\u0000\u0191\u0192\u0005E\u0000"+ - "\u0000\u0192\u0197\u0003\\.\u0000\u0193\u0194\u0005&\u0000\u0000\u0194"+ - "\u0196\u0003\\.\u0000\u0195\u0193\u0001\u0000\u0000\u0000\u0196\u0199"+ - "\u0001\u0000\u0000\u0000\u0197\u0195\u0001\u0000\u0000\u0000\u0197\u0198"+ - "\u0001\u0000\u0000\u0000\u0198\u019a\u0001\u0000\u0000\u0000\u0199\u0197"+ - "\u0001\u0000\u0000\u0000\u019a\u019b\u0005F\u0000\u0000\u019b\u01b3\u0001"+ - "\u0000\u0000\u0000\u019c\u019d\u0005E\u0000\u0000\u019d\u01a2\u0003Z-"+ - "\u0000\u019e\u019f\u0005&\u0000\u0000\u019f\u01a1\u0003Z-\u0000\u01a0"+ - "\u019e\u0001\u0000\u0000\u0000\u01a1\u01a4\u0001\u0000\u0000\u0000\u01a2"+ - "\u01a0\u0001\u0000\u0000\u0000\u01a2\u01a3\u0001\u0000\u0000\u0000\u01a3"+ - "\u01a5\u0001\u0000\u0000\u0000\u01a4\u01a2\u0001\u0000\u0000\u0000\u01a5"+ - "\u01a6\u0005F\u0000\u0000\u01a6\u01b3\u0001\u0000\u0000\u0000\u01a7\u01a8"+ - "\u0005E\u0000\u0000\u01a8\u01ad\u0003b1\u0000\u01a9\u01aa\u0005&\u0000"+ - "\u0000\u01aa\u01ac\u0003b1\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000\u01ac"+ - "\u01af\u0001\u0000\u0000\u0000\u01ad\u01ab\u0001\u0000\u0000\u0000\u01ad"+ - "\u01ae\u0001\u0000\u0000\u0000\u01ae\u01b0\u0001\u0000\u0000\u0000\u01af"+ - "\u01ad\u0001\u0000\u0000\u0000\u01b0\u01b1\u0005F\u0000\u0000\u01b1\u01b3"+ - "\u0001\u0000\u0000\u0000\u01b2\u0188\u0001\u0000\u0000\u0000\u01b2\u0189"+ - "\u0001\u0000\u0000\u0000\u01b2\u018c\u0001\u0000\u0000\u0000\u01b2\u018d"+ - "\u0001\u0000\u0000\u0000\u01b2\u018e\u0001\u0000\u0000\u0000\u01b2\u018f"+ - "\u0001\u0000\u0000\u0000\u01b2\u0190\u0001\u0000\u0000\u0000\u01b2\u0191"+ - "\u0001\u0000\u0000\u0000\u01b2\u019c\u0001\u0000\u0000\u0000\u01b2\u01a7"+ - "\u0001\u0000\u0000\u0000\u01b3?\u0001\u0000\u0000\u0000\u01b4\u01b7\u0005"+ - "4\u0000\u0000\u01b5\u01b7\u0005D\u0000\u0000\u01b6\u01b4\u0001\u0000\u0000"+ - "\u0000\u01b6\u01b5\u0001\u0000\u0000\u0000\u01b7A\u0001\u0000\u0000\u0000"+ - "\u01b8\u01b9\u0005\n\u0000\u0000\u01b9\u01ba\u0005\u001f\u0000\u0000\u01ba"+ - "C\u0001\u0000\u0000\u0000\u01bb\u01bc\u0005\u0012\u0000\u0000\u01bc\u01c1"+ - "\u0003F#\u0000\u01bd\u01be\u0005&\u0000\u0000\u01be\u01c0\u0003F#\u0000"+ - "\u01bf\u01bd\u0001\u0000\u0000\u0000\u01c0\u01c3\u0001\u0000\u0000\u0000"+ - "\u01c1\u01bf\u0001\u0000\u0000\u0000\u01c1\u01c2\u0001\u0000\u0000\u0000"+ - "\u01c2E\u0001\u0000\u0000\u0000\u01c3\u01c1\u0001\u0000\u0000\u0000\u01c4"+ - "\u01c6\u0003\n\u0005\u0000\u01c5\u01c7\u0007\u0004\u0000\u0000\u01c6\u01c5"+ - "\u0001\u0000\u0000\u0000\u01c6\u01c7\u0001\u0000\u0000\u0000\u01c7\u01ca"+ - "\u0001\u0000\u0000\u0000\u01c8\u01c9\u00052\u0000\u0000\u01c9\u01cb\u0007"+ - "\u0005\u0000\u0000\u01ca\u01c8\u0001\u0000\u0000\u0000\u01ca\u01cb\u0001"+ - "\u0000\u0000\u0000\u01cbG\u0001\u0000\u0000\u0000\u01cc\u01cd\u0005\t"+ - "\u0000\u0000\u01cd\u01ce\u00038\u001c\u0000\u01ceI\u0001\u0000\u0000\u0000"+ - "\u01cf\u01d0\u0005\u0002\u0000\u0000\u01d0\u01d1\u00038\u001c\u0000\u01d1"+ - "K\u0001\u0000\u0000\u0000\u01d2\u01d3\u0005\u000f\u0000\u0000\u01d3\u01d8"+ - "\u0003N\'\u0000\u01d4\u01d5\u0005&\u0000\u0000\u01d5\u01d7\u0003N\'\u0000"+ - "\u01d6\u01d4\u0001\u0000\u0000\u0000\u01d7\u01da\u0001\u0000\u0000\u0000"+ - "\u01d8\u01d6\u0001\u0000\u0000\u0000\u01d8\u01d9\u0001\u0000\u0000\u0000"+ - "\u01d9M\u0001\u0000\u0000\u0000\u01da\u01d8\u0001\u0000\u0000\u0000\u01db"+ - "\u01dc\u00036\u001b\u0000\u01dc\u01dd\u0005T\u0000\u0000\u01dd\u01de\u0003"+ - "6\u001b\u0000\u01deO\u0001\u0000\u0000\u0000\u01df\u01e0\u0005\u0001\u0000"+ - "\u0000\u01e0\u01e1\u0003\u0012\t\u0000\u01e1\u01e3\u0003b1\u0000\u01e2"+ - "\u01e4\u0003V+\u0000\u01e3\u01e2\u0001\u0000\u0000\u0000\u01e3\u01e4\u0001"+ - "\u0000\u0000\u0000\u01e4Q\u0001\u0000\u0000\u0000\u01e5\u01e6\u0005\u0007"+ - "\u0000\u0000\u01e6\u01e7\u0003\u0012\t\u0000\u01e7\u01e8\u0003b1\u0000"+ - "\u01e8S\u0001\u0000\u0000\u0000\u01e9\u01ea\u0005\u000e\u0000\u0000\u01ea"+ - "\u01eb\u00034\u001a\u0000\u01ebU\u0001\u0000\u0000\u0000\u01ec\u01f1\u0003"+ - "X,\u0000\u01ed\u01ee\u0005&\u0000\u0000\u01ee\u01f0\u0003X,\u0000\u01ef"+ - "\u01ed\u0001\u0000\u0000\u0000\u01f0\u01f3\u0001\u0000\u0000\u0000\u01f1"+ - "\u01ef\u0001\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000\u01f2"+ - "W\u0001\u0000\u0000\u0000\u01f3\u01f1\u0001\u0000\u0000\u0000\u01f4\u01f5"+ - "\u0003:\u001d\u0000\u01f5\u01f6\u0005$\u0000\u0000\u01f6\u01f7\u0003>"+ - "\u001f\u0000\u01f7Y\u0001\u0000\u0000\u0000\u01f8\u01f9\u0007\u0006\u0000"+ - "\u0000\u01f9[\u0001\u0000\u0000\u0000\u01fa\u01fd\u0003^/\u0000\u01fb"+ - "\u01fd\u0003`0\u0000\u01fc\u01fa\u0001\u0000\u0000\u0000\u01fc\u01fb\u0001"+ - "\u0000\u0000\u0000\u01fd]\u0001\u0000\u0000\u0000\u01fe\u0200\u0007\u0000"+ - "\u0000\u0000\u01ff\u01fe\u0001\u0000\u0000\u0000\u01ff\u0200\u0001\u0000"+ - "\u0000\u0000\u0200\u0201\u0001\u0000\u0000\u0000\u0201\u0202\u0005 \u0000"+ - "\u0000\u0202_\u0001\u0000\u0000\u0000\u0203\u0205\u0007\u0000\u0000\u0000"+ - "\u0204\u0203\u0001\u0000\u0000\u0000\u0204\u0205\u0001\u0000\u0000\u0000"+ - "\u0205\u0206\u0001\u0000\u0000\u0000\u0206\u0207\u0005\u001f\u0000\u0000"+ - "\u0207a\u0001\u0000\u0000\u0000\u0208\u0209\u0005\u001e\u0000\u0000\u0209"+ - "c\u0001\u0000\u0000\u0000\u020a\u020b\u0007\u0007\u0000\u0000\u020be\u0001"+ - "\u0000\u0000\u0000\u020c\u020d\u0005\u0005\u0000\u0000\u020d\u020e\u0003"+ - "h4\u0000\u020eg\u0001\u0000\u0000\u0000\u020f\u0210\u0005E\u0000\u0000"+ - "\u0210\u0211\u0003\u0002\u0001\u0000\u0211\u0212\u0005F\u0000\u0000\u0212"+ - "i\u0001\u0000\u0000\u0000\u0213\u0214\u0005\u0011\u0000\u0000\u0214\u0215"+ - "\u0005j\u0000\u0000\u0215k\u0001\u0000\u0000\u0000\u0216\u0217\u0005\f"+ - "\u0000\u0000\u0217\u0218\u0005n\u0000\u0000\u0218m\u0001\u0000\u0000\u0000"+ - "\u0219\u021a\u0005\u0003\u0000\u0000\u021a\u021d\u0005Z\u0000\u0000\u021b"+ - "\u021c\u0005X\u0000\u0000\u021c\u021e\u00036\u001b\u0000\u021d\u021b\u0001"+ - "\u0000\u0000\u0000\u021d\u021e\u0001\u0000\u0000\u0000\u021e\u0228\u0001"+ - "\u0000\u0000\u0000\u021f\u0220\u0005Y\u0000\u0000\u0220\u0225\u0003p8"+ - "\u0000\u0221\u0222\u0005&\u0000\u0000\u0222\u0224\u0003p8\u0000\u0223"+ - "\u0221\u0001\u0000\u0000\u0000\u0224\u0227\u0001\u0000\u0000\u0000\u0225"+ - "\u0223\u0001\u0000\u0000\u0000\u0225\u0226\u0001\u0000\u0000\u0000\u0226"+ - "\u0229\u0001\u0000\u0000\u0000\u0227\u0225\u0001\u0000\u0000\u0000\u0228"+ - "\u021f\u0001\u0000\u0000\u0000\u0228\u0229\u0001\u0000\u0000\u0000\u0229"+ - "o\u0001\u0000\u0000\u0000\u022a\u022b\u00036\u001b\u0000\u022b\u022c\u0005"+ - "$\u0000\u0000\u022c\u022e\u0001\u0000\u0000\u0000\u022d\u022a\u0001\u0000"+ - "\u0000\u0000\u022d\u022e\u0001\u0000\u0000\u0000\u022e\u022f\u0001\u0000"+ - "\u0000\u0000\u022f\u0230\u00036\u001b\u0000\u0230q\u0001\u0000\u0000\u0000"+ - "\u0231\u0232\u0005\u000b\u0000\u0000\u0232\u0233\u0003 \u0010\u0000\u0233"+ - "\u0234\u0005X\u0000\u0000\u0234\u0235\u00038\u001c\u0000\u0235s\u0001"+ - "\u0000\u0000\u00006\u007f\u0088\u0098\u00a4\u00ad\u00b5\u00b9\u00c1\u00c3"+ - "\u00c8\u00cf\u00d4\u00db\u00e1\u00e9\u00eb\u00f6\u00fd\u0108\u010b\u0119"+ - "\u0121\u0129\u012d\u0134\u013c\u0144\u0151\u0155\u0159\u0160\u0164\u016a"+ - "\u0171\u0179\u0181\u0197\u01a2\u01ad\u01b2\u01b6\u01c1\u01c6\u01ca\u01d8"+ - "\u01e3\u01f1\u01fc\u01ff\u0204\u021d\u0225\u0228\u022d"; + "^`bdfhjlnprt\u0000\b\u0001\u0000@A\u0001\u0000BD\u0002\u0000\u0019\u0019"+ + "\u001e\u001e\u0001\u0000HI\u0002\u0000##\'\'\u0002\u0000**--\u0002\u0000"+ + "))88\u0002\u000099;?\u0257\u0000v\u0001\u0000\u0000\u0000\u0002y\u0001"+ + "\u0000\u0000\u0000\u0004\u008a\u0001\u0000\u0000\u0000\u0006\u009a\u0001"+ + "\u0000\u0000\u0000\b\u009c\u0001\u0000\u0000\u0000\n\u00bc\u0001\u0000"+ + "\u0000\u0000\f\u00d7\u0001\u0000\u0000\u0000\u000e\u00d9\u0001\u0000\u0000"+ + "\u0000\u0010\u00e2\u0001\u0000\u0000\u0000\u0012\u00e8\u0001\u0000\u0000"+ + "\u0000\u0014\u00fd\u0001\u0000\u0000\u0000\u0016\u0107\u0001\u0000\u0000"+ + "\u0000\u0018\u0116\u0001\u0000\u0000\u0000\u001a\u0118\u0001\u0000\u0000"+ + "\u0000\u001c\u011b\u0001\u0000\u0000\u0000\u001e\u0128\u0001\u0000\u0000"+ + "\u0000 \u012a\u0001\u0000\u0000\u0000\"\u013b\u0001\u0000\u0000\u0000"+ + "$\u013d\u0001\u0000\u0000\u0000&\u013f\u0001\u0000\u0000\u0000(\u0143"+ + "\u0001\u0000\u0000\u0000*\u0145\u0001\u0000\u0000\u0000,\u014e\u0001\u0000"+ + "\u0000\u0000.\u0152\u0001\u0000\u0000\u00000\u0162\u0001\u0000\u0000\u0000"+ + "2\u0165\u0001\u0000\u0000\u00004\u016d\u0001\u0000\u0000\u00006\u0173"+ + "\u0001\u0000\u0000\u00008\u017b\u0001\u0000\u0000\u0000:\u0183\u0001\u0000"+ + "\u0000\u0000<\u018b\u0001\u0000\u0000\u0000>\u018d\u0001\u0000\u0000\u0000"+ + "@\u01b9\u0001\u0000\u0000\u0000B\u01bd\u0001\u0000\u0000\u0000D\u01bf"+ + "\u0001\u0000\u0000\u0000F\u01c2\u0001\u0000\u0000\u0000H\u01cb\u0001\u0000"+ + "\u0000\u0000J\u01d3\u0001\u0000\u0000\u0000L\u01d6\u0001\u0000\u0000\u0000"+ + "N\u01d9\u0001\u0000\u0000\u0000P\u01e2\u0001\u0000\u0000\u0000R\u01e6"+ + "\u0001\u0000\u0000\u0000T\u01ec\u0001\u0000\u0000\u0000V\u01f0\u0001\u0000"+ + "\u0000\u0000X\u01f3\u0001\u0000\u0000\u0000Z\u01fb\u0001\u0000\u0000\u0000"+ + "\\\u01ff\u0001\u0000\u0000\u0000^\u0203\u0001\u0000\u0000\u0000`\u0206"+ + "\u0001\u0000\u0000\u0000b\u020b\u0001\u0000\u0000\u0000d\u020f\u0001\u0000"+ + "\u0000\u0000f\u0211\u0001\u0000\u0000\u0000h\u0213\u0001\u0000\u0000\u0000"+ + "j\u0216\u0001\u0000\u0000\u0000l\u021a\u0001\u0000\u0000\u0000n\u021d"+ + "\u0001\u0000\u0000\u0000p\u0220\u0001\u0000\u0000\u0000r\u0234\u0001\u0000"+ + "\u0000\u0000t\u0238\u0001\u0000\u0000\u0000vw\u0003\u0002\u0001\u0000"+ + "wx\u0005\u0000\u0000\u0001x\u0001\u0001\u0000\u0000\u0000yz\u0006\u0001"+ + "\uffff\uffff\u0000z{\u0003\u0004\u0002\u0000{\u0081\u0001\u0000\u0000"+ + "\u0000|}\n\u0001\u0000\u0000}~\u0005\u001d\u0000\u0000~\u0080\u0003\u0006"+ + "\u0003\u0000\u007f|\u0001\u0000\u0000\u0000\u0080\u0083\u0001\u0000\u0000"+ + "\u0000\u0081\u007f\u0001\u0000\u0000\u0000\u0081\u0082\u0001\u0000\u0000"+ + "\u0000\u0082\u0003\u0001\u0000\u0000\u0000\u0083\u0081\u0001\u0000\u0000"+ + "\u0000\u0084\u008b\u0003h4\u0000\u0085\u008b\u0003 \u0010\u0000\u0086"+ + "\u008b\u0003\u001a\r\u0000\u0087\u008b\u0003.\u0017\u0000\u0088\u008b"+ + "\u0003l6\u0000\u0089\u008b\u0003n7\u0000\u008a\u0084\u0001\u0000\u0000"+ + "\u0000\u008a\u0085\u0001\u0000\u0000\u0000\u008a\u0086\u0001\u0000\u0000"+ + "\u0000\u008a\u0087\u0001\u0000\u0000\u0000\u008a\u0088\u0001\u0000\u0000"+ + "\u0000\u008a\u0089\u0001\u0000\u0000\u0000\u008b\u0005\u0001\u0000\u0000"+ + "\u0000\u008c\u009b\u00030\u0018\u0000\u008d\u009b\u00034\u001a\u0000\u008e"+ + "\u009b\u0003D\"\u0000\u008f\u009b\u0003t:\u0000\u0090\u009b\u0003J%\u0000"+ + "\u0091\u009b\u0003F#\u0000\u0092\u009b\u00032\u0019\u0000\u0093\u009b"+ + "\u0003\b\u0004\u0000\u0094\u009b\u0003L&\u0000\u0095\u009b\u0003N\'\u0000"+ + "\u0096\u009b\u0003R)\u0000\u0097\u009b\u0003T*\u0000\u0098\u009b\u0003"+ + "p8\u0000\u0099\u009b\u0003V+\u0000\u009a\u008c\u0001\u0000\u0000\u0000"+ + "\u009a\u008d\u0001\u0000\u0000\u0000\u009a\u008e\u0001\u0000\u0000\u0000"+ + "\u009a\u008f\u0001\u0000\u0000\u0000\u009a\u0090\u0001\u0000\u0000\u0000"+ + "\u009a\u0091\u0001\u0000\u0000\u0000\u009a\u0092\u0001\u0000\u0000\u0000"+ + "\u009a\u0093\u0001\u0000\u0000\u0000\u009a\u0094\u0001\u0000\u0000\u0000"+ + "\u009a\u0095\u0001\u0000\u0000\u0000\u009a\u0096\u0001\u0000\u0000\u0000"+ + "\u009a\u0097\u0001\u0000\u0000\u0000\u009a\u0098\u0001\u0000\u0000\u0000"+ + "\u009a\u0099\u0001\u0000\u0000\u0000\u009b\u0007\u0001\u0000\u0000\u0000"+ + "\u009c\u009d\u0005\u0014\u0000\u0000\u009d\u009e\u0003\n\u0005\u0000\u009e"+ + "\t\u0001\u0000\u0000\u0000\u009f\u00a0\u0006\u0005\uffff\uffff\u0000\u00a0"+ + "\u00a1\u00051\u0000\u0000\u00a1\u00bd\u0003\n\u0005\b\u00a2\u00bd\u0003"+ + "\u0010\b\u0000\u00a3\u00bd\u0003\f\u0006\u0000\u00a4\u00bd\u0003\u000e"+ + "\u0007\u0000\u00a5\u00a7\u0003\u0010\b\u0000\u00a6\u00a8\u00051\u0000"+ + "\u0000\u00a7\u00a6\u0001\u0000\u0000\u0000\u00a7\u00a8\u0001\u0000\u0000"+ + "\u0000\u00a8\u00a9\u0001\u0000\u0000\u0000\u00a9\u00aa\u0005+\u0000\u0000"+ + "\u00aa\u00ab\u0005/\u0000\u0000\u00ab\u00b0\u0003\u0010\b\u0000\u00ac"+ + "\u00ad\u0005&\u0000\u0000\u00ad\u00af\u0003\u0010\b\u0000\u00ae\u00ac"+ + "\u0001\u0000\u0000\u0000\u00af\u00b2\u0001\u0000\u0000\u0000\u00b0\u00ae"+ + "\u0001\u0000\u0000\u0000\u00b0\u00b1\u0001\u0000\u0000\u0000\u00b1\u00b3"+ + "\u0001\u0000\u0000\u0000\u00b2\u00b0\u0001\u0000\u0000\u0000\u00b3\u00b4"+ + "\u00057\u0000\u0000\u00b4\u00bd\u0001\u0000\u0000\u0000\u00b5\u00b6\u0003"+ + "\u0010\b\u0000\u00b6\u00b8\u0005,\u0000\u0000\u00b7\u00b9\u00051\u0000"+ + "\u0000\u00b8\u00b7\u0001\u0000\u0000\u0000\u00b8\u00b9\u0001\u0000\u0000"+ + "\u0000\u00b9\u00ba\u0001\u0000\u0000\u0000\u00ba\u00bb\u00052\u0000\u0000"+ + "\u00bb\u00bd\u0001\u0000\u0000\u0000\u00bc\u009f\u0001\u0000\u0000\u0000"+ + "\u00bc\u00a2\u0001\u0000\u0000\u0000\u00bc\u00a3\u0001\u0000\u0000\u0000"+ + "\u00bc\u00a4\u0001\u0000\u0000\u0000\u00bc\u00a5\u0001\u0000\u0000\u0000"+ + "\u00bc\u00b5\u0001\u0000\u0000\u0000\u00bd\u00c6\u0001\u0000\u0000\u0000"+ + "\u00be\u00bf\n\u0004\u0000\u0000\u00bf\u00c0\u0005\"\u0000\u0000\u00c0"+ + "\u00c5\u0003\n\u0005\u0005\u00c1\u00c2\n\u0003\u0000\u0000\u00c2\u00c3"+ + "\u00054\u0000\u0000\u00c3\u00c5\u0003\n\u0005\u0004\u00c4\u00be\u0001"+ + "\u0000\u0000\u0000\u00c4\u00c1\u0001\u0000\u0000\u0000\u00c5\u00c8\u0001"+ + "\u0000\u0000\u0000\u00c6\u00c4\u0001\u0000\u0000\u0000\u00c6\u00c7\u0001"+ + "\u0000\u0000\u0000\u00c7\u000b\u0001\u0000\u0000\u0000\u00c8\u00c6\u0001"+ + "\u0000\u0000\u0000\u00c9\u00cb\u0003\u0010\b\u0000\u00ca\u00cc\u00051"+ + "\u0000\u0000\u00cb\u00ca\u0001\u0000\u0000\u0000\u00cb\u00cc\u0001\u0000"+ + "\u0000\u0000\u00cc\u00cd\u0001\u0000\u0000\u0000\u00cd\u00ce\u0005.\u0000"+ + "\u0000\u00ce\u00cf\u0003d2\u0000\u00cf\u00d8\u0001\u0000\u0000\u0000\u00d0"+ + "\u00d2\u0003\u0010\b\u0000\u00d1\u00d3\u00051\u0000\u0000\u00d2\u00d1"+ + "\u0001\u0000\u0000\u0000\u00d2\u00d3\u0001\u0000\u0000\u0000\u00d3\u00d4"+ + "\u0001\u0000\u0000\u0000\u00d4\u00d5\u00056\u0000\u0000\u00d5\u00d6\u0003"+ + "d2\u0000\u00d6\u00d8\u0001\u0000\u0000\u0000\u00d7\u00c9\u0001\u0000\u0000"+ + "\u0000\u00d7\u00d0\u0001\u0000\u0000\u0000\u00d8\r\u0001\u0000\u0000\u0000"+ + "\u00d9\u00da\u00036\u001b\u0000\u00da\u00db\u00050\u0000\u0000\u00db\u00dc"+ + "\u0003d2\u0000\u00dc\u000f\u0001\u0000\u0000\u0000\u00dd\u00e3\u0003\u0012"+ + "\t\u0000\u00de\u00df\u0003\u0012\t\u0000\u00df\u00e0\u0003f3\u0000\u00e0"+ + "\u00e1\u0003\u0012\t\u0000\u00e1\u00e3\u0001\u0000\u0000\u0000\u00e2\u00dd"+ + "\u0001\u0000\u0000\u0000\u00e2\u00de\u0001\u0000\u0000\u0000\u00e3\u0011"+ + "\u0001\u0000\u0000\u0000\u00e4\u00e5\u0006\t\uffff\uffff\u0000\u00e5\u00e9"+ + "\u0003\u0014\n\u0000\u00e6\u00e7\u0007\u0000\u0000\u0000\u00e7\u00e9\u0003"+ + "\u0012\t\u0003\u00e8\u00e4\u0001\u0000\u0000\u0000\u00e8\u00e6\u0001\u0000"+ + "\u0000\u0000\u00e9\u00f2\u0001\u0000\u0000\u0000\u00ea\u00eb\n\u0002\u0000"+ + "\u0000\u00eb\u00ec\u0007\u0001\u0000\u0000\u00ec\u00f1\u0003\u0012\t\u0003"+ + "\u00ed\u00ee\n\u0001\u0000\u0000\u00ee\u00ef\u0007\u0000\u0000\u0000\u00ef"+ + "\u00f1\u0003\u0012\t\u0002\u00f0\u00ea\u0001\u0000\u0000\u0000\u00f0\u00ed"+ + "\u0001\u0000\u0000\u0000\u00f1\u00f4\u0001\u0000\u0000\u0000\u00f2\u00f0"+ + "\u0001\u0000\u0000\u0000\u00f2\u00f3\u0001\u0000\u0000\u0000\u00f3\u0013"+ + "\u0001\u0000\u0000\u0000\u00f4\u00f2\u0001\u0000\u0000\u0000\u00f5\u00f6"+ + "\u0006\n\uffff\uffff\u0000\u00f6\u00fe\u0003@ \u0000\u00f7\u00fe\u0003"+ + "6\u001b\u0000\u00f8\u00fe\u0003\u0016\u000b\u0000\u00f9\u00fa\u0005/\u0000"+ + "\u0000\u00fa\u00fb\u0003\n\u0005\u0000\u00fb\u00fc\u00057\u0000\u0000"+ + "\u00fc\u00fe\u0001\u0000\u0000\u0000\u00fd\u00f5\u0001\u0000\u0000\u0000"+ + "\u00fd\u00f7\u0001\u0000\u0000\u0000\u00fd\u00f8\u0001\u0000\u0000\u0000"+ + "\u00fd\u00f9\u0001\u0000\u0000\u0000\u00fe\u0104\u0001\u0000\u0000\u0000"+ + "\u00ff\u0100\n\u0001\u0000\u0000\u0100\u0101\u0005%\u0000\u0000\u0101"+ + "\u0103\u0003\u0018\f\u0000\u0102\u00ff\u0001\u0000\u0000\u0000\u0103\u0106"+ + "\u0001\u0000\u0000\u0000\u0104\u0102\u0001\u0000\u0000\u0000\u0104\u0105"+ + "\u0001\u0000\u0000\u0000\u0105\u0015\u0001\u0000\u0000\u0000\u0106\u0104"+ + "\u0001\u0000\u0000\u0000\u0107\u0108\u0003<\u001e\u0000\u0108\u0112\u0005"+ + "/\u0000\u0000\u0109\u0113\u0005B\u0000\u0000\u010a\u010f\u0003\n\u0005"+ + "\u0000\u010b\u010c\u0005&\u0000\u0000\u010c\u010e\u0003\n\u0005\u0000"+ + "\u010d\u010b\u0001\u0000\u0000\u0000\u010e\u0111\u0001\u0000\u0000\u0000"+ + "\u010f\u010d\u0001\u0000\u0000\u0000\u010f\u0110\u0001\u0000\u0000\u0000"+ + "\u0110\u0113\u0001\u0000\u0000\u0000\u0111\u010f\u0001\u0000\u0000\u0000"+ + "\u0112\u0109\u0001\u0000\u0000\u0000\u0112\u010a\u0001\u0000\u0000\u0000"+ + "\u0112\u0113\u0001\u0000\u0000\u0000\u0113\u0114\u0001\u0000\u0000\u0000"+ + "\u0114\u0115\u00057\u0000\u0000\u0115\u0017\u0001\u0000\u0000\u0000\u0116"+ + "\u0117\u0003<\u001e\u0000\u0117\u0019\u0001\u0000\u0000\u0000\u0118\u0119"+ + "\u0005\u0010\u0000\u0000\u0119\u011a\u0003\u001c\u000e\u0000\u011a\u001b"+ + "\u0001\u0000\u0000\u0000\u011b\u0120\u0003\u001e\u000f\u0000\u011c\u011d"+ + "\u0005&\u0000\u0000\u011d\u011f\u0003\u001e\u000f\u0000\u011e\u011c\u0001"+ + "\u0000\u0000\u0000\u011f\u0122\u0001\u0000\u0000\u0000\u0120\u011e\u0001"+ + "\u0000\u0000\u0000\u0120\u0121\u0001\u0000\u0000\u0000\u0121\u001d\u0001"+ + "\u0000\u0000\u0000\u0122\u0120\u0001\u0000\u0000\u0000\u0123\u0129\u0003"+ + "\n\u0005\u0000\u0124\u0125\u00036\u001b\u0000\u0125\u0126\u0005$\u0000"+ + "\u0000\u0126\u0127\u0003\n\u0005\u0000\u0127\u0129\u0001\u0000\u0000\u0000"+ + "\u0128\u0123\u0001\u0000\u0000\u0000\u0128\u0124\u0001\u0000\u0000\u0000"+ + "\u0129\u001f\u0001\u0000\u0000\u0000\u012a\u012b\u0005\u0006\u0000\u0000"+ + "\u012b\u0130\u0003\"\u0011\u0000\u012c\u012d\u0005&\u0000\u0000\u012d"+ + "\u012f\u0003\"\u0011\u0000\u012e\u012c\u0001\u0000\u0000\u0000\u012f\u0132"+ + "\u0001\u0000\u0000\u0000\u0130\u012e\u0001\u0000\u0000\u0000\u0130\u0131"+ + "\u0001\u0000\u0000\u0000\u0131\u0134\u0001\u0000\u0000\u0000\u0132\u0130"+ + "\u0001\u0000\u0000\u0000\u0133\u0135\u0003(\u0014\u0000\u0134\u0133\u0001"+ + "\u0000\u0000\u0000\u0134\u0135\u0001\u0000\u0000\u0000\u0135!\u0001\u0000"+ + "\u0000\u0000\u0136\u0137\u0003$\u0012\u0000\u0137\u0138\u0005s\u0000\u0000"+ + "\u0138\u0139\u0003&\u0013\u0000\u0139\u013c\u0001\u0000\u0000\u0000\u013a"+ + "\u013c\u0003&\u0013\u0000\u013b\u0136\u0001\u0000\u0000\u0000\u013b\u013a"+ + "\u0001\u0000\u0000\u0000\u013c#\u0001\u0000\u0000\u0000\u013d\u013e\u0005"+ + "\u0019\u0000\u0000\u013e%\u0001\u0000\u0000\u0000\u013f\u0140\u0007\u0002"+ + "\u0000\u0000\u0140\'\u0001\u0000\u0000\u0000\u0141\u0144\u0003*\u0015"+ + "\u0000\u0142\u0144\u0003,\u0016\u0000\u0143\u0141\u0001\u0000\u0000\u0000"+ + "\u0143\u0142\u0001\u0000\u0000\u0000\u0144)\u0001\u0000\u0000\u0000\u0145"+ + "\u0146\u0005M\u0000\u0000\u0146\u014b\u0005\u0019\u0000\u0000\u0147\u0148"+ + "\u0005&\u0000\u0000\u0148\u014a\u0005\u0019\u0000\u0000\u0149\u0147\u0001"+ + "\u0000\u0000\u0000\u014a\u014d\u0001\u0000\u0000\u0000\u014b\u0149\u0001"+ + "\u0000\u0000\u0000\u014b\u014c\u0001\u0000\u0000\u0000\u014c+\u0001\u0000"+ + "\u0000\u0000\u014d\u014b\u0001\u0000\u0000\u0000\u014e\u014f\u0005F\u0000"+ + "\u0000\u014f\u0150\u0003*\u0015\u0000\u0150\u0151\u0005G\u0000\u0000\u0151"+ + "-\u0001\u0000\u0000\u0000\u0152\u0153\u0005\r\u0000\u0000\u0153\u0158"+ + "\u0003\"\u0011\u0000\u0154\u0155\u0005&\u0000\u0000\u0155\u0157\u0003"+ + "\"\u0011\u0000\u0156\u0154\u0001\u0000\u0000\u0000\u0157\u015a\u0001\u0000"+ + "\u0000\u0000\u0158\u0156\u0001\u0000\u0000\u0000\u0158\u0159\u0001\u0000"+ + "\u0000\u0000\u0159\u015c\u0001\u0000\u0000\u0000\u015a\u0158\u0001\u0000"+ + "\u0000\u0000\u015b\u015d\u0003\u001c\u000e\u0000\u015c\u015b\u0001\u0000"+ + "\u0000\u0000\u015c\u015d\u0001\u0000\u0000\u0000\u015d\u0160\u0001\u0000"+ + "\u0000\u0000\u015e\u015f\u0005!\u0000\u0000\u015f\u0161\u0003\u001c\u000e"+ + "\u0000\u0160\u015e\u0001\u0000\u0000\u0000\u0160\u0161\u0001\u0000\u0000"+ + "\u0000\u0161/\u0001\u0000\u0000\u0000\u0162\u0163\u0005\u0004\u0000\u0000"+ + "\u0163\u0164\u0003\u001c\u000e\u0000\u01641\u0001\u0000\u0000\u0000\u0165"+ + "\u0167\u0005\u0013\u0000\u0000\u0166\u0168\u0003\u001c\u000e\u0000\u0167"+ + "\u0166\u0001\u0000\u0000\u0000\u0167\u0168\u0001\u0000\u0000\u0000\u0168"+ + "\u016b\u0001\u0000\u0000\u0000\u0169\u016a\u0005!\u0000\u0000\u016a\u016c"+ + "\u0003\u001c\u000e\u0000\u016b\u0169\u0001\u0000\u0000\u0000\u016b\u016c"+ + "\u0001\u0000\u0000\u0000\u016c3\u0001\u0000\u0000\u0000\u016d\u016e\u0005"+ + "\b\u0000\u0000\u016e\u0171\u0003\u001c\u000e\u0000\u016f\u0170\u0005!"+ + "\u0000\u0000\u0170\u0172\u0003\u001c\u000e\u0000\u0171\u016f\u0001\u0000"+ + "\u0000\u0000\u0171\u0172\u0001\u0000\u0000\u0000\u01725\u0001\u0000\u0000"+ + "\u0000\u0173\u0178\u0003<\u001e\u0000\u0174\u0175\u0005(\u0000\u0000\u0175"+ + "\u0177\u0003<\u001e\u0000\u0176\u0174\u0001\u0000\u0000\u0000\u0177\u017a"+ + "\u0001\u0000\u0000\u0000\u0178\u0176\u0001\u0000\u0000\u0000\u0178\u0179"+ + "\u0001\u0000\u0000\u0000\u01797\u0001\u0000\u0000\u0000\u017a\u0178\u0001"+ + "\u0000\u0000\u0000\u017b\u0180\u0003>\u001f\u0000\u017c\u017d\u0005(\u0000"+ + "\u0000\u017d\u017f\u0003>\u001f\u0000\u017e\u017c\u0001\u0000\u0000\u0000"+ + "\u017f\u0182\u0001\u0000\u0000\u0000\u0180\u017e\u0001\u0000\u0000\u0000"+ + "\u0180\u0181\u0001\u0000\u0000\u0000\u01819\u0001\u0000\u0000\u0000\u0182"+ + "\u0180\u0001\u0000\u0000\u0000\u0183\u0188\u00038\u001c\u0000\u0184\u0185"+ + "\u0005&\u0000\u0000\u0185\u0187\u00038\u001c\u0000\u0186\u0184\u0001\u0000"+ + "\u0000\u0000\u0187\u018a\u0001\u0000\u0000\u0000\u0188\u0186\u0001\u0000"+ + "\u0000\u0000\u0188\u0189\u0001\u0000\u0000\u0000\u0189;\u0001\u0000\u0000"+ + "\u0000\u018a\u0188\u0001\u0000\u0000\u0000\u018b\u018c\u0007\u0003\u0000"+ + "\u0000\u018c=\u0001\u0000\u0000\u0000\u018d\u018e\u0005Q\u0000\u0000\u018e"+ + "?\u0001\u0000\u0000\u0000\u018f\u01ba\u00052\u0000\u0000\u0190\u0191\u0003"+ + "b1\u0000\u0191\u0192\u0005H\u0000\u0000\u0192\u01ba\u0001\u0000\u0000"+ + "\u0000\u0193\u01ba\u0003`0\u0000\u0194\u01ba\u0003b1\u0000\u0195\u01ba"+ + "\u0003\\.\u0000\u0196\u01ba\u0003B!\u0000\u0197\u01ba\u0003d2\u0000\u0198"+ + "\u0199\u0005F\u0000\u0000\u0199\u019e\u0003^/\u0000\u019a\u019b\u0005"+ + "&\u0000\u0000\u019b\u019d\u0003^/\u0000\u019c\u019a\u0001\u0000\u0000"+ + "\u0000\u019d\u01a0\u0001\u0000\u0000\u0000\u019e\u019c\u0001\u0000\u0000"+ + "\u0000\u019e\u019f\u0001\u0000\u0000\u0000\u019f\u01a1\u0001\u0000\u0000"+ + "\u0000\u01a0\u019e\u0001\u0000\u0000\u0000\u01a1\u01a2\u0005G\u0000\u0000"+ + "\u01a2\u01ba\u0001\u0000\u0000\u0000\u01a3\u01a4\u0005F\u0000\u0000\u01a4"+ + "\u01a9\u0003\\.\u0000\u01a5\u01a6\u0005&\u0000\u0000\u01a6\u01a8\u0003"+ + "\\.\u0000\u01a7\u01a5\u0001\u0000\u0000\u0000\u01a8\u01ab\u0001\u0000"+ + "\u0000\u0000\u01a9\u01a7\u0001\u0000\u0000\u0000\u01a9\u01aa\u0001\u0000"+ + "\u0000\u0000\u01aa\u01ac\u0001\u0000\u0000\u0000\u01ab\u01a9\u0001\u0000"+ + "\u0000\u0000\u01ac\u01ad\u0005G\u0000\u0000\u01ad\u01ba\u0001\u0000\u0000"+ + "\u0000\u01ae\u01af\u0005F\u0000\u0000\u01af\u01b4\u0003d2\u0000\u01b0"+ + "\u01b1\u0005&\u0000\u0000\u01b1\u01b3\u0003d2\u0000\u01b2\u01b0\u0001"+ + "\u0000\u0000\u0000\u01b3\u01b6\u0001\u0000\u0000\u0000\u01b4\u01b2\u0001"+ + "\u0000\u0000\u0000\u01b4\u01b5\u0001\u0000\u0000\u0000\u01b5\u01b7\u0001"+ + "\u0000\u0000\u0000\u01b6\u01b4\u0001\u0000\u0000\u0000\u01b7\u01b8\u0005"+ + "G\u0000\u0000\u01b8\u01ba\u0001\u0000\u0000\u0000\u01b9\u018f\u0001\u0000"+ + "\u0000\u0000\u01b9\u0190\u0001\u0000\u0000\u0000\u01b9\u0193\u0001\u0000"+ + "\u0000\u0000\u01b9\u0194\u0001\u0000\u0000\u0000\u01b9\u0195\u0001\u0000"+ + "\u0000\u0000\u01b9\u0196\u0001\u0000\u0000\u0000\u01b9\u0197\u0001\u0000"+ + "\u0000\u0000\u01b9\u0198\u0001\u0000\u0000\u0000\u01b9\u01a3\u0001\u0000"+ + "\u0000\u0000\u01b9\u01ae\u0001\u0000\u0000\u0000\u01baA\u0001\u0000\u0000"+ + "\u0000\u01bb\u01be\u00055\u0000\u0000\u01bc\u01be\u0005E\u0000\u0000\u01bd"+ + "\u01bb\u0001\u0000\u0000\u0000\u01bd\u01bc\u0001\u0000\u0000\u0000\u01be"+ + "C\u0001\u0000\u0000\u0000\u01bf\u01c0\u0005\n\u0000\u0000\u01c0\u01c1"+ + "\u0005\u001f\u0000\u0000\u01c1E\u0001\u0000\u0000\u0000\u01c2\u01c3\u0005"+ + "\u0012\u0000\u0000\u01c3\u01c8\u0003H$\u0000\u01c4\u01c5\u0005&\u0000"+ + "\u0000\u01c5\u01c7\u0003H$\u0000\u01c6\u01c4\u0001\u0000\u0000\u0000\u01c7"+ + "\u01ca\u0001\u0000\u0000\u0000\u01c8\u01c6\u0001\u0000\u0000\u0000\u01c8"+ + "\u01c9\u0001\u0000\u0000\u0000\u01c9G\u0001\u0000\u0000\u0000\u01ca\u01c8"+ + "\u0001\u0000\u0000\u0000\u01cb\u01cd\u0003\n\u0005\u0000\u01cc\u01ce\u0007"+ + "\u0004\u0000\u0000\u01cd\u01cc\u0001\u0000\u0000\u0000\u01cd\u01ce\u0001"+ + "\u0000\u0000\u0000\u01ce\u01d1\u0001\u0000\u0000\u0000\u01cf\u01d0\u0005"+ + "3\u0000\u0000\u01d0\u01d2\u0007\u0005\u0000\u0000\u01d1\u01cf\u0001\u0000"+ + "\u0000\u0000\u01d1\u01d2\u0001\u0000\u0000\u0000\u01d2I\u0001\u0000\u0000"+ + "\u0000\u01d3\u01d4\u0005\t\u0000\u0000\u01d4\u01d5\u0003:\u001d\u0000"+ + "\u01d5K\u0001\u0000\u0000\u0000\u01d6\u01d7\u0005\u0002\u0000\u0000\u01d7"+ + "\u01d8\u0003:\u001d\u0000\u01d8M\u0001\u0000\u0000\u0000\u01d9\u01da\u0005"+ + "\u000f\u0000\u0000\u01da\u01df\u0003P(\u0000\u01db\u01dc\u0005&\u0000"+ + "\u0000\u01dc\u01de\u0003P(\u0000\u01dd\u01db\u0001\u0000\u0000\u0000\u01de"+ + "\u01e1\u0001\u0000\u0000\u0000\u01df\u01dd\u0001\u0000\u0000\u0000\u01df"+ + "\u01e0\u0001\u0000\u0000\u0000\u01e0O\u0001\u0000\u0000\u0000\u01e1\u01df"+ + "\u0001\u0000\u0000\u0000\u01e2\u01e3\u00038\u001c\u0000\u01e3\u01e4\u0005"+ + "U\u0000\u0000\u01e4\u01e5\u00038\u001c\u0000\u01e5Q\u0001\u0000\u0000"+ + "\u0000\u01e6\u01e7\u0005\u0001\u0000\u0000\u01e7\u01e8\u0003\u0014\n\u0000"+ + "\u01e8\u01ea\u0003d2\u0000\u01e9\u01eb\u0003X,\u0000\u01ea\u01e9\u0001"+ + "\u0000\u0000\u0000\u01ea\u01eb\u0001\u0000\u0000\u0000\u01ebS\u0001\u0000"+ + "\u0000\u0000\u01ec\u01ed\u0005\u0007\u0000\u0000\u01ed\u01ee\u0003\u0014"+ + "\n\u0000\u01ee\u01ef\u0003d2\u0000\u01efU\u0001\u0000\u0000\u0000\u01f0"+ + "\u01f1\u0005\u000e\u0000\u0000\u01f1\u01f2\u00036\u001b\u0000\u01f2W\u0001"+ + "\u0000\u0000\u0000\u01f3\u01f8\u0003Z-\u0000\u01f4\u01f5\u0005&\u0000"+ + "\u0000\u01f5\u01f7\u0003Z-\u0000\u01f6\u01f4\u0001\u0000\u0000\u0000\u01f7"+ + "\u01fa\u0001\u0000\u0000\u0000\u01f8\u01f6\u0001\u0000\u0000\u0000\u01f8"+ + "\u01f9\u0001\u0000\u0000\u0000\u01f9Y\u0001\u0000\u0000\u0000\u01fa\u01f8"+ + "\u0001\u0000\u0000\u0000\u01fb\u01fc\u0003<\u001e\u0000\u01fc\u01fd\u0005"+ + "$\u0000\u0000\u01fd\u01fe\u0003@ \u0000\u01fe[\u0001\u0000\u0000\u0000"+ + "\u01ff\u0200\u0007\u0006\u0000\u0000\u0200]\u0001\u0000\u0000\u0000\u0201"+ + "\u0204\u0003`0\u0000\u0202\u0204\u0003b1\u0000\u0203\u0201\u0001\u0000"+ + "\u0000\u0000\u0203\u0202\u0001\u0000\u0000\u0000\u0204_\u0001\u0000\u0000"+ + "\u0000\u0205\u0207\u0007\u0000\u0000\u0000\u0206\u0205\u0001\u0000\u0000"+ + "\u0000\u0206\u0207\u0001\u0000\u0000\u0000\u0207\u0208\u0001\u0000\u0000"+ + "\u0000\u0208\u0209\u0005 \u0000\u0000\u0209a\u0001\u0000\u0000\u0000\u020a"+ + "\u020c\u0007\u0000\u0000\u0000\u020b\u020a\u0001\u0000\u0000\u0000\u020b"+ + "\u020c\u0001\u0000\u0000\u0000\u020c\u020d\u0001\u0000\u0000\u0000\u020d"+ + "\u020e\u0005\u001f\u0000\u0000\u020ec\u0001\u0000\u0000\u0000\u020f\u0210"+ + "\u0005\u001e\u0000\u0000\u0210e\u0001\u0000\u0000\u0000\u0211\u0212\u0007"+ + "\u0007\u0000\u0000\u0212g\u0001\u0000\u0000\u0000\u0213\u0214\u0005\u0005"+ + "\u0000\u0000\u0214\u0215\u0003j5\u0000\u0215i\u0001\u0000\u0000\u0000"+ + "\u0216\u0217\u0005F\u0000\u0000\u0217\u0218\u0003\u0002\u0001\u0000\u0218"+ + "\u0219\u0005G\u0000\u0000\u0219k\u0001\u0000\u0000\u0000\u021a\u021b\u0005"+ + "\u0011\u0000\u0000\u021b\u021c\u0005k\u0000\u0000\u021cm\u0001\u0000\u0000"+ + "\u0000\u021d\u021e\u0005\f\u0000\u0000\u021e\u021f\u0005o\u0000\u0000"+ + "\u021fo\u0001\u0000\u0000\u0000\u0220\u0221\u0005\u0003\u0000\u0000\u0221"+ + "\u0224\u0005[\u0000\u0000\u0222\u0223\u0005Y\u0000\u0000\u0223\u0225\u0003"+ + "8\u001c\u0000\u0224\u0222\u0001\u0000\u0000\u0000\u0224\u0225\u0001\u0000"+ + "\u0000\u0000\u0225\u022f\u0001\u0000\u0000\u0000\u0226\u0227\u0005Z\u0000"+ + "\u0000\u0227\u022c\u0003r9\u0000\u0228\u0229\u0005&\u0000\u0000\u0229"+ + "\u022b\u0003r9\u0000\u022a\u0228\u0001\u0000\u0000\u0000\u022b\u022e\u0001"+ + "\u0000\u0000\u0000\u022c\u022a\u0001\u0000\u0000\u0000\u022c\u022d\u0001"+ + "\u0000\u0000\u0000\u022d\u0230\u0001\u0000\u0000\u0000\u022e\u022c\u0001"+ + "\u0000\u0000\u0000\u022f\u0226\u0001\u0000\u0000\u0000\u022f\u0230\u0001"+ + "\u0000\u0000\u0000\u0230q\u0001\u0000\u0000\u0000\u0231\u0232\u00038\u001c"+ + "\u0000\u0232\u0233\u0005$\u0000\u0000\u0233\u0235\u0001\u0000\u0000\u0000"+ + "\u0234\u0231\u0001\u0000\u0000\u0000\u0234\u0235\u0001\u0000\u0000\u0000"+ + "\u0235\u0236\u0001\u0000\u0000\u0000\u0236\u0237\u00038\u001c\u0000\u0237"+ + "s\u0001\u0000\u0000\u0000\u0238\u0239\u0005\u000b\u0000\u0000\u0239\u023a"+ + "\u0003\"\u0011\u0000\u023a\u023b\u0005Y\u0000\u0000\u023b\u023c\u0003"+ + ":\u001d\u0000\u023cu\u0001\u0000\u0000\u00006\u0081\u008a\u009a\u00a7"+ + "\u00b0\u00b8\u00bc\u00c4\u00c6\u00cb\u00d2\u00d7\u00e2\u00e8\u00f0\u00f2"+ + "\u00fd\u0104\u010f\u0112\u0120\u0128\u0130\u0134\u013b\u0143\u014b\u0158"+ + "\u015c\u0160\u0167\u016b\u0171\u0178\u0180\u0188\u019e\u01a9\u01b4\u01b9"+ + "\u01bd\u01c8\u01cd\u01d1\u01df\u01ea\u01f8\u0203\u0206\u020b\u0224\u022c"+ + "\u022f\u0234"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index c2c682e0eea17..6a8e9abc4af13 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -84,6 +84,18 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

The default implementation does nothing.

*/ @Override public void exitWhereCommand(EsqlBaseParser.WhereCommandContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterMatchExpression(EsqlBaseParser.MatchExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitMatchExpression(EsqlBaseParser.MatchExpressionContext ctx) { } /** * {@inheritDoc} * @@ -168,6 +180,18 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

The default implementation does nothing.

*/ @Override public void exitRegexBooleanExpression(EsqlBaseParser.RegexBooleanExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterMatchBooleanExpression(EsqlBaseParser.MatchBooleanExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitMatchBooleanExpression(EsqlBaseParser.MatchBooleanExpressionContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index 3b2675d3490a2..647f222e20582 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -54,6 +54,13 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitWhereCommand(EsqlBaseParser.WhereCommandContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitMatchExpression(EsqlBaseParser.MatchExpressionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -103,6 +110,13 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitRegexBooleanExpression(EsqlBaseParser.RegexBooleanExpressionContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitMatchBooleanExpression(EsqlBaseParser.MatchBooleanExpressionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index a6420e6fadebd..8361d8a6edd9a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -71,6 +71,18 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitWhereCommand(EsqlBaseParser.WhereCommandContext ctx); + /** + * Enter a parse tree produced by the {@code matchExpression} + * labeled alternative in {@link EsqlBaseParser#booleanExpression}. + * @param ctx the parse tree + */ + void enterMatchExpression(EsqlBaseParser.MatchExpressionContext ctx); + /** + * Exit a parse tree produced by the {@code matchExpression} + * labeled alternative in {@link EsqlBaseParser#booleanExpression}. + * @param ctx the parse tree + */ + void exitMatchExpression(EsqlBaseParser.MatchExpressionContext ctx); /** * Enter a parse tree produced by the {@code logicalNot} * labeled alternative in {@link EsqlBaseParser#booleanExpression}. @@ -153,6 +165,16 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitRegexBooleanExpression(EsqlBaseParser.RegexBooleanExpressionContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#matchBooleanExpression}. + * @param ctx the parse tree + */ + void enterMatchBooleanExpression(EsqlBaseParser.MatchBooleanExpressionContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#matchBooleanExpression}. + * @param ctx the parse tree + */ + void exitMatchBooleanExpression(EsqlBaseParser.MatchBooleanExpressionContext ctx); /** * Enter a parse tree produced by the {@code valueExpressionDefault} * labeled alternative in {@link EsqlBaseParser#valueExpression}. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index ec84b7234d67e..c514c6722d4cf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -48,6 +48,13 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitWhereCommand(EsqlBaseParser.WhereCommandContext ctx); + /** + * Visit a parse tree produced by the {@code matchExpression} + * labeled alternative in {@link EsqlBaseParser#booleanExpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitMatchExpression(EsqlBaseParser.MatchExpressionContext ctx); /** * Visit a parse tree produced by the {@code logicalNot} * labeled alternative in {@link EsqlBaseParser#booleanExpression}. @@ -96,6 +103,12 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitRegexBooleanExpression(EsqlBaseParser.RegexBooleanExpressionContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#matchBooleanExpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitMatchBooleanExpression(EsqlBaseParser.MatchBooleanExpressionContext ctx); /** * Visit a parse tree produced by the {@code valueExpressionDefault} * labeled alternative in {@link EsqlBaseParser#valueExpression}. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 88279b65d2007..b00424c113c0d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -15,6 +15,7 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.Operations; +import org.elasticsearch.Build; import org.elasticsearch.common.Strings; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.xpack.esql.core.InvalidArgumentException; @@ -25,6 +26,7 @@ import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedStar; +import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MatchQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; @@ -762,4 +764,17 @@ QueryParam paramByNameOrPosition(TerminalNode node) { return params.get(nameOrPosition); } } + + @Override + public Expression visitMatchBooleanExpression(EsqlBaseParser.MatchBooleanExpressionContext ctx) { + if (Build.current().isSnapshot() == false) { + throw new ParsingException(source(ctx), "MATCH operator currently requires a snapshot build"); + } + return new MatchQueryPredicate( + source(ctx), + visitQualifiedName(ctx.qualifiedName()), + visitString(ctx.queryString).fold().toString(), + null + ); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index f5faf3129d883..17c776f5c56d2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -237,6 +237,10 @@ public final void test() throws Throwable { */ assumeFalse("metadata fields aren't supported", testCase.requiredCapabilities.contains(cap(EsqlFeatures.METADATA_FIELDS))); assumeFalse("enrich can't load fields in csv tests", testCase.requiredCapabilities.contains(cap(EsqlFeatures.ENRICH_LOAD))); + assumeFalse( + "can't use match in csv tests", + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.MATCH_OPERATOR.capabilityName()) + ); assumeFalse("can't load metrics in csv tests", testCase.requiredCapabilities.contains(cap(EsqlFeatures.METRICS_SYNTAX))); assumeFalse( "multiple indices aren't supported", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index f1ea1387c59e6..08b1ef9f6fef6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -627,6 +627,37 @@ public void testWeightedAvg() { ); } + public void testMatchInsideEval() throws Exception { + assertEquals("1:36: EVAL does not support MATCH expressions", error("row title = \"brown fox\" | eval x = title match \"fox\" ")); + } + + public void testMatchFilter() throws Exception { + assertEquals( + "1:63: MATCH requires a mapped index field, found [name]", + error("from test | eval name = concat(first_name, last_name) | where name match \"Anna\"") + ); + + assertEquals( + "1:19: MATCH requires a text or keyword field, but [salary] has type [integer]", + error("from test | where salary match \"100\"") + ); + + assertEquals( + "1:19: Invalid condition using MATCH", + error("from test | where first_name match \"Anna\" or starts_with(first_name, \"Anne\")") + ); + + assertEquals( + "1:51: Invalid condition using MATCH", + error("from test | eval new_salary = salary + 10 | where first_name match \"Anna\" OR new_salary > 100") + ); + + assertEquals( + "1:45: MATCH requires a mapped index field, found [fn]", + error("from test | rename first_name as fn | where fn match \"Anna\"") + ); + } + private String error(String query) { return error(query, defaultAnalyzer); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index d1a352589263a..7374ee55b048c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -16,6 +16,8 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperServiceTestCase; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.SearchExecutionContext; @@ -43,12 +45,14 @@ import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.Stat; import org.elasticsearch.xpack.esql.plan.physical.EstimatesRowSize; +import org.elasticsearch.xpack.esql.plan.physical.EvalExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec; import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; +import org.elasticsearch.xpack.esql.plan.physical.TopNExec; import org.elasticsearch.xpack.esql.planner.FilterTests; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; @@ -746,6 +750,72 @@ public void testMissingFieldsDoNotGetExtracted() { assertThat(Expressions.names(fields), contains("_meta_field", "gender", "job", "job.raw", "languages", "long_noidx")); } + /** + * Expects + * LimitExec[1000[INTEGER]] + * \_ExchangeExec[[],false] + * \_ProjectExec[[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gender{f}#5, job{f}#10, job.raw{f}#11, languages{f}#6, last_n + * ame{f}#7, long_noidx{f}#12, salary{f}#8]] + * \_FieldExtractExec[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gen..] + * \_EsQueryExec[test], indexMode[standard], query[{"match":{"first_name":{"query":"Anna"}}}][_doc{f}#13], limit[1000], sort[] + * estimatedRowSize[324] + */ + public void testSingleMatchFilterPushdown() { + var plan = plannerOptimizer.plan(""" + from test + | where first_name match "Anna" + """); + + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + var actualLuceneQuery = as(fieldExtract.child(), EsQueryExec.class).query(); + + var expectedLuceneQuery = new MatchQueryBuilder("first_name", "Anna"); + assertThat(actualLuceneQuery, equalTo(expectedLuceneQuery)); + } + + /** + * Expects + * EvalExec[[CONCAT([65 6d 70 5f 6e 6f 3a 20][KEYWORD],TOSTRING(emp_no{f}#12),[2c 20 6e 61 6d 65 3a 20][KEYWORD],first_nam + * e{f}#13,[20][KEYWORD],last_name{f}#16) AS description]] + * \_TopNExec[[Order[emp_no{f}#12,ASC,LAST]],1000[INTEGER],50] + * \_ExchangeExec[[],false] + * \_ProjectExec[[_meta_field{f}#18, emp_no{f}#12, first_name{f}#13, gender{f}#14, job{f}#19, job.raw{f}#20, languages{f}#15, l + * ast_name{f}#16, long_noidx{f}#21, salary{f}#17]] + * \_FieldExtractExec[_meta_field{f}#18, emp_no{f}#12, first_name{f}#13, ..] + * \_EsQueryExec[test], indexMode[standard], query[{"bool":{"must":[{"bool":{"should":[{"match":{"first_name":{"query":"Anna"}}} + * ,{"match":{"first_name":{"query":"Anneke"}}}],"boost":1.0}},{"esql_single_value":{"field":"emp_no","next":{"range":{"emp_no": + * {"gt":10000,"boost":1.0}}},"source":"emp_no > 10000@4:9"}},{"match":{"last_name":{"query":"Xinglin"}}}],"boost":1.0}}] + * [_doc{f}#22], limit[1000], sort[[FieldSort[field=emp_no{f}#12, direction=ASC, nulls=LAST]]] estimatedRowSize[336] + */ + public void testMultipleMatchFilterPushdown() { + var plan = plannerOptimizer.plan(""" + from test + | where first_name match "Anna" OR first_name match "Anneke" + | sort emp_no + | where emp_no > 10000 + | eval description = concat("emp_no: ", to_str(emp_no), ", name: ", first_name, " ", last_name) + | where last_name match "Xinglin" + """); + + var eval = as(plan, EvalExec.class); + var topNExec = as(eval.child(), TopNExec.class); + var exchange = as(topNExec.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + var actualLuceneQuery = as(fieldExtract.child(), EsQueryExec.class).query(); + + Source filterSource = new Source(4, 8, "emp_no > 10000"); + var expectedLuceneQuery = new BoolQueryBuilder().must( + new BoolQueryBuilder().should(new MatchQueryBuilder("first_name", "Anna")).should(new MatchQueryBuilder("first_name", "Anneke")) + ) + .must(wrapWithSingleQuery(QueryBuilders.rangeQuery("emp_no").gt(10000), "emp_no", filterSource)) + .must(new MatchQueryBuilder("last_name", "Xinglin")); + assertThat(actualLuceneQuery.toString(), is(expectedLuceneQuery.toString())); + } + private QueryBuilder wrapWithSingleQuery(QueryBuilder inner, String fieldName, Source source) { return FilterTests.singleValueQuery(inner, fieldName, source); } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/180_match_operator.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/180_match_operator.yml new file mode 100644 index 0000000000000..061fbbd57c2c8 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/180_match_operator.yml @@ -0,0 +1,196 @@ +--- +setup: + - requires: + capabilities: + - method: POST + path: /_query + parameters: [ method, path, parameters, capabilities ] + capabilities: [ match_operator ] + reason: "Match operator added in 8.16.0" + test_runner_features: [capabilities, allowed_warnings_regex] + - do: + indices.create: + index: test + body: + mappings: + properties: + content: + type: text + id: + type: integer + - do: + bulk: + index: "test" + refresh: true + body: + - { "index": { } } + - { "content": "This is a brown fox", "id": 1 } + - { "index": { } } + - { "content": "This is a brown dog", "id": 2 } + - { "index": { } } + - { "content": "This dog is really brown", "id": 3 } + - { "index": { } } + - { "content": "The dog is brown but this document is very very long", "id": 4 } + - { "index": { } } + - { "content": "There is also a white cat", "id": 5 } + - { "index": { } } + - { "content": "The quick brown fox jumps over the lazy dog", "id": 6 } + +--- +"simple where match": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | WHERE content MATCH "fox" | KEEP id | SORT id' + + - match: { columns.0.name: "id" } + - match: { columns.0.type: "integer" } + - length: { values: 2 } + - match: { values.0.0: 1 } + - match: { values.1.0: 6 } + +--- +"combined where match": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | WHERE content MATCH "fox" AND id > 5 | KEEP id | SORT id' + + - match: { columns.0.name: "id" } + - match: { columns.0.type: "integer" } + - length: { values: 1 } + - match: { values.0.0: 6 } + +--- +"multiple match": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | WHERE content MATCH "fox" OR content MATCH "brown" | KEEP id | SORT id' + + - match: { columns.0.name: "id" } + - match: { columns.0.type: "integer" } + - length: { values: 5 } + - match: { values.0.0: 1 } + - match: { values.1.0: 2 } + - match: { values.2.0: 3 } + - match: { values.3.0: 4 } + - match: { values.4.0: 6 } + +--- +"not where match": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | WHERE NOT content MATCH "brown fox" | KEEP id | SORT id' + + - match: { columns.0.name: "id" } + - match: { columns.0.type: "integer" } + - length: { values: 1 } + - match: { values.0.0: 5 } + +--- +"match on non existing column": + - do: + catch: bad_request + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | WHERE something match "fox"' + + - match: { status: 400 } + - match: { error.type: verification_exception } + - match: { error.reason: "Found 1 problem\nline 1:19: Unknown column [something]" } + +--- +"match on eval column": + - do: + catch: bad_request + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | EVAL upper_content = to_upper(content) | WHERE upper_content MATCH "FOX" | KEEP id' + + - match: { status: 400 } + - match: { error.type: verification_exception } + - match: { error.reason: "Found 1 problem\nline 1:60: MATCH requires a mapped index field, found [upper_content]" } + +--- +"match on overwritten column": + - do: + catch: bad_request + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | DROP content | EVAL content = CONCAT("ID: ", to_str(id)) | WHERE content match "fox"' + + - match: { status: 400 } + - match: { error.type: verification_exception } + - match: { error.reason: "Found 1 problem\nline 1:78: MATCH requires a mapped index field, found [content]" } + +--- +"match after stats": + - do: + catch: bad_request + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | STATS count(*) | WHERE content match "fox"' + + - match: { status: 400 } + - match: { error.type: verification_exception } + - match: { error.reason: "Found 1 problem\nline 1:36: Unknown column [content], did you mean [count(*)]?" } + +--- +"match with functions": + - do: + catch: bad_request + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | WHERE content MATCH "fox" OR to_upper(content) == "FOX"' + + - match: { status: 400 } + - match: { error.type: verification_exception } + - match: { error.reason: "Found 1 problem\nline 1:19: Invalid condition using MATCH" } + +--- +"match within eval": + - do: + catch: bad_request + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | EVAL matches_query = content MATCH "fox"' + + - match: { status: 400 } + - match: { error.type: verification_exception } + - match: { error.reason: "Found 1 problem\nline 1:34: EVAL does not support MATCH expressions" } + +--- +"match with non text field": + - do: + catch: bad_request + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | WHERE id MATCH "fox"' + + - match: { status: 400 } + - match: { error.type: verification_exception } + - match: { error.reason: "Found 1 problem\nline 1:19: MATCH requires a text or keyword field, but [id] has type [integer]" } From fa515bd72e757445f4dfd0fa14827d1e7135f8fb Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Fri, 26 Jul 2024 10:23:17 -0400 Subject: [PATCH 054/105] Use native scalar scorer for int8_flat index (#111071) Switches to optionally using the native scorers if they are available for int8_flat indices. --- docs/changelog/111071.yaml | 5 ++++ .../vectors/ES813Int8FlatVectorFormat.java | 4 +-- .../ES814ScalarQuantizedVectorsFormat.java | 7 +++++ .../vectors/DenseVectorFieldMapperTests.java | 30 +++++++++++++++---- 4 files changed, 37 insertions(+), 9 deletions(-) create mode 100644 docs/changelog/111071.yaml diff --git a/docs/changelog/111071.yaml b/docs/changelog/111071.yaml new file mode 100644 index 0000000000000..5e8ab53db3d03 --- /dev/null +++ b/docs/changelog/111071.yaml @@ -0,0 +1,5 @@ +pr: 111071 +summary: Use native scalar scorer for int8_flat index +area: Vector Search +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java index 701bf5dc98552..420cdbe016552 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java @@ -15,7 +15,6 @@ import org.apache.lucene.codecs.hnsw.FlatVectorsFormat; import org.apache.lucene.codecs.hnsw.FlatVectorsReader; import org.apache.lucene.codecs.hnsw.FlatVectorsWriter; -import org.apache.lucene.codecs.lucene99.Lucene99ScalarQuantizedVectorsFormat; import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FloatVectorValues; @@ -45,8 +44,7 @@ public ES813Int8FlatVectorFormat() { */ public ES813Int8FlatVectorFormat(Float confidenceInterval, int bits, boolean compress) { super(NAME); - // TODO can we just switch this to ES814ScalarQuantizedVectorsFormat ? - this.format = new Lucene99ScalarQuantizedVectorsFormat(confidenceInterval, bits, compress); + this.format = new ES814ScalarQuantizedVectorsFormat(confidenceInterval, bits, compress); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java index c4b52d26fc6e7..431015b015779 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java @@ -98,6 +98,8 @@ public String toString() { + bits + ", compressed=" + compress + + ", flatVectorScorer=" + + flatVectorScorer + ", rawVectorFormat=" + rawVectorFormat + ")"; @@ -234,6 +236,11 @@ static final class ESFlatVectorsScorer implements FlatVectorsScorer { factory = VectorScorerFactory.instance().orElse(null); } + @Override + public String toString() { + return "ESFlatVectorsScorer(" + "delegate=" + delegate + ", factory=" + factory + ')'; + } + @Override public RandomVectorScorerSupplier getRandomVectorScorerSupplier(VectorSimilarityFunction sim, RandomAccessVectorValues values) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index b044308218543..83b8a8fa991c2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -48,6 +48,7 @@ import org.elasticsearch.search.lookup.Source; import org.elasticsearch.search.lookup.SourceProvider; import org.elasticsearch.search.vectors.VectorData; +import org.elasticsearch.simdvec.VectorScorerFactory; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; @@ -1886,16 +1887,21 @@ public void testKnnQuantizedFlatVectorsFormat() throws IOException { assertThat(codec, instanceOf(LegacyPerFieldMapperCodec.class)); knnVectorsFormat = ((LegacyPerFieldMapperCodec) codec).getKnnVectorsFormatForField("field"); } + VectorScorerFactory factory = VectorScorerFactory.instance().orElse(null); String expectedString = "ES813Int8FlatVectorFormat(name=ES813Int8FlatVectorFormat, innerFormat=" - + "Lucene99ScalarQuantizedVectorsFormat(name=Lucene99ScalarQuantizedVectorsFormat," + + "ES814ScalarQuantizedVectorsFormat(name=ES814ScalarQuantizedVectorsFormat," + " confidenceInterval=" + (setConfidenceInterval ? Float.toString(confidenceInterval) : (quantizedFlatFormat.equals("int4_flat") ? "0.0" : null)) + ", bits=" + (quantizedFlatFormat.equals("int4_flat") ? 4 : 7) - + ", compress=" + + ", compressed=" + quantizedFlatFormat.equals("int4_flat") - + ", flatVectorScorer=ScalarQuantizedVectorScorer(nonQuantizedDelegate=DefaultFlatVectorScorer())," - + " rawVectorFormat=Lucene99FlatVectorsFormat(vectorsScorer=DefaultFlatVectorScorer())))"; + + ", flatVectorScorer=ESFlatVectorsScorer(" + + "delegate=ScalarQuantizedVectorScorer(nonQuantizedDelegate=DefaultFlatVectorScorer())" + + ", factory=" + + (factory != null ? factory : "null") + + "), " + + "rawVectorFormat=Lucene99FlatVectorsFormat(vectorsScorer=DefaultFlatVectorScorer())))"; assertEquals(expectedString, knnVectorsFormat.toString()); } } @@ -1932,6 +1938,7 @@ public void testKnnQuantizedHNSWVectorsFormat() throws IOException { assertThat(codec, instanceOf(LegacyPerFieldMapperCodec.class)); knnVectorsFormat = ((LegacyPerFieldMapperCodec) codec).getKnnVectorsFormatForField("field"); } + VectorScorerFactory factory = VectorScorerFactory.instance().orElse(null); String expectedString = "ES814HnswScalarQuantizedVectorsFormat(name=ES814HnswScalarQuantizedVectorsFormat, maxConn=" + m + ", beamWidth=" @@ -1939,7 +1946,12 @@ public void testKnnQuantizedHNSWVectorsFormat() throws IOException { + ", flatVectorFormat=ES814ScalarQuantizedVectorsFormat(" + "name=ES814ScalarQuantizedVectorsFormat, confidenceInterval=" + (setConfidenceInterval ? confidenceInterval : null) - + ", bits=7, compressed=false, rawVectorFormat=Lucene99FlatVectorsFormat(vectorsScorer=DefaultFlatVectorScorer())" + + ", bits=7, compressed=false, " + + "flatVectorScorer=ESFlatVectorsScorer(delegate=ScalarQuantizedVectorScorer(nonQuantizedDelegate=DefaultFlatVectorScorer()), " + + "factory=" + + (factory != null ? factory : "null") + + "), " + + "rawVectorFormat=Lucene99FlatVectorsFormat(vectorsScorer=DefaultFlatVectorScorer())" + "))"; assertEquals(expectedString, knnVectorsFormat.toString()); } @@ -1976,6 +1988,7 @@ public void testKnnHalfByteQuantizedHNSWVectorsFormat() throws IOException { assertThat(codec, instanceOf(LegacyPerFieldMapperCodec.class)); knnVectorsFormat = ((LegacyPerFieldMapperCodec) codec).getKnnVectorsFormatForField("field"); } + VectorScorerFactory factory = VectorScorerFactory.instance().orElse(null); String expectedString = "ES814HnswScalarQuantizedVectorsFormat(name=ES814HnswScalarQuantizedVectorsFormat, maxConn=" + m + ", beamWidth=" @@ -1983,7 +1996,12 @@ public void testKnnHalfByteQuantizedHNSWVectorsFormat() throws IOException { + ", flatVectorFormat=ES814ScalarQuantizedVectorsFormat(" + "name=ES814ScalarQuantizedVectorsFormat, confidenceInterval=" + (setConfidenceInterval ? confidenceInterval : 0.0f) - + ", bits=4, compressed=true, rawVectorFormat=Lucene99FlatVectorsFormat(vectorsScorer=DefaultFlatVectorScorer())" + + ", bits=4, compressed=true, " + + "flatVectorScorer=ESFlatVectorsScorer(delegate=ScalarQuantizedVectorScorer(nonQuantizedDelegate=DefaultFlatVectorScorer()), " + + "factory=" + + (factory != null ? factory : "null") + + "), " + + "rawVectorFormat=Lucene99FlatVectorsFormat(vectorsScorer=DefaultFlatVectorScorer())" + "))"; assertEquals(expectedString, knnVectorsFormat.toString()); } From ff3a77ca46327c91558f8e87220447efb84b8c0e Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Fri, 26 Jul 2024 16:45:29 +0200 Subject: [PATCH 055/105] Clarify some semantic_text docs (#111329) --- docs/reference/mapping/types/semantic-text.asciidoc | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/docs/reference/mapping/types/semantic-text.asciidoc b/docs/reference/mapping/types/semantic-text.asciidoc index ece22fc08b00f..87e8a8bc70538 100644 --- a/docs/reference/mapping/types/semantic-text.asciidoc +++ b/docs/reference/mapping/types/semantic-text.asciidoc @@ -52,8 +52,8 @@ Use the <> to create the endpoint. The `inference_id` will not be validated when the mapping is created, but when documents are ingested into the index. When the first document is indexed, the `inference_id` will be used to generate underlying indexing structures for the field. -WARNING: Removing an inference endpoint will cause ingestion of documents and semantic queries to fail on indices that define `semantic_text` fields with that inference endpoint as their `inference_id`. -Please check that inference endpoints are not used in `semantic_text` fields before removal. +WARNING: Removing an {infer} endpoint will cause ingestion of documents and semantic queries to fail on indices that define `semantic_text` fields with that {infer} endpoint as their `inference_id`. +Trying to <> that is used on a `semantic_text` field will result in an error. [discrete] [[auto-text-chunking]] @@ -127,7 +127,8 @@ types and create an ingest pipeline with an [[update-script]] ==== Updates to `semantic_text` fields -Updates that use scripts are not supported when the index contains a `semantic_text` field. +Updates that use scripts are not supported for an index contains a `semantic_text` field. +Even if the script targets non-`semantic_text` fields, the update will fail when the index contains a `semantic_text` field. [discrete] From 1a5b008921e9cc2fce58650b69bc4f82899c6f77 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Fri, 26 Jul 2024 16:53:38 +0200 Subject: [PATCH 056/105] [DOCS] Clarifies semantic query behavior on sparse and dense vector fields (#111339) * [DOCS] Clarifies semantic query behavior on sparse and dense vector fields. * [DOCS] Adds a NOTE to the semantic query docs. --- docs/reference/mapping/types/semantic-text.asciidoc | 7 ++++++- docs/reference/query-dsl/semantic-query.asciidoc | 4 ++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/docs/reference/mapping/types/semantic-text.asciidoc b/docs/reference/mapping/types/semantic-text.asciidoc index 87e8a8bc70538..522a0c54c8aad 100644 --- a/docs/reference/mapping/types/semantic-text.asciidoc +++ b/docs/reference/mapping/types/semantic-text.asciidoc @@ -121,7 +121,12 @@ In case you want to customize data indexing, use the <> or <> field types and create an ingest pipeline with an <> to generate the embeddings. -<> walks you through the process. +<> walks you through the process. In +these cases - when you use `sparse_vector` or `dense_vector` field types instead +of the `semantic_text` field type to customize indexing - using the +<> is not supported for querying the +field data. + [discrete] [[update-script]] diff --git a/docs/reference/query-dsl/semantic-query.asciidoc b/docs/reference/query-dsl/semantic-query.asciidoc index d0eb2da95ebc6..22b5e6c5e6aad 100644 --- a/docs/reference/query-dsl/semantic-query.asciidoc +++ b/docs/reference/query-dsl/semantic-query.asciidoc @@ -128,6 +128,10 @@ If you want to fine-tune a search on a `semantic_text` field, you need to know t You can find the task type using the <>, and check the `task_type` associated with the {infer} service. Depending on the `task_type`, use either the <> or the <> query for greater flexibility and customization. +NOTE: While it is possible to use the `sparse_vector` query or the `knn` query +on a `semantic_text` field, it is not supported to use the `semantic_query` on a +`sparse_vector` or `dense_vector` field type. + [discrete] [[search-sparse-inference]] From ce86f2f843bc77eea8ab7b817dfb66ccf25d0546 Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 26 Jul 2024 16:52:39 +0100 Subject: [PATCH 057/105] More verbose logging to investigate #111343 (#111356) --- .../blobstore/testkit/RepositoryAnalysisFailureIT.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisFailureIT.java b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisFailureIT.java index 2ca5685c83db3..f0d4eaec7c848 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisFailureIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisFailureIT.java @@ -40,6 +40,7 @@ import org.elasticsearch.repositories.RepositoryVerificationException; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; +import org.elasticsearch.test.junit.annotations.TestIssueLogging; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; import org.junit.Before; @@ -416,6 +417,10 @@ public boolean compareAndExchangeReturnsWitness(String key) { ); } + @TestIssueLogging( + issueUrl = "https://github.com/elastic/elasticsearch/issues/111343", + value = "org.elasticsearch.repositories.blobstore.testkit:TRACE" + ) public void testFailsIfEmptyRegisterRejected() { final RepositoryAnalyzeAction.Request request = new RepositoryAnalyzeAction.Request("test-repo"); blobStore.setDisruption(new Disruption() { From 9b1f07aa12a1bae60090ff162e257b36294dbda6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Campinas?= Date: Fri, 26 Jul 2024 18:20:27 +0200 Subject: [PATCH 058/105] GET _cluster/settings with include_defaults returns the expected fallback value if defined in elasticsearch.yml (#110816) * test: add unit test that reproduces #110815 * fix: consider the fallback setting when getting the default value of a setting * doc: add changelog entry --- docs/changelog/110816.yaml | 6 ++++++ .../common/settings/Setting.java | 2 +- .../common/settings/ScopedSettingsTests.java | 20 +++++++++++++++++++ 3 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/110816.yaml diff --git a/docs/changelog/110816.yaml b/docs/changelog/110816.yaml new file mode 100644 index 0000000000000..bf707376ec9ea --- /dev/null +++ b/docs/changelog/110816.yaml @@ -0,0 +1,6 @@ +pr: 110816 +summary: GET _cluster/settings with include_defaults returns the expected fallback value if defined in elasticsearch.yml +area: Infra/Settings +type: bug +issues: + - 110815 diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index e96de685381eb..ad9f933ec0459 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -584,7 +584,7 @@ private T get(Settings settings, boolean validate) { */ public void diff(Settings.Builder builder, Settings source, Settings defaultSettings) { if (exists(source) == false) { - if (exists(defaultSettings)) { + if (existsOrFallbackExists(defaultSettings)) { // If the setting is only in the defaults, use the value from the defaults builder.put(getKey(), getRaw(defaultSettings)); } else { diff --git a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 94c0b849edf8d..8d25b41619fcd 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -924,6 +924,26 @@ public void testDiff() throws IOException { assertThat(diff.getAsInt("foo.bar", null), equalTo(1)); } + public void testDiffWithFallbackDefaultSetting() { + final String fallbackSettingName = "fallback"; + final Setting fallbackSetting = Setting.intSetting(fallbackSettingName, 1, Property.Dynamic, Property.NodeScope); + + final String settingName = "setting.with.fallback"; + final Setting dependentSetting = new Setting<>( + settingName, + fallbackSetting, + (s) -> Setting.parseInt(s, 1, settingName), + value -> {}, + Property.Dynamic, + Property.NodeScope + ); + + ClusterSettings settings = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(fallbackSetting, dependentSetting))); + + final Settings diff = settings.diff(Settings.EMPTY, Settings.builder().put(fallbackSettingName, 2).build()); + assertThat(diff.getAsInt(settingName, null), equalTo(2)); + } + public void testDiffWithDependentSettings() { final String dependedSettingName = "this.setting.is.depended.on"; Setting dependedSetting = Setting.intSetting(dependedSettingName, 1, Property.Dynamic, Property.NodeScope); From 7f78fda76f9d79789e829c47271f358047e7157e Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 26 Jul 2024 13:45:09 -0400 Subject: [PATCH 059/105] ESQL: Add skip to test (#111361) This skips some tests in release mode that try to test a feature that's only available in snapshots. Closes #111263 Closes #111264 Closes #111287 Closes #111286 --- muted-tests.yml | 6 ------ .../elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java | 2 ++ 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index c06c4bd127be6..2ba65aceff1d3 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -105,12 +105,6 @@ tests: - class: org.elasticsearch.repositories.azure.AzureBlobContainerRetriesTests method: testReadNonexistentBlobThrowsNoSuchFileException issue: https://github.com/elastic/elasticsearch/issues/111233 -- class: org.elasticsearch.xpack.esql.qa.single_node.RestEsqlIT - method: testInlineStatsProfile {SYNC} - issue: https://github.com/elastic/elasticsearch/issues/111263 -- class: org.elasticsearch.xpack.esql.qa.single_node.RestEsqlIT - method: testInlineStatsProfile {ASYNC} - issue: https://github.com/elastic/elasticsearch/issues/111264 - class: org.elasticsearch.action.admin.indices.create.SplitIndexIT method: testSplitIndexPrimaryTerm issue: https://github.com/elastic/elasticsearch/issues/111282 diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index af872715c2fea..797fc803ed531 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -284,6 +284,7 @@ public void testTableDuplicateNames() throws IOException { *

*/ public void testInlineStatsNow() throws IOException { + assumeTrue("INLINESTATS only available on snapshots", Build.current().isSnapshot()); indexTestData(); RequestObjectBuilder builder = requestObjectBuilder().query( @@ -369,6 +370,7 @@ public void testProfile() throws IOException { } public void testInlineStatsProfile() throws IOException { + assumeTrue("INLINESTATS only available on snapshots", Build.current().isSnapshot()); indexTestData(); RequestObjectBuilder builder = requestObjectBuilder().query(fromIndex() + " | INLINESTATS AVG(value) | SORT value ASC"); From d9b91738a47c324a0bafd738109669b1a63dfa85 Mon Sep 17 00:00:00 2001 From: Ivan Malutin <137520241+ivamly@users.noreply.github.com> Date: Fri, 26 Jul 2024 20:54:40 +0300 Subject: [PATCH 060/105] Fix floating-point comparison in testRandomSamplerConsistentSeed method (#111348) * Fix floating-point comparison in testRandomSamplerConsistentSeed method * Replace AssertThat with AssertEquals --- .../search/aggregations/bucket/RandomSamplerIT.java | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java index 71402d3e9c1d8..f8ace23057ea6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java @@ -25,7 +25,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThan; @ESIntegTestCase.SuiteScopeTestCase @@ -93,6 +92,7 @@ public void testRandomSamplerConsistentSeed() { double[] sampleMonotonicValue = new double[1]; double[] sampleNumericValue = new double[1]; long[] sampledDocCount = new long[1]; + double tolerance = 1e-14; // initialize the values assertResponse( prepareSearch("idx").setPreference("shard:0") @@ -123,9 +123,12 @@ public void testRandomSamplerConsistentSeed() { ), response -> { InternalRandomSampler sampler = response.getAggregations().get("sampler"); - assertThat(((Avg) sampler.getAggregations().get("mean_monotonic")).getValue(), equalTo(sampleMonotonicValue[0])); - assertThat(((Avg) sampler.getAggregations().get("mean_numeric")).getValue(), equalTo(sampleNumericValue[0])); - assertThat(sampler.getDocCount(), equalTo(sampledDocCount[0])); + double monotonicValue = ((Avg) sampler.getAggregations().get("mean_monotonic")).getValue(); + double numericValue = ((Avg) sampler.getAggregations().get("mean_numeric")).getValue(); + long docCount = sampler.getDocCount(); + assertEquals(monotonicValue, sampleMonotonicValue[0], tolerance); + assertEquals(numericValue, sampleNumericValue[0], tolerance); + assertEquals(docCount, sampledDocCount[0]); } ); } From 961ee33fc5ab4cb8d4ab296211f7d328d82c45f2 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 26 Jul 2024 14:49:19 -0400 Subject: [PATCH 061/105] ESQL: Fix name clashes in `@Fixed` annotations (#111365) In ESQL functions you can annotate an argument as `@Fixed` and we'll pass it in without modification. But it could clash in name with something in the generated code - specifically if you name a parameter something like `p`. This fixes the clash by using `this.p` for all fixed parameters. --- x-pack/plugin/esql/build.gradle | 4 +- x-pack/plugin/esql/compute/build.gradle | 2 +- .../compute/gen/EvaluatorImplementer.java | 2 +- .../scalar/convert/FromBase64Evaluator.java | 4 +- .../scalar/convert/ToBase64Evaluator.java | 4 +- .../date/DateDiffConstantEvaluator.java | 4 +- .../date/DateExtractConstantEvaluator.java | 4 +- .../scalar/date/DateExtractEvaluator.java | 4 +- .../date/DateFormatConstantEvaluator.java | 4 +- .../scalar/date/DateFormatEvaluator.java | 4 +- .../date/DateParseConstantEvaluator.java | 4 +- .../scalar/date/DateParseEvaluator.java | 4 +- .../scalar/date/DateTruncEvaluator.java | 4 +- .../function/scalar/date/NowEvaluator.java | 2 +- .../function/scalar/ip/IpPrefixEvaluator.java | 4 +- .../scalar/ip/IpPrefixOnlyV4Evaluator.java | 148 ------------------ .../scalar/math/ExpDoubleEvaluator.java | 108 +++++++++++++ .../function/scalar/math/ExpIntEvaluator.java | 110 +++++++++++++ .../scalar/math/ExpLongEvaluator.java | 110 +++++++++++++ .../scalar/math/ExpUnsignedLongEvaluator.java | 110 +++++++++++++ ...ianPointDocValuesAndConstantEvaluator.java | 4 +- ...nsCartesianSourceAndConstantEvaluator.java | 4 +- ...GeoPointDocValuesAndConstantEvaluator.java | 4 +- ...ContainsGeoSourceAndConstantEvaluator.java | 4 +- ...ianPointDocValuesAndConstantEvaluator.java | 4 +- ...ntCartesianSourceAndConstantEvaluator.java | 4 +- ...GeoPointDocValuesAndConstantEvaluator.java | 4 +- ...DisjointGeoSourceAndConstantEvaluator.java | 4 +- ...ianPointDocValuesAndConstantEvaluator.java | 4 +- ...tsCartesianSourceAndConstantEvaluator.java | 4 +- ...GeoPointDocValuesAndConstantEvaluator.java | 4 +- ...tersectsGeoSourceAndConstantEvaluator.java | 4 +- ...ianPointDocValuesAndConstantEvaluator.java | 4 +- ...inCartesianSourceAndConstantEvaluator.java | 4 +- ...GeoPointDocValuesAndConstantEvaluator.java | 4 +- ...alWithinGeoSourceAndConstantEvaluator.java | 4 +- ...ianPointDocValuesAndConstantEvaluator.java | 4 +- ...ceCartesianSourceAndConstantEvaluator.java | 4 +- ...GeoPointDocValuesAndConstantEvaluator.java | 4 +- ...DistanceGeoSourceAndConstantEvaluator.java | 4 +- .../scalar/string/AutomataMatchEvaluator.java | 4 +- .../scalar/string/ConcatEvaluator.java | 4 +- .../function/scalar/string/LeftEvaluator.java | 4 +- .../string/RepeatConstantEvaluator.java | 4 +- .../scalar/string/RepeatEvaluator.java | 4 +- .../string/ReplaceConstantEvaluator.java | 4 +- .../scalar/string/RightEvaluator.java | 4 +- .../string/SplitSingleByteEvaluator.java | 4 +- .../scalar/string/SplitVariableEvaluator.java | 4 +- .../scalar/string/ToLowerEvaluator.java | 4 +- .../scalar/string/ToUpperEvaluator.java | 4 +- .../arithmetic/AddDatetimesEvaluator.java | 4 +- .../arithmetic/AddDoublesEvaluator.java | 21 ++- .../arithmetic/MulDoublesEvaluator.java | 21 ++- .../arithmetic/SubDatetimesEvaluator.java | 4 +- .../arithmetic/SubDoublesEvaluator.java | 21 ++- .../InsensitiveEqualsConstantEvaluator.java | 4 +- 57 files changed, 581 insertions(+), 258 deletions(-) delete mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixOnlyV4Evaluator.java create mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpDoubleEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpIntEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpLongEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpUnsignedLongEvaluator.java diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 8803fd81147ef..cab5cee146ebe 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -50,9 +50,9 @@ dependencies { } tasks.named("compileJava").configure { - options.compilerArgs.addAll(["-s", "src/main/generated"]) + options.compilerArgs.addAll(["-s", "$projectDir/src/main/generated"]) // IntelliJ sticks generated files here and we can't stop it.... - exclude { it.file.toString().contains("src/main/generated-src/generated") } + exclude { it.file.toString().contains("$projectDir/src/main/generated-src/generated") } } interface Injected { diff --git a/x-pack/plugin/esql/compute/build.gradle b/x-pack/plugin/esql/compute/build.gradle index ac053bdb827dc..d31a7e629003e 100644 --- a/x-pack/plugin/esql/compute/build.gradle +++ b/x-pack/plugin/esql/compute/build.gradle @@ -17,7 +17,7 @@ dependencies { } tasks.named("compileJava").configure { - options.compilerArgs.addAll(["-s", "src/main/generated"]) + options.compilerArgs.addAll(["-s", "$projectDir/src/main/generated"]) } tasks.named('checkstyleMain').configure { diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java index e1456328e7f64..629a45574ebbb 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java @@ -784,7 +784,7 @@ public void unpackValues(MethodSpec.Builder builder, boolean blockStyle) { @Override public void buildInvocation(StringBuilder pattern, List args, boolean blockStyle) { - pattern.append("$L"); + pattern.append("this.$L"); args.add(name); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java index f4704dc7c7e27..6ae51ca82d9a1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java @@ -67,7 +67,7 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock fieldBlock) { result.appendNull(); continue position; } - result.appendBytesRef(FromBase64.process(fieldBlock.getBytesRef(fieldBlock.getFirstValueIndex(p), fieldScratch), oScratch)); + result.appendBytesRef(FromBase64.process(fieldBlock.getBytesRef(fieldBlock.getFirstValueIndex(p), fieldScratch), this.oScratch)); } return result.build(); } @@ -77,7 +77,7 @@ public BytesRefVector eval(int positionCount, BytesRefVector fieldVector) { try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { BytesRef fieldScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBytesRef(FromBase64.process(fieldVector.getBytesRef(p, fieldScratch), oScratch)); + result.appendBytesRef(FromBase64.process(fieldVector.getBytesRef(p, fieldScratch), this.oScratch)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java index eb0c483c7485d..3c102b655d235 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java @@ -69,7 +69,7 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock fieldBlock) { continue position; } try { - result.appendBytesRef(ToBase64.process(fieldBlock.getBytesRef(fieldBlock.getFirstValueIndex(p), fieldScratch), oScratch)); + result.appendBytesRef(ToBase64.process(fieldBlock.getBytesRef(fieldBlock.getFirstValueIndex(p), fieldScratch), this.oScratch)); } catch (ArithmeticException e) { warnings.registerException(e); result.appendNull(); @@ -84,7 +84,7 @@ public BytesRefBlock eval(int positionCount, BytesRefVector fieldVector) { BytesRef fieldScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { try { - result.appendBytesRef(ToBase64.process(fieldVector.getBytesRef(p, fieldScratch), oScratch)); + result.appendBytesRef(ToBase64.process(fieldVector.getBytesRef(p, fieldScratch), this.oScratch)); } catch (ArithmeticException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java index fe54f8f5f9e12..dfef24cb556ea 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java @@ -88,7 +88,7 @@ public IntBlock eval(int positionCount, LongBlock startTimestampBlock, continue position; } try { - result.appendInt(DateDiff.process(datePartFieldUnit, startTimestampBlock.getLong(startTimestampBlock.getFirstValueIndex(p)), endTimestampBlock.getLong(endTimestampBlock.getFirstValueIndex(p)))); + result.appendInt(DateDiff.process(this.datePartFieldUnit, startTimestampBlock.getLong(startTimestampBlock.getFirstValueIndex(p)), endTimestampBlock.getLong(endTimestampBlock.getFirstValueIndex(p)))); } catch (IllegalArgumentException | InvalidArgumentException e) { warnings.registerException(e); result.appendNull(); @@ -103,7 +103,7 @@ public IntBlock eval(int positionCount, LongVector startTimestampVector, try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { try { - result.appendInt(DateDiff.process(datePartFieldUnit, startTimestampVector.getLong(p), endTimestampVector.getLong(p))); + result.appendInt(DateDiff.process(this.datePartFieldUnit, startTimestampVector.getLong(p), endTimestampVector.getLong(p))); } catch (IllegalArgumentException | InvalidArgumentException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java index abff711e5c19a..d72ffe77ac914 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java @@ -68,7 +68,7 @@ public LongBlock eval(int positionCount, LongBlock valueBlock) { result.appendNull(); continue position; } - result.appendLong(DateExtract.process(valueBlock.getLong(valueBlock.getFirstValueIndex(p)), chronoField, zone)); + result.appendLong(DateExtract.process(valueBlock.getLong(valueBlock.getFirstValueIndex(p)), this.chronoField, this.zone)); } return result.build(); } @@ -77,7 +77,7 @@ public LongBlock eval(int positionCount, LongBlock valueBlock) { public LongVector eval(int positionCount, LongVector valueVector) { try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(p, DateExtract.process(valueVector.getLong(p), chronoField, zone)); + result.appendLong(p, DateExtract.process(valueVector.getLong(p), this.chronoField, this.zone)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java index e2c77cd2718c4..8812eba051336 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java @@ -89,7 +89,7 @@ public LongBlock eval(int positionCount, LongBlock valueBlock, BytesRefBlock chr continue position; } try { - result.appendLong(DateExtract.process(valueBlock.getLong(valueBlock.getFirstValueIndex(p)), chronoFieldBlock.getBytesRef(chronoFieldBlock.getFirstValueIndex(p), chronoFieldScratch), zone)); + result.appendLong(DateExtract.process(valueBlock.getLong(valueBlock.getFirstValueIndex(p)), chronoFieldBlock.getBytesRef(chronoFieldBlock.getFirstValueIndex(p), chronoFieldScratch), this.zone)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); @@ -105,7 +105,7 @@ public LongBlock eval(int positionCount, LongVector valueVector, BytesRef chronoFieldScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { try { - result.appendLong(DateExtract.process(valueVector.getLong(p), chronoFieldVector.getBytesRef(p, chronoFieldScratch), zone)); + result.appendLong(DateExtract.process(valueVector.getLong(p), chronoFieldVector.getBytesRef(p, chronoFieldScratch), this.zone)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java index 770230e3a5a71..4731c888cef77 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java @@ -66,7 +66,7 @@ public BytesRefBlock eval(int positionCount, LongBlock valBlock) { result.appendNull(); continue position; } - result.appendBytesRef(DateFormat.process(valBlock.getLong(valBlock.getFirstValueIndex(p)), formatter)); + result.appendBytesRef(DateFormat.process(valBlock.getLong(valBlock.getFirstValueIndex(p)), this.formatter)); } return result.build(); } @@ -75,7 +75,7 @@ public BytesRefBlock eval(int positionCount, LongBlock valBlock) { public BytesRefVector eval(int positionCount, LongVector valVector) { try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBytesRef(DateFormat.process(valVector.getLong(p), formatter)); + result.appendBytesRef(DateFormat.process(valVector.getLong(p), this.formatter)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java index 0ac3f5c327169..77633797de02d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java @@ -88,7 +88,7 @@ public BytesRefBlock eval(int positionCount, LongBlock valBlock, BytesRefBlock f result.appendNull(); continue position; } - result.appendBytesRef(DateFormat.process(valBlock.getLong(valBlock.getFirstValueIndex(p)), formatterBlock.getBytesRef(formatterBlock.getFirstValueIndex(p), formatterScratch), locale)); + result.appendBytesRef(DateFormat.process(valBlock.getLong(valBlock.getFirstValueIndex(p)), formatterBlock.getBytesRef(formatterBlock.getFirstValueIndex(p), formatterScratch), this.locale)); } return result.build(); } @@ -99,7 +99,7 @@ public BytesRefVector eval(int positionCount, LongVector valVector, try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { BytesRef formatterScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBytesRef(DateFormat.process(valVector.getLong(p), formatterVector.getBytesRef(p, formatterScratch), locale)); + result.appendBytesRef(DateFormat.process(valVector.getLong(p), formatterVector.getBytesRef(p, formatterScratch), this.locale)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java index c08c1a54f90ba..470ffab52920a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java @@ -68,7 +68,7 @@ public LongBlock eval(int positionCount, BytesRefBlock valBlock) { continue position; } try { - result.appendLong(DateParse.process(valBlock.getBytesRef(valBlock.getFirstValueIndex(p), valScratch), formatter)); + result.appendLong(DateParse.process(valBlock.getBytesRef(valBlock.getFirstValueIndex(p), valScratch), this.formatter)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); @@ -83,7 +83,7 @@ public LongBlock eval(int positionCount, BytesRefVector valVector) { BytesRef valScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { try { - result.appendLong(DateParse.process(valVector.getBytesRef(p, valScratch), formatter)); + result.appendLong(DateParse.process(valVector.getBytesRef(p, valScratch), this.formatter)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java index a28a3feb1c9b6..edfe6c39949c3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java @@ -89,7 +89,7 @@ public LongBlock eval(int positionCount, BytesRefBlock valBlock, BytesRefBlock f continue position; } try { - result.appendLong(DateParse.process(valBlock.getBytesRef(valBlock.getFirstValueIndex(p), valScratch), formatterBlock.getBytesRef(formatterBlock.getFirstValueIndex(p), formatterScratch), zoneId)); + result.appendLong(DateParse.process(valBlock.getBytesRef(valBlock.getFirstValueIndex(p), valScratch), formatterBlock.getBytesRef(formatterBlock.getFirstValueIndex(p), formatterScratch), this.zoneId)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); @@ -106,7 +106,7 @@ public LongBlock eval(int positionCount, BytesRefVector valVector, BytesRef formatterScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { try { - result.appendLong(DateParse.process(valVector.getBytesRef(p, valScratch), formatterVector.getBytesRef(p, formatterScratch), zoneId)); + result.appendLong(DateParse.process(valVector.getBytesRef(p, valScratch), formatterVector.getBytesRef(p, formatterScratch), this.zoneId)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java index b72203ce0de35..b8bc45296a60a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java @@ -64,7 +64,7 @@ public LongBlock eval(int positionCount, LongBlock fieldValBlock) { result.appendNull(); continue position; } - result.appendLong(DateTrunc.process(fieldValBlock.getLong(fieldValBlock.getFirstValueIndex(p)), rounding)); + result.appendLong(DateTrunc.process(fieldValBlock.getLong(fieldValBlock.getFirstValueIndex(p)), this.rounding)); } return result.build(); } @@ -73,7 +73,7 @@ public LongBlock eval(int positionCount, LongBlock fieldValBlock) { public LongVector eval(int positionCount, LongVector fieldValVector) { try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(p, DateTrunc.process(fieldValVector.getLong(p), rounding)); + result.appendLong(p, DateTrunc.process(fieldValVector.getLong(p), this.rounding)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java index 1894d19d7b082..db1d5f8fe01ca 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java @@ -39,7 +39,7 @@ public Block eval(Page page) { public LongVector eval(int positionCount) { try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(p, Now.process(now)); + result.appendLong(p, Now.process(this.now)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixEvaluator.java index 57427f87b76f7..0347fd2e1065c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixEvaluator.java @@ -112,7 +112,7 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock ipBlock, IntBlock pre continue position; } try { - result.appendBytesRef(IpPrefix.process(ipBlock.getBytesRef(ipBlock.getFirstValueIndex(p), ipScratch), prefixLengthV4Block.getInt(prefixLengthV4Block.getFirstValueIndex(p)), prefixLengthV6Block.getInt(prefixLengthV6Block.getFirstValueIndex(p)), scratch)); + result.appendBytesRef(IpPrefix.process(ipBlock.getBytesRef(ipBlock.getFirstValueIndex(p), ipScratch), prefixLengthV4Block.getInt(prefixLengthV4Block.getFirstValueIndex(p)), prefixLengthV6Block.getInt(prefixLengthV6Block.getFirstValueIndex(p)), this.scratch)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); @@ -128,7 +128,7 @@ public BytesRefBlock eval(int positionCount, BytesRefVector ipVector, BytesRef ipScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { try { - result.appendBytesRef(IpPrefix.process(ipVector.getBytesRef(p, ipScratch), prefixLengthV4Vector.getInt(p), prefixLengthV6Vector.getInt(p), scratch)); + result.appendBytesRef(IpPrefix.process(ipVector.getBytesRef(p, ipScratch), prefixLengthV4Vector.getInt(p), prefixLengthV6Vector.getInt(p), this.scratch)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixOnlyV4Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixOnlyV4Evaluator.java deleted file mode 100644 index a6cb7c7f9b687..0000000000000 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixOnlyV4Evaluator.java +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.xpack.esql.expression.function.scalar.ip; - -import java.lang.IllegalArgumentException; -import java.lang.Override; -import java.lang.String; -import java.util.function.Function; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; -import org.elasticsearch.core.Releasables; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.function.Warnings; - -/** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link IpPrefix}. - * This class is generated. Do not edit it. - */ -public final class IpPrefixOnlyV4Evaluator implements EvalOperator.ExpressionEvaluator { - private final Warnings warnings; - - private final EvalOperator.ExpressionEvaluator ip; - - private final EvalOperator.ExpressionEvaluator prefixLengthV4; - - private final BytesRef scratch; - - private final DriverContext driverContext; - - public IpPrefixOnlyV4Evaluator(Source source, EvalOperator.ExpressionEvaluator ip, - EvalOperator.ExpressionEvaluator prefixLengthV4, BytesRef scratch, - DriverContext driverContext) { - this.warnings = new Warnings(source); - this.ip = ip; - this.prefixLengthV4 = prefixLengthV4; - this.scratch = scratch; - this.driverContext = driverContext; - } - - @Override - public Block eval(Page page) { - try (BytesRefBlock ipBlock = (BytesRefBlock) ip.eval(page)) { - try (IntBlock prefixLengthV4Block = (IntBlock) prefixLengthV4.eval(page)) { - BytesRefVector ipVector = ipBlock.asVector(); - if (ipVector == null) { - return eval(page.getPositionCount(), ipBlock, prefixLengthV4Block); - } - IntVector prefixLengthV4Vector = prefixLengthV4Block.asVector(); - if (prefixLengthV4Vector == null) { - return eval(page.getPositionCount(), ipBlock, prefixLengthV4Block); - } - return eval(page.getPositionCount(), ipVector, prefixLengthV4Vector).asBlock(); - } - } - } - - public BytesRefBlock eval(int positionCount, BytesRefBlock ipBlock, - IntBlock prefixLengthV4Block) { - try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { - BytesRef ipScratch = new BytesRef(); - position: for (int p = 0; p < positionCount; p++) { - if (ipBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (ipBlock.getValueCount(p) != 1) { - if (ipBlock.getValueCount(p) > 1) { - warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - if (prefixLengthV4Block.isNull(p)) { - result.appendNull(); - continue position; - } - if (prefixLengthV4Block.getValueCount(p) != 1) { - if (prefixLengthV4Block.getValueCount(p) > 1) { - warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - result.appendBytesRef(IpPrefix.process(ipBlock.getBytesRef(ipBlock.getFirstValueIndex(p), ipScratch), prefixLengthV4Block.getInt(prefixLengthV4Block.getFirstValueIndex(p)), scratch)); - } - return result.build(); - } - } - - public BytesRefVector eval(int positionCount, BytesRefVector ipVector, - IntVector prefixLengthV4Vector) { - try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { - BytesRef ipScratch = new BytesRef(); - position: for (int p = 0; p < positionCount; p++) { - result.appendBytesRef(IpPrefix.process(ipVector.getBytesRef(p, ipScratch), prefixLengthV4Vector.getInt(p), scratch)); - } - return result.build(); - } - } - - @Override - public String toString() { - return "IpPrefixOnlyV4Evaluator[" + "ip=" + ip + ", prefixLengthV4=" + prefixLengthV4 + "]"; - } - - @Override - public void close() { - Releasables.closeExpectNoException(ip, prefixLengthV4); - } - - static class Factory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - - private final EvalOperator.ExpressionEvaluator.Factory ip; - - private final EvalOperator.ExpressionEvaluator.Factory prefixLengthV4; - - private final Function scratch; - - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory ip, - EvalOperator.ExpressionEvaluator.Factory prefixLengthV4, - Function scratch) { - this.source = source; - this.ip = ip; - this.prefixLengthV4 = prefixLengthV4; - this.scratch = scratch; - } - - @Override - public IpPrefixOnlyV4Evaluator get(DriverContext context) { - return new IpPrefixOnlyV4Evaluator(source, ip.get(context), prefixLengthV4.get(context), scratch.apply(context), context); - } - - @Override - public String toString() { - return "IpPrefixOnlyV4Evaluator[" + "ip=" + ip + ", prefixLengthV4=" + prefixLengthV4 + "]"; - } - } -} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpDoubleEvaluator.java new file mode 100644 index 0000000000000..37114256774df --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpDoubleEvaluator.java @@ -0,0 +1,108 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.Warnings; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Exp}. + * This class is generated. Do not edit it. + */ +public final class ExpDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator val; + + private final DriverContext driverContext; + + public ExpDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.val = val; + this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); + } + + @Override + public Block eval(Page page) { + try (DoubleBlock valBlock = (DoubleBlock) val.eval(page)) { + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector).asBlock(); + } + } + + public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendDouble(Exp.process(valBlock.getDouble(valBlock.getFirstValueIndex(p)))); + } + return result.build(); + } + } + + public DoubleVector eval(int positionCount, DoubleVector valVector) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendDouble(p, Exp.process(valVector.getDouble(p))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "ExpDoubleEvaluator[" + "val=" + val + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public ExpDoubleEvaluator get(DriverContext context) { + return new ExpDoubleEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "ExpDoubleEvaluator[" + "val=" + val + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpIntEvaluator.java new file mode 100644 index 0000000000000..5559453f97f39 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpIntEvaluator.java @@ -0,0 +1,110 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.Warnings; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Exp}. + * This class is generated. Do not edit it. + */ +public final class ExpIntEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator val; + + private final DriverContext driverContext; + + public ExpIntEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.val = val; + this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); + } + + @Override + public Block eval(Page page) { + try (IntBlock valBlock = (IntBlock) val.eval(page)) { + IntVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector).asBlock(); + } + } + + public DoubleBlock eval(int positionCount, IntBlock valBlock) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendDouble(Exp.process(valBlock.getInt(valBlock.getFirstValueIndex(p)))); + } + return result.build(); + } + } + + public DoubleVector eval(int positionCount, IntVector valVector) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendDouble(p, Exp.process(valVector.getInt(p))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "ExpIntEvaluator[" + "val=" + val + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public ExpIntEvaluator get(DriverContext context) { + return new ExpIntEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "ExpIntEvaluator[" + "val=" + val + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpLongEvaluator.java new file mode 100644 index 0000000000000..2642628088a27 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpLongEvaluator.java @@ -0,0 +1,110 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.Warnings; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Exp}. + * This class is generated. Do not edit it. + */ +public final class ExpLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator val; + + private final DriverContext driverContext; + + public ExpLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.val = val; + this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); + } + + @Override + public Block eval(Page page) { + try (LongBlock valBlock = (LongBlock) val.eval(page)) { + LongVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector).asBlock(); + } + } + + public DoubleBlock eval(int positionCount, LongBlock valBlock) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendDouble(Exp.process(valBlock.getLong(valBlock.getFirstValueIndex(p)))); + } + return result.build(); + } + } + + public DoubleVector eval(int positionCount, LongVector valVector) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendDouble(p, Exp.process(valVector.getLong(p))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "ExpLongEvaluator[" + "val=" + val + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public ExpLongEvaluator get(DriverContext context) { + return new ExpLongEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "ExpLongEvaluator[" + "val=" + val + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpUnsignedLongEvaluator.java new file mode 100644 index 0000000000000..52506d5cd30e8 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpUnsignedLongEvaluator.java @@ -0,0 +1,110 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.Warnings; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Exp}. + * This class is generated. Do not edit it. + */ +public final class ExpUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator val; + + private final DriverContext driverContext; + + public ExpUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.val = val; + this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); + } + + @Override + public Block eval(Page page) { + try (LongBlock valBlock = (LongBlock) val.eval(page)) { + LongVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector).asBlock(); + } + } + + public DoubleBlock eval(int positionCount, LongBlock valBlock) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendDouble(Exp.processUnsignedLong(valBlock.getLong(valBlock.getFirstValueIndex(p)))); + } + return result.build(); + } + } + + public DoubleVector eval(int positionCount, LongVector valVector) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendDouble(p, Exp.processUnsignedLong(valVector.getLong(p))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "ExpUnsignedLongEvaluator[" + "val=" + val + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public ExpUnsignedLongEvaluator get(DriverContext context) { + return new ExpUnsignedLongEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "ExpUnsignedLongEvaluator[" + "val=" + val + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java index b5a5634bc0fd1..30f5ef487f612 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java @@ -67,7 +67,7 @@ public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { continue position; } try { - result.appendBoolean(SpatialContains.processCartesianPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + result.appendBoolean(SpatialContains.processCartesianPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), this.rightValue)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); @@ -81,7 +81,7 @@ public BooleanBlock eval(int positionCount, LongVector leftValueVector) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { try { - result.appendBoolean(SpatialContains.processCartesianPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + result.appendBoolean(SpatialContains.processCartesianPointDocValuesAndConstant(leftValueVector.getLong(p), this.rightValue)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java index 3e2de0ebd397a..9205f1faaa3d0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java @@ -70,7 +70,7 @@ public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { continue position; } try { - result.appendBoolean(SpatialContains.processCartesianSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + result.appendBoolean(SpatialContains.processCartesianSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), this.rightValue)); } catch (IllegalArgumentException | IOException e) { warnings.registerException(e); result.appendNull(); @@ -85,7 +85,7 @@ public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { BytesRef leftValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { try { - result.appendBoolean(SpatialContains.processCartesianSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + result.appendBoolean(SpatialContains.processCartesianSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), this.rightValue)); } catch (IllegalArgumentException | IOException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java index f345c135747e7..a76ccaadfc4bf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java @@ -67,7 +67,7 @@ public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { continue position; } try { - result.appendBoolean(SpatialContains.processGeoPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + result.appendBoolean(SpatialContains.processGeoPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), this.rightValue)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); @@ -81,7 +81,7 @@ public BooleanBlock eval(int positionCount, LongVector leftValueVector) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { try { - result.appendBoolean(SpatialContains.processGeoPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + result.appendBoolean(SpatialContains.processGeoPointDocValuesAndConstant(leftValueVector.getLong(p), this.rightValue)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java index f2316c17db1ec..adb2e2ea0ced2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java @@ -70,7 +70,7 @@ public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { continue position; } try { - result.appendBoolean(SpatialContains.processGeoSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + result.appendBoolean(SpatialContains.processGeoSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), this.rightValue)); } catch (IllegalArgumentException | IOException e) { warnings.registerException(e); result.appendNull(); @@ -85,7 +85,7 @@ public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { BytesRef leftValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { try { - result.appendBoolean(SpatialContains.processGeoSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + result.appendBoolean(SpatialContains.processGeoSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), this.rightValue)); } catch (IllegalArgumentException | IOException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java index 3c46f859c80f8..3a50c1cc1f717 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java @@ -67,7 +67,7 @@ public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { continue position; } try { - result.appendBoolean(SpatialDisjoint.processCartesianPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + result.appendBoolean(SpatialDisjoint.processCartesianPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), this.rightValue)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); @@ -81,7 +81,7 @@ public BooleanBlock eval(int positionCount, LongVector leftValueVector) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { try { - result.appendBoolean(SpatialDisjoint.processCartesianPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + result.appendBoolean(SpatialDisjoint.processCartesianPointDocValuesAndConstant(leftValueVector.getLong(p), this.rightValue)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java index 6e5becc402135..64dcf096e0dd8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java @@ -70,7 +70,7 @@ public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { continue position; } try { - result.appendBoolean(SpatialDisjoint.processCartesianSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + result.appendBoolean(SpatialDisjoint.processCartesianSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), this.rightValue)); } catch (IllegalArgumentException | IOException e) { warnings.registerException(e); result.appendNull(); @@ -85,7 +85,7 @@ public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { BytesRef leftValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { try { - result.appendBoolean(SpatialDisjoint.processCartesianSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + result.appendBoolean(SpatialDisjoint.processCartesianSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), this.rightValue)); } catch (IllegalArgumentException | IOException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java index 6fa2d7a6dd639..73912f2f4c5de 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java @@ -67,7 +67,7 @@ public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { continue position; } try { - result.appendBoolean(SpatialDisjoint.processGeoPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + result.appendBoolean(SpatialDisjoint.processGeoPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), this.rightValue)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); @@ -81,7 +81,7 @@ public BooleanBlock eval(int positionCount, LongVector leftValueVector) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { try { - result.appendBoolean(SpatialDisjoint.processGeoPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + result.appendBoolean(SpatialDisjoint.processGeoPointDocValuesAndConstant(leftValueVector.getLong(p), this.rightValue)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java index c3930168ae594..1cd5e8504c7cf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java @@ -70,7 +70,7 @@ public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { continue position; } try { - result.appendBoolean(SpatialDisjoint.processGeoSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + result.appendBoolean(SpatialDisjoint.processGeoSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), this.rightValue)); } catch (IllegalArgumentException | IOException e) { warnings.registerException(e); result.appendNull(); @@ -85,7 +85,7 @@ public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { BytesRef leftValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { try { - result.appendBoolean(SpatialDisjoint.processGeoSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + result.appendBoolean(SpatialDisjoint.processGeoSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), this.rightValue)); } catch (IllegalArgumentException | IOException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java index 56912e3233a4c..38386aa7d51f1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java @@ -67,7 +67,7 @@ public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { continue position; } try { - result.appendBoolean(SpatialIntersects.processCartesianPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + result.appendBoolean(SpatialIntersects.processCartesianPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), this.rightValue)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); @@ -81,7 +81,7 @@ public BooleanBlock eval(int positionCount, LongVector leftValueVector) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { try { - result.appendBoolean(SpatialIntersects.processCartesianPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + result.appendBoolean(SpatialIntersects.processCartesianPointDocValuesAndConstant(leftValueVector.getLong(p), this.rightValue)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java index 26c4abdc51ecf..70fd91ed0a1ab 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java @@ -70,7 +70,7 @@ public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { continue position; } try { - result.appendBoolean(SpatialIntersects.processCartesianSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + result.appendBoolean(SpatialIntersects.processCartesianSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), this.rightValue)); } catch (IllegalArgumentException | IOException e) { warnings.registerException(e); result.appendNull(); @@ -85,7 +85,7 @@ public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { BytesRef leftValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { try { - result.appendBoolean(SpatialIntersects.processCartesianSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + result.appendBoolean(SpatialIntersects.processCartesianSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), this.rightValue)); } catch (IllegalArgumentException | IOException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java index 405d013a77f5a..f6c16997bedc3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java @@ -67,7 +67,7 @@ public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { continue position; } try { - result.appendBoolean(SpatialIntersects.processGeoPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + result.appendBoolean(SpatialIntersects.processGeoPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), this.rightValue)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); @@ -81,7 +81,7 @@ public BooleanBlock eval(int positionCount, LongVector leftValueVector) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { try { - result.appendBoolean(SpatialIntersects.processGeoPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + result.appendBoolean(SpatialIntersects.processGeoPointDocValuesAndConstant(leftValueVector.getLong(p), this.rightValue)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java index dea6989a830ab..94ecf64f7252e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java @@ -70,7 +70,7 @@ public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { continue position; } try { - result.appendBoolean(SpatialIntersects.processGeoSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + result.appendBoolean(SpatialIntersects.processGeoSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), this.rightValue)); } catch (IllegalArgumentException | IOException e) { warnings.registerException(e); result.appendNull(); @@ -85,7 +85,7 @@ public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { BytesRef leftValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { try { - result.appendBoolean(SpatialIntersects.processGeoSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + result.appendBoolean(SpatialIntersects.processGeoSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), this.rightValue)); } catch (IllegalArgumentException | IOException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java index 5c31df936236b..0619d401154da 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java @@ -67,7 +67,7 @@ public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { continue position; } try { - result.appendBoolean(SpatialWithin.processCartesianPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + result.appendBoolean(SpatialWithin.processCartesianPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), this.rightValue)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); @@ -81,7 +81,7 @@ public BooleanBlock eval(int positionCount, LongVector leftValueVector) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { try { - result.appendBoolean(SpatialWithin.processCartesianPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + result.appendBoolean(SpatialWithin.processCartesianPointDocValuesAndConstant(leftValueVector.getLong(p), this.rightValue)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java index 1a1bd4cd64535..82de0c349dc7d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java @@ -70,7 +70,7 @@ public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { continue position; } try { - result.appendBoolean(SpatialWithin.processCartesianSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + result.appendBoolean(SpatialWithin.processCartesianSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), this.rightValue)); } catch (IllegalArgumentException | IOException e) { warnings.registerException(e); result.appendNull(); @@ -85,7 +85,7 @@ public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { BytesRef leftValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { try { - result.appendBoolean(SpatialWithin.processCartesianSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + result.appendBoolean(SpatialWithin.processCartesianSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), this.rightValue)); } catch (IllegalArgumentException | IOException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java index d19182ffb2341..b686e1aec1f4c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java @@ -67,7 +67,7 @@ public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { continue position; } try { - result.appendBoolean(SpatialWithin.processGeoPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + result.appendBoolean(SpatialWithin.processGeoPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), this.rightValue)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); @@ -81,7 +81,7 @@ public BooleanBlock eval(int positionCount, LongVector leftValueVector) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { try { - result.appendBoolean(SpatialWithin.processGeoPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + result.appendBoolean(SpatialWithin.processGeoPointDocValuesAndConstant(leftValueVector.getLong(p), this.rightValue)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java index cca5ef92918d8..a5d3d48717531 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java @@ -70,7 +70,7 @@ public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { continue position; } try { - result.appendBoolean(SpatialWithin.processGeoSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + result.appendBoolean(SpatialWithin.processGeoSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), this.rightValue)); } catch (IllegalArgumentException | IOException e) { warnings.registerException(e); result.appendNull(); @@ -85,7 +85,7 @@ public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { BytesRef leftValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { try { - result.appendBoolean(SpatialWithin.processGeoSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + result.appendBoolean(SpatialWithin.processGeoSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), this.rightValue)); } catch (IllegalArgumentException | IOException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndConstantEvaluator.java index 21b987f830a2c..01d70b2a53366 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndConstantEvaluator.java @@ -66,7 +66,7 @@ public DoubleBlock eval(int positionCount, LongBlock leftValueBlock) { continue position; } try { - result.appendDouble(StDistance.processCartesianPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + result.appendDouble(StDistance.processCartesianPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), this.rightValue)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); @@ -80,7 +80,7 @@ public DoubleBlock eval(int positionCount, LongVector leftValueVector) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { try { - result.appendDouble(StDistance.processCartesianPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + result.appendDouble(StDistance.processCartesianPointDocValuesAndConstant(leftValueVector.getLong(p), this.rightValue)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndConstantEvaluator.java index 23416e56788b6..b4f3dbc3df326 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndConstantEvaluator.java @@ -69,7 +69,7 @@ public DoubleBlock eval(int positionCount, BytesRefBlock leftValueBlock) { continue position; } try { - result.appendDouble(StDistance.processCartesianSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + result.appendDouble(StDistance.processCartesianSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), this.rightValue)); } catch (IllegalArgumentException | IOException e) { warnings.registerException(e); result.appendNull(); @@ -84,7 +84,7 @@ public DoubleBlock eval(int positionCount, BytesRefVector leftValueVector) { BytesRef leftValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { try { - result.appendDouble(StDistance.processCartesianSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + result.appendDouble(StDistance.processCartesianSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), this.rightValue)); } catch (IllegalArgumentException | IOException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndConstantEvaluator.java index 3f96c8bf20ab7..b0980697e9377 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndConstantEvaluator.java @@ -66,7 +66,7 @@ public DoubleBlock eval(int positionCount, LongBlock leftValueBlock) { continue position; } try { - result.appendDouble(StDistance.processGeoPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + result.appendDouble(StDistance.processGeoPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), this.rightValue)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); @@ -80,7 +80,7 @@ public DoubleBlock eval(int positionCount, LongVector leftValueVector) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { try { - result.appendDouble(StDistance.processGeoPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + result.appendDouble(StDistance.processGeoPointDocValuesAndConstant(leftValueVector.getLong(p), this.rightValue)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndConstantEvaluator.java index 556444ac8d740..70bcc64e58834 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndConstantEvaluator.java @@ -69,7 +69,7 @@ public DoubleBlock eval(int positionCount, BytesRefBlock leftValueBlock) { continue position; } try { - result.appendDouble(StDistance.processGeoSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + result.appendDouble(StDistance.processGeoSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), this.rightValue)); } catch (IllegalArgumentException | IOException e) { warnings.registerException(e); result.appendNull(); @@ -84,7 +84,7 @@ public DoubleBlock eval(int positionCount, BytesRefVector leftValueVector) { BytesRef leftValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { try { - result.appendDouble(StDistance.processGeoSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + result.appendDouble(StDistance.processGeoSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), this.rightValue)); } catch (IllegalArgumentException | IOException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java index 21491b4272ea1..96726b310a654 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java @@ -71,7 +71,7 @@ public BooleanBlock eval(int positionCount, BytesRefBlock inputBlock) { result.appendNull(); continue position; } - result.appendBoolean(AutomataMatch.process(inputBlock.getBytesRef(inputBlock.getFirstValueIndex(p), inputScratch), automaton, pattern)); + result.appendBoolean(AutomataMatch.process(inputBlock.getBytesRef(inputBlock.getFirstValueIndex(p), inputScratch), this.automaton, this.pattern)); } return result.build(); } @@ -81,7 +81,7 @@ public BooleanVector eval(int positionCount, BytesRefVector inputVector) { try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef inputScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(p, AutomataMatch.process(inputVector.getBytesRef(p, inputScratch), automaton, pattern)); + result.appendBoolean(p, AutomataMatch.process(inputVector.getBytesRef(p, inputScratch), this.automaton, this.pattern)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java index e73cc58590fc1..955e05ae7445f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java @@ -87,7 +87,7 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock[] valuesBlocks) { int o = valuesBlocks[i].getFirstValueIndex(p); valuesValues[i] = valuesBlocks[i].getBytesRef(o, valuesScratch[i]); } - result.appendBytesRef(Concat.process(scratch, valuesValues)); + result.appendBytesRef(Concat.process(this.scratch, valuesValues)); } return result.build(); } @@ -105,7 +105,7 @@ public BytesRefVector eval(int positionCount, BytesRefVector[] valuesVectors) { for (int i = 0; i < valuesVectors.length; i++) { valuesValues[i] = valuesVectors[i].getBytesRef(p, valuesScratch[i]); } - result.appendBytesRef(Concat.process(scratch, valuesValues)); + result.appendBytesRef(Concat.process(this.scratch, valuesValues)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java index 8effe5e6d72cd..d932f4f1ea9b0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java @@ -93,7 +93,7 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock le result.appendNull(); continue position; } - result.appendBytesRef(Left.process(out, cp, strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), lengthBlock.getInt(lengthBlock.getFirstValueIndex(p)))); + result.appendBytesRef(Left.process(this.out, this.cp, strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), lengthBlock.getInt(lengthBlock.getFirstValueIndex(p)))); } return result.build(); } @@ -103,7 +103,7 @@ public BytesRefVector eval(int positionCount, BytesRefVector strVector, IntVecto try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBytesRef(Left.process(out, cp, strVector.getBytesRef(p, strScratch), lengthVector.getInt(p))); + result.appendBytesRef(Left.process(this.out, this.cp, strVector.getBytesRef(p, strScratch), lengthVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatConstantEvaluator.java index e83c7c7720828..3ba3f40a8ed5d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatConstantEvaluator.java @@ -71,7 +71,7 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock) { continue position; } try { - result.appendBytesRef(Repeat.processConstantNumber(scratch, strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), number)); + result.appendBytesRef(Repeat.processConstantNumber(this.scratch, strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), this.number)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); @@ -86,7 +86,7 @@ public BytesRefBlock eval(int positionCount, BytesRefVector strVector) { BytesRef strScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { try { - result.appendBytesRef(Repeat.processConstantNumber(scratch, strVector.getBytesRef(p, strScratch), number)); + result.appendBytesRef(Repeat.processConstantNumber(this.scratch, strVector.getBytesRef(p, strScratch), this.number)); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatEvaluator.java index 3723a35283c4b..313c99ad7c454 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatEvaluator.java @@ -91,7 +91,7 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock nu continue position; } try { - result.appendBytesRef(Repeat.process(scratch, strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), numberBlock.getInt(numberBlock.getFirstValueIndex(p)))); + result.appendBytesRef(Repeat.process(this.scratch, strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), numberBlock.getInt(numberBlock.getFirstValueIndex(p)))); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); @@ -106,7 +106,7 @@ public BytesRefBlock eval(int positionCount, BytesRefVector strVector, IntVector BytesRef strScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { try { - result.appendBytesRef(Repeat.process(scratch, strVector.getBytesRef(p, strScratch), numberVector.getInt(p))); + result.appendBytesRef(Repeat.process(this.scratch, strVector.getBytesRef(p, strScratch), numberVector.getInt(p))); } catch (IllegalArgumentException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java index 2b898377f59f6..63623b8428fab 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java @@ -89,7 +89,7 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlo continue position; } try { - result.appendBytesRef(Replace.process(strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), regex, newStrBlock.getBytesRef(newStrBlock.getFirstValueIndex(p), newStrScratch))); + result.appendBytesRef(Replace.process(strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), this.regex, newStrBlock.getBytesRef(newStrBlock.getFirstValueIndex(p), newStrScratch))); } catch (PatternSyntaxException e) { warnings.registerException(e); result.appendNull(); @@ -106,7 +106,7 @@ public BytesRefBlock eval(int positionCount, BytesRefVector strVector, BytesRef newStrScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { try { - result.appendBytesRef(Replace.process(strVector.getBytesRef(p, strScratch), regex, newStrVector.getBytesRef(p, newStrScratch))); + result.appendBytesRef(Replace.process(strVector.getBytesRef(p, strScratch), this.regex, newStrVector.getBytesRef(p, newStrScratch))); } catch (PatternSyntaxException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java index 57cad6c63242d..b100cd592495e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java @@ -93,7 +93,7 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock le result.appendNull(); continue position; } - result.appendBytesRef(Right.process(out, cp, strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), lengthBlock.getInt(lengthBlock.getFirstValueIndex(p)))); + result.appendBytesRef(Right.process(this.out, this.cp, strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), lengthBlock.getInt(lengthBlock.getFirstValueIndex(p)))); } return result.build(); } @@ -103,7 +103,7 @@ public BytesRefVector eval(int positionCount, BytesRefVector strVector, IntVecto try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBytesRef(Right.process(out, cp, strVector.getBytesRef(p, strScratch), lengthVector.getInt(p))); + result.appendBytesRef(Right.process(this.out, this.cp, strVector.getBytesRef(p, strScratch), lengthVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java index d58b1aee0ee9d..31639b981f84d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java @@ -69,7 +69,7 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock) { result.appendNull(); continue position; } - Split.process(result, strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), delim, scratch); + Split.process(result, strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), this.delim, this.scratch); } return result.build(); } @@ -79,7 +79,7 @@ public BytesRefBlock eval(int positionCount, BytesRefVector strVector) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - Split.process(result, strVector.getBytesRef(p, strScratch), delim, scratch); + Split.process(result, strVector.getBytesRef(p, strScratch), this.delim, this.scratch); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java index 753febd88ca58..bccfa9ab680ea 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java @@ -87,7 +87,7 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlo result.appendNull(); continue position; } - Split.process(result, strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), delimBlock.getBytesRef(delimBlock.getFirstValueIndex(p), delimScratch), scratch); + Split.process(result, strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), delimBlock.getBytesRef(delimBlock.getFirstValueIndex(p), delimScratch), this.scratch); } return result.build(); } @@ -99,7 +99,7 @@ public BytesRefBlock eval(int positionCount, BytesRefVector strVector, BytesRef strScratch = new BytesRef(); BytesRef delimScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - Split.process(result, strVector.getBytesRef(p, strScratch), delimVector.getBytesRef(p, delimScratch), scratch); + Split.process(result, strVector.getBytesRef(p, strScratch), delimVector.getBytesRef(p, delimScratch), this.scratch); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerEvaluator.java index ee30b2b282162..703fb1cae5d8f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerEvaluator.java @@ -66,7 +66,7 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock valBlock) { result.appendNull(); continue position; } - result.appendBytesRef(ToLower.process(valBlock.getBytesRef(valBlock.getFirstValueIndex(p), valScratch), locale)); + result.appendBytesRef(ToLower.process(valBlock.getBytesRef(valBlock.getFirstValueIndex(p), valScratch), this.locale)); } return result.build(); } @@ -76,7 +76,7 @@ public BytesRefVector eval(int positionCount, BytesRefVector valVector) { try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBytesRef(ToLower.process(valVector.getBytesRef(p, valScratch), locale)); + result.appendBytesRef(ToLower.process(valVector.getBytesRef(p, valScratch), this.locale)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperEvaluator.java index cf72804b7e354..2a99af3033b0d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperEvaluator.java @@ -66,7 +66,7 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock valBlock) { result.appendNull(); continue position; } - result.appendBytesRef(ToUpper.process(valBlock.getBytesRef(valBlock.getFirstValueIndex(p), valScratch), locale)); + result.appendBytesRef(ToUpper.process(valBlock.getBytesRef(valBlock.getFirstValueIndex(p), valScratch), this.locale)); } return result.build(); } @@ -76,7 +76,7 @@ public BytesRefVector eval(int positionCount, BytesRefVector valVector) { try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBytesRef(ToUpper.process(valVector.getBytesRef(p, valScratch), locale)); + result.appendBytesRef(ToUpper.process(valVector.getBytesRef(p, valScratch), this.locale)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java index 44ed1ebebd817..962fb4e2e6819 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java @@ -67,7 +67,7 @@ public LongBlock eval(int positionCount, LongBlock datetimeBlock) { continue position; } try { - result.appendLong(Add.processDatetimes(datetimeBlock.getLong(datetimeBlock.getFirstValueIndex(p)), temporalAmount)); + result.appendLong(Add.processDatetimes(datetimeBlock.getLong(datetimeBlock.getFirstValueIndex(p)), this.temporalAmount)); } catch (ArithmeticException | DateTimeException e) { warnings.registerException(e); result.appendNull(); @@ -81,7 +81,7 @@ public LongBlock eval(int positionCount, LongVector datetimeVector) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { try { - result.appendLong(Add.processDatetimes(datetimeVector.getLong(p), temporalAmount)); + result.appendLong(Add.processDatetimes(datetimeVector.getLong(p), this.temporalAmount)); } catch (ArithmeticException | DateTimeException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java index fbf25c5fec393..d6a81f2873e3a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; +import java.lang.ArithmeticException; import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; @@ -50,7 +51,7 @@ public Block eval(Page page) { if (rhsVector == null) { return eval(page.getPositionCount(), lhsBlock, rhsBlock); } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); + return eval(page.getPositionCount(), lhsVector, rhsVector); } } } @@ -80,16 +81,26 @@ public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhs result.appendNull(); continue position; } - result.appendDouble(Add.processDoubles(lhsBlock.getDouble(lhsBlock.getFirstValueIndex(p)), rhsBlock.getDouble(rhsBlock.getFirstValueIndex(p)))); + try { + result.appendDouble(Add.processDoubles(lhsBlock.getDouble(lhsBlock.getFirstValueIndex(p)), rhsBlock.getDouble(rhsBlock.getFirstValueIndex(p)))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } } return result.build(); } } - public DoubleVector eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { - try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { + public DoubleBlock eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(p, Add.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + try { + result.appendDouble(Add.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java index 9f3d5aa6d8b10..9104b9b920a93 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; +import java.lang.ArithmeticException; import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; @@ -50,7 +51,7 @@ public Block eval(Page page) { if (rhsVector == null) { return eval(page.getPositionCount(), lhsBlock, rhsBlock); } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); + return eval(page.getPositionCount(), lhsVector, rhsVector); } } } @@ -80,16 +81,26 @@ public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhs result.appendNull(); continue position; } - result.appendDouble(Mul.processDoubles(lhsBlock.getDouble(lhsBlock.getFirstValueIndex(p)), rhsBlock.getDouble(rhsBlock.getFirstValueIndex(p)))); + try { + result.appendDouble(Mul.processDoubles(lhsBlock.getDouble(lhsBlock.getFirstValueIndex(p)), rhsBlock.getDouble(rhsBlock.getFirstValueIndex(p)))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } } return result.build(); } } - public DoubleVector eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { - try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { + public DoubleBlock eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(p, Mul.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + try { + result.appendDouble(Mul.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java index f87f3c217e16e..e9b540468fb3c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java @@ -67,7 +67,7 @@ public LongBlock eval(int positionCount, LongBlock datetimeBlock) { continue position; } try { - result.appendLong(Sub.processDatetimes(datetimeBlock.getLong(datetimeBlock.getFirstValueIndex(p)), temporalAmount)); + result.appendLong(Sub.processDatetimes(datetimeBlock.getLong(datetimeBlock.getFirstValueIndex(p)), this.temporalAmount)); } catch (ArithmeticException | DateTimeException e) { warnings.registerException(e); result.appendNull(); @@ -81,7 +81,7 @@ public LongBlock eval(int positionCount, LongVector datetimeVector) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { try { - result.appendLong(Sub.processDatetimes(datetimeVector.getLong(p), temporalAmount)); + result.appendLong(Sub.processDatetimes(datetimeVector.getLong(p), this.temporalAmount)); } catch (ArithmeticException | DateTimeException e) { warnings.registerException(e); result.appendNull(); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java index 291cb5648e213..c9097145df61f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; +import java.lang.ArithmeticException; import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; @@ -50,7 +51,7 @@ public Block eval(Page page) { if (rhsVector == null) { return eval(page.getPositionCount(), lhsBlock, rhsBlock); } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); + return eval(page.getPositionCount(), lhsVector, rhsVector); } } } @@ -80,16 +81,26 @@ public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhs result.appendNull(); continue position; } - result.appendDouble(Sub.processDoubles(lhsBlock.getDouble(lhsBlock.getFirstValueIndex(p)), rhsBlock.getDouble(rhsBlock.getFirstValueIndex(p)))); + try { + result.appendDouble(Sub.processDoubles(lhsBlock.getDouble(lhsBlock.getFirstValueIndex(p)), rhsBlock.getDouble(rhsBlock.getFirstValueIndex(p)))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } } return result.build(); } } - public DoubleVector eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { - try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { + public DoubleBlock eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(p, Sub.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + try { + result.appendDouble(Sub.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java index 4a1737f01a245..b2d177f61ef59 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java @@ -68,7 +68,7 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock) { result.appendNull(); continue position; } - result.appendBoolean(InsensitiveEquals.processConstant(lhsBlock.getBytesRef(lhsBlock.getFirstValueIndex(p), lhsScratch), rhs)); + result.appendBoolean(InsensitiveEquals.processConstant(lhsBlock.getBytesRef(lhsBlock.getFirstValueIndex(p), lhsScratch), this.rhs)); } return result.build(); } @@ -78,7 +78,7 @@ public BooleanVector eval(int positionCount, BytesRefVector lhsVector) { try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(p, InsensitiveEquals.processConstant(lhsVector.getBytesRef(p, lhsScratch), rhs)); + result.appendBoolean(p, InsensitiveEquals.processConstant(lhsVector.getBytesRef(p, lhsScratch), this.rhs)); } return result.build(); } From cf480a5c2a0708878832d073e28433efb33374ff Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Fri, 26 Jul 2024 15:51:33 -0400 Subject: [PATCH 062/105] [Inference API] Replace model_id with inference_id in inference API except when stored (#111366) * Replace model_id with inference_id in inference API except when storing ModelConfigs * Update docs/changelog/111366.yaml * replace missed literals in tests --- docs/changelog/111366.yaml | 6 ++++++ .../inference/ModelConfigurations.java | 18 +++++++++++++++--- .../xpack/inference/InferenceCrudIT.java | 6 +++--- .../inference/MockDenseInferenceServiceIT.java | 2 +- .../MockSparseInferenceServiceIT.java | 6 +++--- .../inference/integration/ModelRegistryIT.java | 4 ++-- .../inference/registry/ModelRegistry.java | 10 ++++++++-- 7 files changed, 38 insertions(+), 14 deletions(-) create mode 100644 docs/changelog/111366.yaml diff --git a/docs/changelog/111366.yaml b/docs/changelog/111366.yaml new file mode 100644 index 0000000000000..9cb127077094f --- /dev/null +++ b/docs/changelog/111366.yaml @@ -0,0 +1,6 @@ +pr: 111366 +summary: "[Inference API] Replace `model_id` with `inference_id` in inference API\ + \ except when stored" +area: Machine Learning +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java b/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java index 4b6f436460fdc..0df0378c4a5f4 100644 --- a/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java +++ b/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java @@ -20,7 +20,11 @@ public class ModelConfigurations implements ToFilteredXContentObject, VersionedNamedWriteable { - public static final String MODEL_ID = "model_id"; + // Due to refactoring, we now have different field names for the inference ID when it is serialized and stored to an index vs when it + // is returned as part of a GetInferenceModelAction + public static final String INDEX_ONLY_ID_FIELD_NAME = "model_id"; + public static final String INFERENCE_ID_FIELD_NAME = "inference_id"; + public static final String USE_ID_FOR_INDEX = "for_index"; public static final String SERVICE = "service"; public static final String SERVICE_SETTINGS = "service_settings"; public static final String TASK_SETTINGS = "task_settings"; @@ -119,7 +123,11 @@ public TaskSettings getTaskSettings() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(MODEL_ID, inferenceEntityId); + if (params.paramAsBoolean(USE_ID_FOR_INDEX, false)) { + builder.field(INDEX_ONLY_ID_FIELD_NAME, inferenceEntityId); + } else { + builder.field(INFERENCE_ID_FIELD_NAME, inferenceEntityId); + } builder.field(TaskType.NAME, taskType.toString()); builder.field(SERVICE, service); builder.field(SERVICE_SETTINGS, serviceSettings); @@ -131,7 +139,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public XContentBuilder toFilteredXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(MODEL_ID, inferenceEntityId); + if (params.paramAsBoolean(USE_ID_FOR_INDEX, false)) { + builder.field(INDEX_ONLY_ID_FIELD_NAME, inferenceEntityId); + } else { + builder.field(INFERENCE_ID_FIELD_NAME, inferenceEntityId); + } builder.field(TaskType.NAME, taskType.toString()); builder.field(SERVICE, service); builder.field(SERVICE_SETTINGS, serviceSettings.getFilteredXContentObject()); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index 242f786e95364..7b897432afa83 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -49,7 +49,7 @@ public void testGet() throws IOException { var singleModel = getModels("se_model_1", TaskType.SPARSE_EMBEDDING); assertThat(singleModel, hasSize(1)); - assertEquals("se_model_1", singleModel.get(0).get("model_id")); + assertEquals("se_model_1", singleModel.get(0).get("inference_id")); for (int i = 0; i < 5; i++) { deleteModel("se_model_" + i, TaskType.SPARSE_EMBEDDING); @@ -82,7 +82,7 @@ public void testGetModelWithAnyTaskType() throws IOException { String inferenceEntityId = "sparse_embedding_model"; putModel(inferenceEntityId, mockSparseServiceModelConfig(), TaskType.SPARSE_EMBEDDING); var singleModel = getModels(inferenceEntityId, TaskType.ANY); - assertEquals(inferenceEntityId, singleModel.get(0).get("model_id")); + assertEquals(inferenceEntityId, singleModel.get(0).get("inference_id")); assertEquals(TaskType.SPARSE_EMBEDDING.toString(), singleModel.get(0).get("task_type")); } @@ -91,7 +91,7 @@ public void testApisWithoutTaskType() throws IOException { String modelId = "no_task_type_in_url"; putModel(modelId, mockSparseServiceModelConfig(TaskType.SPARSE_EMBEDDING)); var singleModel = getModel(modelId); - assertEquals(modelId, singleModel.get("model_id")); + assertEquals(modelId, singleModel.get("inference_id")); assertEquals(TaskType.SPARSE_EMBEDDING.toString(), singleModel.get("task_type")); var inference = inferOnMockService(modelId, List.of(randomAlphaOfLength(10))); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockDenseInferenceServiceIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockDenseInferenceServiceIT.java index 833b1fd3673fc..5f6bad5687407 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockDenseInferenceServiceIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockDenseInferenceServiceIT.java @@ -22,7 +22,7 @@ public void testMockService() throws IOException { var model = getModels(inferenceEntityId, TaskType.TEXT_EMBEDDING).get(0); for (var modelMap : List.of(putModel, model)) { - assertEquals(inferenceEntityId, modelMap.get("model_id")); + assertEquals(inferenceEntityId, modelMap.get("inference_id")); assertEquals(TaskType.TEXT_EMBEDDING, TaskType.fromString((String) modelMap.get("task_type"))); assertEquals("text_embedding_test_service", modelMap.get("service")); } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockSparseInferenceServiceIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockSparseInferenceServiceIT.java index 97e0641f37c33..24ba2708f5de4 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockSparseInferenceServiceIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockSparseInferenceServiceIT.java @@ -24,7 +24,7 @@ public void testMockService() throws IOException { var model = getModels(inferenceEntityId, TaskType.SPARSE_EMBEDDING).get(0); for (var modelMap : List.of(putModel, model)) { - assertEquals(inferenceEntityId, modelMap.get("model_id")); + assertEquals(inferenceEntityId, modelMap.get("inference_id")); assertEquals(TaskType.SPARSE_EMBEDDING, TaskType.fromString((String) modelMap.get("task_type"))); assertEquals("test_service", modelMap.get("service")); } @@ -77,7 +77,7 @@ public void testMockService_DoesNotReturnHiddenField_InModelResponses() throws I var model = getModels(inferenceEntityId, TaskType.SPARSE_EMBEDDING).get(0); for (var modelMap : List.of(putModel, model)) { - assertEquals(inferenceEntityId, modelMap.get("model_id")); + assertEquals(inferenceEntityId, modelMap.get("inference_id")); assertThat(modelMap.get("service_settings"), is(Map.of("model", "my_model"))); assertEquals(TaskType.SPARSE_EMBEDDING, TaskType.fromString((String) modelMap.get("task_type"))); assertEquals("test_service", modelMap.get("service")); @@ -95,7 +95,7 @@ public void testMockService_DoesReturnHiddenField_InModelResponses() throws IOEx var model = getModels(inferenceEntityId, TaskType.SPARSE_EMBEDDING).get(0); for (var modelMap : List.of(putModel, model)) { - assertEquals(inferenceEntityId, modelMap.get("model_id")); + assertEquals(inferenceEntityId, modelMap.get("inference_id")); assertThat(modelMap.get("service_settings"), is(Map.of("model", "my_model", "hidden_field", "my_hidden_value"))); assertEquals(TaskType.SPARSE_EMBEDDING, TaskType.fromString((String) modelMap.get("task_type"))); assertEquals("test_service", modelMap.get("service")); diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index 17037e56b2db3..5157683f2dce9 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -410,7 +410,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("unknown_field", "foo"); - builder.field(MODEL_ID, getInferenceEntityId()); + builder.field(INDEX_ONLY_ID_FIELD_NAME, getInferenceEntityId()); builder.field(TaskType.NAME, getTaskType().toString()); builder.field(SERVICE, getService()); builder.field(SERVICE_SETTINGS, getServiceSettings()); @@ -436,7 +436,7 @@ private static class ModelWithUnknownField extends ModelConfigurations { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("unknown_field", "foo"); - builder.field(MODEL_ID, getInferenceEntityId()); + builder.field(INDEX_ONLY_ID_FIELD_NAME, getInferenceEntityId()); builder.field(TaskType.NAME, getTaskType().toString()); builder.field(SERVICE, getService()); builder.field(SERVICE_SETTINGS, getServiceSettings()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java index ae82264a77a0d..a6e4fcae7169f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java @@ -74,7 +74,10 @@ public static UnparsedModel unparsedModelFromMap(ModelConfigMap modelConfigMap) if (modelConfigMap.config() == null) { throw new ElasticsearchStatusException("Missing config map", RestStatus.BAD_REQUEST); } - String inferenceEntityId = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.MODEL_ID); + String inferenceEntityId = ServiceUtils.removeStringOrThrowIfNull( + modelConfigMap.config(), + ModelConfigurations.INDEX_ONLY_ID_FIELD_NAME + ); String service = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.SERVICE); String taskTypeStr = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), TaskType.NAME); TaskType taskType = TaskType.fromString(taskTypeStr); @@ -375,7 +378,10 @@ public void deleteModel(String inferenceEntityId, ActionListener listen private static IndexRequest createIndexRequest(String docId, String indexName, ToXContentObject body, boolean allowOverwriting) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) { var request = new IndexRequest(indexName); - XContentBuilder source = body.toXContent(builder, ToXContent.EMPTY_PARAMS); + XContentBuilder source = body.toXContent( + builder, + new ToXContent.MapParams(Map.of(ModelConfigurations.USE_ID_FOR_INDEX, Boolean.TRUE.toString())) + ); var operation = allowOverwriting ? DocWriteRequest.OpType.INDEX : DocWriteRequest.OpType.CREATE; return request.opType(operation).id(docId).source(source); From 73373a581bf1f9dbf7085f26f196959ea1ea559b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 27 Jul 2024 06:12:16 +1000 Subject: [PATCH 063/105] Mute org.elasticsearch.xpack.searchablesnapshots.AzureSearchableSnapshotsIT org.elasticsearch.xpack.searchablesnapshots.AzureSearchableSnapshotsIT #111279 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 2ba65aceff1d3..4b875b1454b2b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -116,6 +116,8 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/111308 - class: org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT issue: https://github.com/elastic/elasticsearch/issues/111319 +- class: org.elasticsearch.xpack.searchablesnapshots.AzureSearchableSnapshotsIT + issue: https://github.com/elastic/elasticsearch/issues/111279 # Examples: # From 9c1e4b59a48811e0960a08287233327582b18ba6 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Fri, 26 Jul 2024 15:30:40 -0500 Subject: [PATCH 064/105] Adding support for data streams with a match-all template (#111311) --- docs/changelog/111311.yaml | 6 ++ .../test/data_stream/10_basic.yml | 91 +++++++++++++++++++ .../MetadataIndexTemplateService.java | 7 +- .../MetadataIndexTemplateServiceTests.java | 17 ++++ 4 files changed, 120 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/111311.yaml diff --git a/docs/changelog/111311.yaml b/docs/changelog/111311.yaml new file mode 100644 index 0000000000000..5786e11e885e2 --- /dev/null +++ b/docs/changelog/111311.yaml @@ -0,0 +1,6 @@ +pr: 111311 +summary: Adding support for data streams with a match-all template +area: Data streams +type: bug +issues: + - 111204 diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml index 35e3f38d55c26..39558d12b56cd 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml @@ -1150,3 +1150,94 @@ setup: name: simple-data-stream2 - is_true: acknowledged +--- +"Create data stream with match all template": + - requires: + cluster_features: ["gte_v8.16.0"] + reason: "data streams supoprt for match all templates only supported in 8.16" + + - do: + allowed_warnings: + - "index template [match-all-template] has index patterns [*] matching patterns from existing older templates [.monitoring-logstash,.monitoring-es,.monitoring-beats,.monitoring-alerts-7,.monitoring-kibana] with patterns (.monitoring-logstash => [.monitoring-logstash-7-*],.monitoring-es => [.monitoring-es-7-*],.monitoring-beats => [.monitoring-beats-7-*],.monitoring-alerts-7 => [.monitoring-alerts-7],.monitoring-kibana => [.monitoring-kibana-7-*]); this template [match-all-template] will take precedence during new index creation" + indices.put_index_template: + name: match-all-template + body: + index_patterns: [ "*" ] + priority: 1 + data_stream: {} + + - do: + indices.create_data_stream: + name: match-all-data-stream + - is_true: acknowledged + + - do: + cluster.health: + wait_for_status: green + + - do: + indices.get_data_stream: + name: "*" + - match: { data_streams.0.name: match-all-data-stream } + - match: { data_streams.0.generation: 1 } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.index_name: '/\.ds-match-all-data-stream-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.0.status: 'GREEN' } + - match: { data_streams.0.template: 'match-all-template' } + - match: { data_streams.0.hidden: false } + + - do: + indices.delete_data_stream: + name: match-all-data-stream + - is_true: acknowledged + + - do: + indices.delete_index_template: + name: match-all-template + - is_true: acknowledged + +--- +"Create hidden data stream with match all template": + - requires: + cluster_features: [ "gte_v8.16.0" ] + reason: "data streams supoprt for match all templates only supported in 8.16" + - do: + allowed_warnings: + - "index template [match-all-hidden-template] has index patterns [*] matching patterns from existing older templates [.monitoring-logstash,.monitoring-es,.monitoring-beats,.monitoring-alerts-7,.monitoring-kibana] with patterns (.monitoring-logstash => [.monitoring-logstash-7-*],.monitoring-es => [.monitoring-es-7-*],.monitoring-beats => [.monitoring-beats-7-*],.monitoring-alerts-7 => [.monitoring-alerts-7],.monitoring-kibana => [.monitoring-kibana-7-*]); this template [match-all-hidden-template] will take precedence during new index creation" + indices.put_index_template: + name: match-all-hidden-template + body: + index_patterns: [ "*" ] + priority: 1 + data_stream: + hidden: true + - do: + indices.create_data_stream: + name: match-all-hidden-data-stream + - is_true: acknowledged + + - do: + cluster.health: + wait_for_status: green + + - do: + indices.get_data_stream: + name: "*" + - length: { data_streams: 0 } + + - do: + indices.get_data_stream: + name: ['*'] + expand_wildcards: hidden + - length: { data_streams: 1 } + - match: { data_streams.0.name: match-all-hidden-data-stream } + - match: { data_streams.0.hidden: true } + + - do: + indices.delete_data_stream: + name: match-all-hidden-data-stream + - is_true: acknowledged + + - do: + indices.delete_index_template: + name: match-all-hidden-template diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java index e9658e71f895e..a84c1d4a782f4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java @@ -1309,7 +1309,12 @@ static List> findV2CandidateTemplates(Met for (Map.Entry entry : metadata.templatesV2().entrySet()) { final String name = entry.getKey(); final ComposableIndexTemplate template = entry.getValue(); - if (isHidden == false) { + /* + * We do not ordinarily return match-all templates for hidden indices. But all backing indices for data streams are hidden, + * and we do want to return even match-all templates for those. Not doing so can result in a situation where a data stream is + * built with a template that none of its indices match. + */ + if (isHidden == false || template.getDataStreamTemplate() != null) { final boolean matched = template.indexPatterns().stream().anyMatch(patternMatchPredicate); if (matched) { candidates.add(Tuple.tuple(name, template)); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java index d9c01953fbaab..eb00b8bf59594 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java @@ -2158,6 +2158,23 @@ public void testDataStreamsUsingTemplates() throws Exception { MetadataIndexTemplateService.innerRemoveIndexTemplateV2(stateWithTwoTemplates, "logs"); } + public void testDataStreamsUsingMatchAllTemplate() throws Exception { + ClusterState state = ClusterState.EMPTY_STATE; + final MetadataIndexTemplateService service = getMetadataIndexTemplateService(); + + ComposableIndexTemplate template = ComposableIndexTemplate.builder() + .indexPatterns(Collections.singletonList("*")) + .priority(100L) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); + final String templateName = "all-data-streams-template"; + state = service.addIndexTemplateV2(state, false, templateName, template); + // When creating a data stream, we'll look for templates. The data stream is not hidden + assertThat(MetadataIndexTemplateService.findV2Template(state.metadata(), "some-data-stream", false), equalTo(templateName)); + // The write index for a data stream will be a hidden index. We need to make sure it matches the same template: + assertThat(MetadataIndexTemplateService.findV2Template(state.metadata(), "some-data-stream", true), equalTo(templateName)); + } + public void testRemovingHigherOrderTemplateOfDataStreamWithMultipleTemplates() throws Exception { ClusterState state = ClusterState.EMPTY_STATE; final MetadataIndexTemplateService service = getMetadataIndexTemplateService(); From 0eba29ea6748963110438f407d5dacd233858c42 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 27 Jul 2024 06:33:51 +1000 Subject: [PATCH 065/105] Mute org.elasticsearch.repositories.azure.RepositoryAzureClientYamlTestSuiteIT org.elasticsearch.repositories.azure.RepositoryAzureClientYamlTestSuiteIT #111345 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 4b875b1454b2b..0d31ecb149c22 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -118,6 +118,8 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/111319 - class: org.elasticsearch.xpack.searchablesnapshots.AzureSearchableSnapshotsIT issue: https://github.com/elastic/elasticsearch/issues/111279 +- class: org.elasticsearch.repositories.azure.RepositoryAzureClientYamlTestSuiteIT + issue: https://github.com/elastic/elasticsearch/issues/111345 # Examples: # From 5be31acee5e6a8bf2043fd0cee1ad0dd3fafc5c4 Mon Sep 17 00:00:00 2001 From: James Baiera Date: Fri, 26 Jul 2024 16:35:13 -0400 Subject: [PATCH 066/105] Fix enrich policy runner exception handling on empty segments response (#111290) * Fix enrich segment action listener exception logic * Update docs/changelog/111290.yaml --- docs/changelog/111290.yaml | 5 + .../segments/IndicesSegmentResponse.java | 2 +- .../xpack/enrich/EnrichPolicyRunner.java | 104 +++++--- .../xpack/enrich/EnrichPolicyRunnerTests.java | 251 +++++++++++++++++- 4 files changed, 325 insertions(+), 37 deletions(-) create mode 100644 docs/changelog/111290.yaml diff --git a/docs/changelog/111290.yaml b/docs/changelog/111290.yaml new file mode 100644 index 0000000000000..efcb01a4aedf9 --- /dev/null +++ b/docs/changelog/111290.yaml @@ -0,0 +1,5 @@ +pr: 111290 +summary: Fix enrich policy runner exception handling on empty segments response +area: Ingest Node +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java index bd12cfdbc7962..429ebe365bbe1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java @@ -36,7 +36,7 @@ public class IndicesSegmentResponse extends ChunkedBroadcastResponse { private volatile Map indicesSegments; - IndicesSegmentResponse( + public IndicesSegmentResponse( ShardSegments[] shards, int totalShards, int successfulShards, diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java index ca00f49100279..810fd03f061ea 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java @@ -25,11 +25,11 @@ import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.segments.IndexSegments; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; -import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsRequest; import org.elasticsearch.action.admin.indices.segments.ShardSegments; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.FilterClient; import org.elasticsearch.client.internal.OriginSettingClient; @@ -575,48 +575,82 @@ private void refreshEnrichIndex(final String destinationIndexName, final int att protected void ensureSingleSegment(final String destinationIndexName, final int attempt) { enrichOriginClient().admin() .indices() - .segments(new IndicesSegmentsRequest(destinationIndexName), new DelegatingActionListener<>(listener) { - @Override - public void onResponse(IndicesSegmentResponse indicesSegmentResponse) { - IndexSegments indexSegments = indicesSegmentResponse.getIndices().get(destinationIndexName); - if (indexSegments == null) { + .segments(new IndicesSegmentsRequest(destinationIndexName), listener.delegateFailureAndWrap((l, indicesSegmentResponse) -> { + int failedShards = indicesSegmentResponse.getFailedShards(); + if (failedShards > 0) { + // Encountered a problem while querying the segments for the enrich index. Try and surface the problem in the log. + logger.warn( + "Policy [{}]: Encountered [{}] shard level failures while querying the segments for enrich index [{}]. " + + "Turn on DEBUG logging for details.", + policyName, + failedShards, + enrichIndexName + ); + if (logger.isDebugEnabled()) { + DefaultShardOperationFailedException[] shardFailures = indicesSegmentResponse.getShardFailures(); + int failureNumber = 1; + String logPrefix = "Policy [" + policyName + "]: Encountered shard failure ["; + String logSuffix = " of " + + shardFailures.length + + "] while querying segments for enrich index [" + + enrichIndexName + + "]. Shard ["; + for (DefaultShardOperationFailedException shardFailure : shardFailures) { + logger.debug( + logPrefix + failureNumber + logSuffix + shardFailure.index() + "][" + shardFailure.shardId() + "]", + shardFailure.getCause() + ); + failureNumber++; + } + } + } + IndexSegments indexSegments = indicesSegmentResponse.getIndices().get(destinationIndexName); + if (indexSegments == null) { + if (indicesSegmentResponse.getShardFailures().length == 0) { throw new ElasticsearchException( "Could not locate segment information for newly created index [{}]", destinationIndexName ); + } else { + DefaultShardOperationFailedException shardFailure = indicesSegmentResponse.getShardFailures()[0]; + throw new ElasticsearchException( + "Could not obtain segment information for newly created index [{}]; shard info [{}][{}]", + shardFailure.getCause(), + destinationIndexName, + shardFailure.index(), + shardFailure.shardId() + ); } - Map indexShards = indexSegments.getShards(); - assert indexShards.size() == 1 : "Expected enrich index to contain only one shard"; - ShardSegments[] shardSegments = indexShards.get(0).shards(); - assert shardSegments.length == 1 : "Expected enrich index to contain no replicas at this point"; - ShardSegments primarySegments = shardSegments[0]; - if (primarySegments.getSegments().size() > 1) { - int nextAttempt = attempt + 1; - if (nextAttempt > maxForceMergeAttempts) { - delegate.onFailure( - new ElasticsearchException( - "Force merging index [{}] attempted [{}] times but did not result in one segment.", - destinationIndexName, - attempt, - maxForceMergeAttempts - ) - ); - } else { - logger.debug( - "Policy [{}]: Force merge result contains more than one segment [{}], retrying (attempt {}/{})", - policyName, - primarySegments.getSegments().size(), - nextAttempt, - maxForceMergeAttempts - ); - forceMergeEnrichIndex(destinationIndexName, nextAttempt); - } + } + Map indexShards = indexSegments.getShards(); + assert indexShards.size() == 1 : "Expected enrich index to contain only one shard"; + ShardSegments[] shardSegments = indexShards.get(0).shards(); + assert shardSegments.length == 1 : "Expected enrich index to contain no replicas at this point"; + ShardSegments primarySegments = shardSegments[0]; + if (primarySegments.getSegments().size() > 1) { + int nextAttempt = attempt + 1; + if (nextAttempt > maxForceMergeAttempts) { + throw new ElasticsearchException( + "Force merging index [{}] attempted [{}] times but did not result in one segment.", + destinationIndexName, + attempt, + maxForceMergeAttempts + ); } else { - // Force merge down to one segment successful - setIndexReadOnly(destinationIndexName); + logger.debug( + "Policy [{}]: Force merge result contains more than one segment [{}], retrying (attempt {}/{})", + policyName, + primarySegments.getSegments().size(), + nextAttempt, + maxForceMergeAttempts + ); + forceMergeEnrichIndex(destinationIndexName, nextAttempt); } + } else { + // Force merge down to one segment successful + setIndexReadOnly(destinationIndexName); } - }); + })); } private void setIndexReadOnly(final String destinationIndexName) { diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java index 8ce1e7f350ccb..7ba3b356d6015 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.action.admin.indices.settings.put.TransportUpdateSettingsAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; @@ -2048,6 +2049,254 @@ protected void ensureSingleSegment(String destinationIndexName, int attempt) { ensureEnrichIndexIsReadOnly(createdEnrichIndex); } + public void testRunnerWithEmptySegmentsResponse() throws Exception { + final String sourceIndex = "source-index"; + DocWriteResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" + { + "field1": "value1", + "field2": 2, + "field3": "ignored", + "field4": "ignored", + "field5": "value5" + }""", XContentType.JSON).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)).actionGet(); + assertEquals(RestStatus.CREATED, indexRequest.status()); + + assertResponse( + client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), + sourceSearchResponse -> { + assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); + assertNotNull(sourceDocMap); + assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); + assertThat(sourceDocMap.get("field2"), is(equalTo(2))); + assertThat(sourceDocMap.get("field3"), is(equalTo("ignored"))); + assertThat(sourceDocMap.get("field4"), is(equalTo("ignored"))); + assertThat(sourceDocMap.get("field5"), is(equalTo("value5"))); + } + ); + List enrichFields = List.of("field2", "field5"); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, List.of(sourceIndex), "field1", enrichFields); + String policyName = "test1"; + + final long createTime = randomNonNegativeLong(); + String createdEnrichIndex = ".enrich-test1-" + createTime; + final AtomicReference exception = new AtomicReference<>(); + final CountDownLatch latch = new CountDownLatch(1); + ActionListener listener = createTestListener(latch, exception::set); + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + IndexNameExpressionResolver resolver = getInstanceFromNode(IndexNameExpressionResolver.class); + Task asyncTask = testTaskManager.register("enrich", "policy_execution", new TaskAwareRequest() { + @Override + public void setParentTask(TaskId taskId) {} + + @Override + public void setRequestId(long requestId) {} + + @Override + public TaskId getParentTask() { + return TaskId.EMPTY_TASK_ID; + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new ExecuteEnrichPolicyTask(id, type, action, getDescription(), parentTaskId, headers); + } + + @Override + public String getDescription() { + return policyName; + } + }); + ExecuteEnrichPolicyTask task = ((ExecuteEnrichPolicyTask) asyncTask); + // The executor would wrap the listener in order to clean up the task in the + // task manager, but we're just testing the runner, so we make sure to clean + // up after ourselves. + ActionListener wrappedListener = ActionListener.runBefore( + listener, + () -> testTaskManager.unregister(task) + ); + + // Wrap the client so that when we receive the indices segments action, we intercept the request and complete it on another thread + // with an empty segments response. + Client client = new FilterClient(client()) { + @Override + protected void doExecute( + ActionType action, + Request request, + ActionListener listener + ) { + if (action.equals(IndicesSegmentsAction.INSTANCE)) { + testThreadPool.generic().execute(() -> { + @SuppressWarnings("unchecked") + ActionListener castListener = ((ActionListener) listener); + castListener.onResponse(new IndicesSegmentResponse(new ShardSegments[0], 0, 0, 0, List.of())); + }); + } else { + super.doExecute(action, request, listener); + } + } + }; + + EnrichPolicyRunner enrichPolicyRunner = new EnrichPolicyRunner( + policyName, + policy, + task, + wrappedListener, + clusterService, + getInstanceFromNode(IndicesService.class), + client, + resolver, + createdEnrichIndex, + randomIntBetween(1, 10000), + randomIntBetween(3, 10) + ); + + logger.info("Starting policy run"); + enrichPolicyRunner.run(); + if (latch.await(1, TimeUnit.MINUTES) == false) { + fail("Timeout while waiting for runner to complete"); + } + Exception exceptionThrown = exception.get(); + if (exceptionThrown == null) { + fail("Expected exception to be thrown from segment api"); + } + + // Validate exception information + assertThat(exceptionThrown, instanceOf(ElasticsearchException.class)); + assertThat(exceptionThrown.getMessage(), containsString("Could not locate segment information for newly created index")); + } + + public void testRunnerWithShardFailuresInSegmentResponse() throws Exception { + final String sourceIndex = "source-index"; + DocWriteResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" + { + "field1": "value1", + "field2": 2, + "field3": "ignored", + "field4": "ignored", + "field5": "value5" + }""", XContentType.JSON).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)).actionGet(); + assertEquals(RestStatus.CREATED, indexRequest.status()); + + assertResponse( + client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), + sourceSearchResponse -> { + assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); + assertNotNull(sourceDocMap); + assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); + assertThat(sourceDocMap.get("field2"), is(equalTo(2))); + assertThat(sourceDocMap.get("field3"), is(equalTo("ignored"))); + assertThat(sourceDocMap.get("field4"), is(equalTo("ignored"))); + assertThat(sourceDocMap.get("field5"), is(equalTo("value5"))); + } + ); + List enrichFields = List.of("field2", "field5"); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, List.of(sourceIndex), "field1", enrichFields); + String policyName = "test1"; + + final long createTime = randomNonNegativeLong(); + String createdEnrichIndex = ".enrich-test1-" + createTime; + final AtomicReference exception = new AtomicReference<>(); + final CountDownLatch latch = new CountDownLatch(1); + ActionListener listener = createTestListener(latch, exception::set); + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + IndexNameExpressionResolver resolver = getInstanceFromNode(IndexNameExpressionResolver.class); + Task asyncTask = testTaskManager.register("enrich", "policy_execution", new TaskAwareRequest() { + @Override + public void setParentTask(TaskId taskId) {} + + @Override + public void setRequestId(long requestId) {} + + @Override + public TaskId getParentTask() { + return TaskId.EMPTY_TASK_ID; + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new ExecuteEnrichPolicyTask(id, type, action, getDescription(), parentTaskId, headers); + } + + @Override + public String getDescription() { + return policyName; + } + }); + ExecuteEnrichPolicyTask task = ((ExecuteEnrichPolicyTask) asyncTask); + // The executor would wrap the listener in order to clean up the task in the + // task manager, but we're just testing the runner, so we make sure to clean + // up after ourselves. + ActionListener wrappedListener = ActionListener.runBefore( + listener, + () -> testTaskManager.unregister(task) + ); + + // Wrap the client so that when we receive the indices segments action, we intercept the request and complete it on another thread + // with an failed segments response. + Client client = new FilterClient(client()) { + @Override + protected void doExecute( + ActionType action, + Request request, + ActionListener listener + ) { + if (action.equals(IndicesSegmentsAction.INSTANCE)) { + testThreadPool.generic().execute(() -> { + @SuppressWarnings("unchecked") + ActionListener castListener = ((ActionListener) listener); + castListener.onResponse( + new IndicesSegmentResponse( + new ShardSegments[0], + 0, + 0, + 3, + List.of( + new DefaultShardOperationFailedException(createdEnrichIndex, 1, new ElasticsearchException("failure1")), + new DefaultShardOperationFailedException(createdEnrichIndex, 2, new ElasticsearchException("failure2")), + new DefaultShardOperationFailedException(createdEnrichIndex, 3, new ElasticsearchException("failure3")) + ) + ) + ); + }); + } else { + super.doExecute(action, request, listener); + } + } + }; + + EnrichPolicyRunner enrichPolicyRunner = new EnrichPolicyRunner( + policyName, + policy, + task, + wrappedListener, + clusterService, + getInstanceFromNode(IndicesService.class), + client, + resolver, + createdEnrichIndex, + randomIntBetween(1, 10000), + randomIntBetween(3, 10) + ); + + logger.info("Starting policy run"); + enrichPolicyRunner.run(); + if (latch.await(1, TimeUnit.MINUTES) == false) { + fail("Timeout while waiting for runner to complete"); + } + Exception exceptionThrown = exception.get(); + if (exceptionThrown == null) { + fail("Expected exception to be thrown from segment api"); + } + + // Validate exception information + assertThat(exceptionThrown, instanceOf(ElasticsearchException.class)); + assertThat(exceptionThrown.getMessage(), containsString("Could not obtain segment information for newly created index")); + assertThat(exceptionThrown.getCause(), instanceOf(ElasticsearchException.class)); + assertThat(exceptionThrown.getCause().getMessage(), containsString("failure1")); + } + public void testRunnerCancel() throws Exception { final String sourceIndex = "source-index"; DocWriteResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" @@ -2495,7 +2744,7 @@ private ActionListener createTestListener( final CountDownLatch latch, final Consumer exceptionConsumer ) { - return new LatchedActionListener<>(ActionListener.wrap((r) -> logger.info("Run complete"), exceptionConsumer), latch); + return new LatchedActionListener<>(ActionListener.wrap((r) -> logger.debug("Run complete"), exceptionConsumer), latch); } private void validateMappingMetadata(Map mapping, String policyName, EnrichPolicy policy) { From ae5a9eec40cd712afb464db14f91711d1d4ab47e Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Fri, 26 Jul 2024 16:34:14 -0500 Subject: [PATCH 067/105] Fixing RolloverIT.testRolloverWithClosedWriteIndex() (#111370) --- muted-tests.yml | 3 --- .../action/admin/indices/rollover/RolloverIT.java | 2 +- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 0d31ecb149c22..14425484939d6 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -44,9 +44,6 @@ tests: - class: "org.elasticsearch.xpack.test.rest.XPackRestIT" issue: "https://github.com/elastic/elasticsearch/issues/109687" method: "test {p0=sql/translate/Translate SQL}" -- class: "org.elasticsearch.action.admin.indices.rollover.RolloverIT" - issue: "https://github.com/elastic/elasticsearch/issues/110034" - method: "testRolloverWithClosedWriteIndex" - class: org.elasticsearch.index.store.FsDirectoryFactoryTests method: testStoreDirectory issue: https://github.com/elastic/elasticsearch/issues/110210 diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java index 4d52383bfc4e1..a568424300e75 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java @@ -665,7 +665,7 @@ public void testRolloverWithClosedWriteIndex() throws Exception { assertAcked(prepareCreate(openNonwriteIndex).addAlias(new Alias(aliasName)).get()); assertAcked(prepareCreate(closedIndex).addAlias(new Alias(aliasName)).get()); assertAcked(prepareCreate(writeIndexPrefix + "000001").addAlias(new Alias(aliasName).writeIndex(true)).get()); - + ensureGreen(openNonwriteIndex, closedIndex, writeIndexPrefix + "000001"); index(closedIndex, null, "{\"foo\": \"bar\"}"); index(aliasName, null, "{\"foo\": \"bar\"}"); index(aliasName, null, "{\"foo\": \"bar\"}"); From 1aa5b2facee01d53f4fd39433e4128f14f7d478b Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Fri, 26 Jul 2024 18:28:44 -0400 Subject: [PATCH 068/105] Fix geoip processor isp_organization_name property and docs (#111372) --- docs/reference/ingest/processors/geoip.asciidoc | 4 ++-- .../java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java | 4 ++-- .../org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/reference/ingest/processors/geoip.asciidoc b/docs/reference/ingest/processors/geoip.asciidoc index 738ac234d6162..230146d483144 100644 --- a/docs/reference/ingest/processors/geoip.asciidoc +++ b/docs/reference/ingest/processors/geoip.asciidoc @@ -64,12 +64,12 @@ depend on what has been found and which properties were configured in `propertie * If the GeoIP2 Domain database is used, then the following fields may be added under the `target_field`: `ip`, and `domain`. The fields actually added depend on what has been found and which properties were configured in `properties`. * If the GeoIP2 ISP database is used, then the following fields may be added under the `target_field`: `ip`, `asn`, -`organization_name`, `network`, `isp`, `isp_organization`, `mobile_country_code`, and `mobile_network_code`. The fields actually added +`organization_name`, `network`, `isp`, `isp_organization_name`, `mobile_country_code`, and `mobile_network_code`. The fields actually added depend on what has been found and which properties were configured in `properties`. * If the GeoIP2 Enterprise database is used, then the following fields may be added under the `target_field`: `ip`, `country_iso_code`, `country_name`, `continent_code`, `continent_name`, `region_iso_code`, `region_name`, `city_name`, `timezone`, `location`, `asn`, `organization_name`, `network`, `hosting_provider`, `tor_exit_node`, `anonymous_vpn`, `anonymous`, `public_proxy`, -`residential_proxy`, `domain`, `isp`, `isp_organization`, `mobile_country_code`, `mobile_network_code`, `user_type`, and +`residential_proxy`, `domain`, `isp`, `isp_organization_name`, `mobile_country_code`, `mobile_network_code`, `user_type`, and `connection_type`. The fields actually added depend on what has been found and which properties were configured in `properties`. preview::["Do not use the GeoIP2 Anonymous IP, GeoIP2 Connection Type, GeoIP2 Domain, GeoIP2 ISP, and GeoIP2 Enterprise databases in production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index e39705a71f56c..82b9e930280b7 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -592,7 +592,7 @@ private Map retrieveEnterpriseGeoData(GeoIpDatabase geoIpDatabas } case ISP_ORGANIZATION_NAME -> { if (ispOrganization != null) { - geoData.put("isp_organization", ispOrganization); + geoData.put("isp_organization_name", ispOrganization); } } case MOBILE_COUNTRY_CODE -> { @@ -660,7 +660,7 @@ private Map retrieveIspGeoData(GeoIpDatabase geoIpDatabase, Inet } case ISP_ORGANIZATION_NAME -> { if (ispOrganization != null) { - geoData.put("isp_organization", ispOrganization); + geoData.put("isp_organization_name", ispOrganization); } } case MOBILE_COUNTRY_CODE -> { diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index 6276155d9f083..87d1881a9e743 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -463,7 +463,7 @@ public void testEnterprise() throws Exception { assertThat(geoData.get("residential_proxy"), equalTo(false)); assertThat(geoData.get("domain"), equalTo("frpt.net")); assertThat(geoData.get("isp"), equalTo("Fairpoint Communications")); - assertThat(geoData.get("isp_organization"), equalTo("Fairpoint Communications")); + assertThat(geoData.get("isp_organization_name"), equalTo("Fairpoint Communications")); assertThat(geoData.get("user_type"), equalTo("residential")); assertThat(geoData.get("connection_type"), equalTo("Cable/DSL")); } @@ -497,7 +497,7 @@ public void testIsp() throws Exception { assertThat(geoData.get("organization_name"), equalTo("CELLCO-PART")); assertThat(geoData.get("network"), equalTo("149.101.100.0/28")); assertThat(geoData.get("isp"), equalTo("Verizon Wireless")); - assertThat(geoData.get("isp_organization"), equalTo("Verizon Wireless")); + assertThat(geoData.get("isp_organization_name"), equalTo("Verizon Wireless")); assertThat(geoData.get("mobile_network_code"), equalTo("004")); assertThat(geoData.get("mobile_country_code"), equalTo("310")); } From 89e31fe7f0b9bbb555e9a74d5d3aff50929e1fa9 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 27 Jul 2024 14:32:28 +1000 Subject: [PATCH 069/105] Mute org.elasticsearch.xpack.repositories.metering.azure.AzureRepositoriesMeteringIT org.elasticsearch.xpack.repositories.metering.azure.AzureRepositoriesMeteringIT #111307 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 14425484939d6..9e501fe442876 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -117,6 +117,8 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/111279 - class: org.elasticsearch.repositories.azure.RepositoryAzureClientYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/111345 +- class: org.elasticsearch.xpack.repositories.metering.azure.AzureRepositoriesMeteringIT + issue: https://github.com/elastic/elasticsearch/issues/111307 # Examples: # From abc9c4848300ef4c1f1850ed0a706d604fc9430d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 27 Jul 2024 22:30:54 +1000 Subject: [PATCH 070/105] Mute org.elasticsearch.xpack.snapshotbasedrecoveries.recovery.AzureSnapshotBasedRecoveryIT testRecoveryUsingSnapshots #111377 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 9e501fe442876..99e9ba7f8aa48 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -119,6 +119,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/111345 - class: org.elasticsearch.xpack.repositories.metering.azure.AzureRepositoriesMeteringIT issue: https://github.com/elastic/elasticsearch/issues/111307 +- class: org.elasticsearch.xpack.snapshotbasedrecoveries.recovery.AzureSnapshotBasedRecoveryIT + method: testRecoveryUsingSnapshots + issue: https://github.com/elastic/elasticsearch/issues/111377 # Examples: # From 61a051d6456f133895275c81fac42795590ab645 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 27 Jul 2024 22:31:04 +1000 Subject: [PATCH 071/105] Mute org.elasticsearch.repositories.blobstore.testkit.AzureSnapshotRepoTestKitIT testRepositoryAnalysis #111280 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 99e9ba7f8aa48..2fb9f4a46a2b4 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -122,6 +122,9 @@ tests: - class: org.elasticsearch.xpack.snapshotbasedrecoveries.recovery.AzureSnapshotBasedRecoveryIT method: testRecoveryUsingSnapshots issue: https://github.com/elastic/elasticsearch/issues/111377 +- class: org.elasticsearch.repositories.blobstore.testkit.AzureSnapshotRepoTestKitIT + method: testRepositoryAnalysis + issue: https://github.com/elastic/elasticsearch/issues/111280 # Examples: # From 7c564feab718fb12c4e7c7db202c04471e9310e7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 27 Jul 2024 23:19:17 +1000 Subject: [PATCH 072/105] Mute org.elasticsearch.xpack.esql.analysis.VerifierTests testMatchFilter #111380 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 2fb9f4a46a2b4..221c638160359 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -125,6 +125,9 @@ tests: - class: org.elasticsearch.repositories.blobstore.testkit.AzureSnapshotRepoTestKitIT method: testRepositoryAnalysis issue: https://github.com/elastic/elasticsearch/issues/111280 +- class: org.elasticsearch.xpack.esql.analysis.VerifierTests + method: testMatchFilter + issue: https://github.com/elastic/elasticsearch/issues/111380 # Examples: # From 213c49141f59b6873211c214d4fec4d0e8284c29 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sun, 28 Jul 2024 01:13:38 +1000 Subject: [PATCH 073/105] Mute org.elasticsearch.xpack.ml.integration.InferenceIngestInputConfigIT testIngestWithInputFields #111383 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 221c638160359..cf0cd07f99e89 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -128,6 +128,9 @@ tests: - class: org.elasticsearch.xpack.esql.analysis.VerifierTests method: testMatchFilter issue: https://github.com/elastic/elasticsearch/issues/111380 +- class: org.elasticsearch.xpack.ml.integration.InferenceIngestInputConfigIT + method: testIngestWithInputFields + issue: https://github.com/elastic/elasticsearch/issues/111383 # Examples: # From c44bf1f7f624a4d984e5d444605712c81bb7ecf9 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sun, 28 Jul 2024 01:13:49 +1000 Subject: [PATCH 074/105] Mute org.elasticsearch.xpack.ml.integration.InferenceIngestInputConfigIT testIngestWithMultipleInputFields #111384 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index cf0cd07f99e89..b4ed662a72f87 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -131,6 +131,9 @@ tests: - class: org.elasticsearch.xpack.ml.integration.InferenceIngestInputConfigIT method: testIngestWithInputFields issue: https://github.com/elastic/elasticsearch/issues/111383 +- class: org.elasticsearch.xpack.ml.integration.InferenceIngestInputConfigIT + method: testIngestWithMultipleInputFields + issue: https://github.com/elastic/elasticsearch/issues/111384 # Examples: # From 32d0418ec24e650db10780417fae68fb6d56a534 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sun, 28 Jul 2024 01:58:04 +1000 Subject: [PATCH 075/105] Mute org.elasticsearch.xpack.restart.FullClusterRestartIT testTransformLegacyTemplateCleanup {cluster=OLD} #111385 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index b4ed662a72f87..4553fb1d78ec2 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -134,6 +134,9 @@ tests: - class: org.elasticsearch.xpack.ml.integration.InferenceIngestInputConfigIT method: testIngestWithMultipleInputFields issue: https://github.com/elastic/elasticsearch/issues/111384 +- class: org.elasticsearch.xpack.restart.FullClusterRestartIT + method: testTransformLegacyTemplateCleanup {cluster=OLD} + issue: https://github.com/elastic/elasticsearch/issues/111385 # Examples: # From 67b576cfec504b28bf14e393706fa814b2c0091d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 29 Jul 2024 14:44:30 +1000 Subject: [PATCH 076/105] Mute org.elasticsearch.xpack.restart.FullClusterRestartIT testRollupAfterRestart {cluster=OLD} #111392 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 4553fb1d78ec2..368d2d8bb2a21 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -137,6 +137,9 @@ tests: - class: org.elasticsearch.xpack.restart.FullClusterRestartIT method: testTransformLegacyTemplateCleanup {cluster=OLD} issue: https://github.com/elastic/elasticsearch/issues/111385 +- class: org.elasticsearch.xpack.restart.FullClusterRestartIT + method: testRollupAfterRestart {cluster=OLD} + issue: https://github.com/elastic/elasticsearch/issues/111392 # Examples: # From 7db5d40de72f9dbffe307d74c5169d5dcb71ba6c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 29 Jul 2024 14:44:58 +1000 Subject: [PATCH 077/105] Mute org.elasticsearch.xpack.restart.FullClusterRestartIT testRollupAfterRestart {cluster=UPGRADED} #111393 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 368d2d8bb2a21..66646f071ec67 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -140,6 +140,9 @@ tests: - class: org.elasticsearch.xpack.restart.FullClusterRestartIT method: testRollupAfterRestart {cluster=OLD} issue: https://github.com/elastic/elasticsearch/issues/111392 +- class: org.elasticsearch.xpack.restart.FullClusterRestartIT + method: testRollupAfterRestart {cluster=UPGRADED} + issue: https://github.com/elastic/elasticsearch/issues/111393 # Examples: # From c74a127f781c2f1de2d448465157188751c47f94 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 29 Jul 2024 15:31:24 +1000 Subject: [PATCH 078/105] Mute org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectAuthIT testAuthenticateWithCodeFlowAndClientPost #111396 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 66646f071ec67..8ab76d6a4dd2b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -143,6 +143,9 @@ tests: - class: org.elasticsearch.xpack.restart.FullClusterRestartIT method: testRollupAfterRestart {cluster=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/111393 +- class: org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectAuthIT + method: testAuthenticateWithCodeFlowAndClientPost + issue: https://github.com/elastic/elasticsearch/issues/111396 # Examples: # From 37b65f1fba6c87c488cc331b8be81dabc14b1955 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 29 Jul 2024 07:08:22 +0100 Subject: [PATCH 079/105] Remove throttling from repo analyzer tests (#111390) In practice most of the time in these tests is spent on throttling, especially on the read side, and with some seeds this might add up to tens of seconds of delays. The throttling is not relevant to these tests, so this commit removes it. With this change, the analysis now reliably runs quickly enough to use `safeAwait()` and friends to capture the results. Closes #111343 --- .../testkit/RepositoryAnalysisFailureIT.java | 115 ++++++++++++------ .../testkit/RepositoryAnalysisSuccessIT.java | 39 +++++- 2 files changed, 112 insertions(+), 42 deletions(-) diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisFailureIT.java b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisFailureIT.java index f0d4eaec7c848..73a90f247810e 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisFailureIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisFailureIT.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -40,7 +41,6 @@ import org.elasticsearch.repositories.RepositoryVerificationException; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; -import org.elasticsearch.test.junit.annotations.TestIssueLogging; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; import org.junit.Before; @@ -58,12 +58,14 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.stream.Collectors; +import static org.elasticsearch.indices.recovery.RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING; +import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.MAX_RESTORE_BYTES_PER_SEC; +import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.MAX_SNAPSHOT_BYTES_PER_SEC; import static org.elasticsearch.repositories.blobstore.testkit.ContendedRegisterAnalyzeAction.bytesFromLong; import static org.elasticsearch.repositories.blobstore.testkit.ContendedRegisterAnalyzeAction.longFromBytes; import static org.hamcrest.Matchers.allOf; @@ -87,9 +89,26 @@ protected Collection> nodePlugins() { return List.of(TestPlugin.class, LocalStateCompositeXPackPlugin.class, SnapshotRepositoryTestKit.class); } + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + // no throttling, so that even analyses which run to completion do not take too long + .put(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey(), ByteSizeValue.ZERO) + .build(); + } + @Before public void createBlobStore() { - createRepositoryNoVerify("test-repo", TestPlugin.DISRUPTABLE_REPO_TYPE); + createRepository( + "test-repo", + TestPlugin.DISRUPTABLE_REPO_TYPE, + randomRepositorySettings() + // no throttling, so that even analyses which run to completion do not take too long + .put(MAX_SNAPSHOT_BYTES_PER_SEC.getKey(), ByteSizeValue.ZERO) + .put(MAX_RESTORE_BYTES_PER_SEC.getKey(), ByteSizeValue.ZERO), + false + ); blobStore = new DisruptableBlobStore(); for (final RepositoriesService repositoriesService : internalCluster().getInstances(RepositoriesService.class)) { @@ -105,8 +124,7 @@ public void testSuccess() { final RepositoryAnalyzeAction.Request request = new RepositoryAnalyzeAction.Request("test-repo"); request.blobCount(1); request.maxBlobSize(ByteSizeValue.ofBytes(10L)); - - analyseRepository(request); + safeAwait((ActionListener l) -> analyseRepository(request, l)); } public void testFailsOnReadError() { @@ -125,7 +143,8 @@ public byte[] onRead(byte[] actualContents, long position, long length) throws I } }); - final Exception exception = expectThrows(RepositoryVerificationException.class, () -> analyseRepository(request)); + final Exception exception = analyseRepositoryExpectFailure(request); + assertAnalysisFailureMessage(exception.getMessage()); final IOException ioException = (IOException) ExceptionsHelper.unwrap(exception, IOException.class); assert ioException != null : exception; assertThat(ioException.getMessage(), equalTo("simulated")); @@ -149,7 +168,7 @@ public byte[] onRead(byte[] actualContents, long position, long length) { } }); - expectThrows(RepositoryVerificationException.class, () -> analyseRepository(request)); + assertAnalysisFailureMessage(analyseRepositoryExpectFailure(request).getMessage()); } public void testFailsOnChecksumMismatch() { @@ -180,7 +199,7 @@ public byte[] onRead(byte[] actualContents, long position, long length) { } }); - expectThrows(RepositoryVerificationException.class, () -> analyseRepository(request)); + assertAnalysisFailureMessage(analyseRepositoryExpectFailure(request).getMessage()); } public void testFailsOnWriteException() { @@ -201,7 +220,8 @@ public void onWrite() throws IOException { }); - final Exception exception = expectThrows(RepositoryVerificationException.class, () -> analyseRepository(request)); + final Exception exception = analyseRepositoryExpectFailure(request); + assertAnalysisFailureMessage(exception.getMessage()); final IOException ioException = (IOException) ExceptionsHelper.unwrap(exception, IOException.class); assert ioException != null : exception; assertThat(ioException.getMessage(), equalTo("simulated")); @@ -223,7 +243,7 @@ public Map onList(Map actualListing) }); - expectThrows(RepositoryVerificationException.class, () -> analyseRepository(request)); + assertAnalysisFailureMessage(analyseRepositoryExpectFailure(request).getMessage()); } public void testFailsOnListingException() { @@ -243,7 +263,7 @@ public Map onList(Map actualListing) } }); - expectThrows(RepositoryVerificationException.class, () -> analyseRepository(request)); + assertAnalysisFailureMessage(analyseRepositoryExpectFailure(request).getMessage()); } public void testFailsOnDeleteException() { @@ -258,7 +278,7 @@ public void onDelete() throws IOException { } }); - expectThrows(RepositoryVerificationException.class, () -> analyseRepository(request)); + assertAnalysisFailureMessage(analyseRepositoryExpectFailure(request).getMessage()); } public void testFailsOnIncompleteDelete() { @@ -286,7 +306,7 @@ public Map onList(Map actualListing) } }); - expectThrows(RepositoryVerificationException.class, () -> analyseRepository(request)); + assertAnalysisFailureMessage(analyseRepositoryExpectFailure(request).getMessage()); } public void testFailsIfBlobCreatedOnAbort() { @@ -303,12 +323,21 @@ public boolean createBlobOnAbort() { } }); - try { - analyseRepository(request); - assertFalse(writeWasAborted.get()); - } catch (RepositoryVerificationException e) { - assertTrue(writeWasAborted.get()); - } + safeAwait((ActionListener l) -> analyseRepository(request, l.delegateResponse((ll, e) -> { + if (ExceptionsHelper.unwrapCause(e) instanceof RepositoryVerificationException repositoryVerificationException) { + assertAnalysisFailureMessage(repositoryVerificationException.getMessage()); + assertTrue( + "did not abort write, so why did the verification fail?", + // clear flag for final assertion + writeWasAborted.compareAndSet(true, false) + ); + ll.onResponse(null); + } else { + ll.onFailure(e); + } + }))); + + assertFalse(writeWasAborted.get()); } public void testFailsIfRegisterIncorrect() { @@ -325,7 +354,7 @@ public BytesReference onContendedCompareAndExchange(BytesRegister register, Byte return register.compareAndExchange(expected, updated); } }); - expectThrows(RepositoryVerificationException.class, () -> analyseRepository(request)); + assertAnalysisFailureMessage(analyseRepositoryExpectFailure(request).getMessage()); } public void testFailsIfRegisterHoldsSpuriousValue() { @@ -355,14 +384,22 @@ public BytesReference onContendedCompareAndExchange(BytesRegister register, Byte return register.compareAndExchange(expected, updated); } }); - try { - analyseRepository(request); - assertFalse(sawSpuriousValue.get()); - } catch (RepositoryVerificationException e) { - if (sawSpuriousValue.get() == false) { - fail(e, "did not see spurious value, so why did the verification fail?"); + + safeAwait((ActionListener l) -> analyseRepository(request, l.delegateResponse((ll, e) -> { + if (ExceptionsHelper.unwrapCause(e) instanceof RepositoryVerificationException repositoryVerificationException) { + assertAnalysisFailureMessage(repositoryVerificationException.getMessage()); + assertTrue( + "did not see spurious value, so why did the verification fail?", + // clear flag for final assertion + sawSpuriousValue.compareAndSet(true, false) + ); + ll.onResponse(null); + } else { + ll.onFailure(e); } - } + }))); + + assertFalse(sawSpuriousValue.get()); } private static void assertAnalysisFailureMessage(String message) { @@ -387,7 +424,7 @@ public boolean compareAndExchangeReturnsWitness(String key) { return isContendedRegisterKey(key) == false; } }); - final var exception = expectThrows(RepositoryVerificationException.class, () -> analyseRepository(request)); + final var exception = analyseRepositoryExpectFailure(request); assertThat( exception.getMessage(), allOf( @@ -409,7 +446,7 @@ public boolean compareAndExchangeReturnsWitness(String key) { return false; } }); - final var exception = expectThrows(RepositoryVerificationException.class, () -> analyseRepository(request)); + final var exception = analyseRepositoryExpectFailure(request); assertAnalysisFailureMessage(exception.getMessage()); assertThat( asInstanceOf(RepositoryVerificationException.class, ExceptionsHelper.unwrapCause(exception.getCause())).getMessage(), @@ -417,10 +454,6 @@ public boolean compareAndExchangeReturnsWitness(String key) { ); } - @TestIssueLogging( - issueUrl = "https://github.com/elastic/elasticsearch/issues/111343", - value = "org.elasticsearch.repositories.blobstore.testkit:TRACE" - ) public void testFailsIfEmptyRegisterRejected() { final RepositoryAnalyzeAction.Request request = new RepositoryAnalyzeAction.Request("test-repo"); blobStore.setDisruption(new Disruption() { @@ -429,7 +462,7 @@ public boolean acceptsEmptyRegister() { return false; } }); - final var exception = expectThrows(RepositoryVerificationException.class, () -> analyseRepository(request)); + final var exception = analyseRepositoryExpectFailure(request); assertAnalysisFailureMessage(exception.getMessage()); final var cause = ExceptionsHelper.unwrapCause(exception.getCause()); if (cause instanceof IOException ioException) { @@ -445,8 +478,18 @@ public boolean acceptsEmptyRegister() { } } - private void analyseRepository(RepositoryAnalyzeAction.Request request) { - client().execute(RepositoryAnalyzeAction.INSTANCE, request).actionGet(5L, TimeUnit.MINUTES); + private RepositoryVerificationException analyseRepositoryExpectFailure(RepositoryAnalyzeAction.Request request) { + return asInstanceOf( + RepositoryVerificationException.class, + ExceptionsHelper.unwrapCause(safeAwaitFailure(RepositoryAnalyzeAction.Response.class, l -> analyseRepository(request, l))) + ); + } + + private void analyseRepository(RepositoryAnalyzeAction.Request request, ActionListener listener) { + client().execute(RepositoryAnalyzeAction.INSTANCE, request, listener.delegateFailureAndWrap((l, response) -> { + RepositoryAnalysisSuccessIT.assertNoThrottling(response); + l.onResponse(response); + })); } private static void assertPurpose(OperationPurpose purpose) { diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisSuccessIT.java b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisSuccessIT.java index 47b44f41f72d2..e4d9bf9041b4a 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisSuccessIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisSuccessIT.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; @@ -26,7 +27,6 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.plugins.Plugin; @@ -38,6 +38,7 @@ import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.ObjectPath; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; import org.junit.Before; @@ -51,15 +52,18 @@ import java.util.List; import java.util.Map; import java.util.concurrent.Semaphore; -import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.stream.Collectors; +import static org.elasticsearch.indices.recovery.RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING; +import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.MAX_RESTORE_BYTES_PER_SEC; +import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.MAX_SNAPSHOT_BYTES_PER_SEC; import static org.elasticsearch.repositories.blobstore.testkit.ContendedRegisterAnalyzeAction.longFromBytes; import static org.elasticsearch.repositories.blobstore.testkit.RepositoryAnalysisFailureIT.isContendedRegisterKey; +import static org.elasticsearch.test.XContentTestUtils.convertToMap; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.equalTo; @@ -83,9 +87,21 @@ protected Collection> nodePlugins() { return List.of(TestPlugin.class, LocalStateCompositeXPackPlugin.class, SnapshotRepositoryTestKit.class); } + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey(), ByteSizeValue.ZERO) + .build(); + } + public void testRepositoryAnalysis() { - final Settings.Builder settings = Settings.builder(); + final Settings.Builder settings = Settings.builder() + // no throttling + .put(MAX_SNAPSHOT_BYTES_PER_SEC.getKey(), ByteSizeValue.ZERO) + .put(MAX_RESTORE_BYTES_PER_SEC.getKey(), ByteSizeValue.ZERO); + if (randomBoolean()) { settings.put(BASE_PATH_SETTING_KEY, randomAlphaOfLength(10)); } @@ -136,11 +152,22 @@ public void testRepositoryAnalysis() { blobStore.setMaxTotalBlobSize(request.getMaxTotalDataSize().getBytes()); } - request.timeout(TimeValue.timeValueSeconds(20)); - - client().execute(RepositoryAnalyzeAction.INSTANCE, request).actionGet(30L, TimeUnit.SECONDS); + request.timeout(SAFE_AWAIT_TIMEOUT); + final RepositoryAnalyzeAction.Response response = safeAwait(l -> client().execute(RepositoryAnalyzeAction.INSTANCE, request, l)); assertThat(blobStore.currentPath, nullValue()); + + assertNoThrottling(response); + } + + static void assertNoThrottling(RepositoryAnalyzeAction.Response response) { + try { + final var responseMap = convertToMap(response); + assertEquals(Strings.toString(response), 0, (int) ObjectPath.eval("summary.write.total_throttled_nanos", responseMap)); + assertEquals(Strings.toString(response), 0, (int) ObjectPath.eval("summary.read.total_throttled_nanos", responseMap)); + } catch (IOException e) { + fail(e); + } } public static class TestPlugin extends Plugin implements RepositoryPlugin { From f1671c93064e335996326b40de681bf5346a47a8 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 29 Jul 2024 08:37:05 +0100 Subject: [PATCH 080/105] Use HTTP for Azure fixture in FIPS tests (#111397) We cannot (easily) override the JVM trust store in FIPS tests. This commit falls back to using HTTP in these cases. Closes #111279 Closes #111345 Closes #111307 Closes #111377 Closes #111280 --- .../azure/RepositoryAzureClientYamlTestSuiteIT.java | 11 +++++++++-- muted-tests.yml | 12 ------------ .../java/org/elasticsearch/test/TestTrustStore.java | 2 ++ .../metering/azure/AzureRepositoriesMeteringIT.java | 11 +++++++++-- .../AzureSearchableSnapshotsIT.java | 5 ++++- .../recovery/AzureSnapshotBasedRecoveryIT.java | 11 +++++++++-- .../testkit/AzureSnapshotRepoTestKitIT.java | 11 +++++++++-- 7 files changed, 42 insertions(+), 21 deletions(-) diff --git a/modules/repository-azure/src/yamlRestTest/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java b/modules/repository-azure/src/yamlRestTest/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java index ba476e754a59e..1fd04e73364ab 100644 --- a/modules/repository-azure/src/yamlRestTest/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java +++ b/modules/repository-azure/src/yamlRestTest/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java @@ -14,6 +14,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.core.Booleans; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TestTrustStore; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; @@ -30,7 +31,9 @@ public class RepositoryAzureClientYamlTestSuiteIT extends ESClientYamlSuiteTestC private static final String AZURE_TEST_SASTOKEN = System.getProperty("test.azure.sas_token"); private static AzureHttpFixture fixture = new AzureHttpFixture( - USE_FIXTURE ? AzureHttpFixture.Protocol.HTTPS : AzureHttpFixture.Protocol.NONE, + USE_FIXTURE + ? ESTestCase.inFipsJvm() ? AzureHttpFixture.Protocol.HTTP : AzureHttpFixture.Protocol.HTTPS + : AzureHttpFixture.Protocol.NONE, AZURE_TEST_ACCOUNT, AZURE_TEST_CONTAINER, AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_TEST_ACCOUNT) @@ -59,7 +62,11 @@ public class RepositoryAzureClientYamlTestSuiteIT extends ESClientYamlSuiteTestC s -> USE_FIXTURE ) .setting("thread_pool.repository_azure.max", () -> String.valueOf(randomIntBetween(1, 10)), s -> USE_FIXTURE) - .systemProperty("javax.net.ssl.trustStore", () -> trustStore.getTrustStorePath().toString(), s -> USE_FIXTURE) + .systemProperty( + "javax.net.ssl.trustStore", + () -> trustStore.getTrustStorePath().toString(), + s -> USE_FIXTURE && ESTestCase.inFipsJvm() == false + ) .build(); @ClassRule(order = 1) diff --git a/muted-tests.yml b/muted-tests.yml index 8ab76d6a4dd2b..9f175d094f016 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -113,18 +113,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/111308 - class: org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT issue: https://github.com/elastic/elasticsearch/issues/111319 -- class: org.elasticsearch.xpack.searchablesnapshots.AzureSearchableSnapshotsIT - issue: https://github.com/elastic/elasticsearch/issues/111279 -- class: org.elasticsearch.repositories.azure.RepositoryAzureClientYamlTestSuiteIT - issue: https://github.com/elastic/elasticsearch/issues/111345 -- class: org.elasticsearch.xpack.repositories.metering.azure.AzureRepositoriesMeteringIT - issue: https://github.com/elastic/elasticsearch/issues/111307 -- class: org.elasticsearch.xpack.snapshotbasedrecoveries.recovery.AzureSnapshotBasedRecoveryIT - method: testRecoveryUsingSnapshots - issue: https://github.com/elastic/elasticsearch/issues/111377 -- class: org.elasticsearch.repositories.blobstore.testkit.AzureSnapshotRepoTestKitIT - method: testRepositoryAnalysis - issue: https://github.com/elastic/elasticsearch/issues/111280 - class: org.elasticsearch.xpack.esql.analysis.VerifierTests method: testMatchFilter issue: https://github.com/elastic/elasticsearch/issues/111380 diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestTrustStore.java b/test/framework/src/main/java/org/elasticsearch/test/TestTrustStore.java index 01069b8fb295c..e17a309dbc9c8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestTrustStore.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestTrustStore.java @@ -22,6 +22,7 @@ import java.util.Objects; import static org.apache.lucene.tests.util.LuceneTestCase.createTempDir; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class TestTrustStore extends ExternalResource { @@ -35,6 +36,7 @@ public TestTrustStore(CheckedSupplier pemStreamSupplie private Path trustStorePath; public Path getTrustStorePath() { + assertFalse("Tests in FIPS mode cannot supply a custom trust store", ESTestCase.inFipsJvm()); return Objects.requireNonNullElseGet(trustStorePath, () -> ESTestCase.fail(null, "trust store not created")); } diff --git a/x-pack/plugin/repositories-metering-api/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/repositories/metering/azure/AzureRepositoriesMeteringIT.java b/x-pack/plugin/repositories-metering-api/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/repositories/metering/azure/AzureRepositoriesMeteringIT.java index 0e7eeb965d1f3..3a66854191088 100644 --- a/x-pack/plugin/repositories-metering-api/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/repositories/metering/azure/AzureRepositoriesMeteringIT.java +++ b/x-pack/plugin/repositories-metering-api/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/repositories/metering/azure/AzureRepositoriesMeteringIT.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Booleans; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TestTrustStore; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.repositories.metering.AbstractRepositoriesMeteringAPIRestTestCase; @@ -28,7 +29,9 @@ public class AzureRepositoriesMeteringIT extends AbstractRepositoriesMeteringAPI private static final String AZURE_TEST_SASTOKEN = System.getProperty("test.azure.sas_token"); private static AzureHttpFixture fixture = new AzureHttpFixture( - USE_FIXTURE ? AzureHttpFixture.Protocol.HTTPS : AzureHttpFixture.Protocol.NONE, + USE_FIXTURE + ? ESTestCase.inFipsJvm() ? AzureHttpFixture.Protocol.HTTP : AzureHttpFixture.Protocol.HTTPS + : AzureHttpFixture.Protocol.NONE, AZURE_TEST_ACCOUNT, AZURE_TEST_CONTAINER, AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_TEST_ACCOUNT) @@ -57,7 +60,11 @@ public class AzureRepositoriesMeteringIT extends AbstractRepositoriesMeteringAPI () -> "ignored;DefaultEndpointsProtocol=https;BlobEndpoint=" + fixture.getAddress(), s -> USE_FIXTURE ) - .systemProperty("javax.net.ssl.trustStore", () -> trustStore.getTrustStorePath().toString(), s -> USE_FIXTURE) + .systemProperty( + "javax.net.ssl.trustStore", + () -> trustStore.getTrustStorePath().toString(), + s -> USE_FIXTURE && ESTestCase.inFipsJvm() == false + ) .build(); @ClassRule(order = 1) diff --git a/x-pack/plugin/searchable-snapshots/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/AzureSearchableSnapshotsIT.java b/x-pack/plugin/searchable-snapshots/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/AzureSearchableSnapshotsIT.java index d86632d77b51c..d2cdef121fe40 100644 --- a/x-pack/plugin/searchable-snapshots/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/AzureSearchableSnapshotsIT.java +++ b/x-pack/plugin/searchable-snapshots/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/AzureSearchableSnapshotsIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Booleans; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TestTrustStore; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.ESRestTestCase; @@ -29,7 +30,9 @@ public class AzureSearchableSnapshotsIT extends AbstractSearchableSnapshotsRestT private static final String AZURE_TEST_SASTOKEN = System.getProperty("test.azure.sas_token"); private static AzureHttpFixture fixture = new AzureHttpFixture( - USE_FIXTURE ? AzureHttpFixture.Protocol.HTTPS : AzureHttpFixture.Protocol.NONE, + USE_FIXTURE + ? ESTestCase.inFipsJvm() ? AzureHttpFixture.Protocol.HTTP : AzureHttpFixture.Protocol.HTTPS + : AzureHttpFixture.Protocol.NONE, AZURE_TEST_ACCOUNT, AZURE_TEST_CONTAINER, AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_TEST_ACCOUNT) diff --git a/x-pack/plugin/snapshot-based-recoveries/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/AzureSnapshotBasedRecoveryIT.java b/x-pack/plugin/snapshot-based-recoveries/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/AzureSnapshotBasedRecoveryIT.java index 8895574f85d02..bac69158a860c 100644 --- a/x-pack/plugin/snapshot-based-recoveries/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/AzureSnapshotBasedRecoveryIT.java +++ b/x-pack/plugin/snapshot-based-recoveries/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/AzureSnapshotBasedRecoveryIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Booleans; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TestTrustStore; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.junit.ClassRule; @@ -28,7 +29,9 @@ public class AzureSnapshotBasedRecoveryIT extends AbstractSnapshotBasedRecoveryR private static final String AZURE_TEST_SASTOKEN = System.getProperty("test.azure.sas_token"); private static AzureHttpFixture fixture = new AzureHttpFixture( - USE_FIXTURE ? AzureHttpFixture.Protocol.HTTPS : AzureHttpFixture.Protocol.NONE, + USE_FIXTURE + ? ESTestCase.inFipsJvm() ? AzureHttpFixture.Protocol.HTTP : AzureHttpFixture.Protocol.HTTPS + : AzureHttpFixture.Protocol.NONE, AZURE_TEST_ACCOUNT, AZURE_TEST_CONTAINER, AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_TEST_ACCOUNT) @@ -59,7 +62,11 @@ public class AzureSnapshotBasedRecoveryIT extends AbstractSnapshotBasedRecoveryR s -> USE_FIXTURE ) .setting("xpack.license.self_generated.type", "trial") - .systemProperty("javax.net.ssl.trustStore", () -> trustStore.getTrustStorePath().toString(), s -> USE_FIXTURE) + .systemProperty( + "javax.net.ssl.trustStore", + () -> trustStore.getTrustStorePath().toString(), + s -> USE_FIXTURE && ESTestCase.inFipsJvm() == false + ) .build(); @ClassRule(order = 1) diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AzureSnapshotRepoTestKitIT.java b/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AzureSnapshotRepoTestKitIT.java index 7451f37cd0e40..d0b5fca158472 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AzureSnapshotRepoTestKitIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AzureSnapshotRepoTestKitIT.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Booleans; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TestTrustStore; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.junit.ClassRule; @@ -27,7 +28,9 @@ public class AzureSnapshotRepoTestKitIT extends AbstractSnapshotRepoTestKitRestT private static final String AZURE_TEST_SASTOKEN = System.getProperty("test.azure.sas_token"); private static AzureHttpFixture fixture = new AzureHttpFixture( - USE_FIXTURE ? AzureHttpFixture.Protocol.HTTPS : AzureHttpFixture.Protocol.NONE, + USE_FIXTURE + ? ESTestCase.inFipsJvm() ? AzureHttpFixture.Protocol.HTTP : AzureHttpFixture.Protocol.HTTPS + : AzureHttpFixture.Protocol.NONE, AZURE_TEST_ACCOUNT, AZURE_TEST_CONTAINER, AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_TEST_ACCOUNT) @@ -62,7 +65,11 @@ public class AzureSnapshotRepoTestKitIT extends AbstractSnapshotRepoTestKitRestT c.systemProperty("test.repository_test_kit.skip_cas", "true"); } }) - .systemProperty("javax.net.ssl.trustStore", () -> trustStore.getTrustStorePath().toString(), s -> USE_FIXTURE) + .systemProperty( + "javax.net.ssl.trustStore", + () -> trustStore.getTrustStorePath().toString(), + s -> USE_FIXTURE && ESTestCase.inFipsJvm() == false + ) .build(); @ClassRule(order = 1) From 26623f14d8cada4852aa992fc93205a092146578 Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Mon, 29 Jul 2024 10:38:18 +0200 Subject: [PATCH 081/105] Inference autoscaling telemetry (#110630) * Wire MeterRegistry * Allow for collections of values in async APM measurements * Adaptive allocations scaler metrics * Update docs/changelog/110630.yaml * Update 110630.yaml --- docs/changelog/110630.yaml | 5 + .../telemetry/apm/APMMeterRegistry.java | 21 +++- .../metrics/DoubleAsyncCounterAdapter.java | 13 +- .../internal/metrics/DoubleGaugeAdapter.java | 13 +- .../metrics/LongAsyncCounterAdapter.java | 13 +- .../internal/metrics/LongGaugeAdapter.java | 9 +- .../apm/internal/metrics/OtelHelper.java | 29 +++-- .../telemetry/metric/MeterRegistry.java | 96 ++++++++++++-- .../telemetry/RecordingMeterRegistry.java | 36 ++++++ .../xpack/ml/MachineLearning.java | 1 + .../xpack/ml/MlInitializationService.java | 4 +- .../AdaptiveAllocationsScaler.java | 56 ++++++++- .../AdaptiveAllocationsScalerService.java | 119 +++++++++++++++++- .../ml/MlInitializationServiceTests.java | 5 + ...AdaptiveAllocationsScalerServiceTests.java | 4 + 15 files changed, 381 insertions(+), 43 deletions(-) create mode 100644 docs/changelog/110630.yaml diff --git a/docs/changelog/110630.yaml b/docs/changelog/110630.yaml new file mode 100644 index 0000000000000..9bf78e1209753 --- /dev/null +++ b/docs/changelog/110630.yaml @@ -0,0 +1,5 @@ +pr: 110630 +summary: Telemetry for inference adaptive allocations +area: Machine Learning +type: feature +issues: [] diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APMMeterRegistry.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APMMeterRegistry.java index 382fc9417eac0..831e2f19e0126 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APMMeterRegistry.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APMMeterRegistry.java @@ -38,6 +38,7 @@ import org.elasticsearch.telemetry.metric.LongWithAttributes; import org.elasticsearch.telemetry.metric.MeterRegistry; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.concurrent.locks.ReentrantLock; @@ -96,11 +97,11 @@ public DoubleCounter getDoubleCounter(String name) { } @Override - public DoubleAsyncCounter registerDoubleAsyncCounter( + public DoubleAsyncCounter registerDoublesAsyncCounter( String name, String description, String unit, - Supplier observer + Supplier> observer ) { try (ReleasableLock lock = registerLock.acquire()) { return register(doubleAsynchronousCounters, new DoubleAsyncCounterAdapter(meter, name, description, unit, observer)); @@ -125,7 +126,12 @@ public DoubleUpDownCounter getDoubleUpDownCounter(String name) { } @Override - public DoubleGauge registerDoubleGauge(String name, String description, String unit, Supplier observer) { + public DoubleGauge registerDoublesGauge( + String name, + String description, + String unit, + Supplier> observer + ) { try (ReleasableLock lock = registerLock.acquire()) { return register(doubleGauges, new DoubleGaugeAdapter(meter, name, description, unit, observer)); } @@ -156,7 +162,12 @@ public LongCounter registerLongCounter(String name, String description, String u } @Override - public LongAsyncCounter registerLongAsyncCounter(String name, String description, String unit, Supplier observer) { + public LongAsyncCounter registerLongsAsyncCounter( + String name, + String description, + String unit, + Supplier> observer + ) { try (ReleasableLock lock = registerLock.acquire()) { return register(longAsynchronousCounters, new LongAsyncCounterAdapter(meter, name, description, unit, observer)); } @@ -185,7 +196,7 @@ public LongUpDownCounter getLongUpDownCounter(String name) { } @Override - public LongGauge registerLongGauge(String name, String description, String unit, Supplier observer) { + public LongGauge registerLongsGauge(String name, String description, String unit, Supplier> observer) { try (ReleasableLock lock = registerLock.acquire()) { return register(longGauges, new LongGaugeAdapter(meter, name, description, unit, observer)); } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleAsyncCounterAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleAsyncCounterAdapter.java index 6b17a83619ef7..ab735c41ca890 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleAsyncCounterAdapter.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleAsyncCounterAdapter.java @@ -15,12 +15,19 @@ import org.elasticsearch.telemetry.metric.DoubleAsyncCounter; import org.elasticsearch.telemetry.metric.DoubleWithAttributes; +import java.util.Collection; import java.util.Objects; import java.util.function.Supplier; public class DoubleAsyncCounterAdapter extends AbstractInstrument implements DoubleAsyncCounter { - public DoubleAsyncCounterAdapter(Meter meter, String name, String description, String unit, Supplier observer) { + public DoubleAsyncCounterAdapter( + Meter meter, + String name, + String description, + String unit, + Supplier> observer + ) { super(meter, new Builder(name, description, unit, observer)); } @@ -30,9 +37,9 @@ public void close() throws Exception { } private static class Builder extends AbstractInstrument.Builder { - private final Supplier observer; + private final Supplier> observer; - private Builder(String name, String description, String unit, Supplier observer) { + private Builder(String name, String description, String unit, Supplier> observer) { super(name, description, unit); this.observer = Objects.requireNonNull(observer); } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleGaugeAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleGaugeAdapter.java index ed6ecee66d696..2a9c2d45981ed 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleGaugeAdapter.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleGaugeAdapter.java @@ -14,6 +14,7 @@ import org.elasticsearch.telemetry.apm.AbstractInstrument; import org.elasticsearch.telemetry.metric.DoubleWithAttributes; +import java.util.Collection; import java.util.Objects; import java.util.function.Supplier; @@ -24,7 +25,13 @@ public class DoubleGaugeAdapter extends AbstractInstrument observer) { + public DoubleGaugeAdapter( + Meter meter, + String name, + String description, + String unit, + Supplier> observer + ) { super(meter, new Builder(name, description, unit, observer)); } @@ -34,9 +41,9 @@ public void close() throws Exception { } private static class Builder extends AbstractInstrument.Builder { - private final Supplier observer; + private final Supplier> observer; - private Builder(String name, String description, String unit, Supplier observer) { + private Builder(String name, String description, String unit, Supplier> observer) { super(name, description, unit); this.observer = Objects.requireNonNull(observer); } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongAsyncCounterAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongAsyncCounterAdapter.java index 14c58139d03e1..1bc21ef2c831c 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongAsyncCounterAdapter.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongAsyncCounterAdapter.java @@ -15,12 +15,19 @@ import org.elasticsearch.telemetry.metric.LongAsyncCounter; import org.elasticsearch.telemetry.metric.LongWithAttributes; +import java.util.Collection; import java.util.Objects; import java.util.function.Supplier; public class LongAsyncCounterAdapter extends AbstractInstrument implements LongAsyncCounter { - public LongAsyncCounterAdapter(Meter meter, String name, String description, String unit, Supplier observer) { + public LongAsyncCounterAdapter( + Meter meter, + String name, + String description, + String unit, + Supplier> observer + ) { super(meter, new Builder(name, description, unit, observer)); } @@ -30,9 +37,9 @@ public void close() throws Exception { } private static class Builder extends AbstractInstrument.Builder { - private final Supplier observer; + private final Supplier> observer; - private Builder(String name, String description, String unit, Supplier observer) { + private Builder(String name, String description, String unit, Supplier> observer) { super(name, description, unit); this.observer = Objects.requireNonNull(observer); } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongGaugeAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongGaugeAdapter.java index 52c19c80c284f..eab9ed2eb5278 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongGaugeAdapter.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongGaugeAdapter.java @@ -14,6 +14,7 @@ import org.elasticsearch.telemetry.apm.AbstractInstrument; import org.elasticsearch.telemetry.metric.LongWithAttributes; +import java.util.Collection; import java.util.Objects; import java.util.function.Supplier; @@ -21,7 +22,7 @@ * LongGaugeAdapter wraps an otel ObservableLongGauge */ public class LongGaugeAdapter extends AbstractInstrument implements org.elasticsearch.telemetry.metric.LongGauge { - public LongGaugeAdapter(Meter meter, String name, String description, String unit, Supplier observer) { + public LongGaugeAdapter(Meter meter, String name, String description, String unit, Supplier> observer) { super(meter, new Builder(name, description, unit, observer)); } @@ -31,11 +32,11 @@ public void close() throws Exception { } private static class Builder extends AbstractInstrument.Builder { - private final Supplier observer; + private final Supplier> observer; - private Builder(String name, String description, String unit, Supplier observer) { + private Builder(String name, String description, String unit, Supplier> observer) { super(name, description, unit); - this.observer = Objects.requireNonNull(observer); + this.observer = observer; } @Override diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/OtelHelper.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/OtelHelper.java index 3e8ab415bd25e..1d760c8c12791 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/OtelHelper.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/OtelHelper.java @@ -17,6 +17,7 @@ import org.elasticsearch.telemetry.metric.DoubleWithAttributes; import org.elasticsearch.telemetry.metric.LongWithAttributes; +import java.util.Collection; import java.util.Map; import java.util.function.Consumer; import java.util.function.Supplier; @@ -53,37 +54,45 @@ static Attributes fromMap(Map attributes) { return builder.build(); } - static Consumer doubleMeasurementCallback(Supplier observer) { + static Consumer doubleMeasurementCallback(Supplier> observer) { return measurement -> { - DoubleWithAttributes observation; + Collection observations; try { - observation = observer.get(); + observations = observer.get(); } catch (RuntimeException err) { assert false : "observer must not throw [" + err.getMessage() + "]"; logger.error("doubleMeasurementCallback observer unexpected error", err); return; } - if (observation == null) { + if (observations == null) { return; } - measurement.record(observation.value(), OtelHelper.fromMap(observation.attributes())); + for (DoubleWithAttributes observation : observations) { + if (observation != null) { + measurement.record(observation.value(), OtelHelper.fromMap(observation.attributes())); + } + } }; } - static Consumer longMeasurementCallback(Supplier observer) { + static Consumer longMeasurementCallback(Supplier> observer) { return measurement -> { - LongWithAttributes observation; + Collection observations; try { - observation = observer.get(); + observations = observer.get(); } catch (RuntimeException err) { assert false : "observer must not throw [" + err.getMessage() + "]"; logger.error("longMeasurementCallback observer unexpected error", err); return; } - if (observation == null) { + if (observations == null) { return; } - measurement.record(observation.value(), OtelHelper.fromMap(observation.attributes())); + for (LongWithAttributes observation : observations) { + if (observation != null) { + measurement.record(observation.value(), OtelHelper.fromMap(observation.attributes())); + } + } }; } } diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/MeterRegistry.java b/server/src/main/java/org/elasticsearch/telemetry/metric/MeterRegistry.java index 0f690558361e4..12c62859fd372 100644 --- a/server/src/main/java/org/elasticsearch/telemetry/metric/MeterRegistry.java +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/MeterRegistry.java @@ -8,6 +8,8 @@ package org.elasticsearch.telemetry.metric; +import java.util.Collection; +import java.util.Collections; import java.util.function.Supplier; /** @@ -15,6 +17,7 @@ * only be registered once. * TODO(stu): describe name, unit and description */ + public interface MeterRegistry { /** * Register a {@link DoubleCounter}. The returned object may be reused. @@ -57,7 +60,20 @@ public interface MeterRegistry { * Must not throw an exception and must be safe to call from different threads. * @return the registered meter. */ - DoubleGauge registerDoubleGauge(String name, String description, String unit, Supplier observer); + default DoubleGauge registerDoubleGauge(String name, String description, String unit, Supplier observer) { + return registerDoublesGauge(name, description, unit, () -> Collections.singleton(observer.get())); + } + + /** + * Register a {@link DoubleGauge}. The returned object may be reused. + * @param name name of the gauge + * @param description description of purpose + * @param unit the unit (bytes, sec, hour) + * @param observer callback to use. This is called once during reporting period. + * Must not throw an exception and must be safe to call from different threads. + * @return the registered meter. + */ + DoubleGauge registerDoublesGauge(String name, String description, String unit, Supplier> observer); /** * Retrieved a previously registered {@link DoubleGauge}. @@ -98,7 +114,23 @@ public interface MeterRegistry { * @param unit the unit (bytes, sec, hour) * @param observer a callback to provide a metric value upon observation (metric interval) */ - LongAsyncCounter registerLongAsyncCounter(String name, String description, String unit, Supplier observer); + default LongAsyncCounter registerLongAsyncCounter(String name, String description, String unit, Supplier observer) { + return registerLongsAsyncCounter(name, description, unit, () -> Collections.singleton(observer.get())); + } + + /** + * Register a {@link LongAsyncCounter} with an asynchronous callback. The returned object may be reused. + * @param name name of the counter + * @param description description of purpose + * @param unit the unit (bytes, sec, hour) + * @param observer a callback to provide a metric values upon observation (metric interval) + */ + LongAsyncCounter registerLongsAsyncCounter( + String name, + String description, + String unit, + Supplier> observer + ); /** * Retrieved a previously registered {@link LongAsyncCounter}. @@ -114,7 +146,28 @@ public interface MeterRegistry { * @param unit the unit (bytes, sec, hour) * @param observer a callback to provide a metric value upon observation (metric interval) */ - DoubleAsyncCounter registerDoubleAsyncCounter(String name, String description, String unit, Supplier observer); + default DoubleAsyncCounter registerDoubleAsyncCounter( + String name, + String description, + String unit, + Supplier observer + ) { + return registerDoublesAsyncCounter(name, description, unit, () -> Collections.singleton(observer.get())); + } + + /** + * Register a {@link DoubleAsyncCounter} with an asynchronous callback. The returned object may be reused. + * @param name name of the counter + * @param description description of purpose + * @param unit the unit (bytes, sec, hour) + * @param observer a callback to provide a metric values upon observation (metric interval) + */ + DoubleAsyncCounter registerDoublesAsyncCounter( + String name, + String description, + String unit, + Supplier> observer + ); /** * Retrieved a previously registered {@link DoubleAsyncCounter}. @@ -155,7 +208,20 @@ public interface MeterRegistry { * Must not throw an exception and must be safe to call from different threads. * @return the registered meter. */ - LongGauge registerLongGauge(String name, String description, String unit, Supplier observer); + default LongGauge registerLongGauge(String name, String description, String unit, Supplier observer) { + return registerLongsGauge(name, description, unit, () -> Collections.singleton(observer.get())); + } + + /** + * Register a {@link LongGauge}. The returned object may be reused. + * @param name name of the gauge + * @param description description of purpose + * @param unit the unit (bytes, sec, hour) + * @param observer callback to use. This is called once during reporting period. + * Must not throw an exception and must be safe to call from different threads. + * @return the registered meter. + */ + LongGauge registerLongsGauge(String name, String description, String unit, Supplier> observer); /** * Retrieved a previously registered {@link LongGauge}. @@ -204,7 +270,12 @@ public DoubleUpDownCounter getDoubleUpDownCounter(String name) { } @Override - public DoubleGauge registerDoubleGauge(String name, String description, String unit, Supplier observer) { + public DoubleGauge registerDoublesGauge( + String name, + String description, + String unit, + Supplier> observer + ) { return DoubleGauge.NOOP; } @@ -229,11 +300,11 @@ public LongCounter registerLongCounter(String name, String description, String u } @Override - public LongAsyncCounter registerLongAsyncCounter( + public LongAsyncCounter registerLongsAsyncCounter( String name, String description, String unit, - Supplier observer + Supplier> observer ) { return LongAsyncCounter.NOOP; } @@ -244,11 +315,11 @@ public LongAsyncCounter getLongAsyncCounter(String name) { } @Override - public DoubleAsyncCounter registerDoubleAsyncCounter( + public DoubleAsyncCounter registerDoublesAsyncCounter( String name, String description, String unit, - Supplier observer + Supplier> observer ) { return DoubleAsyncCounter.NOOP; } @@ -274,7 +345,12 @@ public LongUpDownCounter getLongUpDownCounter(String name) { } @Override - public LongGauge registerLongGauge(String name, String description, String unit, Supplier observer) { + public LongGauge registerLongsGauge( + String name, + String description, + String unit, + Supplier> observer + ) { return LongGauge.NOOP; } diff --git a/test/framework/src/main/java/org/elasticsearch/telemetry/RecordingMeterRegistry.java b/test/framework/src/main/java/org/elasticsearch/telemetry/RecordingMeterRegistry.java index 33693c297f166..97fe0ad1370ef 100644 --- a/test/framework/src/main/java/org/elasticsearch/telemetry/RecordingMeterRegistry.java +++ b/test/framework/src/main/java/org/elasticsearch/telemetry/RecordingMeterRegistry.java @@ -23,6 +23,7 @@ import org.elasticsearch.telemetry.metric.LongWithAttributes; import org.elasticsearch.telemetry.metric.MeterRegistry; +import java.util.Collection; import java.util.function.Supplier; /** @@ -76,6 +77,16 @@ public DoubleGauge registerDoubleGauge(String name, String description, String u return instrument; } + @Override + public DoubleGauge registerDoublesGauge( + String name, + String description, + String unit, + Supplier> observer + ) { + throw new UnsupportedOperationException("not implemented"); + } + @Override public DoubleGauge getDoubleGauge(String name) { return (DoubleGauge) recorder.getInstrument(InstrumentType.DOUBLE_GAUGE, name); @@ -115,6 +126,16 @@ public LongAsyncCounter registerLongAsyncCounter(String name, String description return instrument; } + @Override + public LongAsyncCounter registerLongsAsyncCounter( + String name, + String description, + String unit, + Supplier> observer + ) { + throw new UnsupportedOperationException("not implemented"); + } + @Override public LongAsyncCounter getLongAsyncCounter(String name) { return (LongAsyncCounter) recorder.getInstrument(InstrumentType.LONG_ASYNC_COUNTER, name); @@ -132,6 +153,16 @@ public DoubleAsyncCounter registerDoubleAsyncCounter( return instrument; } + @Override + public DoubleAsyncCounter registerDoublesAsyncCounter( + String name, + String description, + String unit, + Supplier> observer + ) { + throw new UnsupportedOperationException("not implemented"); + } + @Override public DoubleAsyncCounter getDoubleAsyncCounter(String name) { return (DoubleAsyncCounter) recorder.getInstrument(InstrumentType.DOUBLE_ASYNC_COUNTER, name); @@ -170,6 +201,11 @@ public LongGauge registerLongGauge(String name, String description, String unit, return instrument; } + @Override + public LongGauge registerLongsGauge(String name, String description, String unit, Supplier> observer) { + throw new UnsupportedOperationException("not implemented"); + } + @Override public LongGauge getLongGauge(String name) { return (LongGauge) recorder.getInstrument(InstrumentType.LONG_GAUGE, name); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 22a9c2dbcc281..c4bf92401be9d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -1283,6 +1283,7 @@ public Collection createComponents(PluginServices services) { clusterService, client, inferenceAuditor, + telemetryProvider.getMeterRegistry(), mlAssignmentNotifier, machineLearningExtension.get().isAnomalyDetectionEnabled(), machineLearningExtension.get().isDataFrameAnalyticsEnabled(), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java index a1664b7023fc0..2b3ed3f7a656c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.annotations.AnnotationIndex; import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsFeatureFlag; @@ -68,6 +69,7 @@ public final class MlInitializationService implements ClusterStateListener { ClusterService clusterService, Client client, InferenceAuditor inferenceAuditor, + MeterRegistry meterRegistry, MlAssignmentNotifier mlAssignmentNotifier, boolean isAnomalyDetectionEnabled, boolean isDataFrameAnalyticsEnabled, @@ -87,7 +89,7 @@ public final class MlInitializationService implements ClusterStateListener { isDataFrameAnalyticsEnabled, isNlpEnabled ), - new AdaptiveAllocationsScalerService(threadPool, clusterService, client, inferenceAuditor, isNlpEnabled), + new AdaptiveAllocationsScalerService(threadPool, clusterService, client, inferenceAuditor, meterRegistry, isNlpEnabled), clusterService ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java index 15f647bc76697..044556d1b30ac 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java @@ -35,10 +35,15 @@ public class AdaptiveAllocationsScaler { private final KalmanFilter1d inferenceTimeEstimator; private int numberOfAllocations; + private int neededNumberOfAllocations; private Integer minNumberOfAllocations; private Integer maxNumberOfAllocations; private boolean dynamicsChanged; + private Double lastMeasuredRequestRate; + private Double lastMeasuredInferenceTime; + private Long lastMeasuredQueueSize; + AdaptiveAllocationsScaler(String deploymentId, int numberOfAllocations) { this.deploymentId = deploymentId; // A smoothing factor of 100 roughly means the last 100 measurements have an effect @@ -51,9 +56,14 @@ public class AdaptiveAllocationsScaler { requestRateEstimator = new KalmanFilter1d(deploymentId + ":rate", 100, true); inferenceTimeEstimator = new KalmanFilter1d(deploymentId + ":time", 100, false); this.numberOfAllocations = numberOfAllocations; - this.minNumberOfAllocations = null; - this.maxNumberOfAllocations = null; - this.dynamicsChanged = false; + neededNumberOfAllocations = numberOfAllocations; + minNumberOfAllocations = null; + maxNumberOfAllocations = null; + dynamicsChanged = false; + + lastMeasuredRequestRate = null; + lastMeasuredInferenceTime = null; + lastMeasuredQueueSize = null; } void setMinMaxNumberOfAllocations(Integer minNumberOfAllocations, Integer maxNumberOfAllocations) { @@ -62,6 +72,8 @@ void setMinMaxNumberOfAllocations(Integer minNumberOfAllocations, Integer maxNum } void process(AdaptiveAllocationsScalerService.Stats stats, double timeIntervalSeconds, int numberOfAllocations) { + lastMeasuredQueueSize = stats.pendingCount(); + // The request rate (per second) is the request count divided by the time. // Assuming a Poisson process for the requests, the variance in the request // count equals the mean request count, and the variance in the request rate @@ -74,6 +86,7 @@ void process(AdaptiveAllocationsScalerService.Stats stats, double timeIntervalSe double requestRateEstimate = requestRateEstimator.hasValue() ? requestRateEstimator.estimate() : requestRate; double requestRateVariance = Math.max(1.0, requestRateEstimate * timeIntervalSeconds) / Math.pow(timeIntervalSeconds, 2); requestRateEstimator.add(requestRate, requestRateVariance, false); + lastMeasuredRequestRate = requestRate; if (stats.requestCount() > 0 && Double.isNaN(stats.inferenceTime()) == false) { // The inference time distribution is unknown. For simplicity, we assume @@ -86,6 +99,9 @@ void process(AdaptiveAllocationsScalerService.Stats stats, double timeIntervalSe double inferenceTimeEstimate = inferenceTimeEstimator.hasValue() ? inferenceTimeEstimator.estimate() : inferenceTime; double inferenceTimeVariance = Math.pow(inferenceTimeEstimate, 2) / stats.requestCount(); inferenceTimeEstimator.add(inferenceTime, inferenceTimeVariance, dynamicsChanged); + lastMeasuredInferenceTime = inferenceTime; + } else { + lastMeasuredInferenceTime = null; } this.numberOfAllocations = numberOfAllocations; @@ -104,6 +120,14 @@ void process(AdaptiveAllocationsScalerService.Stats stats, double timeIntervalSe return requestRateUpper * inferenceTimeUpper; } + Double getRequestRateEstimate() { + return requestRateEstimator.hasValue() ? requestRateEstimator.estimate() : null; + } + + Double getInferenceTimeEstimate() { + return inferenceTimeEstimator.hasValue() ? inferenceTimeEstimator.estimate() : null; + } + Integer scale() { if (requestRateEstimator.hasValue() == false) { return null; @@ -121,6 +145,8 @@ Integer scale() { numberOfAllocations--; } + this.neededNumberOfAllocations = numberOfAllocations; + if (maxNumberOfAllocations == null) { numberOfAllocations = Math.min(numberOfAllocations, MAX_NUMBER_OF_ALLOCATIONS_SAFEGUARD); } @@ -161,4 +187,28 @@ Integer scale() { return null; } } + + public String getDeploymentId() { + return deploymentId; + } + + public long getNumberOfAllocations() { + return numberOfAllocations; + } + + public long getNeededNumberOfAllocations() { + return neededNumberOfAllocations; + } + + public Double getLastMeasuredRequestRate() { + return lastMeasuredRequestRate; + } + + public Double getLastMeasuredInferenceTime() { + return lastMeasuredInferenceTime; + } + + public Long getLastMeasuredQueueSize() { + return lastMeasuredQueueSize; + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java index 30e3871ad5ad0..063ecae3726b1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java @@ -19,6 +19,9 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.telemetry.metric.DoubleWithAttributes; +import org.elasticsearch.telemetry.metric.LongWithAttributes; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ClientHelper; @@ -30,11 +33,15 @@ import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; +import java.util.ArrayList; +import java.util.Collection; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Function; /** * Periodically schedules adaptive allocations scaling. This process consists @@ -75,6 +82,108 @@ Stats sub(Stats value) { } } + private class Metrics { + + private final List metrics = new ArrayList<>(); + + Metrics() {} + + void init() { + if (metrics.isEmpty() == false) { + return; + } + metrics.add( + meterRegistry.registerLongsGauge( + "es.ml.trained_models.adaptive_allocations.actual_number_of_allocations.current", + "the actual number of allocations", + "", + () -> observeLong(AdaptiveAllocationsScaler::getNumberOfAllocations) + ) + ); + metrics.add( + meterRegistry.registerLongsGauge( + "es.ml.trained_models.adaptive_allocations.needed_number_of_allocations.current", + "the number of allocations needed according to the adaptive allocations scaler", + "", + () -> observeLong(AdaptiveAllocationsScaler::getNeededNumberOfAllocations) + ) + ); + metrics.add( + meterRegistry.registerDoublesGauge( + "es.ml.trained_models.adaptive_allocations.measured_request_rate.current", + "the request rate reported by the stats API", + "1/s", + () -> observeDouble(AdaptiveAllocationsScaler::getLastMeasuredRequestRate) + ) + ); + metrics.add( + meterRegistry.registerDoublesGauge( + "es.ml.trained_models.adaptive_allocations.estimated_request_rate.current", + "the request rate estimated by the adaptive allocations scaler", + "1/s", + () -> observeDouble(AdaptiveAllocationsScaler::getRequestRateEstimate) + ) + ); + metrics.add( + meterRegistry.registerDoublesGauge( + "es.ml.trained_models.adaptive_allocations.measured_inference_time.current", + "the inference time reported by the stats API", + "s", + () -> observeDouble(AdaptiveAllocationsScaler::getLastMeasuredInferenceTime) + ) + ); + metrics.add( + meterRegistry.registerDoublesGauge( + "es.ml.trained_models.adaptive_allocations.estimated_inference_time.current", + "the inference time estimated by the adaptive allocations scaler", + "s", + () -> observeDouble(AdaptiveAllocationsScaler::getInferenceTimeEstimate) + ) + ); + metrics.add( + meterRegistry.registerLongsGauge( + "es.ml.trained_models.adaptive_allocations.queue_size.current", + "the queue size reported by the stats API", + "s", + () -> observeLong(AdaptiveAllocationsScaler::getLastMeasuredQueueSize) + ) + ); + } + + Collection observeLong(Function getValue) { + List observations = new ArrayList<>(); + for (AdaptiveAllocationsScaler scaler : scalers.values()) { + Long value = getValue.apply(scaler); + if (value != null) { + observations.add(new LongWithAttributes(value, Map.of("deployment_id", scaler.getDeploymentId()))); + } + } + return observations; + } + + Collection observeDouble(Function getValue) { + List observations = new ArrayList<>(); + for (AdaptiveAllocationsScaler scaler : scalers.values()) { + Double value = getValue.apply(scaler); + if (value != null) { + observations.add(new DoubleWithAttributes(value, Map.of("deployment_id", scaler.getDeploymentId()))); + } + } + return observations; + } + + void close() { + for (AutoCloseable metric : metrics) { + try { + metric.close(); + } catch (Exception e) { + // do nothing + } + } + metrics.clear(); + } + } + /** * The time interval between the adaptive allocations triggers. */ @@ -92,6 +201,8 @@ Stats sub(Stats value) { private final ClusterService clusterService; private final Client client; private final InferenceAuditor inferenceAuditor; + private final MeterRegistry meterRegistry; + private final Metrics metrics; private final boolean isNlpEnabled; private final Map> lastInferenceStatsByDeploymentAndNode; private Long lastInferenceStatsTimestampMillis; @@ -106,9 +217,10 @@ public AdaptiveAllocationsScalerService( ClusterService clusterService, Client client, InferenceAuditor inferenceAuditor, + MeterRegistry meterRegistry, boolean isNlpEnabled ) { - this(threadPool, clusterService, client, inferenceAuditor, isNlpEnabled, DEFAULT_TIME_INTERVAL_SECONDS); + this(threadPool, clusterService, client, inferenceAuditor, meterRegistry, isNlpEnabled, DEFAULT_TIME_INTERVAL_SECONDS); } // visible for testing @@ -117,6 +229,7 @@ public AdaptiveAllocationsScalerService( ClusterService clusterService, Client client, InferenceAuditor inferenceAuditor, + MeterRegistry meterRegistry, boolean isNlpEnabled, int timeIntervalSeconds ) { @@ -124,6 +237,7 @@ public AdaptiveAllocationsScalerService( this.clusterService = clusterService; this.client = client; this.inferenceAuditor = inferenceAuditor; + this.meterRegistry = meterRegistry; this.isNlpEnabled = isNlpEnabled; this.timeIntervalSeconds = timeIntervalSeconds; @@ -131,11 +245,13 @@ public AdaptiveAllocationsScalerService( lastInferenceStatsTimestampMillis = null; lastScaleUpTimesMillis = new HashMap<>(); scalers = new HashMap<>(); + metrics = new Metrics(); busy = new AtomicBoolean(false); } public synchronized void start() { updateAutoscalers(clusterService.state()); + metrics.init(); clusterService.addListener(this); if (scalers.isEmpty() == false) { startScheduling(); @@ -144,6 +260,7 @@ public synchronized void start() { public synchronized void stop() { stopScheduling(); + metrics.close(); } @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlInitializationServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlInitializationServiceTests.java index 2f251e3b0aee6..a5b9597886e15 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlInitializationServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlInitializationServiceTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.ml.inference.adaptiveallocations.AdaptiveAllocationsScalerService; @@ -40,6 +41,7 @@ public class MlInitializationServiceTests extends ESTestCase { private ClusterService clusterService; private Client client; private InferenceAuditor inferenceAuditor; + private MeterRegistry meterRegistry; private MlAssignmentNotifier mlAssignmentNotifier; @Before @@ -49,6 +51,7 @@ public void setUpMocks() { clusterService = mock(ClusterService.class); client = mock(Client.class); inferenceAuditor = mock(InferenceAuditor.class); + meterRegistry = mock(MeterRegistry.class); mlAssignmentNotifier = mock(MlAssignmentNotifier.class); when(clusterService.getClusterName()).thenReturn(CLUSTER_NAME); @@ -75,6 +78,7 @@ public void testInitialize() { clusterService, client, inferenceAuditor, + meterRegistry, mlAssignmentNotifier, true, true, @@ -91,6 +95,7 @@ public void testInitialize_noMasterNode() { clusterService, client, inferenceAuditor, + meterRegistry, mlAssignmentNotifier, true, true, diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerServiceTests.java index 3ad44f256dc66..4aaddc91231f3 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerServiceTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.threadpool.TestThreadPool; @@ -55,6 +56,7 @@ public class AdaptiveAllocationsScalerServiceTests extends ESTestCase { private ClusterService clusterService; private Client client; private InferenceAuditor inferenceAuditor; + private MeterRegistry meterRegistry; @Override @Before @@ -66,6 +68,7 @@ public void setUp() throws Exception { clusterService = mock(ClusterService.class); client = mock(Client.class); inferenceAuditor = mock(InferenceAuditor.class); + meterRegistry = mock(MeterRegistry.class); } @Override @@ -156,6 +159,7 @@ public void test() throws IOException { clusterService, client, inferenceAuditor, + meterRegistry, true, 1 ); From e5c7cbc3c08cfe76c1487bea7d8004854c11b015 Mon Sep 17 00:00:00 2001 From: Nick Tindall Date: Mon, 29 Jul 2024 19:40:08 +1000 Subject: [PATCH 082/105] Add ability to bypass circuit breakers on replica in TransportReplicationAction (#111040) Relates: #109414 --- ...ActionBypassCircuitBreakerOnReplicaIT.java | 192 ++++++++++++++++++ ...tReplicationActionRetryOnClosedNodeIT.java | 3 +- .../discovery/DiscoveryDisruptionIT.java | 67 +++--- ...TransportVerifyShardBeforeCloseAction.java | 3 +- .../flush/TransportShardFlushAction.java | 3 +- .../TransportVerifyShardIndexBlockAction.java | 3 +- .../refresh/TransportShardRefreshAction.java | 3 +- .../action/bulk/TransportShardBulkAction.java | 3 +- .../TransportResyncReplicationAction.java | 3 +- .../TransportReplicationAction.java | 27 ++- .../replication/TransportWriteAction.java | 6 +- .../seqno/GlobalCheckpointSyncAction.java | 3 +- .../RetentionLeaseBackgroundSyncAction.java | 3 +- .../index/seqno/RetentionLeaseSyncAction.java | 3 +- .../TransportReplicationActionTests.java | 3 +- ...ReplicationAllPermitsAcquisitionTests.java | 3 +- .../TransportWriteActionTests.java | 6 +- .../elasticsearch/test/ESIntegTestCase.java | 41 ++++ .../TransportBulkShardOperationsAction.java | 3 +- 19 files changed, 315 insertions(+), 63 deletions(-) create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/action/support/replication/TransportReplicationActionBypassCircuitBreakerOnReplicaIT.java diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/support/replication/TransportReplicationActionBypassCircuitBreakerOnReplicaIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/support/replication/TransportReplicationActionBypassCircuitBreakerOnReplicaIT.java new file mode 100644 index 0000000000000..70add580f8d8c --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/support/replication/TransportReplicationActionBypassCircuitBreakerOnReplicaIT.java @@ -0,0 +1,192 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.support.replication; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.action.shard.ShardStateAction; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.function.BiFunction; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) +public class TransportReplicationActionBypassCircuitBreakerOnReplicaIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return List.of(TestPlugin.class); + } + + public static class Request extends ReplicationRequest { + public Request(ShardId shardId) { + super(shardId); + } + + public Request(StreamInput in) throws IOException { + super(in); + } + + @Override + public String toString() { + return "test-request"; + } + } + + public static class Response extends ReplicationResponse { + public Response() {} + + public Response(StreamInput in) throws IOException { + super(in); + } + } + + public static class TestAction extends TransportReplicationAction { + private static final String ACTION_NAME = "internal:test-replication-action"; + private static final ActionType TYPE = new ActionType<>(ACTION_NAME); + + @Inject + public TestAction( + Settings settings, + TransportService transportService, + ClusterService clusterService, + IndicesService indicesService, + ThreadPool threadPool, + ShardStateAction shardStateAction, + ActionFilters actionFilters + ) { + super( + settings, + ACTION_NAME, + transportService, + clusterService, + indicesService, + threadPool, + shardStateAction, + actionFilters, + Request::new, + Request::new, + threadPool.executor(ThreadPool.Names.GENERIC), + SyncGlobalCheckpointAfterOperation.DoNotSync, + PrimaryActionExecution.RejectOnOverload, + ReplicaActionExecution.BypassCircuitBreaker + ); + } + + @Override + protected Response newResponseInstance(StreamInput in) throws IOException { + return new Response(in); + } + + @Override + protected void shardOperationOnPrimary( + Request shardRequest, + IndexShard primary, + ActionListener> listener + ) { + listener.onResponse(new PrimaryResult<>(shardRequest, new Response())); + } + + @Override + protected void shardOperationOnReplica(Request shardRequest, IndexShard replica, ActionListener listener) { + listener.onResponse(new ReplicaResult()); + } + } + + public static class TestPlugin extends Plugin implements ActionPlugin { + + public TestPlugin() {} + + @Override + public List> getActions() { + return List.of(new ActionHandler<>(TestAction.TYPE, TestAction.class)); + } + } + + private enum PrimaryOrReplica implements BiFunction { + PRIMARY { + @Override + public String apply(String primaryName, String replicaName) { + return primaryName; + } + }, + REPLICA { + @Override + public String apply(String primaryName, String replicaName) { + return replicaName; + } + } + } + + public void testActionCompletesWhenReplicaCircuitBreakersAreAtCapacity() { + maxOutCircuitBreakersAndExecuteAction(PrimaryOrReplica.REPLICA); + } + + public void testActionFailsWhenPrimaryCircuitBreakersAreAtCapacity() { + AssertionError assertionError = assertThrows( + AssertionError.class, + () -> maxOutCircuitBreakersAndExecuteAction(PrimaryOrReplica.PRIMARY) + ); + assertNotNull( + "Not caused by CircuitBreakingException " + ExceptionsHelper.stackTrace(assertionError), + ExceptionsHelper.unwrap(assertionError, CircuitBreakingException.class) + ); + } + + private void maxOutCircuitBreakersAndExecuteAction(PrimaryOrReplica nodeToMaxOutCircuitBreakers) { + internalCluster().startMasterOnlyNodes(2); + String primary = internalCluster().startDataOnlyNode(); + assertAcked( + prepareCreate("test").setSettings( + Settings.builder() + .put(indexSettings()) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) + ) + ); + + String replica = internalCluster().startDataOnlyNode(); + String coordinator = internalCluster().startCoordinatingOnlyNode(Settings.EMPTY); + ensureGreen("test"); + + try ( + var ignored = fullyAllocateCircuitBreakerOnNode( + nodeToMaxOutCircuitBreakers.apply(primary, replica), + CircuitBreaker.IN_FLIGHT_REQUESTS + ) + ) { + PlainActionFuture testActionResult = new PlainActionFuture<>(); + client(coordinator).execute(TestAction.TYPE, new Request(new ShardId(resolveIndex("test"), 0)), testActionResult); + safeGet(testActionResult); + } + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/support/replication/TransportReplicationActionRetryOnClosedNodeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/support/replication/TransportReplicationActionRetryOnClosedNodeIT.java index c4737468a766c..459ca39e86b0e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/support/replication/TransportReplicationActionRetryOnClosedNodeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/support/replication/TransportReplicationActionRetryOnClosedNodeIT.java @@ -107,7 +107,8 @@ public TestAction( Request::new, threadPool.executor(ThreadPool.Names.GENERIC), SyncGlobalCheckpointAfterOperation.DoNotSync, - PrimaryActionExecution.RejectOnOverload + PrimaryActionExecution.RejectOnOverload, + ReplicaActionExecution.SubjectToCircuitBreaker ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/discovery/DiscoveryDisruptionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/discovery/DiscoveryDisruptionIT.java index 71c6ef956c4d4..66fc6e0236b53 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/discovery/DiscoveryDisruptionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/discovery/DiscoveryDisruptionIT.java @@ -17,9 +17,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.breaker.CircuitBreaker; -import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.disruption.NetworkDisruption; import org.elasticsearch.test.disruption.ServiceDisruptionScheme; @@ -259,46 +257,31 @@ public void testJoinWaitsForCircuitBreaker() throws InterruptedException { logger.info("--> master [{}], victim [{}]", masterName, victimName); // fill up the circuit breaker to breaking point - final var circuitBreaker = internalCluster().getInstance(CircuitBreakerService.class, victimName) - .getBreaker(CircuitBreaker.IN_FLIGHT_REQUESTS); - long allocationSize = 1; - while (true) { - try { - circuitBreaker.addEstimateBytesAndMaybeBreak(allocationSize, "test"); - } catch (CircuitBreakingException e) { - circuitBreaker.addWithoutBreaking(allocationSize); - break; - } - allocationSize <<= 1; - assert 0 <= allocationSize; - } - - // drop the victim from the cluster with a network disruption - final var masterTransportService = MockTransportService.getInstance(masterName); - masterTransportService.addFailToSendNoConnectRule(internalCluster().getInstance(TransportService.class, victimName)); - logger.info("--> waiting for victim's departure"); - ensureStableCluster(2, masterName); - - // verify that the victim sends no joins while the circuit breaker is breaking - final var victimTransportService = MockTransportService.getInstance(victimName); - victimTransportService.addSendBehavior((connection, requestId, action, request, options) -> { - assertNotEquals(action, JoinHelper.JOIN_ACTION_NAME); - connection.sendRequest(requestId, action, request, options); - }); - - // fix the network disruption - logger.info("--> removing network disruption"); - masterTransportService.clearAllRules(); - ensureStableCluster(2, masterName); - - // permit joins again - victimTransportService.addSendBehavior(null); - - // release the breaker - logger.info("--> releasing allocations from circuit breaker"); - while (0 < allocationSize) { - circuitBreaker.addWithoutBreaking(-allocationSize); - allocationSize >>= 1; + try (var ignored = fullyAllocateCircuitBreakerOnNode(victimName, CircuitBreaker.IN_FLIGHT_REQUESTS)) { + + // drop the victim from the cluster with a network disruption + final var masterTransportService = MockTransportService.getInstance(masterName); + masterTransportService.addFailToSendNoConnectRule(internalCluster().getInstance(TransportService.class, victimName)); + logger.info("--> waiting for victim's departure"); + ensureStableCluster(2, masterName); + + // verify that the victim sends no joins while the circuit breaker is breaking + final var victimTransportService = MockTransportService.getInstance(victimName); + victimTransportService.addSendBehavior((connection, requestId, action, request, options) -> { + assertNotEquals(action, JoinHelper.JOIN_ACTION_NAME); + connection.sendRequest(requestId, action, request, options); + }); + + // fix the network disruption + logger.info("--> removing network disruption"); + masterTransportService.clearAllRules(); + ensureStableCluster(2, masterName); + + // permit joins again + victimTransportService.addSendBehavior(null); + + // release the breaker + logger.info("--> releasing allocations from circuit breaker"); } logger.info("--> waiting for cluster to heal"); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseAction.java index 643f92ec3378f..3ea246ca0e611 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseAction.java @@ -68,7 +68,8 @@ public TransportVerifyShardBeforeCloseAction( ShardRequest::new, threadPool.executor(ThreadPool.Names.MANAGEMENT), SyncGlobalCheckpointAfterOperation.DoNotSync, - PrimaryActionExecution.RejectOnOverload + PrimaryActionExecution.RejectOnOverload, + ReplicaActionExecution.SubjectToCircuitBreaker ); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java index 69e1309b89aef..dedd4a27678ea 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java @@ -60,7 +60,8 @@ public TransportShardFlushAction( ShardFlushRequest::new, threadPool.executor(ThreadPool.Names.FLUSH), SyncGlobalCheckpointAfterOperation.DoNotSync, - PrimaryActionExecution.RejectOnOverload + PrimaryActionExecution.RejectOnOverload, + ReplicaActionExecution.SubjectToCircuitBreaker ); transportService.registerRequestHandler( PRE_SYNCED_FLUSH_ACTION_NAME, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java index e93b3983ee85b..1de5988da26c9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java @@ -69,7 +69,8 @@ public TransportVerifyShardIndexBlockAction( ShardRequest::new, threadPool.executor(ThreadPool.Names.MANAGEMENT), SyncGlobalCheckpointAfterOperation.DoNotSync, - PrimaryActionExecution.RejectOnOverload + PrimaryActionExecution.RejectOnOverload, + ReplicaActionExecution.SubjectToCircuitBreaker ); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java index cc4edcf0efb81..15ff792a888e6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java @@ -70,7 +70,8 @@ public TransportShardRefreshAction( ShardRefreshReplicaRequest::new, threadPool.executor(ThreadPool.Names.REFRESH), SyncGlobalCheckpointAfterOperation.DoNotSync, - PrimaryActionExecution.RejectOnOverload + PrimaryActionExecution.RejectOnOverload, + ReplicaActionExecution.SubjectToCircuitBreaker ); // registers the unpromotable version of shard refresh action new TransportUnpromotableShardRefreshAction(clusterService, transportService, shardStateAction, actionFilters, indicesService); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index fc9df7bbf73b9..56dd651f1611e 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -117,7 +117,8 @@ public TransportShardBulkAction( ExecutorSelector.getWriteExecutorForShard(threadPool), PrimaryActionExecution.RejectOnOverload, indexingPressure, - systemIndices + systemIndices, + ReplicaActionExecution.SubjectToCircuitBreaker ); this.updateHelper = updateHelper; this.mappingUpdatedAction = mappingUpdatedAction; diff --git a/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java b/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java index 5a891f33480fa..9d40764951f26 100644 --- a/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java @@ -73,7 +73,8 @@ public TransportResyncReplicationAction( ExecutorSelector.getWriteExecutorForShard(threadPool), PrimaryActionExecution.Force, /* we should never reject resync because of thread pool capacity on primary */ indexingPressure, - systemIndices + systemIndices, + ReplicaActionExecution.SubjectToCircuitBreaker ); } diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 3c97bda2ef8d0..7e3e5bdee206d 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -117,6 +117,20 @@ protected enum SyncGlobalCheckpointAfterOperation { AttemptAfterSuccess } + /** + * Execution of the replica action + */ + protected enum ReplicaActionExecution { + /** + * Will only execute when permitted by the configured circuit breakers + */ + SubjectToCircuitBreaker, + /** + * Will bypass the configured circuit breaker checks + */ + BypassCircuitBreaker + } + /** * The timeout for retrying replication requests. */ @@ -170,12 +184,14 @@ protected TransportReplicationAction( Writeable.Reader replicaRequestReader, Executor executor, SyncGlobalCheckpointAfterOperation syncGlobalCheckpointAfterOperation, - PrimaryActionExecution primaryActionExecution + PrimaryActionExecution primaryActionExecution, + ReplicaActionExecution replicaActionExecution ) { // TODO: consider passing the executor, investigate doExecute and let InboundHandler/TransportAction handle concurrency. super(actionName, actionFilters, transportService.getTaskManager(), EsExecutors.DIRECT_EXECUTOR_SERVICE); assert syncGlobalCheckpointAfterOperation != null : "Must specify global checkpoint sync behaviour"; assert primaryActionExecution != null : "Must specify primary action execution behaviour"; + assert replicaActionExecution != null : "Must specify replica action execution behaviour"; this.threadPool = threadPool; this.transportService = transportService; this.clusterService = clusterService; @@ -209,12 +225,15 @@ protected TransportReplicationAction( this::handlePrimaryRequest ); - // we must never reject on because of thread pool capacity on replicas + boolean canTripCircuitBreakerOnReplica = switch (replicaActionExecution) { + case BypassCircuitBreaker -> false; + case SubjectToCircuitBreaker -> true; + }; transportService.registerRequestHandler( transportReplicaAction, executor, - true, - true, + true, // we must never reject because of thread pool capacity on replicas + canTripCircuitBreakerOnReplica, in -> new ConcreteReplicaRequest<>(replicaRequestReader, in), this::handleReplicaRequest ); diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java index f380710cc0794..90b636ed69e24 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java @@ -78,7 +78,8 @@ protected TransportWriteAction( BiFunction executorFunction, PrimaryActionExecution primaryActionExecution, IndexingPressure indexingPressure, - SystemIndices systemIndices + SystemIndices systemIndices, + ReplicaActionExecution replicaActionExecution ) { // We pass ThreadPool.Names.SAME to the super class as we control the dispatching to the // ThreadPool.Names.WRITE/ThreadPool.Names.SYSTEM_WRITE thread pools in this class. @@ -95,7 +96,8 @@ protected TransportWriteAction( replicaRequest, EsExecutors.DIRECT_EXECUTOR_SERVICE, SyncGlobalCheckpointAfterOperation.AttemptAfterSuccess, - primaryActionExecution + primaryActionExecution, + replicaActionExecution ); this.executorFunction = executorFunction; this.indexingPressure = indexingPressure; diff --git a/server/src/main/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncAction.java b/server/src/main/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncAction.java index a051d9c2df430..e7f97173de288 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncAction.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncAction.java @@ -64,7 +64,8 @@ public GlobalCheckpointSyncAction( Request::new, threadPool.executor(ThreadPool.Names.WRITE), SyncGlobalCheckpointAfterOperation.DoNotSync, - PrimaryActionExecution.Force + PrimaryActionExecution.Force, + ReplicaActionExecution.SubjectToCircuitBreaker ); } diff --git a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseBackgroundSyncAction.java b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseBackgroundSyncAction.java index 0aa0f0b8d1556..af23386254b66 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseBackgroundSyncAction.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseBackgroundSyncAction.java @@ -83,7 +83,8 @@ public RetentionLeaseBackgroundSyncAction( Request::new, threadPool.executor(ThreadPool.Names.MANAGEMENT), SyncGlobalCheckpointAfterOperation.DoNotSync, - PrimaryActionExecution.RejectOnOverload + PrimaryActionExecution.RejectOnOverload, + ReplicaActionExecution.SubjectToCircuitBreaker ); } diff --git a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseSyncAction.java b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseSyncAction.java index 0efcf8ac9298b..1678e0021df59 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseSyncAction.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseSyncAction.java @@ -93,7 +93,8 @@ public RetentionLeaseSyncAction( new ManagementOnlyExecutorFunction(threadPool), PrimaryActionExecution.RejectOnOverload, indexingPressure, - systemIndices + systemIndices, + ReplicaActionExecution.SubjectToCircuitBreaker ); } diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index 04ad7d410e9b0..1e891aa80b696 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -1519,7 +1519,8 @@ private class TestAction extends TransportReplicationAction EsExecutors.DIRECT_EXECUTOR_SERVICE, PrimaryActionExecution.RejectOnOverload, new IndexingPressure(Settings.EMPTY), - EmptySystemIndices.INSTANCE + EmptySystemIndices.INSTANCE, + ReplicaActionExecution.SubjectToCircuitBreaker ); this.withDocumentFailureOnPrimary = withDocumentFailureOnPrimary; this.withDocumentFailureOnReplica = withDocumentFailureOnReplica; @@ -456,7 +457,8 @@ protected TestAction( (service, ignore) -> EsExecutors.DIRECT_EXECUTOR_SERVICE, PrimaryActionExecution.RejectOnOverload, new IndexingPressure(settings), - EmptySystemIndices.INSTANCE + EmptySystemIndices.INSTANCE, + ReplicaActionExecution.SubjectToCircuitBreaker ); this.withDocumentFailureOnPrimary = false; this.withDocumentFailureOnReplica = false; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index db6fc9ea696d5..33d36ed5e2cdb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -82,6 +82,8 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkAddress; @@ -100,6 +102,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.env.Environment; @@ -119,6 +122,7 @@ import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.IndicesQueryCache; import org.elasticsearch.indices.IndicesRequestCache; +import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.monitor.jvm.HotThreads; import org.elasticsearch.node.NodeMocksPlugin; @@ -2543,4 +2547,41 @@ public Settings onNodeStopped(String nodeName) throws Exception { } }); } + + /** + * Allocate the entire capacity of a circuit breaker on a specific node + * + * @param targetNode The node on which to allocate + * @param breakerName The circuit breaker to allocate + * @return A {@link Releasable} which will de-allocate the amount allocated + */ + protected static Releasable fullyAllocateCircuitBreakerOnNode(String targetNode, String breakerName) { + final var circuitBreaker = internalCluster().getInstance(CircuitBreakerService.class, targetNode).getBreaker(breakerName); + final long totalAllocated = fullyAllocate(circuitBreaker); + return () -> circuitBreaker.addWithoutBreaking(-totalAllocated); + } + + /** + * Fully allocate a circuit breaker + * + * @param circuitBreaker The circuit breaker to allocate + * @return the amount of bytes allocated + */ + private static long fullyAllocate(CircuitBreaker circuitBreaker) { + long allocationSize = 1; + long totalAllocated = 0; + while (true) { + try { + circuitBreaker.addEstimateBytesAndMaybeBreak(allocationSize, "test"); + totalAllocated += allocationSize; + } catch (CircuitBreakingException e) { + circuitBreaker.addWithoutBreaking(allocationSize); + totalAllocated += allocationSize; + break; + } + allocationSize <<= 1; + assert 0 <= allocationSize; + } + return totalAllocated; + } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/TransportBulkShardOperationsAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/TransportBulkShardOperationsAction.java index 2d0c43315f746..28e52cbef5df0 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/TransportBulkShardOperationsAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/TransportBulkShardOperationsAction.java @@ -69,7 +69,8 @@ public TransportBulkShardOperationsAction( ExecutorSelector.getWriteExecutorForShard(threadPool), PrimaryActionExecution.RejectOnOverload, indexingPressure, - systemIndices + systemIndices, + ReplicaActionExecution.SubjectToCircuitBreaker ); } From 2e0d0e2c90304dffea68415ff294700d9d95e1cf Mon Sep 17 00:00:00 2001 From: weizijun Date: Mon, 29 Jul 2024 18:17:25 +0800 Subject: [PATCH 083/105] fix text_similarity_reranker doc (#111256) --- docs/reference/search/retriever.asciidoc | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/reference/search/retriever.asciidoc b/docs/reference/search/retriever.asciidoc index b86339b905631..0afe9f77286a8 100644 --- a/docs/reference/search/retriever.asciidoc +++ b/docs/reference/search/retriever.asciidoc @@ -262,13 +262,13 @@ GET /index/_search "text_similarity_reranker": { "retriever": { "standard": { ... } - } - }, - "field": "text", - "inference_id": "my-cohere-rerank-model", - "inference_text": "Most famous landmark in Paris", - "rank_window_size": 100, - "min_score": 0.5 + }, + "field": "text", + "inference_id": "my-cohere-rerank-model", + "inference_text": "Most famous landmark in Paris", + "rank_window_size": 100, + "min_score": 0.5 + } } } ---- From 76c4e37dd195cd9555d9c7297daa8e83c5b2072f Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Mon, 29 Jul 2024 14:11:39 +0300 Subject: [PATCH 084/105] Updating error message for missing shards in open_point_in_time action (#111320) --- .../org/elasticsearch/action/search/SearchPhase.java | 12 ++++++++---- .../search/TransportOpenPointInTimeAction.java | 5 +++++ .../sql/qa/jdbc/single_node/JdbcShardFailureIT.java | 2 +- .../xpack/sql/qa/single_node/JdbcShardFailureIT.java | 2 +- 4 files changed, 15 insertions(+), 6 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java index 7ad81154691c0..da8479873a4b6 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java @@ -42,7 +42,13 @@ public void start() { } } - static void doCheckNoMissingShards(String phaseName, SearchRequest request, GroupShardsIterator shardsIts) { + protected String missingShardsErrorMessage(StringBuilder missingShards) { + return "Search rejected due to missing shards [" + + missingShards + + "]. Consider using `allow_partial_search_results` setting to bypass this error."; + } + + protected void doCheckNoMissingShards(String phaseName, SearchRequest request, GroupShardsIterator shardsIts) { assert request.allowPartialSearchResults() != null : "SearchRequest missing setting for allowPartialSearchResults"; if (request.allowPartialSearchResults() == false) { final StringBuilder missingShards = new StringBuilder(); @@ -58,9 +64,7 @@ static void doCheckNoMissingShards(String phaseName, SearchRequest request, Grou } if (missingShards.isEmpty() == false) { // Status red - shard is missing all copies and would produce partial results for an index search - final String msg = "Search rejected due to missing shards [" - + missingShards - + "]. Consider using `allow_partial_search_results` setting to bypass this error."; + final String msg = missingShardsErrorMessage(missingShards); throw new SearchPhaseExecutionException(phaseName, msg, null, ShardSearchFailure.EMPTY_ARRAY); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java index 91784ba331857..92d90fa8e55ad 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java @@ -213,6 +213,11 @@ SearchPhase openPointInTimePhase( searchRequest.getMaxConcurrentShardRequests(), clusters ) { + @Override + protected String missingShardsErrorMessage(StringBuilder missingShards) { + return "[open_point_in_time] action requires all shards to be available. Missing shards: [" + missingShards + "]"; + } + @Override protected void executePhaseOnShard( SearchShardIterator shardIt, diff --git a/x-pack/plugin/sql/qa/jdbc/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/jdbc/single_node/JdbcShardFailureIT.java b/x-pack/plugin/sql/qa/jdbc/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/jdbc/single_node/JdbcShardFailureIT.java index f83047411f0b0..0e0f7dc9722d9 100644 --- a/x-pack/plugin/sql/qa/jdbc/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/jdbc/single_node/JdbcShardFailureIT.java +++ b/x-pack/plugin/sql/qa/jdbc/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/jdbc/single_node/JdbcShardFailureIT.java @@ -76,7 +76,7 @@ public void createTestIndex() throws IOException { public void testPartialResponseHandling() throws SQLException { try (Connection c = esJdbc(); Statement s = c.createStatement()) { SQLException exception = expectThrows(SQLException.class, () -> s.executeQuery("SELECT * FROM test ORDER BY test_field ASC")); - assertThat(exception.getMessage(), containsString("Search rejected due to missing shards")); + assertThat(exception.getMessage(), containsString("[open_point_in_time] action requires all shards to be available")); } } } diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java index dc9989b26c3b2..91f3ab029f55c 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java @@ -89,7 +89,7 @@ public void testPartialResponseHandling() throws Exception { createTestIndex(); try (Connection c = esJdbc(); Statement s = c.createStatement()) { SQLException exception = expectThrows(SQLException.class, () -> s.executeQuery("SELECT * FROM test ORDER BY test_field ASC")); - assertThat(exception.getMessage(), containsString("Search rejected due to missing shards")); + assertThat(exception.getMessage(), containsString("[open_point_in_time] action requires all shards to be available")); } } From 2ed5ce75fc4002efec32f77d496129fed33f8256 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 29 Jul 2024 22:33:42 +1000 Subject: [PATCH 085/105] Mute org.elasticsearch.xpack.searchablesnapshots.AzureSearchableSnapshotsIT org.elasticsearch.xpack.searchablesnapshots.AzureSearchableSnapshotsIT #111279 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 9f175d094f016..89375d5558454 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -134,6 +134,8 @@ tests: - class: org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectAuthIT method: testAuthenticateWithCodeFlowAndClientPost issue: https://github.com/elastic/elasticsearch/issues/111396 +- class: org.elasticsearch.xpack.searchablesnapshots.AzureSearchableSnapshotsIT + issue: https://github.com/elastic/elasticsearch/issues/111279 # Examples: # From 86dff99dcc5ad46d454425328ffa5fb167a49a19 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 29 Jul 2024 22:36:45 +1000 Subject: [PATCH 086/105] Mute org.elasticsearch.repositories.azure.RepositoryAzureClientYamlTestSuiteIT org.elasticsearch.repositories.azure.RepositoryAzureClientYamlTestSuiteIT #111345 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 89375d5558454..6c8b5bc39553d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -136,6 +136,8 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/111396 - class: org.elasticsearch.xpack.searchablesnapshots.AzureSearchableSnapshotsIT issue: https://github.com/elastic/elasticsearch/issues/111279 +- class: org.elasticsearch.repositories.azure.RepositoryAzureClientYamlTestSuiteIT + issue: https://github.com/elastic/elasticsearch/issues/111345 # Examples: # From 38f301a4ded72230e8963f2ea6c4f244290a7c18 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Mon, 29 Jul 2024 13:37:45 +0100 Subject: [PATCH 087/105] [DOCS] Additional reranking docs updates (#111350) * Simplify overview, specify available rerank options * Update links * Clarify eland-uploaded models is for hugging face --- docs/reference/search/retriever.asciidoc | 5 ++++ .../semantic-reranking.asciidoc | 29 +++++++------------ 2 files changed, 15 insertions(+), 19 deletions(-) diff --git a/docs/reference/search/retriever.asciidoc b/docs/reference/search/retriever.asciidoc index 0afe9f77286a8..1b7376c21daab 100644 --- a/docs/reference/search/retriever.asciidoc +++ b/docs/reference/search/retriever.asciidoc @@ -209,6 +209,11 @@ GET /index/_search The `text_similarity_reranker` is a type of retriever that enhances search results by re-ranking documents based on semantic similarity to a specified inference text, using a machine learning model. +[TIP] +==== +Refer to <> for a high level overview of semantic reranking. +==== + ===== Prerequisites To use `text_similarity_reranker` you must first set up a `rerank` task using the <>. diff --git a/docs/reference/search/search-your-data/retrievers-reranking/semantic-reranking.asciidoc b/docs/reference/search/search-your-data/retrievers-reranking/semantic-reranking.asciidoc index 75c06aa953302..f25741fca0b8f 100644 --- a/docs/reference/search/search-your-data/retrievers-reranking/semantic-reranking.asciidoc +++ b/docs/reference/search/search-your-data/retrievers-reranking/semantic-reranking.asciidoc @@ -5,7 +5,7 @@ preview::[] [TIP] ==== -This overview focuses more on the high-level concepts and use cases for semantic reranking. For full implementation details on how to set up and use semantic reranking in {es}, see the <> in the Search API docs. +This overview focuses more on the high-level concepts and use cases for semantic reranking. For full implementation details on how to set up and use semantic reranking in {es}, see the <> in the Search API docs. ==== Rerankers improve the relevance of results from earlier-stage retrieval mechanisms. @@ -89,11 +89,16 @@ In {es}, semantic rerankers are implemented using the {es} <>. +. *Choose a reranking model*. +Currently you can: + +** Integrate directly with the <> using the `rerank` task type +** Integrate directly with the <> using the `rerank` task type +** Upload a model to {es} from Hugging Face with {eland-docs}/machine-learning.html#ml-nlp-pytorch[Eland] +*** Then set up an <> with the `rerank` task type +. *Create a `rerank` task using the <>*. The Inference API creates an inference endpoint and configures your chosen machine learning model to perform the reranking task. -. Define a `text_similarity_reranker` retriever in your search request. +. *Define a `text_similarity_reranker` retriever in your search request*. The retriever syntax makes it simple to configure both the retrieval and reranking of search results in a single API call. .*Example search request* with semantic reranker @@ -127,20 +132,6 @@ POST _search // TEST[skip:TBD] ============== -[discrete] -[[semantic-reranking-types]] -==== Supported reranking types - -The following `text_similarity_reranker` model configuration options are available. - -*Text similarity with cross-encoder* - -This solution uses a hosted or 3rd party inference service which relies on a cross-encoder model. -The model receives the text fields from the _top-K_ documents, as well as the search query, and calculates scores directly, which are then used to rerank the documents. - -Used with the Cohere inference service rolled out in 8.13, turn on semantic reranking that works out of the box. -Check out our https://github.com/elastic/elasticsearch-labs/blob/main/notebooks/integrations/cohere/cohere-elasticsearch.ipynb[Python notebook] for using Cohere with {es}. - [discrete] [[semantic-reranking-learn-more]] ==== Learn more From 1a939e922e08cd02c5c76030082ea5f5f4f68073 Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Mon, 29 Jul 2024 09:46:02 -0400 Subject: [PATCH 088/105] [ML] Create and inject APM Inference Metrics (#111293) We are migrating from in-memory cumulative counter to an Time Series Data Stream delta counter. The goal is to avoid metrics suddenly dropping to zero when a node restarts, hopefully increasing accuracy of the metric. Co-authored-by: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> --- .../inference/ServiceSettings.java | 2 + .../xpack/inference/InferencePlugin.java | 11 +- .../action/TransportInferenceAction.java | 7 +- .../embeddings/CohereEmbeddingsModel.java | 4 +- .../OpenAiEmbeddingsServiceSettings.java | 2 +- .../telemetry/ApmInferenceStats.java | 49 ++++++++ .../telemetry/InferenceAPMStats.java | 47 ------- .../inference/telemetry/InferenceStats.java | 52 ++------ .../xpack/inference/telemetry/Stats.java | 30 ----- .../xpack/inference/telemetry/StatsMap.java | 57 --------- .../telemetry/ApmInferenceStatsTests.java | 69 ++++++++++ .../inference/telemetry/StatsMapTests.java | 119 ------------------ 12 files changed, 142 insertions(+), 307 deletions(-) create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/ApmInferenceStats.java delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/InferenceAPMStats.java delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/Stats.java delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/StatsMap.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/telemetry/ApmInferenceStatsTests.java delete mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/telemetry/StatsMapTests.java diff --git a/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java b/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java index 34a58f83963ce..58e87105f70a3 100644 --- a/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java +++ b/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java @@ -9,6 +9,7 @@ package org.elasticsearch.inference; import org.elasticsearch.common.io.stream.VersionedNamedWriteable; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.xcontent.ToXContentObject; @@ -48,5 +49,6 @@ default DenseVectorFieldMapper.ElementType elementType() { * be chosen when initializing a deployment within their service. In this situation, return null. * @return the model used to perform inference or null if the model is not defined */ + @Nullable String modelId(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index fce2c54c535c9..ec9398358d180 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -26,6 +26,7 @@ import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.InferenceServiceRegistry; +import org.elasticsearch.node.PluginComponentBinding; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.MapperPlugin; @@ -84,8 +85,8 @@ import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserService; import org.elasticsearch.xpack.inference.services.mistral.MistralService; import org.elasticsearch.xpack.inference.services.openai.OpenAiService; -import org.elasticsearch.xpack.inference.telemetry.InferenceAPMStats; -import org.elasticsearch.xpack.inference.telemetry.StatsMap; +import org.elasticsearch.xpack.inference.telemetry.ApmInferenceStats; +import org.elasticsearch.xpack.inference.telemetry.InferenceStats; import java.util.ArrayList; import java.util.Collection; @@ -196,10 +197,10 @@ public Collection createComponents(PluginServices services) { var actionFilter = new ShardBulkInferenceActionFilter(registry, modelRegistry); shardBulkInferenceActionFilter.set(actionFilter); - var statsFactory = new InferenceAPMStats.Factory(services.telemetryProvider().getMeterRegistry()); - var statsMap = new StatsMap<>(InferenceAPMStats::key, statsFactory::newInferenceRequestAPMCounter); + var meterRegistry = services.telemetryProvider().getMeterRegistry(); + var stats = new PluginComponentBinding<>(InferenceStats.class, ApmInferenceStats.create(meterRegistry)); - return List.of(modelRegistry, registry, httpClientManager, statsMap); + return List.of(modelRegistry, registry, httpClientManager, stats); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java index 575697b5d0d39..b7fff3b704695 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java @@ -21,22 +21,26 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.elasticsearch.xpack.inference.telemetry.InferenceStats; public class TransportInferenceAction extends HandledTransportAction { private final ModelRegistry modelRegistry; private final InferenceServiceRegistry serviceRegistry; + private final InferenceStats inferenceStats; @Inject public TransportInferenceAction( TransportService transportService, ActionFilters actionFilters, ModelRegistry modelRegistry, - InferenceServiceRegistry serviceRegistry + InferenceServiceRegistry serviceRegistry, + InferenceStats inferenceStats ) { super(InferenceAction.NAME, transportService, actionFilters, InferenceAction.Request::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.modelRegistry = modelRegistry; this.serviceRegistry = serviceRegistry; + this.inferenceStats = inferenceStats; } @Override @@ -76,6 +80,7 @@ protected void doExecute(Task task, InferenceAction.Request request, ActionListe unparsedModel.settings(), unparsedModel.secrets() ); + inferenceStats.incrementRequestCount(model); inferOnService(model, request, service.get(), delegate); }); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java index 538d88a59ca76..fea5226bf9c6f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java @@ -28,7 +28,7 @@ public static CohereEmbeddingsModel of(CohereEmbeddingsModel model, Map serviceSettings, @@ -37,7 +37,7 @@ public CohereEmbeddingsModel( ConfigurationParseContext context ) { this( - modelId, + inferenceId, taskType, service, CohereEmbeddingsServiceSettings.fromMap(serviceSettings, context), diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java index d474e935fbda7..6ef1f6f0feefe 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java @@ -150,7 +150,7 @@ public OpenAiEmbeddingsServiceSettings( @Nullable RateLimitSettings rateLimitSettings ) { this.uri = uri; - this.modelId = modelId; + this.modelId = Objects.requireNonNull(modelId); this.organizationId = organizationId; this.similarity = similarity; this.dimensions = dimensions; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/ApmInferenceStats.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/ApmInferenceStats.java new file mode 100644 index 0000000000000..ae14a0792dead --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/ApmInferenceStats.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.telemetry; + +import org.elasticsearch.inference.Model; +import org.elasticsearch.telemetry.metric.LongCounter; +import org.elasticsearch.telemetry.metric.MeterRegistry; + +import java.util.HashMap; +import java.util.Objects; + +public class ApmInferenceStats implements InferenceStats { + private final LongCounter inferenceAPMRequestCounter; + + public ApmInferenceStats(LongCounter inferenceAPMRequestCounter) { + this.inferenceAPMRequestCounter = Objects.requireNonNull(inferenceAPMRequestCounter); + } + + @Override + public void incrementRequestCount(Model model) { + var service = model.getConfigurations().getService(); + var taskType = model.getTaskType(); + var modelId = model.getServiceSettings().modelId(); + + var attributes = new HashMap(5); + attributes.put("service", service); + attributes.put("task_type", taskType.toString()); + if (modelId != null) { + attributes.put("model_id", modelId); + } + + inferenceAPMRequestCounter.incrementBy(1, attributes); + } + + public static ApmInferenceStats create(MeterRegistry meterRegistry) { + return new ApmInferenceStats( + meterRegistry.registerLongCounter( + "es.inference.requests.count.total", + "Inference API request counts for a particular service, task type, model ID", + "operations" + ) + ); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/InferenceAPMStats.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/InferenceAPMStats.java deleted file mode 100644 index 76977fef76045..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/InferenceAPMStats.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.telemetry; - -import org.elasticsearch.inference.Model; -import org.elasticsearch.telemetry.metric.LongCounter; -import org.elasticsearch.telemetry.metric.MeterRegistry; - -import java.util.Map; -import java.util.Objects; - -public class InferenceAPMStats extends InferenceStats { - - private final LongCounter inferenceAPMRequestCounter; - - public InferenceAPMStats(Model model, MeterRegistry meterRegistry) { - super(model); - this.inferenceAPMRequestCounter = meterRegistry.registerLongCounter( - "es.inference.requests.count", - "Inference API request counts for a particular service, task type, model ID", - "operations" - ); - } - - @Override - public void increment() { - super.increment(); - inferenceAPMRequestCounter.incrementBy(1, Map.of("service", service, "task_type", taskType.toString(), "model_id", modelId)); - } - - public static final class Factory { - private final MeterRegistry meterRegistry; - - public Factory(MeterRegistry meterRegistry) { - this.meterRegistry = Objects.requireNonNull(meterRegistry); - } - - public InferenceAPMStats newInferenceRequestAPMCounter(Model model) { - return new InferenceAPMStats(model, meterRegistry); - } - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/InferenceStats.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/InferenceStats.java index d639f9da71f56..d080e818e45fc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/InferenceStats.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/InferenceStats.java @@ -8,52 +8,14 @@ package org.elasticsearch.xpack.inference.telemetry; import org.elasticsearch.inference.Model; -import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xpack.core.inference.InferenceRequestStats; -import java.util.Objects; -import java.util.concurrent.atomic.LongAdder; +public interface InferenceStats { -public class InferenceStats implements Stats { - protected final String service; - protected final TaskType taskType; - protected final String modelId; - protected final LongAdder counter = new LongAdder(); + /** + * Increment the counter for a particular value in a thread safe manner. + * @param model the model to increment request count for + */ + void incrementRequestCount(Model model); - public static String key(Model model) { - StringBuilder builder = new StringBuilder(); - builder.append(model.getConfigurations().getService()); - builder.append(":"); - builder.append(model.getTaskType()); - - if (model.getServiceSettings().modelId() != null) { - builder.append(":"); - builder.append(model.getServiceSettings().modelId()); - } - - return builder.toString(); - } - - public InferenceStats(Model model) { - Objects.requireNonNull(model); - - service = model.getConfigurations().getService(); - taskType = model.getTaskType(); - modelId = model.getServiceSettings().modelId(); - } - - @Override - public void increment() { - counter.increment(); - } - - @Override - public long getCount() { - return counter.sum(); - } - - @Override - public InferenceRequestStats toSerializableForm() { - return new InferenceRequestStats(service, taskType, modelId, getCount()); - } + InferenceStats NOOP = model -> {}; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/Stats.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/Stats.java deleted file mode 100644 index bb1e9c98fc2cb..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/Stats.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.telemetry; - -import org.elasticsearch.xpack.core.inference.SerializableStats; - -public interface Stats { - - /** - * Increase the counter by one. - */ - void increment(); - - /** - * Return the current value of the counter. - * @return the current value of the counter - */ - long getCount(); - - /** - * Convert the object into a serializable form that can be written across nodes and returned in xcontent format. - * @return the serializable format of the object - */ - SerializableStats toSerializableForm(); -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/StatsMap.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/StatsMap.java deleted file mode 100644 index 1cfecfb4507d6..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/StatsMap.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.telemetry; - -import org.elasticsearch.xpack.core.inference.SerializableStats; - -import java.util.Map; -import java.util.Objects; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; -import java.util.function.Function; -import java.util.stream.Collectors; - -/** - * A map to provide tracking incrementing statistics. - * - * @param The input to derive the keys and values for the map - * @param The type of the values stored in the map - */ -public class StatsMap { - - private final ConcurrentMap stats = new ConcurrentHashMap<>(); - private final Function keyCreator; - private final Function valueCreator; - - /** - * @param keyCreator a function for creating a key in the map based on the input provided - * @param valueCreator a function for creating a value in the map based on the input provided - */ - public StatsMap(Function keyCreator, Function valueCreator) { - this.keyCreator = Objects.requireNonNull(keyCreator); - this.valueCreator = Objects.requireNonNull(valueCreator); - } - - /** - * Increment the counter for a particular value in a thread safe manner. - * @param input the input to derive the appropriate key in the map - */ - public void increment(Input input) { - var value = stats.computeIfAbsent(keyCreator.apply(input), key -> valueCreator.apply(input)); - value.increment(); - } - - /** - * Build a map that can be serialized. This takes a snapshot of the current state. Any concurrent calls to increment may or may not - * be represented in the resulting serializable map. - * @return a map that is more easily serializable - */ - public Map toSerializableMap() { - return stats.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue().toSerializableForm())); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/telemetry/ApmInferenceStatsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/telemetry/ApmInferenceStatsTests.java new file mode 100644 index 0000000000000..1a5aba5f89ad2 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/telemetry/ApmInferenceStatsTests.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.telemetry; + +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.telemetry.metric.LongCounter; +import org.elasticsearch.telemetry.metric.MeterRegistry; +import org.elasticsearch.test.ESTestCase; + +import java.util.Map; + +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class ApmInferenceStatsTests extends ESTestCase { + + public void testRecordWithModel() { + var longCounter = mock(LongCounter.class); + + var stats = new ApmInferenceStats(longCounter); + + stats.incrementRequestCount(model("service", TaskType.ANY, "modelId")); + + verify(longCounter).incrementBy( + eq(1L), + eq(Map.of("service", "service", "task_type", TaskType.ANY.toString(), "model_id", "modelId")) + ); + } + + public void testRecordWithoutModel() { + var longCounter = mock(LongCounter.class); + + var stats = new ApmInferenceStats(longCounter); + + stats.incrementRequestCount(model("service", TaskType.ANY, null)); + + verify(longCounter).incrementBy(eq(1L), eq(Map.of("service", "service", "task_type", TaskType.ANY.toString()))); + } + + public void testCreation() { + assertNotNull(ApmInferenceStats.create(MeterRegistry.NOOP)); + } + + private Model model(String service, TaskType taskType, String modelId) { + var configuration = mock(ModelConfigurations.class); + when(configuration.getService()).thenReturn(service); + var settings = mock(ServiceSettings.class); + if (modelId != null) { + when(settings.modelId()).thenReturn(modelId); + } + + var model = mock(Model.class); + when(model.getTaskType()).thenReturn(taskType); + when(model.getConfigurations()).thenReturn(configuration); + when(model.getServiceSettings()).thenReturn(settings); + + return model; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/telemetry/StatsMapTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/telemetry/StatsMapTests.java deleted file mode 100644 index fcd8d3d7cefbc..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/telemetry/StatsMapTests.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.telemetry; - -import org.elasticsearch.inference.TaskType; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; -import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; -import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsServiceSettingsTests; -import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettingsTests; -import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModel; -import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsServiceSettingsTests; -import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsTaskSettingsTests; - -import java.util.Map; - -import static org.hamcrest.Matchers.is; - -public class StatsMapTests extends ESTestCase { - public void testAddingEntry_InitializesTheCountToOne() { - var stats = new StatsMap<>(InferenceStats::key, InferenceStats::new); - - stats.increment( - new OpenAiEmbeddingsModel( - "inference_id", - TaskType.TEXT_EMBEDDING, - "openai", - OpenAiEmbeddingsServiceSettingsTests.getServiceSettingsMap("modelId", null, null), - OpenAiEmbeddingsTaskSettingsTests.getTaskSettingsMap(null), - null, - ConfigurationParseContext.REQUEST - ) - ); - - var converted = stats.toSerializableMap(); - - assertThat( - converted, - is( - Map.of( - "openai:text_embedding:modelId", - new org.elasticsearch.xpack.core.inference.InferenceRequestStats("openai", TaskType.TEXT_EMBEDDING, "modelId", 1) - ) - ) - ); - } - - public void testIncrementingWithSeparateModels_IncrementsTheCounterToTwo() { - var stats = new StatsMap<>(InferenceStats::key, InferenceStats::new); - - var model1 = new OpenAiEmbeddingsModel( - "inference_id", - TaskType.TEXT_EMBEDDING, - "openai", - OpenAiEmbeddingsServiceSettingsTests.getServiceSettingsMap("modelId", null, null), - OpenAiEmbeddingsTaskSettingsTests.getTaskSettingsMap(null), - null, - ConfigurationParseContext.REQUEST - ); - - var model2 = new OpenAiEmbeddingsModel( - "inference_id", - TaskType.TEXT_EMBEDDING, - "openai", - OpenAiEmbeddingsServiceSettingsTests.getServiceSettingsMap("modelId", null, null), - OpenAiEmbeddingsTaskSettingsTests.getTaskSettingsMap(null), - null, - ConfigurationParseContext.REQUEST - ); - - stats.increment(model1); - stats.increment(model2); - - var converted = stats.toSerializableMap(); - - assertThat( - converted, - is( - Map.of( - "openai:text_embedding:modelId", - new org.elasticsearch.xpack.core.inference.InferenceRequestStats("openai", TaskType.TEXT_EMBEDDING, "modelId", 2) - ) - ) - ); - } - - public void testNullModelId_ResultsInKeyWithout() { - var stats = new StatsMap<>(InferenceStats::key, InferenceStats::new); - - stats.increment( - new CohereEmbeddingsModel( - "inference_id", - TaskType.TEXT_EMBEDDING, - "cohere", - CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap(null, null, null), - CohereEmbeddingsTaskSettingsTests.getTaskSettingsMap(null, null), - null, - ConfigurationParseContext.REQUEST - ) - ); - - var converted = stats.toSerializableMap(); - - assertThat( - converted, - is( - Map.of( - "cohere:text_embedding", - new org.elasticsearch.xpack.core.inference.InferenceRequestStats("cohere", TaskType.TEXT_EMBEDDING, null, 1) - ) - ) - ); - } -} From 80d539d986c06968aa7441d835626e73907093d1 Mon Sep 17 00:00:00 2001 From: Valeriy Khakhutskyy <1292899+valeriy42@users.noreply.github.com> Date: Mon, 29 Jul 2024 17:11:36 +0200 Subject: [PATCH 089/105] [ML] Fix failing test DetectionRulesTests.testEqualsAndHashcode (#111351) Fixes #111308 --- muted-tests.yml | 3 --- .../xpack/core/ml/job/config/DetectionRuleTests.java | 2 ++ 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 6c8b5bc39553d..d106ca3c9d701 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -108,9 +108,6 @@ tests: - class: org.elasticsearch.upgrades.LogsIndexModeFullClusterRestartIT method: testLogsIndexing {cluster=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/111306 -- class: org.elasticsearch.xpack.core.ml.job.config.DetectionRuleTests - method: testEqualsAndHashcode - issue: https://github.com/elastic/elasticsearch/issues/111308 - class: org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT issue: https://github.com/elastic/elasticsearch/issues/111319 - class: org.elasticsearch.xpack.esql.analysis.VerifierTests diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRuleTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRuleTests.java index d716f34f86e6c..127088d82bade 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRuleTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRuleTests.java @@ -139,6 +139,8 @@ protected DetectionRule mutateInstance(DetectionRule instance) { if (actions.contains(RuleAction.FORCE_TIME_SHIFT) && params.getForceTimeShift() == null) { params = new RuleParams(new RuleParamsForForceTimeShift(randomLong())); + } else if (actions.contains(RuleAction.FORCE_TIME_SHIFT) == false && params.getForceTimeShift() != null) { + params = new RuleParams(); } return new DetectionRule.Builder(conditions).setActions(actions).setScope(scope).setParams(params).build(); From e24a7c12707cab4f306de8ec7af8d9196d26e0bf Mon Sep 17 00:00:00 2001 From: Chris Earle Date: Mon, 29 Jul 2024 10:28:13 -0600 Subject: [PATCH 090/105] [Service Account] Add AutoOps account (#111316) This adds a `ServiceAccount` for AutoOps usage to collect monitoring stats from the cluster. --- docs/changelog/111316.yaml | 5 ++ .../authc/service/ServiceAccountIT.java | 31 +++++++ .../authc/service/ElasticServiceAccounts.java | 20 +++++ ...TransportGetServiceAccountActionTests.java | 15 ++-- .../service/ElasticServiceAccountsTests.java | 90 +++++++++++++++++++ .../service/ServiceAccountServiceTests.java | 12 ++- .../test/service_accounts/10_basic.yml | 6 +- 7 files changed, 167 insertions(+), 12 deletions(-) create mode 100644 docs/changelog/111316.yaml diff --git a/docs/changelog/111316.yaml b/docs/changelog/111316.yaml new file mode 100644 index 0000000000000..0d915cd1ec3ea --- /dev/null +++ b/docs/changelog/111316.yaml @@ -0,0 +1,5 @@ +pr: 111316 +summary: "[Service Account] Add `AutoOps` account" +area: Security +type: enhancement +issues: [] diff --git a/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java b/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java index e790866cf3d77..c1686a500fb2c 100644 --- a/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java +++ b/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java @@ -80,6 +80,33 @@ public class ServiceAccountIT extends ESRestTestCase { } """; + private static final String ELASTIC_AUTO_OPS_ROLE_DESCRIPTOR = """ + { + "cluster": [ + "monitor", + "read_ilm", + "read_slm" + ], + "indices": [ + { + "names": [ + "*" + ], + "privileges": [ + "monitor", + "view_index_metadata" + ], + "allow_restricted_indices": true + } + ], + "applications": [], + "run_as": [], + "metadata": {}, + "transient_metadata": { + "enabled": true + } + }"""; + private static final String ELASTIC_FLEET_SERVER_ROLE_DESCRIPTOR = """ { "cluster": [ @@ -400,6 +427,10 @@ public void testGetServiceAccount() throws IOException { assertOK(getServiceAccountResponse3); assertServiceAccountRoleDescriptor(getServiceAccountResponse3, "elastic/fleet-server", ELASTIC_FLEET_SERVER_ROLE_DESCRIPTOR); + final Request getServiceAccountRequestAutoOps = new Request("GET", "_security/service/elastic/auto-ops"); + final Response getServiceAccountResponseAutoOps = client().performRequest(getServiceAccountRequestAutoOps); + assertServiceAccountRoleDescriptor(getServiceAccountResponseAutoOps, "elastic/auto-ops", ELASTIC_AUTO_OPS_ROLE_DESCRIPTOR); + final Request getServiceAccountRequestKibana = new Request("GET", "_security/service/elastic/kibana"); final Response getServiceAccountResponseKibana = client().performRequest(getServiceAccountRequestKibana); assertOK(getServiceAccountResponseKibana); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java index abd586920f2d8..b62ce28422a9c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java @@ -22,6 +22,25 @@ final class ElasticServiceAccounts { static final String NAMESPACE = "elastic"; + private static final ServiceAccount AUTO_OPS_ACCOUNT = new ElasticServiceAccount( + "auto-ops", + new RoleDescriptor( + NAMESPACE + "/auto-ops", + new String[] { "monitor", "read_ilm", "read_slm" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder() + .allowRestrictedIndices(true) + .indices("*") + .privileges("monitor", "view_index_metadata") + .build(), }, + null, + null, + null, + null, + null + ) + ); + private static final ServiceAccount ENTERPRISE_SEARCH_ACCOUNT = new ElasticServiceAccount( "enterprise-search-server", new RoleDescriptor( @@ -173,6 +192,7 @@ final class ElasticServiceAccounts { ); static final Map ACCOUNTS = Stream.of( + AUTO_OPS_ACCOUNT, ENTERPRISE_SEARCH_ACCOUNT, FLEET_ACCOUNT, FLEET_REMOTE_ACCOUNT, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/service/TransportGetServiceAccountActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/service/TransportGetServiceAccountActionTests.java index b313d94a46ce5..7e35297fcb655 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/service/TransportGetServiceAccountActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/service/TransportGetServiceAccountActionTests.java @@ -20,7 +20,6 @@ import java.util.Arrays; import java.util.Collections; -import java.util.stream.Collectors; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; @@ -47,12 +46,16 @@ public void testDoExecute() { final PlainActionFuture future1 = new PlainActionFuture<>(); transportGetServiceAccountAction.doExecute(mock(Task.class), request1, future1); final GetServiceAccountResponse getServiceAccountResponse1 = future1.actionGet(); - assertThat(getServiceAccountResponse1.getServiceAccountInfos().length, equalTo(4)); + assertThat(getServiceAccountResponse1.getServiceAccountInfos().length, equalTo(5)); assertThat( - Arrays.stream(getServiceAccountResponse1.getServiceAccountInfos()) - .map(ServiceAccountInfo::getPrincipal) - .collect(Collectors.toList()), - containsInAnyOrder("elastic/enterprise-search-server", "elastic/fleet-server", "elastic/fleet-server-remote", "elastic/kibana") + Arrays.stream(getServiceAccountResponse1.getServiceAccountInfos()).map(ServiceAccountInfo::getPrincipal).toList(), + containsInAnyOrder( + "elastic/auto-ops", + "elastic/enterprise-search-server", + "elastic/fleet-server", + "elastic/fleet-server-remote", + "elastic/kibana" + ) ); final GetServiceAccountRequest request2 = new GetServiceAccountRequest("elastic", "fleet-server"); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java index 756d53285a8f6..21e29469bb02b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java @@ -8,18 +8,30 @@ package org.elasticsearch.xpack.security.authc.service; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; +import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; +import org.elasticsearch.action.admin.cluster.snapshots.create.TransportCreateSnapshotAction; +import org.elasticsearch.action.admin.cluster.snapshots.delete.TransportDeleteSnapshotAction; +import org.elasticsearch.action.admin.cluster.snapshots.get.TransportGetSnapshotsAction; +import org.elasticsearch.action.admin.cluster.snapshots.restore.TransportRestoreSnapshotAction; +import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; import org.elasticsearch.action.admin.indices.create.AutoCreateAction; import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.mapping.put.TransportAutoPutMappingAction; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsAction; import org.elasticsearch.action.admin.indices.settings.put.TransportUpdateSettingsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.template.delete.TransportDeleteIndexTemplateAction; +import org.elasticsearch.action.admin.indices.template.get.GetComponentTemplateAction; +import org.elasticsearch.action.admin.indices.template.get.GetComposableIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesAction; import org.elasticsearch.action.admin.indices.template.put.TransportPutIndexTemplateAction; import org.elasticsearch.action.bulk.TransportBulkAction; +import org.elasticsearch.action.datastreams.DataStreamsStatsAction; +import org.elasticsearch.action.datastreams.lifecycle.GetDataStreamLifecycleAction; import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.get.TransportGetAction; import org.elasticsearch.action.get.TransportMultiGetAction; @@ -52,6 +64,11 @@ import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.user.KibanaSystemUser; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.core.slm.action.DeleteSnapshotLifecycleAction; +import org.elasticsearch.xpack.core.slm.action.ExecuteSnapshotLifecycleAction; +import org.elasticsearch.xpack.core.slm.action.GetSLMStatusAction; +import org.elasticsearch.xpack.core.slm.action.GetSnapshotLifecycleAction; +import org.elasticsearch.xpack.core.slm.action.PutSnapshotLifecycleAction; import org.elasticsearch.xpack.security.authc.service.ElasticServiceAccounts.ElasticServiceAccount; import java.util.List; @@ -67,6 +84,79 @@ public class ElasticServiceAccountsTests extends ESTestCase { + public void testAutoOpsPrivileges() { + final Role role = Role.buildFromRoleDescriptor( + ElasticServiceAccounts.ACCOUNTS.get("elastic/auto-ops").roleDescriptor(), + new FieldPermissionsCache(Settings.EMPTY), + RESTRICTED_INDICES + ); + + final Authentication authentication = AuthenticationTestHelper.builder().serviceAccount().build(); + final TransportRequest request = mock(TransportRequest.class); + + // monitor + assertThat(role.cluster().check(GetComponentTemplateAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(GetComposableIndexTemplateAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(GetIndexTemplatesAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(TransportClusterHealthAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(TransportNodesStatsAction.TYPE.name(), request, authentication), is(true)); + + assertThat(role.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); + assertThat(role.cluster().check(TransportPutIndexTemplateAction.TYPE.name(), request, authentication), is(false)); + assertThat(role.cluster().check(TransportDeleteIndexTemplateAction.TYPE.name(), request, authentication), is(false)); + + // read_ilm + assertThat(role.cluster().check(GetLifecycleAction.NAME, request, authentication), is(true)); + + assertThat(role.cluster().check(ILMActions.STOP.name(), request, authentication), is(false)); + assertThat(role.cluster().check(ILMActions.PUT.name(), request, authentication), is(false)); + + // read_slm + assertThat(role.cluster().check(GetSLMStatusAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(GetSnapshotLifecycleAction.NAME, request, authentication), is(true)); + + assertThat(role.cluster().check(DeleteSnapshotLifecycleAction.NAME, request, authentication), is(false)); + assertThat(role.cluster().check(ExecuteSnapshotLifecycleAction.NAME, request, authentication), is(false)); + assertThat(role.cluster().check(PutSnapshotLifecycleAction.NAME, request, authentication), is(false)); + assertThat(role.cluster().check(TransportGetSnapshotsAction.TYPE.name(), request, authentication), is(false)); + assertThat(role.cluster().check(TransportCreateSnapshotAction.TYPE.name(), request, authentication), is(false)); + assertThat(role.cluster().check(TransportDeleteSnapshotAction.TYPE.name(), request, authentication), is(false)); + assertThat(role.cluster().check(TransportRestoreSnapshotAction.TYPE.name(), request, authentication), is(false)); + + // index monitor + List.of( + "search-" + randomAlphaOfLengthBetween(1, 20), + ".kibana-" + randomAlphaOfLengthBetween(1, 20), + ".elastic-analytics-collections", + "logs-" + randomAlphaOfLengthBetween(1, 20), + "my-index-" + randomAlphaOfLengthBetween(1, 20), + ".internal.alerts-default.alerts-default-" + randomAlphaOfLengthBetween(1, 20) + ).forEach(index -> { + final IndexAbstraction anyIndex = mockIndexAbstraction(index); + + assertThat(role.indices().allowedIndicesMatcher(IndicesStatsAction.NAME).test(anyIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(DataStreamsStatsAction.NAME).test(anyIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(GetAliasesAction.NAME).test(anyIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(GetSettingsAction.NAME).test(anyIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(GetDataStreamLifecycleAction.INSTANCE.name()).test(anyIndex), is(true)); + + assertThat(role.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(anyIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(anyIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(anyIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(anyIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(anyIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportIndicesAliasesAction.NAME).test(anyIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(anyIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(anyIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportMultiGetAction.NAME).test(anyIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(anyIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportMultiSearchAction.TYPE.name()).test(anyIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportUpdateSettingsAction.TYPE.name()).test(anyIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(RefreshAction.NAME).test(anyIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher("indices:foo").test(anyIndex), is(false)); + }); + } + public void testKibanaSystemPrivileges() { final RoleDescriptor serviceAccountRoleDescriptor = ElasticServiceAccounts.ACCOUNTS.get("elastic/kibana").roleDescriptor(); final RoleDescriptor reservedRolesStoreRoleDescriptor = ReservedRolesStore.kibanaSystemRoleDescriptor(KibanaSystemUser.ROLE_NAME); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountServiceTests.java index c66f3168c7b7d..43fe57dd8b313 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountServiceTests.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLog; @@ -82,7 +81,6 @@ public void init() throws UnknownHostException { indexServiceAccountTokenStore = mock(IndexServiceAccountTokenStore.class); when(fileServiceAccountTokenStore.getTokenSource()).thenReturn(TokenInfo.TokenSource.FILE); when(indexServiceAccountTokenStore.getTokenSource()).thenReturn(TokenInfo.TokenSource.INDEX); - final Settings.Builder builder = Settings.builder().put("xpack.security.enabled", true); client = mock(Client.class); when(client.threadPool()).thenReturn(threadPool); serviceAccountService = new ServiceAccountService(client, fileServiceAccountTokenStore, indexServiceAccountTokenStore); @@ -96,11 +94,17 @@ public void stopThreadPool() { public void testGetServiceAccountPrincipals() { assertThat( ServiceAccountService.getServiceAccountPrincipals(), - containsInAnyOrder("elastic/enterprise-search-server", "elastic/fleet-server", "elastic/fleet-server-remote", "elastic/kibana") + containsInAnyOrder( + "elastic/auto-ops", + "elastic/enterprise-search-server", + "elastic/fleet-server", + "elastic/fleet-server-remote", + "elastic/kibana" + ) ); } - public void testTryParseToken() throws IOException, IllegalAccessException { + public void testTryParseToken() throws IOException { // Null for null assertNull(ServiceAccountService.tryParseToken(null)); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/service_accounts/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/service_accounts/10_basic.yml index 47d6cdec2858b..a72e2d15c8e85 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/service_accounts/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/service_accounts/10_basic.yml @@ -31,7 +31,8 @@ teardown: "Test get service accounts": - do: security.get_service_accounts: {} - - length: { '': 4 } + - length: { '': 5 } + - is_true: "elastic/auto-ops" - is_true: "elastic/enterprise-search-server" - is_true: "elastic/fleet-server" - is_true: "elastic/fleet-server-remote" @@ -40,7 +41,8 @@ teardown: - do: security.get_service_accounts: namespace: elastic - - length: { '': 4 } + - length: { '': 5 } + - is_true: "elastic/auto-ops" - is_true: "elastic/enterprise-search-server" - is_true: "elastic/fleet-server" - is_true: "elastic/fleet-server-remote" From 735d80dffd17bedd75fdd964e85760ef03f63c4c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Iv=C3=A1n=20Cea=20Fontenla?= Date: Mon, 29 Jul 2024 19:07:15 +0200 Subject: [PATCH 091/105] ESQL: Add COUNT and COUNT_DISTINCT aggregation tests (#111409) --- docs/changelog/111367.yaml | 5 + .../functions/aggregation-functions.asciidoc | 12 +- .../appendix/count_distinct.asciidoc | 25 + .../esql/functions/appendix/values.asciidoc | 10 + .../esql/functions/count-distinct.asciidoc | 85 --- .../esql/functions/description/count.asciidoc | 5 + .../description/count_distinct.asciidoc | 5 + .../functions/description/values.asciidoc | 5 + .../functions/{ => examples}/count.asciidoc | 46 +- .../examples/count_distinct.asciidoc | 31 + .../esql/functions/examples/values.asciidoc | 13 + .../functions/kibana/definition/count.json | 159 +++++ .../kibana/definition/count_distinct.json | 607 ++++++++++++++++++ .../functions/kibana/definition/values.json | 119 ++++ .../esql/functions/kibana/docs/count.md | 11 + .../functions/kibana/docs/count_distinct.md | 11 + .../esql/functions/kibana/docs/values.md | 13 + .../esql/functions/layout/count.asciidoc | 15 + .../functions/layout/count_distinct.asciidoc | 16 + .../esql/functions/layout/values.asciidoc | 18 + .../esql/functions/parameters/count.asciidoc | 6 + .../parameters/count_distinct.asciidoc | 9 + .../esql/functions/parameters/values.asciidoc | 6 + .../esql/functions/signature/count.svg | 1 + .../functions/signature/count_distinct.svg | 1 + .../esql/functions/signature/values.svg | 1 + .../esql/functions/types/count.asciidoc | 20 + .../functions/types/count_distinct.asciidoc | 44 ++ .../esql/functions/types/values.asciidoc | 17 + .../aggregation/ValuesBytesRefAggregator.java | 2 +- .../aggregation/X-ValuesAggregator.java.st | 2 +- .../src/main/resources/meta.csv-spec | 8 +- .../expression/function/FunctionInfo.java | 5 + .../expression/function/aggregate/Count.java | 30 +- .../function/aggregate/CountDistinct.java | 77 ++- .../expression/function/aggregate/Values.java | 21 +- .../function/AbstractFunctionTestCase.java | 23 +- .../function/MultiRowTestCaseSupplier.java | 122 +++- .../expression/function/TestCaseSupplier.java | 15 + .../aggregate/CountDistinctTests.java | 176 +++++ .../function/aggregate/CountTests.java | 106 +++ .../function/aggregate/ValuesTests.java | 110 ++++ 42 files changed, 1849 insertions(+), 164 deletions(-) create mode 100644 docs/changelog/111367.yaml create mode 100644 docs/reference/esql/functions/appendix/count_distinct.asciidoc create mode 100644 docs/reference/esql/functions/appendix/values.asciidoc delete mode 100644 docs/reference/esql/functions/count-distinct.asciidoc create mode 100644 docs/reference/esql/functions/description/count.asciidoc create mode 100644 docs/reference/esql/functions/description/count_distinct.asciidoc create mode 100644 docs/reference/esql/functions/description/values.asciidoc rename docs/reference/esql/functions/{ => examples}/count.asciidoc (63%) create mode 100644 docs/reference/esql/functions/examples/count_distinct.asciidoc create mode 100644 docs/reference/esql/functions/examples/values.asciidoc create mode 100644 docs/reference/esql/functions/kibana/definition/count.json create mode 100644 docs/reference/esql/functions/kibana/definition/count_distinct.json create mode 100644 docs/reference/esql/functions/kibana/definition/values.json create mode 100644 docs/reference/esql/functions/kibana/docs/count.md create mode 100644 docs/reference/esql/functions/kibana/docs/count_distinct.md create mode 100644 docs/reference/esql/functions/kibana/docs/values.md create mode 100644 docs/reference/esql/functions/layout/count.asciidoc create mode 100644 docs/reference/esql/functions/layout/count_distinct.asciidoc create mode 100644 docs/reference/esql/functions/layout/values.asciidoc create mode 100644 docs/reference/esql/functions/parameters/count.asciidoc create mode 100644 docs/reference/esql/functions/parameters/count_distinct.asciidoc create mode 100644 docs/reference/esql/functions/parameters/values.asciidoc create mode 100644 docs/reference/esql/functions/signature/count.svg create mode 100644 docs/reference/esql/functions/signature/count_distinct.svg create mode 100644 docs/reference/esql/functions/signature/values.svg create mode 100644 docs/reference/esql/functions/types/count.asciidoc create mode 100644 docs/reference/esql/functions/types/count_distinct.asciidoc create mode 100644 docs/reference/esql/functions/types/values.asciidoc create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java diff --git a/docs/changelog/111367.yaml b/docs/changelog/111367.yaml new file mode 100644 index 0000000000000..89e6c1d3b4da4 --- /dev/null +++ b/docs/changelog/111367.yaml @@ -0,0 +1,5 @@ +pr: 111367 +summary: "ESQL: Add Values aggregation tests, fix `ConstantBytesRefBlock` memory handling" +area: ES|QL +type: bug +issues: [] diff --git a/docs/reference/esql/functions/aggregation-functions.asciidoc b/docs/reference/esql/functions/aggregation-functions.asciidoc index 821b109741a0a..518aee563e952 100644 --- a/docs/reference/esql/functions/aggregation-functions.asciidoc +++ b/docs/reference/esql/functions/aggregation-functions.asciidoc @@ -9,8 +9,8 @@ The <> command supports these aggregate functions: // tag::agg_list[] * <> -* <> -* <> +* <> +* <> * <> * <> * <> @@ -19,13 +19,13 @@ The <> command supports these aggregate functions: * experimental:[] <> * <> * <> -* <> +* <> * experimental:[] <> // end::agg_list[] -include::count.asciidoc[] -include::count-distinct.asciidoc[] include::layout/avg.asciidoc[] +include::layout/count.asciidoc[] +include::layout/count_distinct.asciidoc[] include::layout/max.asciidoc[] include::layout/median.asciidoc[] include::layout/median_absolute_deviation.asciidoc[] @@ -34,5 +34,5 @@ include::layout/percentile.asciidoc[] include::layout/st_centroid_agg.asciidoc[] include::layout/sum.asciidoc[] include::layout/top.asciidoc[] -include::values.asciidoc[] +include::layout/values.asciidoc[] include::weighted-avg.asciidoc[] diff --git a/docs/reference/esql/functions/appendix/count_distinct.asciidoc b/docs/reference/esql/functions/appendix/count_distinct.asciidoc new file mode 100644 index 0000000000000..065065cf34e06 --- /dev/null +++ b/docs/reference/esql/functions/appendix/count_distinct.asciidoc @@ -0,0 +1,25 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-agg-count-distinct-approximate]] +==== Counts are approximate + +Computing exact counts requires loading values into a set and returning its +size. This doesn't scale when working on high-cardinality sets and/or large +values as the required memory usage and the need to communicate those +per-shard sets between nodes would utilize too many resources of the cluster. + +This `COUNT_DISTINCT` function is based on the +https://static.googleusercontent.com/media/research.google.com/fr//pubs/archive/40671.pdf[HyperLogLog++] +algorithm, which counts based on the hashes of the values with some interesting +properties: + +include::../../../aggregations/metrics/cardinality-aggregation.asciidoc[tag=explanation] + +The `COUNT_DISTINCT` function takes an optional second parameter to configure +the precision threshold. The precision_threshold options allows to trade memory +for accuracy, and defines a unique count below which counts are expected to be +close to accurate. Above this value, counts might become a bit more fuzzy. The +maximum supported value is 40000, thresholds above this number will have the +same effect as a threshold of 40000. The default value is `3000`. + diff --git a/docs/reference/esql/functions/appendix/values.asciidoc b/docs/reference/esql/functions/appendix/values.asciidoc new file mode 100644 index 0000000000000..ec3cfff2db6a6 --- /dev/null +++ b/docs/reference/esql/functions/appendix/values.asciidoc @@ -0,0 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[WARNING] +==== +This can use a significant amount of memory and ES|QL doesn't yet +grow aggregations beyond memory. So this aggregation will work until +it is used to collect more values than can fit into memory. Once it +collects too many values it will fail the query with +a <>. +==== diff --git a/docs/reference/esql/functions/count-distinct.asciidoc b/docs/reference/esql/functions/count-distinct.asciidoc deleted file mode 100644 index a9f30d24e0e83..0000000000000 --- a/docs/reference/esql/functions/count-distinct.asciidoc +++ /dev/null @@ -1,85 +0,0 @@ -[discrete] -[[esql-agg-count-distinct]] -=== `COUNT_DISTINCT` - -*Syntax* - -[source,esql] ----- -COUNT_DISTINCT(expression[, precision_threshold]) ----- - -*Parameters* - -`expression`:: -Expression that outputs the values on which to perform a distinct count. - -`precision_threshold`:: -Precision threshold. Refer to <>. The -maximum supported value is 40000. Thresholds above this number will have the -same effect as a threshold of 40000. The default value is 3000. - -*Description* - -Returns the approximate number of distinct values. - -*Supported types* - -Can take any field type as input. - -*Examples* - -[source.merge.styled,esql] ----- -include::{esql-specs}/stats_count_distinct.csv-spec[tag=count-distinct] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/stats_count_distinct.csv-spec[tag=count-distinct-result] -|=== - -With the optional second parameter to configure the precision threshold: - -[source.merge.styled,esql] ----- -include::{esql-specs}/stats_count_distinct.csv-spec[tag=count-distinct-precision] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/stats_count_distinct.csv-spec[tag=count-distinct-precision-result] -|=== - -The expression can use inline functions. This example splits a string into -multiple values using the `SPLIT` function and counts the unique values: - -[source.merge.styled,esql] ----- -include::{esql-specs}/stats_count_distinct.csv-spec[tag=docsCountDistinctWithExpression] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/stats_count_distinct.csv-spec[tag=docsCountDistinctWithExpression-result] -|=== - -[discrete] -[[esql-agg-count-distinct-approximate]] -==== Counts are approximate - -Computing exact counts requires loading values into a set and returning its -size. This doesn't scale when working on high-cardinality sets and/or large -values as the required memory usage and the need to communicate those -per-shard sets between nodes would utilize too many resources of the cluster. - -This `COUNT_DISTINCT` function is based on the -https://static.googleusercontent.com/media/research.google.com/fr//pubs/archive/40671.pdf[HyperLogLog++] -algorithm, which counts based on the hashes of the values with some interesting -properties: - -include::../../aggregations/metrics/cardinality-aggregation.asciidoc[tag=explanation] - -The `COUNT_DISTINCT` function takes an optional second parameter to configure -the precision threshold. The precision_threshold options allows to trade memory -for accuracy, and defines a unique count below which counts are expected to be -close to accurate. Above this value, counts might become a bit more fuzzy. The -maximum supported value is 40000, thresholds above this number will have the -same effect as a threshold of 40000. The default value is `3000`. \ No newline at end of file diff --git a/docs/reference/esql/functions/description/count.asciidoc b/docs/reference/esql/functions/description/count.asciidoc new file mode 100644 index 0000000000000..ee806d65a8ea3 --- /dev/null +++ b/docs/reference/esql/functions/description/count.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the total number (count) of input values. diff --git a/docs/reference/esql/functions/description/count_distinct.asciidoc b/docs/reference/esql/functions/description/count_distinct.asciidoc new file mode 100644 index 0000000000000..d10825bb991f5 --- /dev/null +++ b/docs/reference/esql/functions/description/count_distinct.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the approximate number of distinct values. diff --git a/docs/reference/esql/functions/description/values.asciidoc b/docs/reference/esql/functions/description/values.asciidoc new file mode 100644 index 0000000000000..b3cebcce955f0 --- /dev/null +++ b/docs/reference/esql/functions/description/values.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns all values in a group as a multivalued field. The order of the returned values isn't guaranteed. If you need the values returned in order use <>. diff --git a/docs/reference/esql/functions/count.asciidoc b/docs/reference/esql/functions/examples/count.asciidoc similarity index 63% rename from docs/reference/esql/functions/count.asciidoc rename to docs/reference/esql/functions/examples/count.asciidoc index 66cfe76350cdd..fb696b51e054c 100644 --- a/docs/reference/esql/functions/count.asciidoc +++ b/docs/reference/esql/functions/examples/count.asciidoc @@ -1,27 +1,4 @@ -[discrete] -[[esql-agg-count]] -=== `COUNT` - -*Syntax* - -[source,esql] ----- -COUNT([expression]) ----- - -*Parameters* - -`expression`:: -Expression that outputs values to be counted. -If omitted, equivalent to `COUNT(*)` (the number of rows). - -*Description* - -Returns the total number (count) of input values. - -*Supported types* - -Can take any field type as input. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Examples* @@ -33,9 +10,7 @@ include::{esql-specs}/stats.csv-spec[tag=count] |=== include::{esql-specs}/stats.csv-spec[tag=count-result] |=== - -To count the number of rows, use `COUNT()` or `COUNT(*)`: - +To count the number of rows, use `COUNT()` or `COUNT(*)` [source.merge.styled,esql] ---- include::{esql-specs}/docs.csv-spec[tag=countAll] @@ -44,10 +19,7 @@ include::{esql-specs}/docs.csv-spec[tag=countAll] |=== include::{esql-specs}/docs.csv-spec[tag=countAll-result] |=== - -The expression can use inline functions. This example splits a string into -multiple values using the `SPLIT` function and counts the values: - +The expression can use inline functions. This example splits a string into multiple values using the `SPLIT` function and counts the values [source.merge.styled,esql] ---- include::{esql-specs}/stats.csv-spec[tag=docsCountWithExpression] @@ -56,11 +28,7 @@ include::{esql-specs}/stats.csv-spec[tag=docsCountWithExpression] |=== include::{esql-specs}/stats.csv-spec[tag=docsCountWithExpression-result] |=== - -[[esql-agg-count-or-null]] -To count the number of times an expression returns `TRUE` use -a <> command to remove rows that shouldn't be included: - +To count the number of times an expression returns `TRUE` use a <> command to remove rows that shouldn't be included [source.merge.styled,esql] ---- include::{esql-specs}/stats.csv-spec[tag=count-where] @@ -69,10 +37,7 @@ include::{esql-specs}/stats.csv-spec[tag=count-where] |=== include::{esql-specs}/stats.csv-spec[tag=count-where-result] |=== - -To count the same stream of data based on two different expressions -use the pattern `COUNT( OR NULL)`: - +To count the same stream of data based on two different expressions use the pattern `COUNT( OR NULL)` [source.merge.styled,esql] ---- include::{esql-specs}/stats.csv-spec[tag=count-or-null] @@ -81,3 +46,4 @@ include::{esql-specs}/stats.csv-spec[tag=count-or-null] |=== include::{esql-specs}/stats.csv-spec[tag=count-or-null-result] |=== + diff --git a/docs/reference/esql/functions/examples/count_distinct.asciidoc b/docs/reference/esql/functions/examples/count_distinct.asciidoc new file mode 100644 index 0000000000000..44968c0652ec0 --- /dev/null +++ b/docs/reference/esql/functions/examples/count_distinct.asciidoc @@ -0,0 +1,31 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Examples* + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats_count_distinct.csv-spec[tag=count-distinct] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats_count_distinct.csv-spec[tag=count-distinct-result] +|=== +With the optional second parameter to configure the precision threshold +[source.merge.styled,esql] +---- +include::{esql-specs}/stats_count_distinct.csv-spec[tag=count-distinct-precision] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats_count_distinct.csv-spec[tag=count-distinct-precision-result] +|=== +The expression can use inline functions. This example splits a string into multiple values using the `SPLIT` function and counts the unique values +[source.merge.styled,esql] +---- +include::{esql-specs}/stats_count_distinct.csv-spec[tag=docsCountDistinctWithExpression] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats_count_distinct.csv-spec[tag=docsCountDistinctWithExpression-result] +|=== + diff --git a/docs/reference/esql/functions/examples/values.asciidoc b/docs/reference/esql/functions/examples/values.asciidoc new file mode 100644 index 0000000000000..c013fc39d92ca --- /dev/null +++ b/docs/reference/esql/functions/examples/values.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/string.csv-spec[tag=values-grouped] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/string.csv-spec[tag=values-grouped-result] +|=== + diff --git a/docs/reference/esql/functions/kibana/definition/count.json b/docs/reference/esql/functions/kibana/definition/count.json new file mode 100644 index 0000000000000..e05ebc6789816 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/count.json @@ -0,0 +1,159 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "agg", + "name" : "count", + "description" : "Returns the total number (count) of input values.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : true, + "description" : "Expression that outputs values to be counted. If omitted, equivalent to `COUNT(*)` (the number of rows)." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : true, + "description" : "Expression that outputs values to be counted. If omitted, equivalent to `COUNT(*)` (the number of rows)." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : true, + "description" : "Expression that outputs values to be counted. If omitted, equivalent to `COUNT(*)` (the number of rows)." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : true, + "description" : "Expression that outputs values to be counted. If omitted, equivalent to `COUNT(*)` (the number of rows)." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : true, + "description" : "Expression that outputs values to be counted. If omitted, equivalent to `COUNT(*)` (the number of rows)." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : true, + "description" : "Expression that outputs values to be counted. If omitted, equivalent to `COUNT(*)` (the number of rows)." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : true, + "description" : "Expression that outputs values to be counted. If omitted, equivalent to `COUNT(*)` (the number of rows)." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : true, + "description" : "Expression that outputs values to be counted. If omitted, equivalent to `COUNT(*)` (the number of rows)." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : true, + "description" : "Expression that outputs values to be counted. If omitted, equivalent to `COUNT(*)` (the number of rows)." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : true, + "description" : "Expression that outputs values to be counted. If omitted, equivalent to `COUNT(*)` (the number of rows)." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : true, + "description" : "Expression that outputs values to be counted. If omitted, equivalent to `COUNT(*)` (the number of rows)." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : true, + "description" : "Expression that outputs values to be counted. If omitted, equivalent to `COUNT(*)` (the number of rows)." + } + ], + "variadic" : false, + "returnType" : "long" + } + ], + "examples" : [ + "FROM employees\n| STATS COUNT(height)", + "FROM employees \n| STATS count = COUNT(*) BY languages \n| SORT languages DESC", + "ROW words=\"foo;bar;baz;qux;quux;foo\"\n| STATS word_count = COUNT(SPLIT(words, \";\"))", + "ROW n=1\n| WHERE n < 0\n| STATS COUNT(n)", + "ROW n=1\n| STATS COUNT(n > 0 OR NULL), COUNT(n < 0 OR NULL)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/count_distinct.json b/docs/reference/esql/functions/kibana/definition/count_distinct.json new file mode 100644 index 0000000000000..801bd26f7d022 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/count_distinct.json @@ -0,0 +1,607 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "agg", + "name" : "count_distinct", + "description" : "Returns the approximate number of distinct values.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "integer", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "unsigned_long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "integer", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "unsigned_long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "integer", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "unsigned_long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "integer", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "unsigned_long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "integer", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "unsigned_long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "integer", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "unsigned_long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "integer", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "unsigned_long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "integer", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "unsigned_long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "integer", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "unsigned_long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + } + ], + "examples" : [ + "FROM hosts\n| STATS COUNT_DISTINCT(ip0), COUNT_DISTINCT(ip1)", + "FROM hosts\n| STATS COUNT_DISTINCT(ip0, 80000), COUNT_DISTINCT(ip1, 5)", + "ROW words=\"foo;bar;baz;qux;quux;foo\"\n| STATS distinct_word_count = COUNT_DISTINCT(SPLIT(words, \";\"))" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/values.json b/docs/reference/esql/functions/kibana/definition/values.json new file mode 100644 index 0000000000000..3e0036c4d25b6 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/values.json @@ -0,0 +1,119 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "agg", + "name" : "values", + "description" : "Returns all values in a group as a multivalued field. The order of the returned values isn't guaranteed. If you need the values returned in order use <>.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ], + "examples" : [ + " FROM employees\n| EVAL first_letter = SUBSTRING(first_name, 0, 1)\n| STATS first_name=MV_SORT(VALUES(first_name)) BY first_letter\n| SORT first_letter" + ] +} diff --git a/docs/reference/esql/functions/kibana/docs/count.md b/docs/reference/esql/functions/kibana/docs/count.md new file mode 100644 index 0000000000000..dc9c356a847ed --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/count.md @@ -0,0 +1,11 @@ + + +### COUNT +Returns the total number (count) of input values. + +``` +FROM employees +| STATS COUNT(height) +``` diff --git a/docs/reference/esql/functions/kibana/docs/count_distinct.md b/docs/reference/esql/functions/kibana/docs/count_distinct.md new file mode 100644 index 0000000000000..a6b451bf9d38d --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/count_distinct.md @@ -0,0 +1,11 @@ + + +### COUNT_DISTINCT +Returns the approximate number of distinct values. + +``` +FROM hosts +| STATS COUNT_DISTINCT(ip0), COUNT_DISTINCT(ip1) +``` diff --git a/docs/reference/esql/functions/kibana/docs/values.md b/docs/reference/esql/functions/kibana/docs/values.md new file mode 100644 index 0000000000000..cba62fc27255e --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/values.md @@ -0,0 +1,13 @@ + + +### VALUES +Returns all values in a group as a multivalued field. The order of the returned values isn't guaranteed. If you need the values returned in order use <>. + +``` + FROM employees +| EVAL first_letter = SUBSTRING(first_name, 0, 1) +| STATS first_name=MV_SORT(VALUES(first_name)) BY first_letter +| SORT first_letter +``` diff --git a/docs/reference/esql/functions/layout/count.asciidoc b/docs/reference/esql/functions/layout/count.asciidoc new file mode 100644 index 0000000000000..8c16d74cde9a7 --- /dev/null +++ b/docs/reference/esql/functions/layout/count.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-count]] +=== `COUNT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/count.svg[Embedded,opts=inline] + +include::../parameters/count.asciidoc[] +include::../description/count.asciidoc[] +include::../types/count.asciidoc[] +include::../examples/count.asciidoc[] diff --git a/docs/reference/esql/functions/layout/count_distinct.asciidoc b/docs/reference/esql/functions/layout/count_distinct.asciidoc new file mode 100644 index 0000000000000..2c9848186e806 --- /dev/null +++ b/docs/reference/esql/functions/layout/count_distinct.asciidoc @@ -0,0 +1,16 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-count_distinct]] +=== `COUNT_DISTINCT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/count_distinct.svg[Embedded,opts=inline] + +include::../parameters/count_distinct.asciidoc[] +include::../description/count_distinct.asciidoc[] +include::../types/count_distinct.asciidoc[] +include::../examples/count_distinct.asciidoc[] +include::../appendix/count_distinct.asciidoc[] diff --git a/docs/reference/esql/functions/layout/values.asciidoc b/docs/reference/esql/functions/layout/values.asciidoc new file mode 100644 index 0000000000000..7d90d4314699a --- /dev/null +++ b/docs/reference/esql/functions/layout/values.asciidoc @@ -0,0 +1,18 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-values]] +=== `VALUES` + +preview::["Do not use `VALUES` on production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] + +*Syntax* + +[.text-center] +image::esql/functions/signature/values.svg[Embedded,opts=inline] + +include::../parameters/values.asciidoc[] +include::../description/values.asciidoc[] +include::../types/values.asciidoc[] +include::../examples/values.asciidoc[] +include::../appendix/values.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/count.asciidoc b/docs/reference/esql/functions/parameters/count.asciidoc new file mode 100644 index 0000000000000..d470061a83e2e --- /dev/null +++ b/docs/reference/esql/functions/parameters/count.asciidoc @@ -0,0 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`field`:: +Expression that outputs values to be counted. If omitted, equivalent to `COUNT(*)` (the number of rows). diff --git a/docs/reference/esql/functions/parameters/count_distinct.asciidoc b/docs/reference/esql/functions/parameters/count_distinct.asciidoc new file mode 100644 index 0000000000000..f84cf27c3e075 --- /dev/null +++ b/docs/reference/esql/functions/parameters/count_distinct.asciidoc @@ -0,0 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`field`:: +Column or literal for which to count the number of distinct values. + +`precision`:: +Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000. diff --git a/docs/reference/esql/functions/parameters/values.asciidoc b/docs/reference/esql/functions/parameters/values.asciidoc new file mode 100644 index 0000000000000..8903aa1a472a3 --- /dev/null +++ b/docs/reference/esql/functions/parameters/values.asciidoc @@ -0,0 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/signature/count.svg b/docs/reference/esql/functions/signature/count.svg new file mode 100644 index 0000000000000..9b19652b98788 --- /dev/null +++ b/docs/reference/esql/functions/signature/count.svg @@ -0,0 +1 @@ +COUNT(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/count_distinct.svg b/docs/reference/esql/functions/signature/count_distinct.svg new file mode 100644 index 0000000000000..a5b77da7c555a --- /dev/null +++ b/docs/reference/esql/functions/signature/count_distinct.svg @@ -0,0 +1 @@ +COUNT_DISTINCT(field,precision) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/values.svg b/docs/reference/esql/functions/signature/values.svg new file mode 100644 index 0000000000000..0fa116ce1eb14 --- /dev/null +++ b/docs/reference/esql/functions/signature/values.svg @@ -0,0 +1 @@ +VALUES(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/count.asciidoc b/docs/reference/esql/functions/types/count.asciidoc new file mode 100644 index 0000000000000..70e79d4899605 --- /dev/null +++ b/docs/reference/esql/functions/types/count.asciidoc @@ -0,0 +1,20 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +field | result +boolean | long +cartesian_point | long +datetime | long +double | long +geo_point | long +integer | long +ip | long +keyword | long +long | long +text | long +unsigned_long | long +version | long +|=== diff --git a/docs/reference/esql/functions/types/count_distinct.asciidoc b/docs/reference/esql/functions/types/count_distinct.asciidoc new file mode 100644 index 0000000000000..4b201d45732f1 --- /dev/null +++ b/docs/reference/esql/functions/types/count_distinct.asciidoc @@ -0,0 +1,44 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +field | precision | result +boolean | integer | long +boolean | long | long +boolean | unsigned_long | long +boolean | | long +datetime | integer | long +datetime | long | long +datetime | unsigned_long | long +datetime | | long +double | integer | long +double | long | long +double | unsigned_long | long +double | | long +integer | integer | long +integer | long | long +integer | unsigned_long | long +integer | | long +ip | integer | long +ip | long | long +ip | unsigned_long | long +ip | | long +keyword | integer | long +keyword | long | long +keyword | unsigned_long | long +keyword | | long +long | integer | long +long | long | long +long | unsigned_long | long +long | | long +text | integer | long +text | long | long +text | unsigned_long | long +text | | long +version | integer | long +version | long | long +version | unsigned_long | long +version | | long +|=== diff --git a/docs/reference/esql/functions/types/values.asciidoc b/docs/reference/esql/functions/types/values.asciidoc new file mode 100644 index 0000000000000..705745d76dbab --- /dev/null +++ b/docs/reference/esql/functions/types/values.asciidoc @@ -0,0 +1,17 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +field | result +boolean | boolean +datetime | datetime +double | double +integer | integer +ip | ip +keyword | keyword +long | long +text | text +version | version +|=== diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java index 736b320a9dde8..602fd29433193 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java @@ -100,7 +100,7 @@ Block toBlock(BlockFactory blockFactory) { } BytesRef scratch = new BytesRef(); if (values.size() == 1) { - return blockFactory.newConstantBytesRefBlockWith(values.get(0, scratch), 1); + return blockFactory.newConstantBytesRefBlockWith(BytesRef.deepCopyOf(values.get(0, scratch)), 1); } try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder((int) values.size())) { builder.beginPositionEntry(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st index ea62dcf295825..a8884c58116f3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st @@ -192,7 +192,7 @@ $elseif(double)$ $elseif(int)$ return blockFactory.newConstantIntBlockWith((int) values.get(0), 1); $elseif(BytesRef)$ - return blockFactory.newConstantBytesRefBlockWith(values.get(0, scratch), 1); + return blockFactory.newConstantBytesRefBlockWith(BytesRef.deepCopyOf(values.get(0, scratch)), 1); $endif$ } try ($Type$Block.Builder builder = blockFactory.new$Type$BlockBuilder((int) values.size())) { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index c036e04bc8ba3..7b5941b88988d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -19,7 +19,7 @@ synopsis:keyword "double cos(angle:double|integer|long|unsigned_long)" "double cosh(angle:double|integer|long|unsigned_long)" "long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" -"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" +"long count_distinct(field:boolean|date|double|integer|ip|keyword|long|text|version, ?precision:integer|long|unsigned_long)" "integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)" "long date_extract(datePart:keyword|text, date:date)" "keyword date_format(?dateFormat:keyword|text, date:date)" @@ -139,8 +139,8 @@ coalesce |first |"boolean|cartesian_point|car concat |[string1, string2] |["keyword|text", "keyword|text"] |[Strings to concatenate., Strings to concatenate.] cos |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. cosh |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. -count |field |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" |Column or literal for which to count the number of values. -count_distinct|[field, precision] |["boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version", integer] |[Column or literal for which to count the number of distinct values., ] +count |field |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" |Expression that outputs values to be counted. If omitted, equivalent to `COUNT(*)` (the number of rows). +count_distinct|[field, precision] |["boolean|date|double|integer|ip|keyword|long|text|version", "integer|long|unsigned_long"] |[Column or literal for which to count the number of distinct values., Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000.] date_diff |[unit, startTimestamp, endTimestamp]|["keyword|text", date, date] |[Time difference unit, A string representing a start timestamp, A string representing an end timestamp] date_extract |[datePart, date] |["keyword|text", date] |[Part of the date to extract. Can be: `aligned_day_of_week_in_month`\, `aligned_day_of_week_in_year`\, `aligned_week_of_month`\, `aligned_week_of_year`\, `ampm_of_day`\, `clock_hour_of_ampm`\, `clock_hour_of_day`\, `day_of_month`\, `day_of_week`\, `day_of_year`\, `epoch_day`\, `era`\, `hour_of_ampm`\, `hour_of_day`\, `instant_seconds`\, `micro_of_day`\, `micro_of_second`\, `milli_of_day`\, `milli_of_second`\, `minute_of_day`\, `minute_of_hour`\, `month_of_year`\, `nano_of_day`\, `nano_of_second`\, `offset_seconds`\, `proleptic_month`\, `second_of_day`\, `second_of_minute`\, `year`\, or `year_of_era`. Refer to https://docs.oracle.com/javase/8/docs/api/java/time/temporal/ChronoField.html[java.time.temporal.ChronoField] for a description of these values. If `null`\, the function returns `null`., Date expression. If `null`\, the function returns `null`.] date_format |[dateFormat, date] |["keyword|text", date] |[Date format (optional). If no format is specified\, the `yyyy-MM-dd'T'HH:mm:ss.SSSZ` format is used. If `null`\, the function returns `null`., Date expression. If `null`\, the function returns `null`.] @@ -356,7 +356,7 @@ to_ver |Converts an input string to a version value. to_version |Converts an input string to a version value. top |Collects the top values for a field. Includes repeated values. trim |Removes leading and trailing whitespaces from a string. -values |Collect values for a field. +values |Returns all values in a group as a multivalued field. The order of the returned values isn't guaranteed. If you need the values returned in order use <>. weighted_avg |The weighted average of a numeric field. ; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java index 94e3aa4e1dd68..f275496c6787a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java @@ -23,6 +23,11 @@ */ String[] returnType(); + /** + * Whether this function is a preview (Not ready for production environments) or not. + */ + boolean preview() default false; + /** * The description of the function rendered in {@code META FUNCTIONS} * and the docs. These should be complete sentences. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java index 52e053f843e14..9b6190408dbd4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.elasticsearch.xpack.esql.expression.SurrogateExpression; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvCount; @@ -35,7 +36,32 @@ public class Count extends AggregateFunction implements EnclosedAgg, ToAggregator, SurrogateExpression { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Count", Count::new); - @FunctionInfo(returnType = "long", description = "Returns the total number (count) of input values.", isAggregation = true) + @FunctionInfo( + returnType = "long", + description = "Returns the total number (count) of input values.", + isAggregation = true, + examples = { + @Example(file = "stats", tag = "count"), + @Example(description = "To count the number of rows, use `COUNT()` or `COUNT(*)`", file = "docs", tag = "countAll"), + @Example( + description = "The expression can use inline functions. This example splits a string into " + + "multiple values using the `SPLIT` function and counts the values", + file = "stats", + tag = "docsCountWithExpression" + ), + @Example( + description = "To count the number of times an expression returns `TRUE` use " + + "a <> command to remove rows that shouldn't be included", + file = "stats", + tag = "count-where" + ), + @Example( + description = "To count the same stream of data based on two different expressions " + + "use the pattern `COUNT( OR NULL)`", + file = "stats", + tag = "count-or-null" + ) } + ) public Count( Source source, @Param( @@ -54,7 +80,7 @@ public Count( "text", "unsigned_long", "version" }, - description = "Column or literal for which to count the number of values." + description = "Expression that outputs values to be counted. If omitted, equivalent to `COUNT(*)` (the number of rows)." ) Expression field ) { super(source, field); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java index 7686d10a03d9e..858c6e659449c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; import org.elasticsearch.xpack.esql.expression.SurrogateExpression; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -53,15 +54,63 @@ public class CountDistinct extends AggregateFunction implements OptionalArgument private static final int DEFAULT_PRECISION = 3000; private final Expression precision; - @FunctionInfo(returnType = "long", description = "Returns the approximate number of distinct values.", isAggregation = true) + @FunctionInfo( + returnType = "long", + description = "Returns the approximate number of distinct values.", + appendix = """ + [discrete] + [[esql-agg-count-distinct-approximate]] + ==== Counts are approximate + + Computing exact counts requires loading values into a set and returning its + size. This doesn't scale when working on high-cardinality sets and/or large + values as the required memory usage and the need to communicate those + per-shard sets between nodes would utilize too many resources of the cluster. + + This `COUNT_DISTINCT` function is based on the + https://static.googleusercontent.com/media/research.google.com/fr//pubs/archive/40671.pdf[HyperLogLog++] + algorithm, which counts based on the hashes of the values with some interesting + properties: + + include::../../../aggregations/metrics/cardinality-aggregation.asciidoc[tag=explanation] + + The `COUNT_DISTINCT` function takes an optional second parameter to configure + the precision threshold. The precision_threshold options allows to trade memory + for accuracy, and defines a unique count below which counts are expected to be + close to accurate. Above this value, counts might become a bit more fuzzy. The + maximum supported value is 40000, thresholds above this number will have the + same effect as a threshold of 40000. The default value is `3000`. + """, + isAggregation = true, + examples = { + @Example(file = "stats_count_distinct", tag = "count-distinct"), + @Example( + description = "With the optional second parameter to configure the precision threshold", + file = "stats_count_distinct", + tag = "count-distinct-precision" + ), + @Example( + description = "The expression can use inline functions. This example splits a string into " + + "multiple values using the `SPLIT` function and counts the unique values", + file = "stats_count_distinct", + tag = "docsCountDistinctWithExpression" + ) } + ) public CountDistinct( Source source, @Param( name = "field", - type = { "boolean", "cartesian_point", "date", "double", "geo_point", "integer", "ip", "keyword", "long", "text", "version" }, + type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "Column or literal for which to count the number of distinct values." ) Expression field, - @Param(optional = true, name = "precision", type = { "integer" }) Expression precision + @Param( + optional = true, + name = "precision", + type = { "integer", "long", "unsigned_long" }, + description = "Precision threshold. Refer to <>. " + + "The maximum supported value is 40000. Thresholds above this number will have the " + + "same effect as a threshold of 40000. The default value is 3000." + ) Expression precision ) { super(source, field, precision != null ? List.of(precision) : List.of()); this.precision = precision; @@ -108,19 +157,17 @@ protected TypeResolution resolveType() { return new TypeResolution("Unresolved children"); } - TypeResolution resolution = EsqlTypeResolutions.isExact(field(), sourceText(), DEFAULT); - if (resolution.unresolved()) { - return resolution; - } + TypeResolution resolution = EsqlTypeResolutions.isExact(field(), sourceText(), DEFAULT) + .and( + isType( + field(), + dt -> dt != DataType.UNSIGNED_LONG && dt != DataType.SOURCE, + sourceText(), + DEFAULT, + "any exact type except unsigned_long, _source, or counter types" + ) + ); - boolean resolved = resolution.resolved(); - resolution = isType( - field(), - dt -> resolved && dt != DataType.UNSIGNED_LONG && dt != DataType.SOURCE, - sourceText(), - DEFAULT, - "any exact type except unsigned_long, _source, or counter types" - ); if (resolution.unresolved() || precision == null) { return resolution; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java index 7d2fbcddb113b..79276b26be6d5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.ToAggregator; @@ -34,13 +35,25 @@ public class Values extends AggregateFunction implements ToAggregator { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Values", Values::new); @FunctionInfo( - returnType = { "boolean|date|double|integer|ip|keyword|long|text|version" }, - description = "Collect values for a field.", - isAggregation = true + returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + preview = true, + description = "Returns all values in a group as a multivalued field. The order of the returned values isn't guaranteed. " + + "If you need the values returned in order use <>.", + appendix = """ + [WARNING] + ==== + This can use a significant amount of memory and ES|QL doesn't yet + grow aggregations beyond memory. So this aggregation will work until + it is used to collect more values than can fit into memory. Once it + collects too many values it will fail the query with + a <>. + ====""", + isAggregation = true, + examples = @Example(file = "string", tag = "values-grouped") ) public Values( Source source, - @Param(name = "field", type = { "boolean|date|double|integer|ip|keyword|long|text|version" }) Expression v + @Param(name = "field", type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }) Expression v ) { super(source, v); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 0ec0a29dc530b..64c72b46c303b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -77,6 +77,7 @@ import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.math.BigInteger; import java.nio.file.Files; import java.nio.file.Path; import java.time.Duration; @@ -298,7 +299,12 @@ protected final List rows(List multirowFields) ) { var multiRowData = field.multiRowData(); for (int row = initialRow; row < initialRow + pageSize; row++) { - wrapper.accept(multiRowData.get(row)); + var data = multiRowData.get(row); + if (data instanceof BigInteger bigIntegerData) { + wrapper.accept(NumericUtils.asLongUnsigned(bigIntegerData)); + } else { + wrapper.accept(data); + } } blocks[i] = wrapper.builder().build(); @@ -545,7 +551,7 @@ public static void renderDocs() throws IOException { renderDescription(description.description(), info.detailedDescription(), info.note()); boolean hasExamples = renderExamples(info); boolean hasAppendix = renderAppendix(info.appendix()); - renderFullLayout(name, hasExamples, hasAppendix); + renderFullLayout(name, info.preview(), hasExamples, hasAppendix); renderKibanaInlineDocs(name, info); List args = description.args(); if (name.equals("case")) { @@ -571,6 +577,11 @@ public static void renderDocs() throws IOException { private static final String DOCS_WARNING = "// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.\n\n"; + private static final String PREVIEW_CALLOUT = + "\npreview::[\"Do not use `VALUES` on production environments. This functionality is in technical preview and " + + "may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview " + + "are not subject to the support SLA of official GA features.\"]\n"; + private static void renderTypes(List argNames) throws IOException { StringBuilder header = new StringBuilder(); for (String arg : argNames) { @@ -686,12 +697,12 @@ private static boolean renderAppendix(String appendix) throws IOException { return true; } - private static void renderFullLayout(String name, boolean hasExamples, boolean hasAppendix) throws IOException { + private static void renderFullLayout(String name, boolean preview, boolean hasExamples, boolean hasAppendix) throws IOException { String rendered = DOCS_WARNING + """ [discrete] [[esql-$NAME$]] === `$UPPER_NAME$` - + $PREVIEW_CALLOUT$ *Syntax* [.text-center] @@ -700,7 +711,9 @@ private static void renderFullLayout(String name, boolean hasExamples, boolean h include::../parameters/$NAME$.asciidoc[] include::../description/$NAME$.asciidoc[] include::../types/$NAME$.asciidoc[] - """.replace("$NAME$", name).replace("$UPPER_NAME$", name.toUpperCase(Locale.ROOT)); + """.replace("$NAME$", name) + .replace("$UPPER_NAME$", name.toUpperCase(Locale.ROOT)) + .replace("$PREVIEW_CALLOUT$", preview ? PREVIEW_CALLOUT : ""); if (hasExamples) { rendered += "include::../examples/" + name + ".asciidoc[]\n"; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java index 2896dec814a71..973249e4a743c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java @@ -14,7 +14,9 @@ import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.versionfield.Version; +import java.math.BigInteger; import java.util.ArrayList; import java.util.List; @@ -149,6 +151,55 @@ public static List longCases(int minRows, int maxRows, long m return cases; } + public static List ulongCases(int minRows, int maxRows, BigInteger min, BigInteger max, boolean includeZero) { + List cases = new ArrayList<>(); + + // Zero + if (BigInteger.ZERO.compareTo(max) <= 0 && BigInteger.ZERO.compareTo(min) >= 0 && includeZero) { + cases.add( + new TypedDataSupplier( + "<0 unsigned longs>", + () -> randomList(minRows, maxRows, () -> BigInteger.ZERO), + DataType.UNSIGNED_LONG, + false, + true + ) + ); + } + + // Small values, less than Long.MAX_VALUE + BigInteger lower1 = min.max(BigInteger.ONE); + BigInteger upper1 = max.min(BigInteger.valueOf(Long.MAX_VALUE)); + if (lower1.compareTo(upper1) < 0) { + cases.add( + new TypedDataSupplier( + "", + () -> randomList(minRows, maxRows, () -> ESTestCase.randomUnsignedLongBetween(lower1, upper1)), + DataType.UNSIGNED_LONG, + false, + true + ) + ); + } + + // Big values, greater than Long.MAX_VALUE + BigInteger lower2 = min.max(BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE)); + BigInteger upper2 = max.min(ESTestCase.UNSIGNED_LONG_MAX); + if (lower2.compareTo(upper2) < 0) { + cases.add( + new TypedDataSupplier( + "", + () -> randomList(minRows, maxRows, () -> ESTestCase.randomUnsignedLongBetween(lower2, upper2)), + DataType.UNSIGNED_LONG, + false, + true + ) + ); + } + + return cases; + } + public static List doubleCases(int minRows, int maxRows, double min, double max, boolean includeZero) { List cases = new ArrayList<>(); @@ -326,6 +377,41 @@ public static List ipCases(int minRows, int maxRows) { ); } + public static List versionCases(int minRows, int maxRows) { + return List.of( + new TypedDataSupplier( + "", + () -> randomList(minRows, maxRows, () -> new Version(Integer.toString(ESTestCase.between(0, 100))).toBytesRef()), + DataType.VERSION, + false, + true + ), + new TypedDataSupplier( + "", + () -> randomList( + minRows, + maxRows, + () -> new Version(ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100)).toBytesRef() + ), + DataType.VERSION, + false, + true + ), + new TypedDataSupplier( + "", + () -> randomList( + minRows, + maxRows, + () -> new Version(ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100)) + .toBytesRef() + ), + DataType.VERSION, + false, + true + ) + ); + } + public static List geoPointCases(int minRows, int maxRows, boolean withAltitude) { List cases = new ArrayList<>(); @@ -343,7 +429,7 @@ public static List geoPointCases(int minRows, int maxRows, bo cases.add( new TypedDataSupplier( "", - () -> randomList(minRows, maxRows, () -> GEO.asWkb(GeometryTestUtils.randomPoint(true))), + () -> randomList(minRows, maxRows, () -> GEO.asWkb(GeometryTestUtils.randomPoint(false))), DataType.GEO_POINT, false, true @@ -381,4 +467,38 @@ public static List cartesianPointCases(int minRows, int maxRo return cases; } + + public static List stringCases(int minRows, int maxRows, DataType type) { + return List.of( + new TypedDataSupplier("", () -> randomList(minRows, maxRows, () -> new BytesRef("")), type, false, true), + new TypedDataSupplier( + "", + () -> randomList(minRows, maxRows, () -> new BytesRef(ESTestCase.randomAlphaOfLengthBetween(1, 30))), + type, + false, + true + ), + new TypedDataSupplier( + "", + () -> randomList(minRows, maxRows, () -> new BytesRef(ESTestCase.randomAlphaOfLengthBetween(300, 3000))), + type, + false, + true + ), + new TypedDataSupplier( + "", + () -> randomList(minRows, maxRows, () -> new BytesRef(ESTestCase.randomRealisticUnicodeOfLengthBetween(1, 30))), + type, + false, + true + ), + new TypedDataSupplier( + "", + () -> randomList(minRows, maxRows, () -> new BytesRef(ESTestCase.randomRealisticUnicodeOfLengthBetween(300, 3000))), + type, + false, + true + ) + ); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 3c9c1795ff210..cd375b8c53595 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -880,6 +880,12 @@ public static List longCases(long min, long max, boolean incl return cases; } + /** + * Generate cases for {@link DataType#UNSIGNED_LONG}. + *

+ * For multi-row parameters, see {@link MultiRowTestCaseSupplier#ulongCases}. + *

+ */ public static List ulongCases(BigInteger min, BigInteger max, boolean includeZero) { List cases = new ArrayList<>(); @@ -1142,6 +1148,12 @@ public static List ipCases() { ); } + /** + * Generate cases for String DataTypes. + *

+ * For multi-row parameters, see {@link MultiRowTestCaseSupplier#stringCases}. + *

+ */ public static List stringCases(DataType type) { List result = new ArrayList<>(); result.add(new TypedDataSupplier("", () -> new BytesRef(""), type)); @@ -1170,6 +1182,9 @@ public static List stringCases(DataType type) { /** * Supplier test case data for {@link Version} fields. + *

+ * For multi-row parameters, see {@link MultiRowTestCaseSupplier#versionCases}. + *

*/ public static List versionCases(String prefix) { return List.of( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java new file mode 100644 index 0000000000000..c2638e8da9196 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java @@ -0,0 +1,176 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.internal.hppc.BitMixer; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.hash.MurmurHash3; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.search.aggregations.metrics.HyperLogLogPlusPlus; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractAggregationTestCase; +import org.elasticsearch.xpack.esql.expression.function.MultiRowTestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.equalTo; + +public class CountDistinctTests extends AbstractAggregationTestCase { + public CountDistinctTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + var suppliers = new ArrayList(); + + var precisionSuppliers = Stream.of( + TestCaseSupplier.intCases(0, 100_000, true), + TestCaseSupplier.longCases(0L, 100_000L, true), + TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.valueOf(100_000L), true) + ).flatMap(List::stream).toList(); + + Stream.of( + MultiRowTestCaseSupplier.intCases(1, 1000, Integer.MIN_VALUE, Integer.MAX_VALUE, true), + MultiRowTestCaseSupplier.longCases(1, 1000, Long.MIN_VALUE, Long.MAX_VALUE, true), + MultiRowTestCaseSupplier.doubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE, true), + MultiRowTestCaseSupplier.dateCases(1, 1000), + MultiRowTestCaseSupplier.booleanCases(1, 1000), + MultiRowTestCaseSupplier.ipCases(1, 1000), + MultiRowTestCaseSupplier.versionCases(1, 1000), + // Lower values for strings, as they take more space and may trigger the circuit breaker + MultiRowTestCaseSupplier.stringCases(1, 100, DataType.KEYWORD), + MultiRowTestCaseSupplier.stringCases(1, 100, DataType.TEXT) + ).flatMap(List::stream).forEach(fieldCaseSupplier -> { + // With precision + for (var precisionCaseSupplier : precisionSuppliers) { + suppliers.add(makeSupplier(fieldCaseSupplier, precisionCaseSupplier)); + } + + // Without precision + suppliers.add(makeSupplier(fieldCaseSupplier)); + }); + + // No rows + for (var dataType : List.of( + DataType.INTEGER, + DataType.LONG, + DataType.DOUBLE, + DataType.DATETIME, + DataType.BOOLEAN, + DataType.IP, + DataType.VERSION, + DataType.KEYWORD, + DataType.TEXT + )) { + var emptyFieldSupplier = new TestCaseSupplier.TypedDataSupplier("No rows (" + dataType + ")", List::of, dataType, false, true); + + // With precision + for (var precisionCaseSupplier : precisionSuppliers) { + suppliers.add(makeSupplier(emptyFieldSupplier, precisionCaseSupplier)); + } + + // Without precision + suppliers.add(makeSupplier(emptyFieldSupplier)); + } + + // "No rows" expects 0 here instead of null + // return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + return parameterSuppliersFromTypedData(randomizeBytesRefsOffset(suppliers)); + } + + @Override + protected Expression build(Source source, List args) { + return new CountDistinct(source, args.get(0), args.size() > 1 ? args.get(1) : null); + } + + private static TestCaseSupplier makeSupplier( + TestCaseSupplier.TypedDataSupplier fieldSupplier, + TestCaseSupplier.TypedDataSupplier precisionSupplier + ) { + return new TestCaseSupplier(fieldSupplier.name(), List.of(fieldSupplier.type(), precisionSupplier.type()), () -> { + var fieldTypedData = fieldSupplier.get(); + var precisionTypedData = precisionSupplier.get().forceLiteral(); + var values = fieldTypedData.multiRowData(); + var precision = ((Number) precisionTypedData.data()).intValue(); + + long result; + + if (fieldTypedData.type() == DataType.BOOLEAN) { + result = values.stream().distinct().count(); + } else { + result = calculateExpectedResult(values, precision); + } + + return new TestCaseSupplier.TestCase( + List.of(fieldTypedData, precisionTypedData), + "CountDistinct[field=Attribute[channel=0],precision=Attribute[channel=1]]", + DataType.LONG, + equalTo(result) + ); + }); + } + + private static TestCaseSupplier makeSupplier(TestCaseSupplier.TypedDataSupplier fieldSupplier) { + return new TestCaseSupplier(fieldSupplier.name() + ", no precision", List.of(fieldSupplier.type()), () -> { + var fieldTypedData = fieldSupplier.get(); + var values = fieldTypedData.multiRowData(); + + long result; + + if (fieldTypedData.type() == DataType.BOOLEAN) { + result = values.stream().distinct().count(); + } else { + result = calculateExpectedResult(values, 3000); + } + + return new TestCaseSupplier.TestCase( + List.of(fieldTypedData), + "CountDistinct[field=Attribute[channel=0]]", + DataType.LONG, + equalTo(result) + ); + }); + } + + private static long calculateExpectedResult(List values, int precision) { + // Can't use driverContext().bigArrays() from a static context + var bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofMb(256)).withCircuitBreaking(); + try (var hll = new HyperLogLogPlusPlus(HyperLogLogPlusPlus.precisionFromThreshold(precision), bigArrays, 1)) { + var hash = new MurmurHash3.Hash128(); + for (var value : values) { + if (value instanceof Integer casted) { + hll.collect(0, BitMixer.mix64(casted)); + } else if (value instanceof Long casted) { + hll.collect(0, BitMixer.mix64(casted)); + } else if (value instanceof Double casted) { + hll.collect(0, BitMixer.mix64(Double.doubleToLongBits(casted))); + } else if (value instanceof BytesRef casted) { + MurmurHash3.hash128(casted.bytes, casted.offset, casted.length, 0, hash); + hll.collect(0, BitMixer.mix64(hash.h1)); + } else { + throw new IllegalArgumentException("Unsupported data type: " + value.getClass()); + } + } + + return hll.cardinality(0); + } + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java new file mode 100644 index 0000000000000..09076f2d70fd9 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractAggregationTestCase; +import org.elasticsearch.xpack.esql.expression.function.MultiRowTestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.equalTo; + +public class CountTests extends AbstractAggregationTestCase { + public CountTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + var suppliers = new ArrayList(); + + Stream.of( + MultiRowTestCaseSupplier.intCases(1, 1000, Integer.MIN_VALUE, Integer.MAX_VALUE, true), + MultiRowTestCaseSupplier.longCases(1, 1000, Long.MIN_VALUE, Long.MAX_VALUE, true), + MultiRowTestCaseSupplier.ulongCases(1, 1000, BigInteger.ZERO, UNSIGNED_LONG_MAX, true), + MultiRowTestCaseSupplier.doubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE, true), + MultiRowTestCaseSupplier.dateCases(1, 1000), + MultiRowTestCaseSupplier.booleanCases(1, 1000), + MultiRowTestCaseSupplier.ipCases(1, 1000), + MultiRowTestCaseSupplier.versionCases(1, 1000), + MultiRowTestCaseSupplier.geoPointCases(1, 1000, true), + MultiRowTestCaseSupplier.cartesianPointCases(1, 1000, true), + // Lower values for strings, as they take more space and may trigger the circuit breaker + MultiRowTestCaseSupplier.stringCases(1, 100, DataType.KEYWORD), + MultiRowTestCaseSupplier.stringCases(1, 100, DataType.TEXT) + ).flatMap(List::stream).map(CountTests::makeSupplier).collect(Collectors.toCollection(() -> suppliers)); + + // No rows + for (var dataType : List.of( + DataType.INTEGER, + DataType.LONG, + DataType.DOUBLE, + DataType.DATETIME, + DataType.BOOLEAN, + DataType.IP, + DataType.VERSION, + DataType.KEYWORD, + DataType.TEXT, + DataType.GEO_POINT, + DataType.CARTESIAN_POINT, + DataType.UNSIGNED_LONG + )) { + suppliers.add( + new TestCaseSupplier( + "No rows (" + dataType + ")", + List.of(dataType), + () -> new TestCaseSupplier.TestCase( + List.of(TestCaseSupplier.TypedData.multiRow(List.of(), dataType, "field")), + "Count[field=Attribute[channel=0]]", + DataType.LONG, + equalTo(0L) + ) + ) + ); + } + + // "No rows" expects 0 here instead of null + // return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + return parameterSuppliersFromTypedData(randomizeBytesRefsOffset(suppliers)); + } + + @Override + protected Expression build(Source source, List args) { + return new Count(source, args.get(0)); + } + + private static TestCaseSupplier makeSupplier(TestCaseSupplier.TypedDataSupplier fieldSupplier) { + return new TestCaseSupplier(fieldSupplier.name(), List.of(fieldSupplier.type()), () -> { + var fieldTypedData = fieldSupplier.get(); + var rowCount = fieldTypedData.multiRowData().size(); + + return new TestCaseSupplier.TestCase( + List.of(fieldTypedData), + "Count[field=Attribute[channel=0]]", + DataType.LONG, + equalTo((long) rowCount) + ); + }); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java new file mode 100644 index 0000000000000..704bd3ab204a3 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java @@ -0,0 +1,110 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractAggregationTestCase; +import org.elasticsearch.xpack.esql.expression.function.MultiRowTestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.BaseMatcher; +import org.hamcrest.Description; +import org.hamcrest.Matcher; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.function.Supplier; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class ValuesTests extends AbstractAggregationTestCase { + public ValuesTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + var suppliers = new ArrayList(); + + Stream.of( + MultiRowTestCaseSupplier.intCases(1, 1000, Integer.MIN_VALUE, Integer.MAX_VALUE, true), + MultiRowTestCaseSupplier.longCases(1, 1000, Long.MIN_VALUE, Long.MAX_VALUE, true), + MultiRowTestCaseSupplier.doubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE, true), + MultiRowTestCaseSupplier.dateCases(1, 1000), + MultiRowTestCaseSupplier.booleanCases(1, 1000), + MultiRowTestCaseSupplier.ipCases(1, 1000), + MultiRowTestCaseSupplier.versionCases(1, 1000), + // Lower values for strings, as they take more space and may trigger the circuit breaker + MultiRowTestCaseSupplier.stringCases(1, 100, DataType.KEYWORD), + MultiRowTestCaseSupplier.stringCases(1, 100, DataType.TEXT) + ).flatMap(List::stream).map(ValuesTests::makeSupplier).collect(Collectors.toCollection(() -> suppliers)); + + return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + } + + @Override + protected Expression build(Source source, List args) { + return new Values(source, args.get(0)); + } + + @SuppressWarnings("unchecked") + private static TestCaseSupplier makeSupplier(TestCaseSupplier.TypedDataSupplier fieldSupplier) { + return new TestCaseSupplier(fieldSupplier.name(), List.of(fieldSupplier.type()), () -> { + var fieldTypedData = fieldSupplier.get(); + + var expected = fieldTypedData.multiRowData() + .stream() + .map(v -> (Comparable>) v) + .collect(Collectors.toSet()); + + return new TestCaseSupplier.TestCase( + List.of(fieldTypedData), + "Values[field=Attribute[channel=0]]", + fieldSupplier.type(), + expected.isEmpty() ? nullValue() : valuesInAnyOrder(expected) + ); + }); + } + + private static Matcher valuesInAnyOrder(Collection data) { + if (data == null) { + return nullValue(); + } + if (data.size() == 1) { + return equalTo(data.iterator().next()); + } + var matcher = containsInAnyOrder(data.toArray()); + // New Matcher, as `containsInAnyOrder` returns Matcher> instead of Matcher + return new BaseMatcher<>() { + @Override + public void describeTo(Description description) { + matcher.describeTo(description); + } + + @Override + public boolean matches(Object item) { + if (item instanceof Iterable == false) { + return false; + } + + var castedItem = (Iterable) item; + + return matcher.matches(castedItem); + } + }; + } +} From a4e6cf9cd2f74322df87ee0e737ceed806542ce3 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Mon, 29 Jul 2024 11:34:59 -0700 Subject: [PATCH 092/105] Integrate data generator in LogsDB mode challenge test (#111303) --- .../logsdb/qa/AbstractChallengeRestTest.java | 2 - ...ardVersusLogsIndexModeChallengeRestIT.java | 140 +++++++++--------- .../logsdb/qa/matchers/ArrayEqualMatcher.java | 2 +- .../logsdb/qa/matchers/ListEqualMatcher.java | 2 +- .../logsdb/datageneration/DataGenerator.java | 47 +++++- .../DataGeneratorSpecification.java | 26 +++- .../GenericSubObjectFieldDataGenerator.java | 108 +++++++------- .../fields/NestedFieldDataGenerator.java | 28 +++- .../fields/ObjectFieldDataGenerator.java | 29 ++-- .../fields/PredefinedField.java | 13 ++ .../TopLevelObjectFieldDataGenerator.java | 61 ++++++++ .../{ => leaf}/KeywordFieldDataGenerator.java | 2 +- .../{ => leaf}/LongFieldDataGenerator.java | 2 +- .../DataGeneratorSnapshotTests.java | 2 +- 14 files changed, 312 insertions(+), 152 deletions(-) create mode 100644 test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/PredefinedField.java create mode 100644 test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/TopLevelObjectFieldDataGenerator.java rename test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/{ => leaf}/KeywordFieldDataGenerator.java (96%) rename test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/{ => leaf}/LongFieldDataGenerator.java (95%) diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/AbstractChallengeRestTest.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/AbstractChallengeRestTest.java index 8ee0e4d715c4c..6724a40fddd22 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/AbstractChallengeRestTest.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/AbstractChallengeRestTest.java @@ -202,9 +202,7 @@ private Settings.Builder createContenderSettings() throws IOException { private XContentBuilder createMappings(final CheckedConsumer builderConsumer) throws IOException { final XContentBuilder builder = XContentFactory.jsonBuilder(); - builder.startObject(); builderConsumer.accept(builder); - builder.endObject(); return builder; } diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java index 63db21e45ae9f..5f08cb9ee3e13 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java @@ -20,6 +20,11 @@ import org.elasticsearch.datastreams.logsdb.qa.matchers.MatchResult; import org.elasticsearch.datastreams.logsdb.qa.matchers.Matcher; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.logsdb.datageneration.DataGenerator; +import org.elasticsearch.logsdb.datageneration.DataGeneratorSpecification; +import org.elasticsearch.logsdb.datageneration.FieldType; +import org.elasticsearch.logsdb.datageneration.arbitrary.RandomBasedArbitrary; +import org.elasticsearch.logsdb.datageneration.fields.PredefinedField; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; @@ -39,85 +44,71 @@ import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; + public class StandardVersusLogsIndexModeChallengeRestIT extends AbstractChallengeRestTest { + private final DataGenerator dataGenerator; public StandardVersusLogsIndexModeChallengeRestIT() { super("standard-apache-baseline", "logs-apache-contender", "baseline-template", "contender-template", 101, 101); + this.dataGenerator = new DataGenerator( + DataGeneratorSpecification.builder() + // Nested fields don't work with subobjects: false. + .withNestedFieldsLimit(0) + // TODO increase depth of objects + // Currently matching fails because in synthetic source all fields are flat (given that we have subobjects: false) + // but stored source is identical to original document which has nested structure. + .withMaxObjectDepth(0) + .withArbitrary(new RandomBasedArbitrary() { + // TODO enable null values + // Matcher does not handle nulls currently + @Override + public boolean generateNullValue() { + return false; + } + + // TODO enable arrays + // List matcher currently does not apply matching logic recursively + // and equality check fails because arrays are sorted in synthetic source. + @Override + public boolean generateArrayOfValues() { + return false; + } + }) + .withPredefinedFields(List.of(new PredefinedField("host.name", FieldType.KEYWORD))) + .build() + ); } @Override public void baselineMappings(XContentBuilder builder) throws IOException { if (randomBoolean()) { - builder.startObject("properties") - - .startObject("@timestamp") - .field("type", "date") - .endObject() - - .startObject("host.name") - .field("type", "keyword") - .field("ignore_above", randomIntBetween(1000, 1200)) - .endObject() - - .startObject("message") - .field("type", "keyword") - .field("ignore_above", randomIntBetween(1000, 1200)) - .endObject() - - .startObject("method") - .field("type", "keyword") - .field("ignore_above", randomIntBetween(1000, 1200)) - .endObject() - - .startObject("memory_usage_bytes") - .field("type", "long") - .field("ignore_malformed", randomBoolean()) - .endObject() - - .endObject(); + dataGenerator.writeMapping(builder); } else { - builder.startObject("properties") + // We want dynamic mapping, but we need host.name to be a keyword instead of text to support aggregations. + builder.startObject() + .startObject("properties") .startObject("host.name") .field("type", "keyword") .field("ignore_above", randomIntBetween(1000, 1200)) .endObject() + .endObject() .endObject(); } } @Override public void contenderMappings(XContentBuilder builder) throws IOException { - builder.field("subobjects", false); if (randomBoolean()) { - builder.startObject("properties") - - .startObject("@timestamp") - .field("type", "date") - .endObject() - - .startObject("host.name") - .field("type", "keyword") - .field("ignore_above", randomIntBetween(1000, 1200)) - .endObject() - - .startObject("message") - .field("type", "keyword") - .field("ignore_above", randomIntBetween(1000, 1200)) - .endObject() - - .startObject("method") - .field("type", "keyword") - .field("ignore_above", randomIntBetween(1000, 1200)) - .endObject() - - .startObject("memory_usage_bytes") - .field("type", "long") - .field("ignore_malformed", randomBoolean()) - .endObject() - - .endObject(); + dataGenerator.writeMapping(builder, b -> builder.field("subobjects", false)); + } else { + // Sometimes we go with full dynamic mapping. + builder.startObject(); + builder.field("subobjects", false); + builder.endObject(); } } @@ -133,11 +124,13 @@ private static void settings(final Settings.Builder settings) { @Override public void contenderSettings(Settings.Builder builder) { builder.put("index.mode", "logsdb"); + builder.put("index.mapping.total_fields.limit", 5000); settings(builder); } @Override public void baselineSettings(Settings.Builder builder) { + builder.put("index.mapping.total_fields.limit", 5000); settings(builder); } @@ -261,22 +254,27 @@ public void testDateHistogramAggregation() throws IOException { assertTrue(matchResult.getMessage(), matchResult.isMatch()); } - private static XContentBuilder generateDocument(final Instant timestamp) throws IOException { - return XContentFactory.jsonBuilder() - .startObject() - .field("@timestamp", DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(timestamp)) - .field("host.name", randomFrom("foo", "bar", "baz")) - .field("message", randomFrom("a message", "another message", "still another message", "one more message")) - .field("method", randomFrom("put", "post", "get")) - .field("memory_usage_bytes", randomLongBetween(1000, 2000)) - .endObject(); + private XContentBuilder generateDocument(final Instant timestamp) throws IOException { + var document = XContentFactory.jsonBuilder(); + dataGenerator.generateDocument(document, doc -> { + doc.field("@timestamp", DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(timestamp)); + // Needed for terms query + doc.field("method", randomFrom("put", "post", "get")); + // We can generate this but we would get "too many buckets" + doc.field("memory_usage_bytes", randomLongBetween(1000, 2000)); + }); + + return document; } @SuppressWarnings("unchecked") private static List> getQueryHits(final Response response) throws IOException { final Map map = XContentHelper.convertToMap(XContentType.JSON.xContent(), response.getEntity().getContent(), true); final Map hitsMap = (Map) map.get("hits"); + final List> hitsList = (List>) hitsMap.get("hits"); + assertThat(hitsList.size(), greaterThan(0)); + return hitsList.stream().map(hit -> (Map) hit.get("_source")).toList(); } @@ -285,13 +283,23 @@ private static List> getAggregationBuckets(final Response re final Map map = XContentHelper.convertToMap(XContentType.JSON.xContent(), response.getEntity().getContent(), true); final Map aggs = (Map) map.get("aggregations"); final Map agg = (Map) aggs.get(aggName); - return (List>) agg.get("buckets"); + + var buckets = (List>) agg.get("buckets"); + assertThat(buckets.size(), greaterThan(0)); + + return buckets; } private void assertDocumentIndexing(List documents) throws IOException { final Tuple tuple = indexDocuments(() -> documents, () -> documents); + assertThat(tuple.v1().getStatusLine().getStatusCode(), Matchers.equalTo(RestStatus.OK.getStatus())); + var baselineResponseBody = entityAsMap(tuple.v1()); + assertThat("errors in baseline bulk response:\n " + baselineResponseBody, baselineResponseBody.get("errors"), equalTo(false)); + assertThat(tuple.v2().getStatusLine().getStatusCode(), Matchers.equalTo(RestStatus.OK.getStatus())); + var contenderResponseBody = entityAsMap(tuple.v2()); + assertThat("errors in contender bulk response:\n " + contenderResponseBody, contenderResponseBody.get("errors"), equalTo(false)); } } diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ArrayEqualMatcher.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ArrayEqualMatcher.java index 25e6dc8ef31c9..ecfe5840689fb 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ArrayEqualMatcher.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ArrayEqualMatcher.java @@ -47,7 +47,7 @@ private MatchResult matchArraysEqual(final Object[] actualArray, final Object[] actualSettings, expectedMappings, expectedSettings, - "Arrays do not match when ignoreing sort order" + "Arrays do not match when ignoring sort order" ) ); } else { diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ListEqualMatcher.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ListEqualMatcher.java index 56c24712f635c..e5429ddad99ff 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ListEqualMatcher.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/matchers/ListEqualMatcher.java @@ -47,7 +47,7 @@ private MatchResult matchListEquals(final List actualList, final List {}).accept(mapping); mapping.endObject(); } + /** + * Writes a fully built mapping document (enclosed in a top-level object) to a provided builder. + * Allows customizing parameters of top level object mapper. + * @param mapping destination + * @param customMappingParameters writer of custom mapping parameters of top level object mapping + * @throws IOException + */ + public void writeMapping(XContentBuilder mapping, CheckedConsumer customMappingParameters) + throws IOException { + mapping.startObject().field("_doc"); + topLevelGenerator.mappingWriter(customMappingParameters).accept(mapping); + mapping.endObject(); + } + + /** + * Generates a document and writes it to a provided builder. New document is generated every time. + * @param document + * @throws IOException + */ public void generateDocument(XContentBuilder document) throws IOException { - topLevelGenerator.fieldValueGenerator().accept(document); + topLevelGenerator.fieldValueGenerator(b -> {}).accept(document); + } + + /** + * Generates a document and writes it to a provided builder. New document is generated every time. + * Supports appending custom content to generated document (e.g. a custom generated field). + * @param document + * @param customDocumentModifications + * @throws IOException + */ + public void generateDocument(XContentBuilder document, CheckedConsumer customDocumentModifications) + throws IOException { + topLevelGenerator.fieldValueGenerator(customDocumentModifications).accept(document); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/DataGeneratorSpecification.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/DataGeneratorSpecification.java index 4a0ed074b1411..ea47ad3be1fa6 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/DataGeneratorSpecification.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/DataGeneratorSpecification.java @@ -10,6 +10,10 @@ import org.elasticsearch.logsdb.datageneration.arbitrary.Arbitrary; import org.elasticsearch.logsdb.datageneration.arbitrary.RandomBasedArbitrary; +import org.elasticsearch.logsdb.datageneration.fields.PredefinedField; + +import java.util.ArrayList; +import java.util.List; /** * Allows configuring behavior of {@link DataGenerator}. @@ -18,8 +22,15 @@ * Applies to subobjects. * @param maxObjectDepth maximum depth of nested objects * @param nestedFieldsLimit how many total nested fields can be present in a produced mapping + * @param predefinedFields predefined fields that must be present in mapping and documents. Only top level fields are supported. */ -public record DataGeneratorSpecification(Arbitrary arbitrary, int maxFieldCountPerLevel, int maxObjectDepth, int nestedFieldsLimit) { +public record DataGeneratorSpecification( + Arbitrary arbitrary, + int maxFieldCountPerLevel, + int maxObjectDepth, + int nestedFieldsLimit, + List predefinedFields +) { public static Builder builder() { return new Builder(); @@ -34,14 +45,16 @@ public static class Builder { private int maxFieldCountPerLevel; private int maxObjectDepth; private int nestedFieldsLimit; + private List predefinedFields; public Builder() { + arbitrary = new RandomBasedArbitrary(); // Simply sufficiently big numbers to get some permutations maxFieldCountPerLevel = 50; - maxObjectDepth = 3; + maxObjectDepth = 2; // Default value of index.mapping.nested_fields.limit nestedFieldsLimit = 50; - arbitrary = new RandomBasedArbitrary(); + predefinedFields = new ArrayList<>(); } public Builder withArbitrary(Arbitrary arbitrary) { @@ -64,8 +77,13 @@ public Builder withNestedFieldsLimit(int nestedFieldsLimit) { return this; } + public Builder withPredefinedFields(List predefinedFields) { + this.predefinedFields = predefinedFields; + return this; + } + public DataGeneratorSpecification build() { - return new DataGeneratorSpecification(arbitrary, maxFieldCountPerLevel, maxObjectDepth, nestedFieldsLimit); + return new DataGeneratorSpecification(arbitrary, maxFieldCountPerLevel, maxObjectDepth, nestedFieldsLimit, predefinedFields); } } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/GenericSubObjectFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/GenericSubObjectFieldDataGenerator.java index 5d05fc1f35a77..24f59867f85b8 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/GenericSubObjectFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/GenericSubObjectFieldDataGenerator.java @@ -11,6 +11,8 @@ import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; import org.elasticsearch.logsdb.datageneration.FieldType; +import org.elasticsearch.logsdb.datageneration.fields.leaf.KeywordFieldDataGenerator; +import org.elasticsearch.logsdb.datageneration.fields.leaf.LongFieldDataGenerator; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -25,84 +27,78 @@ public class GenericSubObjectFieldDataGenerator { private final Context context; - private final List childFields; - - public GenericSubObjectFieldDataGenerator(Context context) { + GenericSubObjectFieldDataGenerator(Context context) { this.context = context; - - childFields = new ArrayList<>(); - generateChildFields(); } - public CheckedConsumer mappingWriter( - CheckedConsumer customMappingParameters - ) { - return b -> { - b.startObject(); - customMappingParameters.accept(b); - - b.startObject("properties"); - for (var childField : childFields) { - b.field(childField.fieldName); - childField.generator.mappingWriter().accept(b); - } - b.endObject(); - - b.endObject(); - }; - } + List generateChildFields() { + var existingFieldNames = new HashSet(); + // no child fields is legal + var childFieldsCount = context.specification().arbitrary().childFieldCount(0, context.specification().maxFieldCountPerLevel()); + var result = new ArrayList(childFieldsCount); - public CheckedConsumer fieldValueGenerator() { - return b -> { - if (context.shouldGenerateObjectArray()) { - int size = context.specification().arbitrary().objectArraySize(); + for (int i = 0; i < childFieldsCount; i++) { + var fieldName = generateFieldName(existingFieldNames); - b.startArray(); - for (int i = 0; i < size; i++) { - writeObject(b, childFields); - } - b.endArray(); + if (context.shouldAddObjectField()) { + result.add(new ChildField(fieldName, new ObjectFieldDataGenerator(context.subObject()))); + } else if (context.shouldAddNestedField()) { + result.add(new ChildField(fieldName, new NestedFieldDataGenerator(context.nestedObject()))); } else { - writeObject(b, childFields); + var fieldType = context.specification().arbitrary().fieldType(); + result.add(leafField(fieldType, fieldName)); } - }; + } + + return result; } - private static void writeObject(XContentBuilder document, Iterable childFields) throws IOException { - document.startObject(); + List generateChildFields(List predefinedFields) { + return predefinedFields.stream().map(pf -> leafField(pf.fieldType(), pf.fieldName())).toList(); + } + + static void writeChildFieldsMapping(XContentBuilder mapping, List childFields) throws IOException { for (var childField : childFields) { - document.field(childField.fieldName); - childField.generator.fieldValueGenerator().accept(document); + mapping.field(childField.fieldName); + childField.generator.mappingWriter().accept(mapping); } - document.endObject(); } - private void generateChildFields() { - var existingFields = new HashSet(); - // no child fields is legal - var childFieldsCount = context.specification().arbitrary().childFieldCount(0, context.specification().maxFieldCountPerLevel()); - - for (int i = 0; i < childFieldsCount; i++) { - var fieldName = generateFieldName(existingFields); + static void writeObjectsData(XContentBuilder document, Context context, CheckedConsumer objectWriter) + throws IOException { + if (context.shouldGenerateObjectArray()) { + int size = context.specification().arbitrary().objectArraySize(); - if (context.shouldAddObjectField()) { - childFields.add(new ChildField(fieldName, new ObjectFieldDataGenerator(context.subObject()))); - } else if (context.shouldAddNestedField()) { - childFields.add(new ChildField(fieldName, new NestedFieldDataGenerator(context.nestedObject()))); - } else { - var fieldType = context.specification().arbitrary().fieldType(); - addLeafField(fieldType, fieldName); + document.startArray(); + for (int i = 0; i < size; i++) { + objectWriter.accept(document); } + document.endArray(); + } else { + objectWriter.accept(document); + } + } + + static void writeSingleObject(XContentBuilder document, Iterable childFields) throws IOException { + document.startObject(); + writeChildFieldsData(document, childFields); + document.endObject(); + } + + static void writeChildFieldsData(XContentBuilder document, Iterable childFields) throws IOException { + for (var childField : childFields) { + document.field(childField.fieldName); + childField.generator.fieldValueGenerator().accept(document); } } - private void addLeafField(FieldType type, String fieldName) { + private ChildField leafField(FieldType type, String fieldName) { var generator = switch (type) { case LONG -> new LongFieldDataGenerator(context.specification().arbitrary()); case KEYWORD -> new KeywordFieldDataGenerator(context.specification().arbitrary()); }; - childFields.add(new ChildField(fieldName, generator)); + return new ChildField(fieldName, generator); } private String generateFieldName(Set existingFields) { @@ -115,5 +111,5 @@ private String generateFieldName(Set existingFields) { return fieldName; } - private record ChildField(String fieldName, FieldDataGenerator generator) {} + record ChildField(String fieldName, FieldDataGenerator generator) {} } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/NestedFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/NestedFieldDataGenerator.java index acceb3aebe421..f52b739418034 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/NestedFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/NestedFieldDataGenerator.java @@ -13,21 +13,39 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.List; public class NestedFieldDataGenerator implements FieldDataGenerator { - private final GenericSubObjectFieldDataGenerator delegate; + private final Context context; + private final List childFields; - public NestedFieldDataGenerator(Context context) { - this.delegate = new GenericSubObjectFieldDataGenerator(context); + NestedFieldDataGenerator(Context context) { + this.context = context; + var genericGenerator = new GenericSubObjectFieldDataGenerator(context); + this.childFields = genericGenerator.generateChildFields(); } @Override public CheckedConsumer mappingWriter() { - return delegate.mappingWriter(b -> b.field("type", "nested")); + return b -> { + b.startObject(); + + b.field("type", "nested"); + + b.startObject("properties"); + GenericSubObjectFieldDataGenerator.writeChildFieldsMapping(b, childFields); + b.endObject(); + + b.endObject(); + }; } @Override public CheckedConsumer fieldValueGenerator() { - return delegate.fieldValueGenerator(); + CheckedConsumer objectWriter = object -> GenericSubObjectFieldDataGenerator.writeSingleObject( + object, + childFields + ); + return b -> GenericSubObjectFieldDataGenerator.writeObjectsData(b, context, objectWriter); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/ObjectFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/ObjectFieldDataGenerator.java index 8cbedefe14ae5..522bb2b1772b0 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/ObjectFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/ObjectFieldDataGenerator.java @@ -9,30 +9,41 @@ package org.elasticsearch.logsdb.datageneration.fields; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.logsdb.datageneration.DataGeneratorSpecification; import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.List; public class ObjectFieldDataGenerator implements FieldDataGenerator { - private final GenericSubObjectFieldDataGenerator delegate; - - public ObjectFieldDataGenerator(DataGeneratorSpecification specification) { - this(new Context(specification)); - } + private final Context context; + private final List childFields; ObjectFieldDataGenerator(Context context) { - this.delegate = new GenericSubObjectFieldDataGenerator(context); + this.context = context; + var genericGenerator = new GenericSubObjectFieldDataGenerator(context); + this.childFields = genericGenerator.generateChildFields(); } @Override public CheckedConsumer mappingWriter() { - return delegate.mappingWriter(b -> {}); + return b -> { + b.startObject(); + + b.startObject("properties"); + GenericSubObjectFieldDataGenerator.writeChildFieldsMapping(b, childFields); + b.endObject(); + + b.endObject(); + }; } @Override public CheckedConsumer fieldValueGenerator() { - return delegate.fieldValueGenerator(); + CheckedConsumer objectWriter = object -> GenericSubObjectFieldDataGenerator.writeSingleObject( + object, + childFields + ); + return b -> GenericSubObjectFieldDataGenerator.writeObjectsData(b, context, objectWriter); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/PredefinedField.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/PredefinedField.java new file mode 100644 index 0000000000000..6adae35dc909c --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/PredefinedField.java @@ -0,0 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.logsdb.datageneration.fields; + +import org.elasticsearch.logsdb.datageneration.FieldType; + +public record PredefinedField(String fieldName, FieldType fieldType) {} diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/TopLevelObjectFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/TopLevelObjectFieldDataGenerator.java new file mode 100644 index 0000000000000..1debc6b1fa7a1 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/TopLevelObjectFieldDataGenerator.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.logsdb.datageneration.fields; + +import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.logsdb.datageneration.DataGeneratorSpecification; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; + +public class TopLevelObjectFieldDataGenerator { + private final Context context; + private final List predefinedFields; + private final List generatedChildFields; + + public TopLevelObjectFieldDataGenerator(DataGeneratorSpecification specification) { + this.context = new Context(specification); + var genericGenerator = new GenericSubObjectFieldDataGenerator(context); + this.predefinedFields = genericGenerator.generateChildFields(specification.predefinedFields()); + this.generatedChildFields = genericGenerator.generateChildFields(); + } + + public CheckedConsumer mappingWriter( + CheckedConsumer customMappingParameters + ) { + return b -> { + b.startObject(); + + customMappingParameters.accept(b); + + b.startObject("properties"); + GenericSubObjectFieldDataGenerator.writeChildFieldsMapping(b, predefinedFields); + GenericSubObjectFieldDataGenerator.writeChildFieldsMapping(b, generatedChildFields); + b.endObject(); + + b.endObject(); + }; + } + + public CheckedConsumer fieldValueGenerator( + CheckedConsumer customDocumentModification + ) { + CheckedConsumer objectWriter = b -> { + b.startObject(); + + customDocumentModification.accept(b); + GenericSubObjectFieldDataGenerator.writeChildFieldsData(b, predefinedFields); + GenericSubObjectFieldDataGenerator.writeChildFieldsData(b, generatedChildFields); + + b.endObject(); + }; + return b -> GenericSubObjectFieldDataGenerator.writeObjectsData(b, context, objectWriter); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/KeywordFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/KeywordFieldDataGenerator.java similarity index 96% rename from test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/KeywordFieldDataGenerator.java rename to test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/KeywordFieldDataGenerator.java index 11413d33a97c7..89ae1d6034c15 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/KeywordFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/KeywordFieldDataGenerator.java @@ -6,7 +6,7 @@ * Side Public License, v 1. */ -package org.elasticsearch.logsdb.datageneration.fields; +package org.elasticsearch.logsdb.datageneration.fields.leaf; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/LongFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/LongFieldDataGenerator.java similarity index 95% rename from test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/LongFieldDataGenerator.java rename to test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/LongFieldDataGenerator.java index f1bb35f1f0401..097c5fe024d2b 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/LongFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/LongFieldDataGenerator.java @@ -6,7 +6,7 @@ * Side Public License, v 1. */ -package org.elasticsearch.logsdb.datageneration.fields; +package org.elasticsearch.logsdb.datageneration.fields.leaf; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; diff --git a/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGeneratorSnapshotTests.java b/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGeneratorSnapshotTests.java index 41066e9ba3cac..e476e02d03778 100644 --- a/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGeneratorSnapshotTests.java +++ b/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGeneratorSnapshotTests.java @@ -207,5 +207,5 @@ public boolean generateArrayOfObjects() { public int objectArraySize() { return 2; } - }; + } } From c722cebb13c45c9400457767fdf5a2ba16261d1a Mon Sep 17 00:00:00 2001 From: Adam Demjen Date: Mon, 29 Jul 2024 15:03:39 -0400 Subject: [PATCH 093/105] Fix score count validation in reranker response (#111212) * Fix rerank score validation * Update docs/changelog/111212.yaml * Add test case for invalid document indices in reranker result * Preemptive top_n config check * Reorg code + refine tests * Add support for Google Vertex AI task settings * Spotless * Make top N eval async * Update test * Fix broken unit test * Clean up tests * Spotless * Add size check + compare against rankWindowSize * Fix import --- docs/changelog/111212.yaml | 6 ++ ...ankFeaturePhaseRankCoordinatorContext.java | 96 +++++++++++++------ ...aturePhaseRankCoordinatorContextTests.java | 7 +- .../TextSimilarityRankTests.java | 70 +++++++++++--- .../TextSimilarityTestPlugin.java | 63 ++++++++++-- 5 files changed, 188 insertions(+), 54 deletions(-) create mode 100644 docs/changelog/111212.yaml diff --git a/docs/changelog/111212.yaml b/docs/changelog/111212.yaml new file mode 100644 index 0000000000000..67d1513b3ff6f --- /dev/null +++ b/docs/changelog/111212.yaml @@ -0,0 +1,6 @@ +pr: 111212 +summary: Fix score count validation in reranker response +area: Ranking +type: bug +issues: + - 111202 diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankFeaturePhaseRankCoordinatorContext.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankFeaturePhaseRankCoordinatorContext.java index a22126439e9e2..42413c35fcbff 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankFeaturePhaseRankCoordinatorContext.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankFeaturePhaseRankCoordinatorContext.java @@ -14,8 +14,11 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.search.rank.context.RankFeaturePhaseRankCoordinatorContext; import org.elasticsearch.search.rank.feature.RankFeatureDoc; +import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; +import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankTaskSettings; +import org.elasticsearch.xpack.inference.services.googlevertexai.rerank.GoogleVertexAiRerankTaskSettings; import java.util.Arrays; import java.util.Comparator; @@ -53,24 +56,77 @@ public TextSimilarityRankFeaturePhaseRankCoordinatorContext( protected void computeScores(RankFeatureDoc[] featureDocs, ActionListener scoreListener) { // Wrap the provided rankListener to an ActionListener that would handle the response from the inference service // and then pass the results - final ActionListener actionListener = scoreListener.delegateFailureAndWrap((l, r) -> { - float[] scores = extractScoresFromResponse(r); - if (scores.length != featureDocs.length) { + final ActionListener inferenceListener = scoreListener.delegateFailureAndWrap((l, r) -> { + InferenceServiceResults results = r.getResults(); + assert results instanceof RankedDocsResults; + + // Ensure we get exactly as many scores as the number of docs we passed, otherwise we may return incorrect results + List rankedDocs = ((RankedDocsResults) results).getRankedDocs(); + if (rankedDocs.size() != featureDocs.length) { l.onFailure( - new IllegalStateException("Document and score count mismatch: [" + featureDocs.length + "] vs [" + scores.length + "]") + new IllegalStateException( + "Reranker input document count and returned score count mismatch: [" + + featureDocs.length + + "] vs [" + + rankedDocs.size() + + "]" + ) ); } else { + float[] scores = extractScoresFromRankedDocs(rankedDocs); l.onResponse(scores); } }); - List featureData = Arrays.stream(featureDocs).map(x -> x.featureData).toList(); - InferenceAction.Request request = generateRequest(featureData); - try { - client.execute(InferenceAction.INSTANCE, request, actionListener); - } finally { - request.decRef(); - } + // top N listener + ActionListener topNListener = scoreListener.delegateFailureAndWrap((l, r) -> { + // The rerank inference endpoint may have an override to return top N documents only, in that case let's fail fast to avoid + // assigning scores to the wrong input + Integer configuredTopN = null; + if (r.getEndpoints().isEmpty() == false + && r.getEndpoints().get(0).getTaskSettings() instanceof CohereRerankTaskSettings cohereTaskSettings) { + configuredTopN = cohereTaskSettings.getTopNDocumentsOnly(); + } else if (r.getEndpoints().isEmpty() == false + && r.getEndpoints().get(0).getTaskSettings() instanceof GoogleVertexAiRerankTaskSettings googleVertexAiTaskSettings) { + configuredTopN = googleVertexAiTaskSettings.topN(); + } + if (configuredTopN != null && configuredTopN < rankWindowSize) { + l.onFailure( + new IllegalArgumentException( + "Inference endpoint [" + + inferenceId + + "] is configured to return the top [" + + configuredTopN + + "] results, but rank_window_size is [" + + rankWindowSize + + "]. Reduce rank_window_size to be less than or equal to the configured top N value." + ) + ); + return; + } + List featureData = Arrays.stream(featureDocs).map(x -> x.featureData).toList(); + InferenceAction.Request inferenceRequest = generateRequest(featureData); + try { + client.execute(InferenceAction.INSTANCE, inferenceRequest, inferenceListener); + } finally { + inferenceRequest.decRef(); + } + }); + + GetInferenceModelAction.Request getModelRequest = new GetInferenceModelAction.Request(inferenceId, TaskType.RERANK); + client.execute(GetInferenceModelAction.INSTANCE, getModelRequest, topNListener); + } + + /** + * Sorts documents by score descending and discards those with a score less than minScore. + * @param originalDocs documents to process + */ + @Override + protected RankFeatureDoc[] preprocess(RankFeatureDoc[] originalDocs) { + return Arrays.stream(originalDocs) + .filter(doc -> minScore == null || doc.score >= minScore) + .sorted(Comparator.comparing((RankFeatureDoc doc) -> doc.score).reversed()) + .toArray(RankFeatureDoc[]::new); } protected InferenceAction.Request generateRequest(List docFeatures) { @@ -85,11 +141,7 @@ protected InferenceAction.Request generateRequest(List docFeatures) { ); } - private float[] extractScoresFromResponse(InferenceAction.Response response) { - InferenceServiceResults results = response.getResults(); - assert results instanceof RankedDocsResults; - - List rankedDocs = ((RankedDocsResults) results).getRankedDocs(); + private float[] extractScoresFromRankedDocs(List rankedDocs) { float[] scores = new float[rankedDocs.size()]; for (RankedDocsResults.RankedDoc rankedDoc : rankedDocs) { scores[rankedDoc.index()] = rankedDoc.relevanceScore(); @@ -97,16 +149,4 @@ private float[] extractScoresFromResponse(InferenceAction.Response response) { return scores; } - - /** - * Sorts documents by score descending and discards those with a score less than minScore. - * @param originalDocs documents to process - */ - @Override - protected RankFeatureDoc[] preprocess(RankFeatureDoc[] originalDocs) { - return Arrays.stream(originalDocs) - .filter(doc -> minScore == null || doc.score >= minScore) - .sorted(Comparator.comparing((RankFeatureDoc doc) -> doc.score).reversed()) - .toArray(RankFeatureDoc[]::new); - } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankFeaturePhaseRankCoordinatorContextTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankFeaturePhaseRankCoordinatorContextTests.java index 50d91a2271de6..2e9be42b5c5d4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankFeaturePhaseRankCoordinatorContextTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankFeaturePhaseRankCoordinatorContextTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.search.rank.feature.RankFeatureDoc; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.argThat; @@ -54,10 +54,9 @@ public void onFailure(Exception e) { fail(); } }); - verify(mockClient).execute( - eq(InferenceAction.INSTANCE), - argThat(actionRequest -> ((InferenceAction.Request) actionRequest).getTaskType().equals(TaskType.RERANK)), + eq(GetInferenceModelAction.INSTANCE), + argThat(actionRequest -> ((GetInferenceModelAction.Request) actionRequest).getTaskType().equals(TaskType.RERANK)), any() ); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java index 7fbfe70dbcfe7..a26dc50097cf5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.rank.textsimilarity; +import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.client.internal.Client; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.inference.InputType; @@ -29,22 +30,46 @@ import java.util.Objects; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; public class TextSimilarityRankTests extends ESSingleNodeTestCase { /** - * {@code TextSimilarityRankBuilder} that simulates an inference call that returns a different number of results as the input. + * {@code TextSimilarityRankBuilder} that sets top_n in the inference endpoint's task settings. + * See {@code TextSimilarityTestPlugin -> TestFilter -> handleGetInferenceModelActionRequest} for the logic that extracts the top_n + * value. */ - public static class InvalidInferenceResultCountProvidingTextSimilarityRankBuilder extends TextSimilarityRankBuilder { + public static class TopNConfigurationAcceptingTextSimilarityRankBuilder extends TextSimilarityRankBuilder { - public InvalidInferenceResultCountProvidingTextSimilarityRankBuilder( + public TopNConfigurationAcceptingTextSimilarityRankBuilder( String field, String inferenceId, String inferenceText, int rankWindowSize, - Float minScore + Float minScore, + int topN + ) { + super(field, inferenceId + "-task-settings-top-" + topN, inferenceText, rankWindowSize, minScore); + } + } + + /** + * {@code TextSimilarityRankBuilder} that simulates an inference call returning N results. + */ + public static class InferenceResultCountAcceptingTextSimilarityRankBuilder extends TextSimilarityRankBuilder { + + private final int inferenceResultCount; + + public InferenceResultCountAcceptingTextSimilarityRankBuilder( + String field, + String inferenceId, + String inferenceText, + int rankWindowSize, + Float minScore, + int inferenceResultCount ) { super(field, inferenceId, inferenceText, rankWindowSize, minScore); + this.inferenceResultCount = inferenceResultCount; } @Override @@ -62,10 +87,10 @@ public RankFeaturePhaseRankCoordinatorContext buildRankFeaturePhaseCoordinatorCo protected InferenceAction.Request generateRequest(List docFeatures) { return new InferenceAction.Request( TaskType.RERANK, - inferenceId, + this.inferenceId, inferenceText, docFeatures, - Map.of("invalidInferenceResultCount", true), + Map.of("inferenceResultCount", inferenceResultCount), InputType.SEARCH, InferenceAction.Request.DEFAULT_TIMEOUT ); @@ -151,17 +176,38 @@ public void testRerankInferenceFailure() { ); } - public void testRerankInferenceResultMismatch() { - ElasticsearchAssertions.assertFailures( + public void testRerankTopNConfigurationAndRankWindowSizeMismatch() { + SearchPhaseExecutionException ex = expectThrows( + SearchPhaseExecutionException.class, // Execute search with text similarity reranking client.prepareSearch() .setRankBuilder( - new InvalidInferenceResultCountProvidingTextSimilarityRankBuilder("text", "my-rerank-model", "my query", 100, 1.5f) + // Simulate reranker configuration with top_n=3 in task_settings, which is different from rank_window_size=10 + // (Note: top_n comes from inferenceId, there's no other easy way of passing this to the mocked get model request) + new TopNConfigurationAcceptingTextSimilarityRankBuilder("text", "my-rerank-model", "my query", 100, 1.5f, 3) ) - .setQuery(QueryBuilders.matchAllQuery()), - RestStatus.INTERNAL_SERVER_ERROR, - containsString("Failed to execute phase [rank-feature], Computing updated ranks for results failed") + .setQuery(QueryBuilders.matchAllQuery()) + ); + assertThat(ex.status(), equalTo(RestStatus.BAD_REQUEST)); + assertThat( + ex.getDetailedMessage(), + containsString("Reduce rank_window_size to be less than or equal to the configured top N value") + ); + } + + public void testRerankInputSizeAndInferenceResultsMismatch() { + SearchPhaseExecutionException ex = expectThrows( + SearchPhaseExecutionException.class, + // Execute search with text similarity reranking + client.prepareSearch() + .setRankBuilder( + // Simulate reranker returning different number of results from input + new InferenceResultCountAcceptingTextSimilarityRankBuilder("text", "my-rerank-model", "my query", 100, 1.5f, 4) + ) + .setQuery(QueryBuilders.matchAllQuery()) ); + assertThat(ex.status(), equalTo(RestStatus.INTERNAL_SERVER_ERROR)); + assertThat(ex.getDetailedMessage(), containsString("Reranker input document count and returned score count mismatch")); } private static void assertHitHasRankScoreAndText(SearchHit hit, int expectedRank, float expectedScore, String expectedText) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityTestPlugin.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityTestPlugin.java index 1e457a1a27c92..6d0c15d5c0bfe 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityTestPlugin.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityTestPlugin.java @@ -21,7 +21,9 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.inference.EmptyTaskSettings; import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskType; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; @@ -39,8 +41,12 @@ import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; +import org.elasticsearch.xpack.inference.services.cohere.CohereService; +import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankServiceSettings; +import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankTaskSettings; import java.io.IOException; import java.util.ArrayList; @@ -48,6 +54,8 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import static java.util.Collections.singletonList; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; @@ -100,7 +108,6 @@ public int order() { } @Override - @SuppressWarnings("unchecked") public void apply( Task task, String action, @@ -108,23 +115,59 @@ public void app ActionListener listener, ActionFilterChain chain ) { - // For any other action than inference, execute normally - if (action.equals(InferenceAction.INSTANCE.name()) == false) { + if (action.equals(GetInferenceModelAction.INSTANCE.name())) { + assert request instanceof GetInferenceModelAction.Request; + handleGetInferenceModelActionRequest((GetInferenceModelAction.Request) request, listener); + } else if (action.equals(InferenceAction.INSTANCE.name())) { + assert request instanceof InferenceAction.Request; + handleInferenceActionRequest((InferenceAction.Request) request, listener); + } else { + // For any other action than get model and inference, execute normally chain.proceed(task, action, request, listener); - return; } + } - assert request instanceof InferenceAction.Request; - boolean shouldThrow = (boolean) ((InferenceAction.Request) request).getTaskSettings().getOrDefault("throwing", false); - boolean hasInvalidInferenceResultCount = (boolean) ((InferenceAction.Request) request).getTaskSettings() - .getOrDefault("invalidInferenceResultCount", false); + @SuppressWarnings("unchecked") + private void handleGetInferenceModelActionRequest( + GetInferenceModelAction.Request request, + ActionListener listener + ) { + String inferenceEntityId = request.getInferenceEntityId(); + Integer topN = null; + Matcher extractTopN = Pattern.compile(".*(task-settings-top-\\d+).*").matcher(inferenceEntityId); + if (extractTopN.find()) { + topN = Integer.parseInt(extractTopN.group(1).replaceAll("\\D", "")); + } + + ActionResponse response = new GetInferenceModelAction.Response( + List.of( + new ModelConfigurations( + request.getInferenceEntityId(), + request.getTaskType(), + CohereService.NAME, + new CohereRerankServiceSettings("uri", "model", null), + topN == null ? new EmptyTaskSettings() : new CohereRerankTaskSettings(topN, null, null) + ) + ) + ); + listener.onResponse((Response) response); + } + + @SuppressWarnings("unchecked") + private void handleInferenceActionRequest( + InferenceAction.Request request, + ActionListener listener + ) { + Map taskSettings = request.getTaskSettings(); + boolean shouldThrow = (boolean) taskSettings.getOrDefault("throwing", false); + Integer inferenceResultCount = (Integer) taskSettings.get("inferenceResultCount"); if (shouldThrow) { listener.onFailure(new UnsupportedOperationException("simulated failure")); } else { List rankedDocsResults = new ArrayList<>(); - List inputs = ((InferenceAction.Request) request).getInput(); - int resultCount = hasInvalidInferenceResultCount ? inputs.size() - 1 : inputs.size(); + List inputs = request.getInput(); + int resultCount = inferenceResultCount == null ? inputs.size() : inferenceResultCount; for (int i = 0; i < resultCount; i++) { rankedDocsResults.add(new RankedDocsResults.RankedDoc(i, Float.parseFloat(inputs.get(i)), inputs.get(i))); } From 7de305c4ec1df3b5fef5704930b1b1d7711af6da Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Mon, 29 Jul 2024 15:20:39 -0400 Subject: [PATCH 094/105] Remove 4096 bool query max limit from docs (#111421) indices.query.bool.max_clause_count is set automatically and does not default to 4096 as before. This remove mentions of 4096 from query documentations. Relates to PR#91811 --- docs/reference/query-dsl/query-string-query.asciidoc | 4 ++-- docs/reference/query-dsl/span-multi-term-query.asciidoc | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/docs/reference/query-dsl/query-string-query.asciidoc b/docs/reference/query-dsl/query-string-query.asciidoc index 319ede7c4ac05..b45247ace3735 100644 --- a/docs/reference/query-dsl/query-string-query.asciidoc +++ b/docs/reference/query-dsl/query-string-query.asciidoc @@ -30,7 +30,7 @@ If you don't need to support a query syntax, consider using the syntax, use the <> query, which is less strict. ==== - + [[query-string-query-ex-request]] ==== Example request @@ -83,7 +83,7 @@ could be expensive. There is a limit on the number of fields times terms that can be queried at once. It is defined by the `indices.query.bool.max_clause_count` -<>, which defaults to 4096. +<>. ==== -- diff --git a/docs/reference/query-dsl/span-multi-term-query.asciidoc b/docs/reference/query-dsl/span-multi-term-query.asciidoc index aefb3e4b75eb5..5a5f0e1f5ff99 100644 --- a/docs/reference/query-dsl/span-multi-term-query.asciidoc +++ b/docs/reference/query-dsl/span-multi-term-query.asciidoc @@ -39,7 +39,8 @@ GET /_search -------------------------------------------------- WARNING: `span_multi` queries will hit too many clauses failure if the number of terms that match the query exceeds the -boolean query limit (defaults to 4096).To avoid an unbounded expansion you can set the <>. +To avoid an unbounded expansion you can set the <> of the multi term query to `top_terms_*` rewrite. Or, if you use `span_multi` on `prefix` query only, you can activate the <> field option of the `text` field instead. This will rewrite any prefix query on the field to a single term query that matches the indexed prefix. From 5e6c2e533c09a923099a8920e98ac28d13a8d42b Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Mon, 29 Jul 2024 12:34:32 -0700 Subject: [PATCH 095/105] Fix LogsIndexModeFullClusterRestartIT (#111362) --- muted-tests.yml | 3 --- .../upgrades/LogsIndexModeFullClusterRestartIT.java | 5 ++++- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index d106ca3c9d701..51f1b56786e86 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -105,9 +105,6 @@ tests: - class: org.elasticsearch.action.admin.indices.create.SplitIndexIT method: testSplitIndexPrimaryTerm issue: https://github.com/elastic/elasticsearch/issues/111282 -- class: org.elasticsearch.upgrades.LogsIndexModeFullClusterRestartIT - method: testLogsIndexing {cluster=UPGRADED} - issue: https://github.com/elastic/elasticsearch/issues/111306 - class: org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT issue: https://github.com/elastic/elasticsearch/issues/111319 - class: org.elasticsearch.xpack.esql.analysis.VerifierTests diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java index da168f2999086..739b4e302bb54 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java @@ -19,6 +19,7 @@ import org.elasticsearch.test.MapMatcher; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.hamcrest.Matcher; import org.hamcrest.Matchers; import org.junit.ClassRule; @@ -37,6 +38,7 @@ public class LogsIndexModeFullClusterRestartIT extends ParameterizedFullClusterR @ClassRule public static final ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) + .version(getOldClusterTestVersion()) .module("constant-keyword") .module("data-streams") .module("mapper-extras") @@ -44,7 +46,6 @@ public class LogsIndexModeFullClusterRestartIT extends ParameterizedFullClusterR .module("x-pack-stack") .setting("xpack.security.enabled", "false") .setting("xpack.license.self_generated.type", "trial") - .setting("cluster.logsdb.enabled", "true") .build(); public LogsIndexModeFullClusterRestartIT(@Name("cluster") FullClusterRestartUpgradeStatus upgradeStatus) { @@ -123,6 +124,8 @@ protected ElasticsearchCluster getUpgradeCluster() { }"""; public void testLogsIndexing() throws IOException { + assumeTrue("Test uses data streams", oldClusterHasFeature(RestTestLegacyFeatures.DATA_STREAMS_SUPPORTED)); + if (isRunningAgainstOldCluster()) { assertOK(client().performRequest(putTemplate(client(), "logs-template", STANDARD_TEMPLATE))); assertOK(client().performRequest(createDataStream("logs-apache-production"))); From 69c96974de548ee3bfbfed482f0c205e18d42c8d Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 29 Jul 2024 16:01:56 -0400 Subject: [PATCH 096/105] Ensure vector similarity correctly limits inner_hits returned for nested kNN (#111363) For nested kNN we support not only similarity thresholds, but also multi-passage search while retrieving more than one nearest passage. However, the inner_hits retrieved for the kNN search would ignore the restricted similarity. Meaning, the inner hits would return all passages, not just the ones within the limited similarity and this is confusing. closes: https://github.com/elastic/elasticsearch/issues/111093 --- docs/changelog/111363.yaml | 6 +++ .../search.vectors/100_knn_nested_search.yml | 50 +++++++++++++++++++ .../org/elasticsearch/TransportVersions.java | 2 + .../action/search/DfsQueryPhase.java | 3 +- .../vectors/DenseVectorFieldMapper.java | 8 ++- .../search/vectors/ExactKnnQueryBuilder.java | 37 ++++++++------ .../vectors/KnnScoreDocQueryBuilder.java | 29 +++++++++-- .../search/vectors/KnnSearchBuilder.java | 4 ++ .../search/vectors/KnnVectorQueryBuilder.java | 2 +- .../action/search/DfsQueryPhaseTests.java | 6 ++- .../vectors/DenseVectorFieldTypeTests.java | 4 +- ...AbstractKnnVectorQueryBuilderTestCase.java | 17 +++++++ .../vectors/ExactKnnQueryBuilderTests.java | 23 ++++++++- .../vectors/KnnScoreDocQueryBuilderTests.java | 19 ++++--- 14 files changed, 176 insertions(+), 34 deletions(-) create mode 100644 docs/changelog/111363.yaml diff --git a/docs/changelog/111363.yaml b/docs/changelog/111363.yaml new file mode 100644 index 0000000000000..2cb3c5342ea5c --- /dev/null +++ b/docs/changelog/111363.yaml @@ -0,0 +1,6 @@ +pr: 111363 +summary: Ensure vector similarity correctly limits `inner_hits` returned for nested + kNN +area: Vector Search +type: bug +issues: [111093] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml index 72c6abab22600..d627be2fb15c3 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml @@ -411,3 +411,53 @@ setup: - match: {hits.total.value: 1} - match: {hits.hits.0._id: "2"} +--- +"nested Knn search with required similarity appropriately filters inner_hits": + - requires: + cluster_features: "gte_v8.16.0" + reason: 'bugfix for 8.16' + + - do: + search: + index: test + body: + query: + nested: + path: nested + inner_hits: + size: 3 + _source: false + fields: + - nested.paragraph_id + query: + knn: + field: nested.vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + num_candidates: 3 + similarity: 10.5 + + - match: {hits.total.value: 1} + - match: {hits.hits.0._id: "2"} + - length: {hits.hits.0.inner_hits.nested.hits.hits: 1} + - match: {hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0"} + + - do: + search: + index: test + body: + knn: + field: nested.vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + num_candidates: 3 + k: 3 + similarity: 10.5 + inner_hits: + size: 3 + _source: false + fields: + - nested.paragraph_id + + - match: {hits.total.value: 1} + - match: {hits.hits.0._id: "2"} + - length: {hits.hits.0.inner_hits.nested.hits.hits: 1} + - match: {hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0"} diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 34324ec2a1c16..7d1204d1a51c0 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -169,6 +169,7 @@ static TransportVersion def(int id) { public static final TransportVersion VERSIONED_MASTER_NODE_REQUESTS = def(8_701_00_0); public static final TransportVersion ML_INFERENCE_AMAZON_BEDROCK_ADDED = def(8_702_00_0); public static final TransportVersion ENTERPRISE_GEOIP_DOWNLOADER_BACKPORT_8_15 = def(8_702_00_1); + public static final TransportVersion FIX_VECTOR_SIMILARITY_INNER_HITS_BACKPORT_8_15 = def(8_702_00_2); public static final TransportVersion ML_INFERENCE_DONT_DELETE_WHEN_SEMANTIC_TEXT_EXISTS = def(8_703_00_0); public static final TransportVersion INFERENCE_ADAPTIVE_ALLOCATIONS = def(8_704_00_0); public static final TransportVersion INDEX_REQUEST_UPDATE_BY_SCRIPT_ORIGIN = def(8_705_00_0); @@ -179,6 +180,7 @@ static TransportVersion def(int id) { public static final TransportVersion MASTER_NODE_METRICS = def(8_710_00_0); public static final TransportVersion SEGMENT_LEVEL_FIELDS_STATS = def(8_711_00_0); public static final TransportVersion ML_ADD_DETECTION_RULE_PARAMS = def(8_712_00_0); + public static final TransportVersion FIX_VECTOR_SIMILARITY_INNER_HITS = def(8_713_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java index 9ddac7f13eb51..7a33eaa59eb03 100644 --- a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java @@ -155,7 +155,8 @@ ShardSearchRequest rewriteShardSearchRequest(ShardSearchRequest request) { QueryBuilder query = new KnnScoreDocQueryBuilder( scoreDocs.toArray(Lucene.EMPTY_SCORE_DOCS), source.knnSearch().get(i).getField(), - source.knnSearch().get(i).getQueryVector() + source.knnSearch().get(i).getQueryVector(), + source.knnSearch().get(i).getSimilarity() ).boost(source.knnSearch().get(i).boost()).queryName(source.knnSearch().get(i).queryName()); if (nestedPath != null) { query = new NestedQueryBuilder(nestedPath, query, ScoreMode.Max).innerHit(source.knnSearch().get(i).innerHit()); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 8ffe4b4cc4a66..81fb7990f09eb 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -1789,17 +1789,21 @@ public Query termQuery(Object value, SearchExecutionContext context) { throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support term queries"); } - public Query createExactKnnQuery(VectorData queryVector) { + public Query createExactKnnQuery(VectorData queryVector, Float vectorSimilarity) { if (isIndexed() == false) { throw new IllegalArgumentException( "to perform knn search on field [" + name() + "], its mapping must have [index] set to [true]" ); } - return switch (elementType) { + Query knnQuery = switch (elementType) { case BYTE -> createExactKnnByteQuery(queryVector.asByteVector()); case FLOAT -> createExactKnnFloatQuery(queryVector.asFloatVector()); case BIT -> createExactKnnBitQuery(queryVector.asByteVector()); }; + if (vectorSimilarity != null) { + knnQuery = new VectorSimilarityQuery(knnQuery, vectorSimilarity, similarity.score(vectorSimilarity, elementType, dims)); + } + return knnQuery; } private Query createExactKnnBitQuery(byte[] queryVector) { diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java index 4ac8d14c0b79d..4f36ddbedf23b 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java @@ -32,6 +32,7 @@ public class ExactKnnQueryBuilder extends AbstractQueryBuilder rewrittenQueries = new ArrayList<>(filterQueries.size()); diff --git a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java index 47dbe8f126556..3c698f1b790e5 100644 --- a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java @@ -353,12 +353,14 @@ public void testRewriteShardSearchRequestWithRank() { KnnScoreDocQueryBuilder ksdqb0 = new KnnScoreDocQueryBuilder( new ScoreDoc[] { new ScoreDoc(1, 3.0f, 1), new ScoreDoc(4, 1.5f, 1) }, "vector", - VectorData.fromFloats(new float[] { 0.0f }) + VectorData.fromFloats(new float[] { 0.0f }), + null ); KnnScoreDocQueryBuilder ksdqb1 = new KnnScoreDocQueryBuilder( new ScoreDoc[] { new ScoreDoc(1, 2.0f, 1) }, "vector2", - VectorData.fromFloats(new float[] { 0.0f }) + VectorData.fromFloats(new float[] { 0.0f }), + null ); assertEquals( List.of(bm25, ksdqb0, ksdqb1), diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java index 9ee895f6de003..9ef2d0df90cce 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java @@ -215,7 +215,7 @@ public void testExactKnnQuery() { for (int i = 0; i < dims; i++) { queryVector[i] = randomFloat(); } - Query query = field.createExactKnnQuery(VectorData.fromFloats(queryVector)); + Query query = field.createExactKnnQuery(VectorData.fromFloats(queryVector), null); assertTrue(query instanceof DenseVectorQuery.Floats); } { @@ -233,7 +233,7 @@ public void testExactKnnQuery() { for (int i = 0; i < dims; i++) { queryVector[i] = randomByte(); } - Query query = field.createExactKnnQuery(VectorData.fromBytes(queryVector)); + Query query = field.createExactKnnQuery(VectorData.fromBytes(queryVector), null); assertTrue(query instanceof DenseVectorQuery.Bytes); } } diff --git a/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java b/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java index f0899384dbc5e..565a09dbff0d3 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; +import org.elasticsearch.index.query.InnerHitsRewriteContext; import org.elasticsearch.index.query.MatchNoneQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -306,6 +307,22 @@ private void assertBWCSerialization(QueryBuilder newQuery, QueryBuilder bwcQuery } } + public void testRewriteForInnerHits() throws IOException { + SearchExecutionContext context = createSearchExecutionContext(); + InnerHitsRewriteContext innerHitsRewriteContext = new InnerHitsRewriteContext(context.getParserConfig(), System::currentTimeMillis); + KnnVectorQueryBuilder queryBuilder = createTestQueryBuilder(); + queryBuilder.boost(randomFloat()); + queryBuilder.queryName(randomAlphaOfLength(10)); + QueryBuilder rewritten = queryBuilder.rewrite(innerHitsRewriteContext); + assertTrue(rewritten instanceof ExactKnnQueryBuilder); + ExactKnnQueryBuilder exactKnnQueryBuilder = (ExactKnnQueryBuilder) rewritten; + assertEquals(queryBuilder.queryVector(), exactKnnQueryBuilder.getQuery()); + assertEquals(queryBuilder.getFieldName(), exactKnnQueryBuilder.getField()); + assertEquals(queryBuilder.boost(), exactKnnQueryBuilder.boost(), 0.0001f); + assertEquals(queryBuilder.queryName(), exactKnnQueryBuilder.queryName()); + assertEquals(queryBuilder.getVectorSimilarity(), exactKnnQueryBuilder.vectorSimilarity()); + } + public void testRewriteWithQueryVectorBuilder() throws Exception { int dims = randomInt(1024); float[] expectedArray = new float[dims]; diff --git a/server/src/test/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilderTests.java index 5f4fb61718a7e..c302dc7bd63c9 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilderTests.java @@ -53,12 +53,12 @@ protected ExactKnnQueryBuilder doCreateTestQueryBuilder() { for (int i = 0; i < VECTOR_DIMENSION; i++) { query[i] = randomFloat(); } - return new ExactKnnQueryBuilder(query, VECTOR_FIELD); + return new ExactKnnQueryBuilder(VectorData.fromFloats(query), VECTOR_FIELD, randomBoolean() ? randomFloat() : null); } @Override public void testValidOutput() { - ExactKnnQueryBuilder query = new ExactKnnQueryBuilder(new float[] { 1.0f, 2.0f, 3.0f }, "field"); + ExactKnnQueryBuilder query = new ExactKnnQueryBuilder(VectorData.fromFloats(new float[] { 1.0f, 2.0f, 3.0f }), "field", null); String expected = """ { "exact_knn" : { @@ -71,10 +71,29 @@ public void testValidOutput() { } }"""; assertEquals(expected, query.toString()); + query = new ExactKnnQueryBuilder(VectorData.fromFloats(new float[] { 1.0f, 2.0f, 3.0f }), "field", 1f); + expected = """ + { + "exact_knn" : { + "query" : [ + 1.0, + 2.0, + 3.0 + ], + "field" : "field", + "similarity" : 1.0 + } + }"""; + assertEquals(expected, query.toString()); } @Override protected void doAssertLuceneQuery(ExactKnnQueryBuilder queryBuilder, Query query, SearchExecutionContext context) throws IOException { + if (queryBuilder.vectorSimilarity() != null) { + assertTrue(query instanceof VectorSimilarityQuery); + VectorSimilarityQuery vectorSimilarityQuery = (VectorSimilarityQuery) query; + query = vectorSimilarityQuery.getInnerKnnQuery(); + } assertTrue(query instanceof DenseVectorQuery.Floats); DenseVectorQuery.Floats denseVectorQuery = (DenseVectorQuery.Floats) query; assertEquals(VECTOR_FIELD, denseVectorQuery.field); diff --git a/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java index a558081c2d16f..c09ed24668963 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java @@ -56,7 +56,8 @@ protected KnnScoreDocQueryBuilder doCreateTestQueryBuilder() { return new KnnScoreDocQueryBuilder( scoreDocs.toArray(new ScoreDoc[0]), randomBoolean() ? "field" : null, - randomBoolean() ? VectorData.fromFloats(randomVector(10)) : null + randomBoolean() ? VectorData.fromFloats(randomVector(10)) : null, + randomBoolean() ? randomFloat() : null ); } @@ -65,7 +66,8 @@ public void testValidOutput() { KnnScoreDocQueryBuilder query = new KnnScoreDocQueryBuilder( new ScoreDoc[] { new ScoreDoc(0, 4.25f), new ScoreDoc(5, 1.6f) }, "field", - VectorData.fromFloats(new float[] { 1.0f, 2.0f }) + VectorData.fromFloats(new float[] { 1.0f, 2.0f }), + null ); String expected = """ { @@ -155,7 +157,8 @@ public void testRewriteToMatchNone() throws IOException { KnnScoreDocQueryBuilder queryBuilder = new KnnScoreDocQueryBuilder( new ScoreDoc[0], randomBoolean() ? "field" : null, - randomBoolean() ? VectorData.fromFloats(randomVector(10)) : null + randomBoolean() ? VectorData.fromFloats(randomVector(10)) : null, + randomBoolean() ? randomFloat() : null ); QueryRewriteContext context = randomBoolean() ? new InnerHitsRewriteContext(createSearchExecutionContext().getParserConfig(), System::currentTimeMillis) @@ -169,7 +172,8 @@ public void testRewriteForInnerHits() throws IOException { KnnScoreDocQueryBuilder queryBuilder = new KnnScoreDocQueryBuilder( new ScoreDoc[] { new ScoreDoc(0, 4.25f), new ScoreDoc(5, 1.6f) }, randomAlphaOfLength(10), - VectorData.fromFloats(randomVector(10)) + VectorData.fromFloats(randomVector(10)), + randomBoolean() ? randomFloat() : null ); queryBuilder.boost(randomFloat()); queryBuilder.queryName(randomAlphaOfLength(10)); @@ -180,6 +184,7 @@ public void testRewriteForInnerHits() throws IOException { assertEquals(queryBuilder.fieldName(), exactKnnQueryBuilder.getField()); assertEquals(queryBuilder.boost(), exactKnnQueryBuilder.boost(), 0.0001f); assertEquals(queryBuilder.queryName(), exactKnnQueryBuilder.queryName()); + assertEquals(queryBuilder.vectorSimilarity(), exactKnnQueryBuilder.vectorSimilarity()); } @Override @@ -221,7 +226,8 @@ public void testScoreDocQueryWeightCount() throws IOException { KnnScoreDocQueryBuilder queryBuilder = new KnnScoreDocQueryBuilder( scoreDocs, "field", - VectorData.fromFloats(randomVector(10)) + VectorData.fromFloats(randomVector(10)), + null ); Query query = queryBuilder.doToQuery(context); final Weight w = query.createWeight(searcher, ScoreMode.TOP_SCORES, 1.0f); @@ -268,7 +274,8 @@ public void testScoreDocQuery() throws IOException { KnnScoreDocQueryBuilder queryBuilder = new KnnScoreDocQueryBuilder( scoreDocs, "field", - VectorData.fromFloats(randomVector(10)) + VectorData.fromFloats(randomVector(10)), + null ); final Query query = queryBuilder.doToQuery(context); final Weight w = query.createWeight(searcher, ScoreMode.TOP_SCORES, 1.0f); From d568949d660c2395bb9a7108daba038d2e6d0ad9 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 30 Jul 2024 06:13:33 +1000 Subject: [PATCH 097/105] Mute org.elasticsearch.repositories.blobstore.testkit.AzureSnapshotRepoTestKitIT testRepositoryAnalysis #111280 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 51f1b56786e86..9a29329cae470 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -132,6 +132,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/111279 - class: org.elasticsearch.repositories.azure.RepositoryAzureClientYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/111345 +- class: org.elasticsearch.repositories.blobstore.testkit.AzureSnapshotRepoTestKitIT + method: testRepositoryAnalysis + issue: https://github.com/elastic/elasticsearch/issues/111280 # Examples: # From b601e3bcd2936a9b93a733fc3d7be9a27b276d32 Mon Sep 17 00:00:00 2001 From: Samiul Monir <150824886+Samiul-TheSoccerFan@users.noreply.github.com> Date: Mon, 29 Jul 2024 16:20:49 -0400 Subject: [PATCH 098/105] Update semantic_text field to support indexing numeric and boolean data types (#111284) * adding support for additional data types * Adding unit tests for additional data types * updating integration tests to feed random data types * Fix code styles by running spotlessApply * Adding yml tests for additional data type support * fix failed yml tests and added tests for dense and boolean type * Removed util class and moved the random function into own specific test files * rewrite the terms to match most up to date terminology * Update docs/changelog/111284.yaml * update changelog yml text to fit into one line * limit changelog limit to only 1 area * Updating text_expansion with sparse_embedding to keep the terminalogy up to date * refactoring randomSemanticTextInput function * Update docs/changelog/111284.yaml * Adding comments and addressing nitpiks --- docs/changelog/111284.yaml | 6 + .../ShardBulkInferenceActionFilterIT.java | 5 +- .../ShardBulkInferenceActionFilter.java | 8 +- .../ShardBulkInferenceActionFilterTests.java | 37 ++++++- .../mapper/SemanticTextFieldTests.java | 30 ++++- .../inference/30_semantic_text_inference.yml | 89 ++++++++++++++- .../test/inference/40_semantic_text_query.yml | 104 ++++++++++++++++++ 7 files changed, 267 insertions(+), 12 deletions(-) create mode 100644 docs/changelog/111284.yaml diff --git a/docs/changelog/111284.yaml b/docs/changelog/111284.yaml new file mode 100644 index 0000000000000..f87649a134af6 --- /dev/null +++ b/docs/changelog/111284.yaml @@ -0,0 +1,6 @@ +pr: 111284 +summary: Update `semantic_text` field to support indexing numeric and boolean data + types +area: Mapping +type: enhancement +issues: [] diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 8da1aaabd517a..73c0f6d4c7685 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -33,6 +33,7 @@ import java.util.Locale; import java.util.Map; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.randomSemanticTextInput; import static org.hamcrest.Matchers.equalTo; public class ShardBulkInferenceActionFilterIT extends ESIntegTestCase { @@ -93,8 +94,8 @@ public void testBulkOperations() throws Exception { String id = Long.toString(totalDocs); boolean isIndexRequest = randomBoolean(); Map source = new HashMap<>(); - source.put("sparse_field", isIndexRequest && rarely() ? null : randomAlphaOfLengthBetween(0, 1000)); - source.put("dense_field", isIndexRequest && rarely() ? null : randomAlphaOfLengthBetween(0, 1000)); + source.put("sparse_field", isIndexRequest && rarely() ? null : randomSemanticTextInput()); + source.put("dense_field", isIndexRequest && rarely() ? null : randomSemanticTextInput()); if (isIndexRequest) { bulkReqBuilder.add(new IndexRequestBuilder(client()).setIndex(INDEX_NAME).setId(id).setSource(source)); totalDocs++; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java index f1a590e647dbc..ade0748ef10bf 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java @@ -492,12 +492,16 @@ private Map> createFieldInferenceRequests(Bu * If {@code valueObj} is not a string or a collection of strings, it throws an ElasticsearchStatusException. */ private static List nodeStringValues(String field, Object valueObj) { - if (valueObj instanceof String value) { + if (valueObj instanceof Number || valueObj instanceof Boolean) { + return List.of(valueObj.toString()); + } else if (valueObj instanceof String value) { return List.of(value); } else if (valueObj instanceof Collection values) { List valuesString = new ArrayList<>(); for (var v : values) { - if (v instanceof String value) { + if (v instanceof Number || v instanceof Boolean) { + valuesString.add(v.toString()); + } else if (v instanceof String value) { valuesString.add(value); } else { throw new ElasticsearchStatusException( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java index f63a6369b21a6..d78ea7933e836 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; +import org.elasticsearch.xpack.inference.mapper.SemanticTextField; import org.elasticsearch.xpack.inference.model.TestModel; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.junit.After; @@ -56,7 +57,9 @@ import static org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilter.DEFAULT_BATCH_SIZE; import static org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilter.getIndexRequestOrNull; import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.randomSemanticText; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.randomSemanticTextInput; import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.randomSparseEmbeddings; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.semanticTextFieldFromChunkedInferenceResults; import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.toChunkedResult; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -331,16 +334,34 @@ private static BulkItemRequest[] randomBulkItemRequest( for (var entry : fieldInferenceMap.values()) { String field = entry.getName(); var model = modelMap.get(entry.getInferenceId()); - String text = randomAlphaOfLengthBetween(10, 20); - docMap.put(field, text); - expectedDocMap.put(field, text); + Object inputObject = randomSemanticTextInput(); + String inputText = inputObject.toString(); + docMap.put(field, inputObject); + expectedDocMap.put(field, inputText); if (model == null) { // ignore results, the doc should fail with a resource not found exception continue; } - var result = randomSemanticText(field, model, List.of(text), requestContentType); - model.putResult(text, toChunkedResult(result)); - expectedDocMap.put(field, result); + + SemanticTextField semanticTextField; + // The model is not field aware and that is why we are skipping the embedding generation process for existing values. + // This prevents a situation where embeddings in the expected docMap do not match those in the model, which could happen if + // embeddings were overwritten. + if (model.hasResult(inputText)) { + ChunkedInferenceServiceResults results = model.getResults(inputText); + semanticTextField = semanticTextFieldFromChunkedInferenceResults( + field, + model, + List.of(inputText), + results, + requestContentType + ); + } else { + semanticTextField = randomSemanticText(field, model, List.of(inputText), requestContentType); + model.putResult(inputText, toChunkedResult(semanticTextField)); + } + + expectedDocMap.put(field, semanticTextField); } int requestId = randomIntBetween(0, Integer.MAX_VALUE); @@ -383,5 +404,9 @@ ChunkedInferenceServiceResults getResults(String text) { void putResult(String text, ChunkedInferenceServiceResults result) { resultMap.put(text, result); } + + boolean hasResult(String text) { + return resultMap.containsKey(text); + } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java index 2a64f77e28756..563093930c358 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java @@ -86,7 +86,7 @@ protected void assertEqualInstances(SemanticTextField expectedInstance, Semantic @Override protected SemanticTextField createTestInstance() { - List rawValues = randomList(1, 5, () -> randomAlphaOfLengthBetween(10, 20)); + List rawValues = randomList(1, 5, () -> randomSemanticTextInput().toString()); try { // try catch required for override return randomSemanticText(NAME, TestModel.createRandomInstance(), rawValues, randomFrom(XContentType.values())); } catch (IOException e) { @@ -192,6 +192,16 @@ public static SemanticTextField randomSemanticText(String fieldName, Model model case SPARSE_EMBEDDING -> randomSparseEmbeddings(inputs); default -> throw new AssertionError("invalid task type: " + model.getTaskType().name()); }; + return semanticTextFieldFromChunkedInferenceResults(fieldName, model, inputs, results, contentType); + } + + public static SemanticTextField semanticTextFieldFromChunkedInferenceResults( + String fieldName, + Model model, + List inputs, + ChunkedInferenceServiceResults results, + XContentType contentType + ) { return new SemanticTextField( fieldName, inputs, @@ -204,6 +214,24 @@ public static SemanticTextField randomSemanticText(String fieldName, Model model ); } + /** + * Returns a randomly generated object for Semantic Text tests purpose. + */ + public static Object randomSemanticTextInput() { + if (rarely()) { + return switch (randomIntBetween(0, 4)) { + case 0 -> randomInt(); + case 1 -> randomLong(); + case 2 -> randomFloat(); + case 3 -> randomBoolean(); + case 4 -> randomDouble(); + default -> throw new IllegalStateException("Illegal state while generating random semantic text input"); + }; + } else { + return randomAlphaOfLengthBetween(10, 20); + } + } + public static ChunkedInferenceServiceResults toChunkedResult(SemanticTextField field) throws IOException { switch (field.inference().modelSettings().taskType()) { case SPARSE_EMBEDDING -> { diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml index f467691600766..f58a5c33fd85d 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml @@ -51,7 +51,7 @@ setup: type: text --- -"Calculates text expansion and embedding results for new documents": +"Calculates sparse embedding and text embedding results for new documents": - do: index: index: test-index @@ -74,6 +74,93 @@ setup: - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } - match: { _source.non_inference_field: "non inference test" } +--- +"Calculates sparse embedding and text embedding results for new documents with integer value": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: 75 + dense_field: 100 + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "75" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "75" } + - match: { _source.dense_field.text: "100" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "100" } + +--- +"Calculates sparse embedding and text embedding results for new documents with boolean value": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: true + dense_field: false + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "true" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "true" } + - match: { _source.dense_field.text: "false" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "false" } + +--- +"Calculates sparse embedding and text embedding results for new documents with a collection of mixed data types": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: [false, 75, "inference test", 13.49] + dense_field: [true, 49.99, "another inference test", 5654] + + - do: + get: + index: test-index + id: doc_1 + + - length: { _source.sparse_field.text: 4 } + - match: { _source.sparse_field.text.0: "false" } + - match: { _source.sparse_field.text.1: "75" } + - match: { _source.sparse_field.text.2: "inference test" } + - match: { _source.sparse_field.text.3: "13.49" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - exists: _source.sparse_field.inference.chunks.1.embeddings + - exists: _source.sparse_field.inference.chunks.2.embeddings + - exists: _source.sparse_field.inference.chunks.3.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "false" } + - match: { _source.sparse_field.inference.chunks.1.text: "75" } + - match: { _source.sparse_field.inference.chunks.2.text: "inference test" } + - match: { _source.sparse_field.inference.chunks.3.text: "13.49" } + + - length: { _source.dense_field.text: 4 } + - match: { _source.dense_field.text.0: "true" } + - match: { _source.dense_field.text.1: "49.99" } + - match: { _source.dense_field.text.2: "another inference test" } + - match: { _source.dense_field.text.3: "5654" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - exists: _source.dense_field.inference.chunks.1.embeddings + - exists: _source.dense_field.inference.chunks.2.embeddings + - exists: _source.dense_field.inference.chunks.3.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "true" } + - match: { _source.dense_field.inference.chunks.1.text: "49.99" } + - match: { _source.dense_field.inference.chunks.2.text: "another inference test" } + - match: { _source.dense_field.inference.chunks.3.text: "5654" } + --- "Inference fields do not create new mappings": - do: diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml index 5ee7a943c4d35..932ee4854f445 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml @@ -90,6 +90,58 @@ setup: - close_to: { hits.hits.0._score: { value: 3.7837332e17, error: 1e10 } } - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } +--- +"Numeric query using a sparse embedding model": + - skip: + features: [ "headers", "close_to" ] + + - do: + index: + index: test-sparse-index + id: doc_1 + body: + inference_field: [40, 49.678] + refresh: true + + - do: + search: + index: test-sparse-index + body: + query: + semantic: + field: "inference_field" + query: "40" + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } + +--- +"Boolean query using a sparse embedding model": + - skip: + features: [ "headers", "close_to" ] + + - do: + index: + index: test-sparse-index + id: doc_1 + body: + inference_field: true + refresh: true + + - do: + search: + index: test-sparse-index + body: + query: + semantic: + field: "inference_field" + query: "true" + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - length: { hits.hits.0._source.inference_field.inference.chunks: 1 } + --- "Query using a dense embedding model": - skip: @@ -121,6 +173,58 @@ setup: - close_to: { hits.hits.0._score: { value: 1.0, error: 0.0001 } } - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } +--- +"Numeric query using a dense embedding model": + - skip: + features: [ "headers", "close_to" ] + + - do: + index: + index: test-dense-index + id: doc_1 + body: + inference_field: [45.1, 100] + refresh: true + + - do: + search: + index: test-dense-index + body: + query: + semantic: + field: "inference_field" + query: "45.1" + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } + +--- +"Boolean query using a dense embedding model": + - skip: + features: [ "headers", "close_to" ] + + - do: + index: + index: test-dense-index + id: doc_1 + body: + inference_field: false + refresh: true + + - do: + search: + index: test-dense-index + body: + query: + semantic: + field: "inference_field" + query: "false" + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - length: { hits.hits.0._source.inference_field.inference.chunks: 1 } + --- "Query using a dense embedding model that uses byte embeddings": - skip: From 73a1dd6932c5b4d052dae2f9b605f853eb236194 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 30 Jul 2024 06:28:37 +1000 Subject: [PATCH 099/105] Mute org.elasticsearch.xpack.repositories.metering.azure.AzureRepositoriesMeteringIT org.elasticsearch.xpack.repositories.metering.azure.AzureRepositoriesMeteringIT #111307 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 9a29329cae470..c2a8c32694d61 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -135,6 +135,8 @@ tests: - class: org.elasticsearch.repositories.blobstore.testkit.AzureSnapshotRepoTestKitIT method: testRepositoryAnalysis issue: https://github.com/elastic/elasticsearch/issues/111280 +- class: org.elasticsearch.xpack.repositories.metering.azure.AzureRepositoriesMeteringIT + issue: https://github.com/elastic/elasticsearch/issues/111307 # Examples: # From 90f6e8c96ba3948a1a081e9eb5ac711d88594b96 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 30 Jul 2024 07:03:15 +1000 Subject: [PATCH 100/105] Mute org.elasticsearch.xpack.esql.expression.function.aggregate.ValuesTests testGroupingAggregate {TestCase=} #111429 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index c2a8c32694d61..a4418c34e2589 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -137,6 +137,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/111280 - class: org.elasticsearch.xpack.repositories.metering.azure.AzureRepositoriesMeteringIT issue: https://github.com/elastic/elasticsearch/issues/111307 +- class: org.elasticsearch.xpack.esql.expression.function.aggregate.ValuesTests + method: testGroupingAggregate {TestCase=} + issue: https://github.com/elastic/elasticsearch/issues/111429 # Examples: # From 65c2fac99efc655b56b0788adf5c672f5388860b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 30 Jul 2024 07:03:22 +1000 Subject: [PATCH 101/105] Mute org.elasticsearch.xpack.esql.expression.function.aggregate.ValuesTests testGroupingAggregate {TestCase=} #111428 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index a4418c34e2589..eb504dfb10219 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -140,6 +140,9 @@ tests: - class: org.elasticsearch.xpack.esql.expression.function.aggregate.ValuesTests method: testGroupingAggregate {TestCase=} issue: https://github.com/elastic/elasticsearch/issues/111429 +- class: org.elasticsearch.xpack.esql.expression.function.aggregate.ValuesTests + method: testGroupingAggregate {TestCase=} + issue: https://github.com/elastic/elasticsearch/issues/111428 # Examples: # From f9007c59ee0aa1b47b09a394d67a252a253e5359 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 30 Jul 2024 07:03:47 +1000 Subject: [PATCH 102/105] Mute org.elasticsearch.xpack.restart.FullClusterRestartIT testSingleDoc {cluster=OLD} #111430 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index eb504dfb10219..1df885cdc72c0 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -143,6 +143,9 @@ tests: - class: org.elasticsearch.xpack.esql.expression.function.aggregate.ValuesTests method: testGroupingAggregate {TestCase=} issue: https://github.com/elastic/elasticsearch/issues/111428 +- class: org.elasticsearch.xpack.restart.FullClusterRestartIT + method: testSingleDoc {cluster=OLD} + issue: https://github.com/elastic/elasticsearch/issues/111430 # Examples: # From fb19b4b098e1e02c44cb9ae55f924d65ab6ffa05 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 29 Jul 2024 22:14:08 +0100 Subject: [PATCH 103/105] Make `EnrichPolicyRunner` more properly async (#111321) Today `EnrichPolicyRunner` carries its listener in a field, with various morally-async methods masquerading as synchronous ones because they don't accept the listener from the caller as one might expect. This commit removes the `listener` field in favour of passing a listener explicitly between the methods that require it, making it easier to spot listener leaks. --- .../xpack/enrich/EnrichPolicyExecutor.java | 12 +- .../xpack/enrich/EnrichPolicyRunner.java | 68 ++- .../xpack/enrich/EnrichPolicyRunnerTests.java | 541 +++++------------- 3 files changed, 201 insertions(+), 420 deletions(-) diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutor.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutor.java index 2ebe268cc788d..746ae2f4eee2b 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutor.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutor.java @@ -13,6 +13,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskResponse; import org.elasticsearch.client.internal.Client; @@ -126,8 +127,9 @@ public void runPolicyLocally( } task.setStatus(new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.SCHEDULED)); - Runnable runnable = createPolicyRunner(policyName, policy, enrichIndexName, task, listener); - threadPool.executor(ThreadPool.Names.GENERIC).execute(runnable); + var policyRunner = createPolicyRunner(policyName, policy, enrichIndexName, task); + threadPool.executor(ThreadPool.Names.GENERIC) + .execute(ActionRunnable.wrap(ActionListener.assertOnce(listener), policyRunner::run)); } catch (Exception e) { task.setStatus(new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.FAILED)); throw e; @@ -206,18 +208,16 @@ public void onFailure(Exception exception) { }); } - private Runnable createPolicyRunner( + private EnrichPolicyRunner createPolicyRunner( String policyName, EnrichPolicy policy, String enrichIndexName, - ExecuteEnrichPolicyTask task, - ActionListener listener + ExecuteEnrichPolicyTask task ) { return new EnrichPolicyRunner( policyName, policy, task, - listener, clusterService, indicesService, client, diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java index 810fd03f061ea..0891f24feda68 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java @@ -76,7 +76,7 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.ClientHelper.ENRICH_ORIGIN; -public class EnrichPolicyRunner implements Runnable { +public class EnrichPolicyRunner { private static final Logger logger = LogManager.getLogger(EnrichPolicyRunner.class); @@ -92,7 +92,6 @@ public class EnrichPolicyRunner implements Runnable { private final String policyName; private final EnrichPolicy policy; private final ExecuteEnrichPolicyTask task; - private final ActionListener listener; private final ClusterService clusterService; private final IndicesService indicesService; private final Client client; @@ -105,7 +104,6 @@ public class EnrichPolicyRunner implements Runnable { String policyName, EnrichPolicy policy, ExecuteEnrichPolicyTask task, - ActionListener listener, ClusterService clusterService, IndicesService indicesService, Client client, @@ -117,7 +115,6 @@ public class EnrichPolicyRunner implements Runnable { this.policyName = Objects.requireNonNull(policyName); this.policy = Objects.requireNonNull(policy); this.task = Objects.requireNonNull(task); - this.listener = Objects.requireNonNull(listener); this.clusterService = Objects.requireNonNull(clusterService); this.indicesService = indicesService; this.client = wrapClient(client, policyName, task, clusterService); @@ -127,8 +124,7 @@ public class EnrichPolicyRunner implements Runnable { this.maxForceMergeAttempts = maxForceMergeAttempts; } - @Override - public void run() { + public void run(ActionListener listener) { try { logger.info("Policy [{}]: Running enrich policy", policyName); task.setStatus(new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.RUNNING)); @@ -139,7 +135,7 @@ public void run() { // This call does not set the origin to ensure that the user executing the policy has permission to access the source index client.admin().indices().getIndex(getIndexRequest, listener.delegateFailureAndWrap((l, getIndexResponse) -> { validateMappings(getIndexResponse); - prepareAndCreateEnrichIndex(toMappings(getIndexResponse), clusterService.getSettings()); + prepareAndCreateEnrichIndex(toMappings(getIndexResponse), clusterService.getSettings(), l); })); } catch (Exception e) { listener.onFailure(e); @@ -204,9 +200,7 @@ static void validateMappings( } } - private record MappingTypeAndFormat(String type, String format) { - - } + private record MappingTypeAndFormat(String type, String format) {} private static MappingTypeAndFormat validateAndGetMappingTypeAndFormat( String fieldName, @@ -436,7 +430,11 @@ static boolean isIndexableField(MapperService mapperService, String field, Strin } } - private void prepareAndCreateEnrichIndex(List> mappings, Settings settings) { + private void prepareAndCreateEnrichIndex( + List> mappings, + Settings settings, + ActionListener listener + ) { int numberOfReplicas = settings.getAsInt(ENRICH_MIN_NUMBER_OF_REPLICAS_NAME, 0); Settings enrichIndexSettings = Settings.builder() .put("index.number_of_shards", 1) @@ -453,23 +451,23 @@ private void prepareAndCreateEnrichIndex(List> mappings, Set .indices() .create( createEnrichIndexRequest, - listener.delegateFailure((l, createIndexResponse) -> prepareReindexOperation(enrichIndexName)) + listener.delegateFailure((l, createIndexResponse) -> prepareReindexOperation(enrichIndexName, l)) ); } - private void prepareReindexOperation(final String destinationIndexName) { + private void prepareReindexOperation(final String destinationIndexName, ActionListener listener) { // Check to make sure that the enrich pipeline exists, and create it if it is missing. if (EnrichPolicyReindexPipeline.exists(clusterService.state()) == false) { EnrichPolicyReindexPipeline.create( enrichOriginClient(), - listener.delegateFailure((l, r) -> transferDataToEnrichIndex(destinationIndexName)) + listener.delegateFailure((l, r) -> transferDataToEnrichIndex(destinationIndexName, l)) ); } else { - transferDataToEnrichIndex(destinationIndexName); + transferDataToEnrichIndex(destinationIndexName, listener); } } - private void transferDataToEnrichIndex(final String destinationIndexName) { + private void transferDataToEnrichIndex(final String destinationIndexName, ActionListener listener) { logger.debug("Policy [{}]: Transferring source data to new enrich index [{}]", policyName, destinationIndexName); // Filter down the source fields to just the ones required by the policy final Set retainFields = new HashSet<>(); @@ -540,13 +538,17 @@ public void onResponse(BulkByScrollResponse bulkByScrollResponse) { bulkByScrollResponse.getCreated(), destinationIndexName ); - forceMergeEnrichIndex(destinationIndexName, 1); + forceMergeEnrichIndex(destinationIndexName, 1, delegate); } } }); } - private void forceMergeEnrichIndex(final String destinationIndexName, final int attempt) { + private void forceMergeEnrichIndex( + final String destinationIndexName, + final int attempt, + ActionListener listener + ) { logger.debug( "Policy [{}]: Force merging newly created enrich index [{}] (Attempt {}/{})", policyName, @@ -558,21 +560,29 @@ private void forceMergeEnrichIndex(final String destinationIndexName, final int .indices() .forceMerge( new ForceMergeRequest(destinationIndexName).maxNumSegments(1), - listener.delegateFailure((l, r) -> refreshEnrichIndex(destinationIndexName, attempt)) + listener.delegateFailure((l, r) -> refreshEnrichIndex(destinationIndexName, attempt, l)) ); } - private void refreshEnrichIndex(final String destinationIndexName, final int attempt) { + private void refreshEnrichIndex( + final String destinationIndexName, + final int attempt, + ActionListener listener + ) { logger.debug("Policy [{}]: Refreshing enrich index [{}]", policyName, destinationIndexName); enrichOriginClient().admin() .indices() .refresh( new RefreshRequest(destinationIndexName), - listener.delegateFailure((l, r) -> ensureSingleSegment(destinationIndexName, attempt)) + listener.delegateFailure((l, r) -> ensureSingleSegment(destinationIndexName, attempt, l)) ); } - protected void ensureSingleSegment(final String destinationIndexName, final int attempt) { + protected void ensureSingleSegment( + final String destinationIndexName, + final int attempt, + ActionListener listener + ) { enrichOriginClient().admin() .indices() .segments(new IndicesSegmentsRequest(destinationIndexName), listener.delegateFailureAndWrap((l, indicesSegmentResponse) -> { @@ -644,29 +654,29 @@ protected void ensureSingleSegment(final String destinationIndexName, final int nextAttempt, maxForceMergeAttempts ); - forceMergeEnrichIndex(destinationIndexName, nextAttempt); + forceMergeEnrichIndex(destinationIndexName, nextAttempt, listener); } } else { // Force merge down to one segment successful - setIndexReadOnly(destinationIndexName); + setIndexReadOnly(destinationIndexName, listener); } })); } - private void setIndexReadOnly(final String destinationIndexName) { + private void setIndexReadOnly(final String destinationIndexName, ActionListener listener) { logger.debug("Policy [{}]: Setting new enrich index [{}] to be read only", policyName, destinationIndexName); UpdateSettingsRequest request = new UpdateSettingsRequest(destinationIndexName).setPreserveExisting(true) .settings(Settings.builder().put("index.auto_expand_replicas", "0-all").put("index.blocks.write", "true")); enrichOriginClient().admin() .indices() - .updateSettings(request, listener.delegateFailure((l, r) -> waitForIndexGreen(destinationIndexName))); + .updateSettings(request, listener.delegateFailure((l, r) -> waitForIndexGreen(destinationIndexName, l))); } - private void waitForIndexGreen(final String destinationIndexName) { + private void waitForIndexGreen(final String destinationIndexName, ActionListener listener) { ClusterHealthRequest request = new ClusterHealthRequest(destinationIndexName).waitForGreenStatus(); enrichOriginClient().admin() .cluster() - .health(request, listener.delegateFailureAndWrap((l, r) -> updateEnrichPolicyAlias(destinationIndexName))); + .health(request, listener.delegateFailureAndWrap((l, r) -> updateEnrichPolicyAlias(destinationIndexName, l))); } /** @@ -720,7 +730,7 @@ private void validateIndexBeforePromotion(String destinationIndexName, ClusterSt } } - private void updateEnrichPolicyAlias(final String destinationIndexName) { + private void updateEnrichPolicyAlias(final String destinationIndexName, ActionListener listener) { String enrichIndexBase = EnrichPolicy.getBaseName(policyName); logger.debug("Policy [{}]: Promoting new enrich index [{}] to alias [{}]", policyName, destinationIndexName, enrichIndexBase); GetAliasesRequest aliasRequest = new GetAliasesRequest(enrichIndexBase); diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java index 7ba3b356d6015..75e10e7069563 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; @@ -34,7 +33,6 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.FilterClient; @@ -79,18 +77,15 @@ import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Consumer; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.hamcrest.CoreMatchers.allOf; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; @@ -147,17 +142,10 @@ public void testRunner() throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - if (exception.get() != null) { - throw exception.get(); - } + safeExecute(enrichPolicyRunner); // Validate Index definition GetIndexResponse enrichIndex = getGetIndexResponseAndCheck(createdEnrichIndex); @@ -227,17 +215,10 @@ public void testRunnerGeoMatchType() throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - if (exception.get() != null) { - throw exception.get(); - } + safeExecute(enrichPolicyRunner); // Validate Index definition GetIndexResponse enrichIndex = getGetIndexResponseAndCheck(createdEnrichIndex); @@ -318,17 +299,10 @@ private void testNumberRangeMatchType(String rangeType) throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - if (exception.get() != null) { - throw exception.get(); - } + safeExecute(enrichPolicyRunner); // Validate Index definition GetIndexResponse enrichIndex = getGetIndexResponseAndCheck(createdEnrichIndex); @@ -415,17 +389,10 @@ public void testRunnerRangeTypeWithIpRange() throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - if (exception.get() != null) { - throw exception.get(); - } + safeExecute(enrichPolicyRunner); // Validate Index definition GetIndexResponse enrichIndex = getGetIndexResponseAndCheck(createdEnrichIndex); @@ -514,17 +481,10 @@ public void testRunnerMultiSource() throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - if (exception.get() != null) { - throw exception.get(); - } + safeExecute(enrichPolicyRunner); // Validate Index definition GetIndexResponse enrichIndex = getGetIndexResponseAndCheck(createdEnrichIndex); @@ -633,17 +593,10 @@ public void testRunnerMultiSourceDocIdCollisions() throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - if (exception.get() != null) { - throw exception.get(); - } + safeExecute(enrichPolicyRunner); // Validate Index definition GetIndexResponse enrichIndex = getGetIndexResponseAndCheck(createdEnrichIndex); @@ -756,17 +709,10 @@ public void testRunnerMultiSourceEnrichKeyCollisions() throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - if (exception.get() != null) { - throw exception.get(); - } + safeExecute(enrichPolicyRunner); // Validate Index definition GetIndexResponse enrichIndex = getGetIndexResponseAndCheck(createdEnrichIndex); @@ -820,7 +766,7 @@ public void testRunnerMultiSourceEnrichKeyCollisions() throws Exception { ensureEnrichIndexIsReadOnly(createdEnrichIndex); } - public void testRunnerNoSourceIndex() throws Exception { + public void testRunnerNoSourceIndex() { final String sourceIndex = "source-index"; List enrichFields = List.of("field2", "field5"); @@ -829,24 +775,16 @@ public void testRunnerNoSourceIndex() throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - if (exception.get() != null) { - Exception thrown = exception.get(); - assertThat(thrown, instanceOf(IndexNotFoundException.class)); - assertThat(thrown.getMessage(), containsString("no such index [" + sourceIndex + "]")); - } else { - fail("Expected exception but nothing was thrown"); - } + assertThat( + asInstanceOf(IndexNotFoundException.class, safeExecuteExpectFailure(enrichPolicyRunner)).getMessage(), + containsString("no such index [" + sourceIndex + "]") + ); } - public void testRunnerNoSourceMapping() throws Exception { + public void testRunnerNoSourceMapping() { final String sourceIndex = "source-index"; CreateIndexResponse createResponse = indicesAdmin().create(new CreateIndexRequest(sourceIndex)).actionGet(); assertTrue(createResponse.isAcknowledged()); @@ -857,32 +795,21 @@ public void testRunnerNoSourceMapping() throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - if (exception.get() != null) { - Exception thrown = exception.get(); - assertThat(thrown, instanceOf(ElasticsearchException.class)); - assertThat( - thrown.getMessage(), - containsString( - "Enrich policy execution for [" - + policyName - + "] failed. No mapping available on source [" - + sourceIndex - + "] included in [[" - + sourceIndex - + "]]" - ) - ); - } else { - fail("Expected exception but nothing was thrown"); - } + assertThat( + asInstanceOf(ElasticsearchException.class, safeExecuteExpectFailure(enrichPolicyRunner)).getMessage(), + containsString( + "Enrich policy execution for [" + + policyName + + "] failed. No mapping available on source [" + + sourceIndex + + "] included in [[" + + sourceIndex + + "]]" + ) + ); } public void testRunnerKeyNestedSourceMapping() throws Exception { @@ -914,36 +841,22 @@ public void testRunnerKeyNestedSourceMapping() throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - if (exception.get() != null) { - Exception thrown = exception.get(); - assertThat(thrown, instanceOf(ElasticsearchException.class)); - assertThat( - thrown.getMessage(), - containsString( - "Enrich policy execution for [" - + policyName - + "] failed while validating field mappings for index [" - + sourceIndex - + "]" - ) - ); - assertThat( - thrown.getCause().getMessage(), - containsString( - "Could not traverse mapping to field [nesting.key]. The [nesting" + "] field must be regular object but was [nested]." - ) - ); - } else { - fail("Expected exception but nothing was thrown"); - } + final var thrown = asInstanceOf(ElasticsearchException.class, safeExecuteExpectFailure(enrichPolicyRunner)); + assertThat( + thrown.getMessage(), + containsString( + "Enrich policy execution for [" + policyName + "] failed while validating field mappings for index [" + sourceIndex + "]" + ) + ); + assertThat( + thrown.getCause().getMessage(), + containsString( + "Could not traverse mapping to field [nesting.key]. The [nesting" + "] field must be regular object but was [nested]." + ) + ); } public void testRunnerValueNestedSourceMapping() throws Exception { @@ -975,37 +888,22 @@ public void testRunnerValueNestedSourceMapping() throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - if (exception.get() != null) { - Exception thrown = exception.get(); - assertThat(thrown, instanceOf(ElasticsearchException.class)); - assertThat( - thrown.getMessage(), - containsString( - "Enrich policy execution for [" - + policyName - + "] failed while validating field mappings for index [" - + sourceIndex - + "]" - ) - ); - assertThat( - thrown.getCause().getMessage(), - containsString( - "Could not traverse mapping to field [nesting.field2]. " - + "The [nesting] field must be regular object but was [nested]." - ) - ); - } else { - fail("Expected exception but nothing was thrown"); - } + final var thrown = asInstanceOf(ElasticsearchException.class, safeExecuteExpectFailure(enrichPolicyRunner)); + assertThat( + thrown.getMessage(), + containsString( + "Enrich policy execution for [" + policyName + "] failed while validating field mappings for index [" + sourceIndex + "]" + ) + ); + assertThat( + thrown.getCause().getMessage(), + containsString( + "Could not traverse mapping to field [nesting.field2]. " + "The [nesting] field must be regular object but was [nested]." + ) + ); } public void testRunnerObjectSourceMapping() throws Exception { @@ -1062,17 +960,10 @@ public void testRunnerObjectSourceMapping() throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - if (exception.get() != null) { - throw exception.get(); - } + safeExecute(enrichPolicyRunner); // Validate Index definition GetIndexResponse enrichIndex = getGetIndexResponseAndCheck(createdEnrichIndex); @@ -1176,17 +1067,10 @@ public void testRunnerExplicitObjectSourceMapping() throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - if (exception.get() != null) { - throw exception.get(); - } + safeExecute(enrichPolicyRunner); // Validate Index definition GetIndexResponse enrichIndex = getGetIndexResponseAndCheck(createdEnrichIndex); @@ -1290,17 +1174,10 @@ public void testRunnerExplicitObjectSourceMappingRangePolicy() throws Exception final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - if (exception.get() != null) { - throw exception.get(); - } + safeExecute(enrichPolicyRunner); // Validate Index definition GetIndexResponse enrichIndex = getGetIndexResponseAndCheck(createdEnrichIndex); @@ -1414,17 +1291,10 @@ public void testRunnerTwoObjectLevelsSourceMapping() throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - if (exception.get() != null) { - throw exception.get(); - } + safeExecute(enrichPolicyRunner); // Validate Index definition GetIndexResponse enrichIndex = getGetIndexResponseAndCheck(createdEnrichIndex); @@ -1544,17 +1414,10 @@ public void testRunnerTwoObjectLevelsSourceMappingRangePolicy() throws Exception final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - if (exception.get() != null) { - throw exception.get(); - } + safeExecute(enrichPolicyRunner); // Validate Index definition GetIndexResponse enrichIndex = getGetIndexResponseAndCheck(createdEnrichIndex); @@ -1678,17 +1541,10 @@ public void testRunnerTwoObjectLevelsSourceMappingDateRangeWithFormat() throws E final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - if (exception.get() != null) { - throw exception.get(); - } + safeExecute(enrichPolicyRunner); // Validate Index definition GetIndexResponse enrichIndex = getGetIndexResponseAndCheck(createdEnrichIndex); @@ -1815,17 +1671,10 @@ public void testRunnerDottedKeyNameSourceMapping() throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - if (exception.get() != null) { - throw exception.get(); - } + safeExecute(enrichPolicyRunner); // Validate Index definition GetIndexResponse enrichIndex = getGetIndexResponseAndCheck(createdEnrichIndex); @@ -1903,9 +1752,6 @@ public void testRunnerWithForceMergeRetry() throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); ClusterService clusterService = getInstanceFromNode(ClusterService.class); IndexNameExpressionResolver resolver = getInstanceFromNode(IndexNameExpressionResolver.class); Task asyncTask = testTaskManager.register("enrich", "policy_execution", new TaskAwareRequest() { @@ -1931,22 +1777,6 @@ public String getDescription() { } }); ExecuteEnrichPolicyTask task = ((ExecuteEnrichPolicyTask) asyncTask); - // The executor would wrap the listener in order to clean up the task in the - // task manager, but we're just testing the runner, so we make sure to clean - // up after ourselves. - ActionListener wrappedListener = new ActionListener<>() { - @Override - public void onResponse(ExecuteEnrichPolicyStatus policyExecutionResult) { - testTaskManager.unregister(task); - listener.onResponse(policyExecutionResult); - } - - @Override - public void onFailure(Exception e) { - testTaskManager.unregister(task); - listener.onFailure(e); - } - }; AtomicInteger forceMergeAttempts = new AtomicInteger(0); final XContentBuilder unmergedDocument = SmileXContent.contentBuilder() .startObject() @@ -1958,7 +1788,6 @@ public void onFailure(Exception e) { policyName, policy, task, - wrappedListener, clusterService, getInstanceFromNode(IndicesService.class), client(), @@ -1968,7 +1797,19 @@ public void onFailure(Exception e) { randomIntBetween(3, 10) ) { @Override - protected void ensureSingleSegment(String destinationIndexName, int attempt) { + public void run(ActionListener listener) { + // The executor would wrap the listener in order to clean up the task in the + // task manager, but we're just testing the runner, so we make sure to clean + // up after ourselves. + super.run(ActionListener.runBefore(listener, () -> testTaskManager.unregister(task))); + } + + @Override + protected void ensureSingleSegment( + String destinationIndexName, + int attempt, + ActionListener listener + ) { forceMergeAttempts.incrementAndGet(); if (attempt == 1) { // Put and flush a document to increase the number of segments, simulating not @@ -1980,16 +1821,12 @@ protected void ensureSingleSegment(String destinationIndexName, int attempt) { ).actionGet(); assertEquals(RestStatus.CREATED, indexRequest.status()); } - super.ensureSingleSegment(destinationIndexName, attempt); + super.ensureSingleSegment(destinationIndexName, attempt, listener); } }; logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - if (exception.get() != null) { - throw exception.get(); - } + safeExecute(enrichPolicyRunner); // Validate number of force merges assertThat(forceMergeAttempts.get(), equalTo(2)); @@ -2080,9 +1917,6 @@ public void testRunnerWithEmptySegmentsResponse() throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); ClusterService clusterService = getInstanceFromNode(ClusterService.class); IndexNameExpressionResolver resolver = getInstanceFromNode(IndexNameExpressionResolver.class); Task asyncTask = testTaskManager.register("enrich", "policy_execution", new TaskAwareRequest() { @@ -2108,13 +1942,6 @@ public String getDescription() { } }); ExecuteEnrichPolicyTask task = ((ExecuteEnrichPolicyTask) asyncTask); - // The executor would wrap the listener in order to clean up the task in the - // task manager, but we're just testing the runner, so we make sure to clean - // up after ourselves. - ActionListener wrappedListener = ActionListener.runBefore( - listener, - () -> testTaskManager.unregister(task) - ); // Wrap the client so that when we receive the indices segments action, we intercept the request and complete it on another thread // with an empty segments response. @@ -2141,7 +1968,6 @@ protected void policyName, policy, task, - wrappedListener, clusterService, getInstanceFromNode(IndicesService.class), client, @@ -2149,21 +1975,21 @@ protected void createdEnrichIndex, randomIntBetween(1, 10000), randomIntBetween(3, 10) - ); + ) { + @Override + public void run(ActionListener listener) { + // The executor would wrap the listener in order to clean up the task in the + // task manager, but we're just testing the runner, so we make sure to clean + // up after ourselves. + super.run(ActionListener.runBefore(listener, () -> testTaskManager.unregister(task))); + } + }; logger.info("Starting policy run"); - enrichPolicyRunner.run(); - if (latch.await(1, TimeUnit.MINUTES) == false) { - fail("Timeout while waiting for runner to complete"); - } - Exception exceptionThrown = exception.get(); - if (exceptionThrown == null) { - fail("Expected exception to be thrown from segment api"); - } - - // Validate exception information - assertThat(exceptionThrown, instanceOf(ElasticsearchException.class)); - assertThat(exceptionThrown.getMessage(), containsString("Could not locate segment information for newly created index")); + assertThat( + asInstanceOf(ElasticsearchException.class, safeExecuteExpectFailure(enrichPolicyRunner)).getMessage(), + containsString("Could not locate segment information for newly created index") + ); } public void testRunnerWithShardFailuresInSegmentResponse() throws Exception { @@ -2197,9 +2023,6 @@ public void testRunnerWithShardFailuresInSegmentResponse() throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); ClusterService clusterService = getInstanceFromNode(ClusterService.class); IndexNameExpressionResolver resolver = getInstanceFromNode(IndexNameExpressionResolver.class); Task asyncTask = testTaskManager.register("enrich", "policy_execution", new TaskAwareRequest() { @@ -2228,10 +2051,6 @@ public String getDescription() { // The executor would wrap the listener in order to clean up the task in the // task manager, but we're just testing the runner, so we make sure to clean // up after ourselves. - ActionListener wrappedListener = ActionListener.runBefore( - listener, - () -> testTaskManager.unregister(task) - ); // Wrap the client so that when we receive the indices segments action, we intercept the request and complete it on another thread // with an failed segments response. @@ -2270,7 +2089,6 @@ protected void policyName, policy, task, - wrappedListener, clusterService, getInstanceFromNode(IndicesService.class), client, @@ -2278,26 +2096,23 @@ protected void createdEnrichIndex, randomIntBetween(1, 10000), randomIntBetween(3, 10) - ); + ) { + @Override + public void run(ActionListener listener) { + // The executor would wrap the listener in order to clean up the task in the + // task manager, but we're just testing the runner, so we make sure to clean + // up after ourselves. + super.run(ActionListener.runBefore(listener, () -> testTaskManager.unregister(task))); + } + }; logger.info("Starting policy run"); - enrichPolicyRunner.run(); - if (latch.await(1, TimeUnit.MINUTES) == false) { - fail("Timeout while waiting for runner to complete"); - } - Exception exceptionThrown = exception.get(); - if (exceptionThrown == null) { - fail("Expected exception to be thrown from segment api"); - } - - // Validate exception information - assertThat(exceptionThrown, instanceOf(ElasticsearchException.class)); + final var exceptionThrown = asInstanceOf(ElasticsearchException.class, safeExecuteExpectFailure(enrichPolicyRunner)); assertThat(exceptionThrown.getMessage(), containsString("Could not obtain segment information for newly created index")); - assertThat(exceptionThrown.getCause(), instanceOf(ElasticsearchException.class)); - assertThat(exceptionThrown.getCause().getMessage(), containsString("failure1")); + assertThat(asInstanceOf(ElasticsearchException.class, exceptionThrown.getCause()).getMessage(), containsString("failure1")); } - public void testRunnerCancel() throws Exception { + public void testRunnerCancel() { final String sourceIndex = "source-index"; DocWriteResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" { @@ -2315,9 +2130,6 @@ public void testRunnerCancel() throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); ActionType randomActionType = randomFrom( EnrichReindexAction.INSTANCE, @@ -2349,12 +2161,12 @@ protected void } }; - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(client, policyName, policy, listener, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(client, policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - assertThat(exception.get(), notNullValue()); - assertThat(exception.get().getMessage(), containsString("cancelled policy execution [test1], status [")); + assertThat( + safeExecuteExpectFailure(enrichPolicyRunner).getMessage(), + containsString("cancelled policy execution [test1], status [") + ); } public void testRunRangePolicyWithObjectFieldAsMatchField() throws Exception { @@ -2386,17 +2198,13 @@ public void testRunRangePolicyWithObjectFieldAsMatchField() throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - Exception e = exception.get(); - assertThat(e, notNullValue()); - assertThat(e.getMessage(), equalTo("Field 'field1' has type [object] which doesn't appear to be a range type")); + assertThat( + safeExecuteExpectFailure(enrichPolicyRunner).getMessage(), + equalTo("Field 'field1' has type [object] which doesn't appear to be a range type") + ); } public void testEnrichFieldsConflictMappingTypes() throws Exception { @@ -2427,10 +2235,7 @@ public void testEnrichFieldsConflictMappingTypes() throws Exception { String policyName = "test1"; final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - PlainActionFuture future = new PlainActionFuture<>(); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, future, createdEnrichIndex); - enrichPolicyRunner.run(); - future.actionGet(); + safeExecute(createPolicyRunner(policyName, policy, createdEnrichIndex)); // Validate Index definition GetIndexResponse enrichIndex = getGetIndexResponseAndCheck(createdEnrichIndex); @@ -2473,10 +2278,7 @@ public void testEnrichMappingConflictFormats() throws ExecutionException, Interr String policyName = "test1"; final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - PlainActionFuture future = new PlainActionFuture<>(); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, future, createdEnrichIndex); - enrichPolicyRunner.run(); - future.actionGet(); + safeExecute(createPolicyRunner(policyName, policy, createdEnrichIndex)); // Validate Index definition GetIndexResponse enrichIndex = getGetIndexResponseAndCheck(createdEnrichIndex); @@ -2508,10 +2310,7 @@ public void testEnrichObjectField() throws ExecutionException, InterruptedExcept String policyName = "test1"; final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - PlainActionFuture future = new PlainActionFuture<>(); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, future, createdEnrichIndex); - enrichPolicyRunner.run(); - future.actionGet(); + safeExecute(createPolicyRunner(policyName, policy, createdEnrichIndex)); // Validate Index definition GetIndexResponse enrichIndex = getGetIndexResponseAndCheck(createdEnrichIndex); @@ -2566,12 +2365,10 @@ public void testEnrichNestedField() throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - PlainActionFuture future = new PlainActionFuture<>(); - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, future, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - future.actionGet(); + safeExecute(enrichPolicyRunner); // Validate Index definition GetIndexResponse enrichIndex = getGetIndexResponseAndCheck(createdEnrichIndex); @@ -2625,9 +2422,6 @@ public void testRunnerValidatesIndexIntegrity() throws Exception { final long createTime = randomNonNegativeLong(); String createdEnrichIndex = ".enrich-test1-" + createTime; - final AtomicReference exception = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = createTestListener(latch, exception::set); // Wrap the client so that when we receive the reindex action, we delete the index then resume operation. This mimics an invalid // state for the resulting index. @@ -2654,36 +2448,20 @@ protected void } } }; - EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(client, policyName, policy, listener, createdEnrichIndex); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(client, policyName, policy, createdEnrichIndex); logger.info("Starting policy run"); - enrichPolicyRunner.run(); - latch.await(); - Exception runnerException = exception.get(); - if (runnerException == null) { - fail("Expected the runner to fail when the underlying index was deleted during policy execution!"); - } - assertThat(runnerException, is(instanceOf(ElasticsearchException.class))); - assertThat(runnerException.getMessage(), containsString("Could not verify enrich index")); - assertThat(runnerException.getMessage(), containsString("mapping meta field missing")); + assertThat( + asInstanceOf(ElasticsearchException.class, safeExecuteExpectFailure(enrichPolicyRunner)).getMessage(), + allOf(containsString("Could not verify enrich index"), containsString("mapping meta field missing")) + ); } - private EnrichPolicyRunner createPolicyRunner( - String policyName, - EnrichPolicy policy, - ActionListener listener, - String targetIndex - ) { - return createPolicyRunner(client(), policyName, policy, listener, targetIndex); + private EnrichPolicyRunner createPolicyRunner(String policyName, EnrichPolicy policy, String targetIndex) { + return createPolicyRunner(client(), policyName, policy, targetIndex); } - private EnrichPolicyRunner createPolicyRunner( - Client client, - String policyName, - EnrichPolicy policy, - ActionListener listener, - String targetIndex - ) { + private EnrichPolicyRunner createPolicyRunner(Client client, String policyName, EnrichPolicy policy, String targetIndex) { ClusterService clusterService = getInstanceFromNode(ClusterService.class); IndexNameExpressionResolver resolver = getInstanceFromNode(IndexNameExpressionResolver.class); Task asyncTask = testTaskManager.register("enrich", "policy_execution", new TaskAwareRequest() { @@ -2709,27 +2487,10 @@ public String getDescription() { } }); ExecuteEnrichPolicyTask task = ((ExecuteEnrichPolicyTask) asyncTask); - // The executor would wrap the listener in order to clean up the task in the - // task manager, but we're just testing the runner, so we make sure to clean - // up after ourselves. - ActionListener wrappedListener = new ActionListener<>() { - @Override - public void onResponse(ExecuteEnrichPolicyStatus policyExecutionResult) { - testTaskManager.unregister(task); - listener.onResponse(policyExecutionResult); - } - - @Override - public void onFailure(Exception e) { - testTaskManager.unregister(task); - listener.onFailure(e); - } - }; return new EnrichPolicyRunner( policyName, policy, task, - wrappedListener, clusterService, getInstanceFromNode(IndicesService.class), client, @@ -2737,14 +2498,24 @@ public void onFailure(Exception e) { targetIndex, randomIntBetween(1, 10000), randomIntBetween(1, 10) - ); + ) { + @Override + public void run(ActionListener listener) { + // The executor would wrap the listener in order to clean up the task in the + // task manager, but we're just testing the runner, so we make sure to clean + // up after ourselves. + super.run(ActionListener.runBefore(listener, () -> testTaskManager.unregister(task))); + } + }; + } + + private void safeExecute(EnrichPolicyRunner enrichPolicyRunner) { + safeAwait(enrichPolicyRunner::run); + logger.debug("Run complete"); } - private ActionListener createTestListener( - final CountDownLatch latch, - final Consumer exceptionConsumer - ) { - return new LatchedActionListener<>(ActionListener.wrap((r) -> logger.debug("Run complete"), exceptionConsumer), latch); + private Exception safeExecuteExpectFailure(EnrichPolicyRunner enrichPolicyRunner) { + return safeAwaitFailure(enrichPolicyRunner::run); } private void validateMappingMetadata(Map mapping, String policyName, EnrichPolicy policy) { From d778b9d85170b404903e32d9f9c461d885c5995c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 30 Jul 2024 07:51:22 +1000 Subject: [PATCH 104/105] Mute org.elasticsearch.xpack.restart.FullClusterRestartIT testSingleDoc {cluster=UPGRADED} #111434 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 1df885cdc72c0..dfec80328588d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -146,6 +146,9 @@ tests: - class: org.elasticsearch.xpack.restart.FullClusterRestartIT method: testSingleDoc {cluster=OLD} issue: https://github.com/elastic/elasticsearch/issues/111430 +- class: org.elasticsearch.xpack.restart.FullClusterRestartIT + method: testSingleDoc {cluster=UPGRADED} + issue: https://github.com/elastic/elasticsearch/issues/111434 # Examples: # From 1f4788fced685ac20eb5afca1d1ab3692c671d03 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Mon, 29 Jul 2024 18:09:48 -0500 Subject: [PATCH 105/105] Removing the use of watcher stats from WatchAcTests (#111435) --- .../xpack/watcher/test/integration/WatchAckTests.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchAckTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchAckTests.java index 1308597b7bcf9..6402b71d3b810 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchAckTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchAckTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchRequestBuilder; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchRequestBuilder; -import org.elasticsearch.xpack.core.watcher.transport.actions.stats.WatcherStatsRequestBuilder; import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.watcher.condition.CompareCondition; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; @@ -75,7 +74,6 @@ public void testAckSingleAction() throws Exception { .get(); assertThat(putWatchResponse.isCreated(), is(true)); - assertThat(new WatcherStatsRequestBuilder(client()).get().getWatchesCount(), is(1L)); timeWarp().trigger("_id", 4, TimeValue.timeValueSeconds(5)); AckWatchResponse ackResponse = new AckWatchRequestBuilder(client(), "_id").setActionIds("_a1").get(); @@ -148,7 +146,6 @@ public void testAckAllActions() throws Exception { .get(); assertThat(putWatchResponse.isCreated(), is(true)); - assertThat(new WatcherStatsRequestBuilder(client()).get().getWatchesCount(), is(1L)); timeWarp().trigger("_id", 4, TimeValue.timeValueSeconds(5)); @@ -226,7 +223,6 @@ public void testAckWithRestart() throws Exception { ) .get(); assertThat(putWatchResponse.isCreated(), is(true)); - assertThat(new WatcherStatsRequestBuilder(client()).get().getWatchesCount(), is(1L)); timeWarp().trigger("_name", 4, TimeValue.timeValueSeconds(5)); restartWatcherRandomly();