Skip to content

Commit

Permalink
Backport: initial data stream commit (#53959)
Browse files Browse the repository at this point in the history
This commits adds a data stream feature flag, initial definition of a data stream and
the stubs for the data stream create, delete and get APIs. Also simple serialization
tests are added and a rest test to thest the data stream API stubs.

This is a large amount of code and mainly mechanical, but this commit should be
straightforward to review, because there isn't any real logic.

The data stream transport and rest action are behind the data stream feature flag and
are only intialized if the feature flag is enabled. The feature flag is enabled if
elasticsearch is build as snapshot or a release build and the
'es.datastreams_feature_flag_registered' is enabled.

The integ-test-zip sets the feature flag if building a release build, otherwise
rest tests would fail.

Relates to #53100
  • Loading branch information
martijnvg authored Mar 23, 2020
1 parent 1d1e177 commit aef7b89
Show file tree
Hide file tree
Showing 22 changed files with 1,141 additions and 3 deletions.
1 change: 1 addition & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -554,6 +554,7 @@ subprojects {
testClusters.all {
if (org.elasticsearch.gradle.info.BuildParams.isSnapshotBuild() == false) {
systemProperty 'es.itv2_feature_flag_registered', 'true'
systemProperty 'es.datastreams_feature_flag_registered', 'true'
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -798,7 +798,10 @@ public void testApiNamingConventions() throws Exception {
"scripts_painless_execute",
"cluster.put_component_template",
"cluster.get_component_template",
"cluster.delete_component_template"
"cluster.delete_component_template",
"indices.create_data_stream",
"indices.get_data_streams",
"indices.delete_data_stream"
};
//These API are not required for high-level client feature completeness
String[] notRequiredApi = new String[] {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
{
"indices.create_data_stream":{
"documentation":{
"url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html",
"description":"Creates or updates a data stream"
},
"stability":"experimental",
"url":{
"paths":[
{
"path":"/_data_stream/{name}",
"methods":[
"PUT"
],
"parts":{
"name":{
"type":"string",
"description":"The name of the data stream"
}
}
}
]
},
"params":{
},
"body":{
"description":"The data stream definition",
"required":true
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
{
"indices.delete_data_stream":{
"documentation":{
"url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html",
"description":"Deletes a data stream."
},
"stability":"experimental",
"url":{
"paths":[
{
"path":"/_data_stream/{name}",
"methods":[
"DELETE"
],
"parts":{
"name":{
"type":"string",
"description":"The name of the data stream"
}
}
}
]
},
"params":{}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
{
"indices.get_data_streams":{
"documentation":{
"url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html",
"description":"Returns data streams."
},
"stability":"experimental",
"url":{
"paths":[
{
"path":"/_data_streams",
"methods":[
"GET"
]
},
{
"path":"/_data_streams/{name}",
"methods":[
"GET"
],
"parts":{
"name":{
"type":"list",
"description":"The comma separated names of data streams"
}
}
}
]
},
"params":{
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
---
"Test stubs":
- skip:
version: " - 7.6.99"
reason: only available in 7.7+

- do:
indices.create_data_stream:
name: data-stream2
body:
timestamp_field: "@timestamp"
- is_true: acknowledged

- do:
indices.get_data_streams: {}
- match: { 0.name: my_data_stream1 }
- match: { 0.timestamp_field: '@timestamp' }
- match: { 0.indices: ['my_data_stream1-000000'] }
- match: { 1.name: my_data_stream2 }
- match: { 1.timestamp_field: '@timestamp' }
- match: { 1.indices: [] }

- do:
indices.delete_data_stream:
name: data-stream2
- is_true: acknowledged
38 changes: 38 additions & 0 deletions server/src/main/java/org/elasticsearch/action/ActionModule.java
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@
import org.elasticsearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsAction;
import org.elasticsearch.action.admin.cluster.configuration.TransportAddVotingConfigExclusionsAction;
import org.elasticsearch.action.admin.cluster.configuration.TransportClearVotingConfigExclusionsAction;
import org.elasticsearch.action.admin.indices.datastream.DeleteDataStreamAction;
import org.elasticsearch.action.admin.indices.datastream.GetDataStreamsAction;
import org.elasticsearch.action.admin.indices.datastream.CreateDataStreamAction;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction;
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsAction;
Expand Down Expand Up @@ -251,9 +254,11 @@
import org.elasticsearch.rest.action.admin.cluster.RestClusterStatsAction;
import org.elasticsearch.rest.action.admin.cluster.RestClusterUpdateSettingsAction;
import org.elasticsearch.rest.action.admin.cluster.RestCreateSnapshotAction;
import org.elasticsearch.rest.action.admin.indices.RestDeleteDataStreamAction;
import org.elasticsearch.rest.action.admin.cluster.RestDeleteRepositoryAction;
import org.elasticsearch.rest.action.admin.cluster.RestDeleteSnapshotAction;
import org.elasticsearch.rest.action.admin.cluster.RestDeleteStoredScriptAction;
import org.elasticsearch.rest.action.admin.indices.RestGetDataStreamsAction;
import org.elasticsearch.rest.action.admin.cluster.RestGetRepositoriesAction;
import org.elasticsearch.rest.action.admin.cluster.RestGetScriptContextAction;
import org.elasticsearch.rest.action.admin.cluster.RestGetScriptLanguageAction;
Expand All @@ -266,6 +271,7 @@
import org.elasticsearch.rest.action.admin.cluster.RestNodesStatsAction;
import org.elasticsearch.rest.action.admin.cluster.RestNodesUsageAction;
import org.elasticsearch.rest.action.admin.cluster.RestPendingClusterTasksAction;
import org.elasticsearch.rest.action.admin.indices.RestCreateDataStreamAction;
import org.elasticsearch.rest.action.admin.cluster.RestPutRepositoryAction;
import org.elasticsearch.rest.action.admin.cluster.RestPutStoredScriptAction;
import org.elasticsearch.rest.action.admin.cluster.RestReloadSecureSettingsAction;
Expand Down Expand Up @@ -388,6 +394,24 @@ public class ActionModule extends AbstractModule {
}
}

private static final boolean DATASTREAMS_FEATURE_FLAG_REGISTERED;

static {
final String property = System.getProperty("es.datastreams_feature_flag_registered");
if (Build.CURRENT.isSnapshot() && property != null) {
throw new IllegalArgumentException("es.datastreams_feature_flag_registered is only supported in non-snapshot builds");
}
if (Build.CURRENT.isSnapshot() || "true".equals(property)) {
DATASTREAMS_FEATURE_FLAG_REGISTERED = true;
} else if ("false".equals(property) || property == null) {
DATASTREAMS_FEATURE_FLAG_REGISTERED = false;
} else {
throw new IllegalArgumentException(
"expected es.datastreams_feature_flag_registered to be unset or [true|false] but was [" + property + "]"
);
}
}

private final Settings settings;
private final IndexNameExpressionResolver indexNameExpressionResolver;
private final IndexScopedSettings indexScopedSettings;
Expand Down Expand Up @@ -576,6 +600,13 @@ public <Request extends ActionRequest, Response extends ActionResponse> void reg

actionPlugins.stream().flatMap(p -> p.getActions().stream()).forEach(actions::register);

// Data streams:
if (DATASTREAMS_FEATURE_FLAG_REGISTERED) {
actions.register(CreateDataStreamAction.INSTANCE, CreateDataStreamAction.TransportAction.class);
actions.register(DeleteDataStreamAction.INSTANCE, DeleteDataStreamAction.TransportAction.class);
actions.register(GetDataStreamsAction.INSTANCE, GetDataStreamsAction.TransportAction.class);
}

// Persistent tasks:
actions.register(StartPersistentTaskAction.INSTANCE, StartPersistentTaskAction.TransportAction.class);
actions.register(UpdatePersistentTaskStatusAction.INSTANCE, UpdatePersistentTaskStatusAction.TransportAction.class);
Expand Down Expand Up @@ -718,6 +749,13 @@ public void initRestHandlers(Supplier<DiscoveryNodes> nodesInCluster) {
registerHandler.accept(new RestDeletePipelineAction());
registerHandler.accept(new RestSimulatePipelineAction());

// Data Stream API
if (DATASTREAMS_FEATURE_FLAG_REGISTERED) {
registerHandler.accept(new RestCreateDataStreamAction());
registerHandler.accept(new RestDeleteDataStreamAction());
registerHandler.accept(new RestGetDataStreamsAction());
}

// CAT API
registerHandler.accept(new RestAllocationAction());
registerHandler.accept(new RestShardsAction());
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,127 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.indices.datastream;

import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.action.support.master.MasterNodeRequest;
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;

import java.io.IOException;
import java.util.Objects;

public class CreateDataStreamAction extends ActionType<AcknowledgedResponse> {

public static final CreateDataStreamAction INSTANCE = new CreateDataStreamAction();
public static final String NAME = "indices:admin/data_stream/create";

private CreateDataStreamAction() {
super(NAME, AcknowledgedResponse::new);
}

public static class Request extends MasterNodeRequest<Request> {

private final String name;
private String timestampFieldName;

public Request(String name) {
this.name = name;
}

public void setTimestampFieldName(String timestampFieldName) {
this.timestampFieldName = timestampFieldName;
}

@Override
public ActionRequestValidationException validate() {
return null;
}

public Request(StreamInput in) throws IOException {
super(in);
this.name = in.readString();
this.timestampFieldName = in.readString();
}

@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(name);
out.writeString(timestampFieldName);
}

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Request request = (Request) o;
return name.equals(request.name) &&
timestampFieldName.equals(request.timestampFieldName);
}

@Override
public int hashCode() {
return Objects.hash(name, timestampFieldName);
}
}

public static class TransportAction extends TransportMasterNodeAction<Request, AcknowledgedResponse> {

@Inject
public TransportAction(TransportService transportService, ClusterService clusterService, ThreadPool threadPool,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
super(NAME, transportService, clusterService, threadPool, actionFilters, Request::new, indexNameExpressionResolver);
}

@Override
protected String executor() {
return ThreadPool.Names.SAME;
}

@Override
protected AcknowledgedResponse read(StreamInput in) throws IOException {
return new AcknowledgedResponse(in);
}

@Override
protected void masterOperation(Request request, ClusterState state,
ActionListener<AcknowledgedResponse> listener) throws Exception {
listener.onResponse(new AcknowledgedResponse(true));
}

@Override
protected ClusterBlockException checkBlock(Request request, ClusterState state) {
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
}
}

}
Loading

0 comments on commit aef7b89

Please sign in to comment.