diff --git a/azureblob-storage/build.gradle b/azureblob-storage/build.gradle index 4ec290214c..fb95e168fb 100644 --- a/azureblob-storage/build.gradle +++ b/azureblob-storage/build.gradle @@ -4,5 +4,5 @@ dependencies { implementation 'org.springframework.boot:spring-boot-starter' implementation "com.azure:azure-storage-blob:${revAzureStorageBlobSdk}" - implementation "org.apache.commons:commons-lang3:${revCommonsLang3}" + implementation "org.apache.commons:commons-lang3" } diff --git a/cassandra-persistence/build.gradle b/cassandra-persistence/build.gradle index aaee7066ad..bedf729283 100644 --- a/cassandra-persistence/build.gradle +++ b/cassandra-persistence/build.gradle @@ -4,7 +4,7 @@ dependencies { implementation 'org.springframework.boot:spring-boot-starter' implementation "com.datastax.cassandra:cassandra-driver-core:${revCassandra}" - implementation "org.apache.commons:commons-lang3:${revCommonsLang3}" + implementation "org.apache.commons:commons-lang3" implementation "javax.inject:javax.inject:1" testImplementation("org.cassandraunit:cassandra-unit:${revCassandraUnit}") { diff --git a/client/build.gradle b/client/build.gradle index dd61f490ed..8265ac0637 100644 --- a/client/build.gradle +++ b/client/build.gradle @@ -8,7 +8,6 @@ dependencies { // SBMTODO: remove guava dep implementation "com.google.guava:guava:${revGuava}" - // SBMTODO: remove jersey dep implementation "com.sun.jersey:jersey-client:1.19.4" implementation "com.netflix.spectator:spectator-api:${revSpectator}" @@ -17,8 +16,8 @@ dependencies { implementation "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider" - implementation "org.apache.commons:commons-lang3:${revCommonsLang3}" - implementation "commons-io:commons-io:${revCommonsIo}" + implementation "org.apache.commons:commons-lang3" + implementation "commons-io:commons-io" implementation "org.slf4j:slf4j-api" } diff --git a/common/build.gradle b/common/build.gradle index 34c7a53009..14c3b66937 100644 --- a/common/build.gradle +++ b/common/build.gradle @@ -11,17 +11,17 @@ dependencies { implementation 'org.springframework.boot:spring-boot-starter' implementation 'org.springframework.boot:spring-boot-starter-validation' - implementation "org.apache.commons:commons-lang3:${revCommonsLang3}" + implementation "org.apache.commons:commons-lang3" implementation "com.github.rholder:guava-retrying:${revGuavaRetrying}" - implementation "org.apache.bval:bval-jsr:${revBval}" + implementation "org.apache.bval:bval-jsr" implementation "com.google.protobuf:protobuf-java:${revProtoBuf}" implementation "com.github.vmg.protogen:protogen-annotations:${revProtogenAnnotations}" - implementation "com.fasterxml.jackson.core:jackson-databind:${revJackson}" - implementation "com.fasterxml.jackson.core:jackson-core:${revJackson}" + implementation "com.fasterxml.jackson.core:jackson-databind" + implementation "com.fasterxml.jackson.core:jackson-core" } import com.github.vmg.protogen.ProtoGenTask diff --git a/contribs/build.gradle b/contribs/build.gradle index 708e3eda5e..508d4414fa 100644 --- a/contribs/build.gradle +++ b/contribs/build.gradle @@ -7,7 +7,7 @@ dependencies { implementation "com.amazonaws:aws-java-sdk-s3:${revAwsSdk}" implementation "com.amazonaws:aws-java-sdk-sqs:${revAwsSdk}" - implementation "org.apache.commons:commons-lang3:${revCommonsLang3}" + implementation "org.apache.commons:commons-lang3:" implementation "net.thisptr:jackson-jq:${revJq}" // SBMTODO: remove guava dep diff --git a/contribs/src/test/java/com/netflix/conductor/contribs/queue/sqs/QueueManagerTest.java b/contribs/src/test/java/com/netflix/conductor/contribs/queue/sqs/QueueManagerTest.java index 4bb88603f6..0fc11e2dea 100644 --- a/contribs/src/test/java/com/netflix/conductor/contribs/queue/sqs/QueueManagerTest.java +++ b/contribs/src/test/java/com/netflix/conductor/contribs/queue/sqs/QueueManagerTest.java @@ -105,7 +105,6 @@ public static void setup() { doAnswer((Answer) invocation -> { List msgs = invocation.getArgument(0, List.class); - System.out.println("got messages to publish: " + msgs); messages.addAll(msgs); return null; }).when(queue).publish(any()); @@ -118,7 +117,6 @@ public static void setup() { doReturn(workflow2).when(executionService).getExecutionStatus(eq("v_2"), anyBoolean()); doAnswer((Answer) invocation -> { - System.out.println("Updating task: " + invocation.getArgument(0, Task.class)); updatedTasks.add(invocation.getArgument(0, Task.class)); return null; }).when(executionService).updateTask(any(Task.class)); diff --git a/contribs/src/test/java/com/netflix/conductor/contribs/tasks/http/HttpTaskTest.java b/contribs/src/test/java/com/netflix/conductor/contribs/tasks/http/HttpTaskTest.java index cab8638d9c..5b8e9b4d70 100644 --- a/contribs/src/test/java/com/netflix/conductor/contribs/tasks/http/HttpTaskTest.java +++ b/contribs/src/test/java/com/netflix/conductor/contribs/tasks/http/HttpTaskTest.java @@ -375,15 +375,12 @@ public void handle(String target, Request baseRequest, HttpServletRequest reques writer.flush(); writer.close(); } else if (request.getMethod().equals("POST") && request.getRequestURI().equals("/post2")) { - System.out.println("==================================== received request from client"); response.addHeader("Content-Type", "application/json"); response.setStatus(204); BufferedReader reader = request.getReader(); Map input = objectMapper.readValue(reader, mapOfObj); Set keys = input.keySet(); - System.out.println(keys); response.getWriter().close(); - } else if (request.getMethod().equals("GET") && request.getRequestURI().equals("/numeric")) { PrintWriter writer = response.getWriter(); writer.print(NUM_RESPONSE); diff --git a/core/build.gradle b/core/build.gradle index 903af7d16d..a427279542 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -6,13 +6,13 @@ dependencies { implementation "com.fasterxml.jackson.core:jackson-annotations" implementation "com.fasterxml.jackson.core:jackson-databind" - implementation "commons-io:commons-io:${revCommonsIo}" + implementation "commons-io:commons-io" implementation "com.google.protobuf:protobuf-java:${revProtoBuf}" - implementation "org.apache.commons:commons-lang3:${revCommonsLang3}" + implementation "org.apache.commons:commons-lang3" - implementation "com.fasterxml.jackson.core:jackson-core:${revJackson}" + implementation "com.fasterxml.jackson.core:jackson-core" implementation "com.spotify:completable-futures:${revSpotifyCompletableFutures}" @@ -23,7 +23,7 @@ dependencies { implementation "com.netflix.servo:servo-core:${revServo}" implementation "com.netflix.spectator:spectator-api:${revSpectator}" - implementation "org.apache.bval:bval-jsr:${revBval}" + implementation "org.apache.bval:bval-jsr" implementation "javax.inject:javax.inject:1" } diff --git a/core/src/main/java/com/netflix/conductor/core/index/EmbeddedElasticSearch.java b/core/src/main/java/com/netflix/conductor/core/index/EmbeddedElasticSearch.java deleted file mode 100644 index 48d178eb22..0000000000 --- a/core/src/main/java/com/netflix/conductor/core/index/EmbeddedElasticSearch.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.core.index; - -import com.netflix.conductor.core.Lifecycle; -import org.apache.commons.io.FileUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; -import java.nio.file.FileSystems; -import java.nio.file.Files; -import java.nio.file.Path; - -public interface EmbeddedElasticSearch extends Lifecycle { - - Logger LOGGER = LoggerFactory.getLogger(EmbeddedElasticSearch.class); - - default void cleanDataDir(String path) { - File dataDir = new File(path); - - try { - LOGGER.info("Deleting contents of data dir {}", path); - if (dataDir.exists()) { - FileUtils.cleanDirectory(dataDir); - } - } catch (IOException e) { - LOGGER.error(String.format("Failed to delete ES data dir: %s", dataDir.getAbsolutePath()), e); - } - } - - default File createDataDir(String dataDirLoc) throws IOException { - Path dataDirPath = FileSystems.getDefault().getPath(dataDirLoc); - Files.createDirectories(dataDirPath); - return dataDirPath.toFile(); - } - - default File setupDataDir(String path) throws IOException { - cleanDataDir(path); - return createDataDir(path); - } -} diff --git a/core/src/main/java/com/netflix/conductor/service/AdminServiceImpl.java b/core/src/main/java/com/netflix/conductor/service/AdminServiceImpl.java index 71507887c6..8ad9ca82d0 100644 --- a/core/src/main/java/com/netflix/conductor/service/AdminServiceImpl.java +++ b/core/src/main/java/com/netflix/conductor/service/AdminServiceImpl.java @@ -53,17 +53,18 @@ public AdminServiceImpl(ConductorProperties properties, ExecutionService executi this.version = "UNKNOWN"; this.buildDate = "UNKNOWN"; - try ( - InputStream propertiesIs = this.getClass().getClassLoader() - .getResourceAsStream("META-INF/conductor-core.properties") - ) { - Properties prop = new Properties(); - prop.load(propertiesIs); - this.version = prop.getProperty("Implementation-Version"); - this.buildDate = prop.getProperty("Build-Date"); - } catch (Exception e) { - LOGGER.error("Error loading properties", e); - } + // SBMTODO: remove +// try ( +// InputStream propertiesIs = this.getClass().getClassLoader() +// .getResourceAsStream("META-INF/conductor-core.properties") +// ) { +// Properties prop = new Properties(); +// prop.load(propertiesIs); +// this.version = prop.getProperty("Implementation-Version"); +// this.buildDate = prop.getProperty("Build-Date"); +// } catch (Exception e) { +// LOGGER.error("Error loading properties", e); +// } } /** diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java b/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java index 51ff16f333..aaf0d07d9d 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java @@ -253,7 +253,6 @@ public void start(Workflow workflow, Task task, WorkflowExecutor executor) { AtomicInteger queuedTaskCount = new AtomicInteger(0); final Answer answer = invocation -> { String queueName = invocation.getArgument(0, String.class); - System.out.println(queueName); queuedTaskCount.incrementAndGet(); return null; }; diff --git a/core/src/test/java/com/netflix/conductor/core/utils/ParametersUtilsTest.java b/core/src/test/java/com/netflix/conductor/core/utils/ParametersUtilsTest.java index 102090b6e1..ee8985387e 100644 --- a/core/src/test/java/com/netflix/conductor/core/utils/ParametersUtilsTest.java +++ b/core/src/test/java/com/netflix/conductor/core/utils/ParametersUtilsTest.java @@ -69,7 +69,6 @@ public void testReplace() throws Exception { Map replaced = parametersUtils.replace(input, jsonObj); assertNotNull(replaced); - System.out.println("testNoExpand(): " + replaced); assertEquals("{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}", replaced.get("k1")); assertEquals("conductor", replaced.get("k4")); diff --git a/dependencies.gradle b/dependencies.gradle index f12e58370f..a883c1b8a0 100644 --- a/dependencies.gradle +++ b/dependencies.gradle @@ -6,11 +6,8 @@ ext { revAwaitility = '3.1.6' revAwsSdk = '1.11.86' revAzureStorageBlobSdk = '12.2.0' - revBval = '2.0.4' revCassandra = '3.10.2' revCassandraUnit = '3.11.2.0' - revCommonsIo = '2.7' - revCommonsLang3 = '3.11' revCuratorRecipes = '2.4.0' revCuratorTest = '2.4.0' revDockerCompose = '0.13.4' @@ -18,14 +15,11 @@ ext { revElasticSearch6 = '6.8.12' revEmbeddedRedis = '0.6' revEurekaClient = '1.10.10' - revFlywayCore = '4.0.3' revGroovy = '2.5.13' revGrpc = '1.33.+' revGuava = '30.0-jre' revGuavaRetrying = '2.0.0' - revHikariCP = '3.2.0' revHealth = '1.1.+' - revJackson = '2.11.0' revJedis = '3.3.0' revJettyServer = '9.4.34.v20201102' revJettyServlet = '9.4.34.v20201102' @@ -33,10 +27,8 @@ ext { revJq = '0.0.12' revJsr311Api = '1.1.1' revKafka = '2.6.0' - revMySqlConnector = '8.0.22' revNatsStreaming = '0.5.0' revOpenapi = '1.4.+' - revPostgres = '42.2.18' revProtoBuf = '3.13.0' revProtogenAnnotations = '1.0.0' revProtogenCodegen = '1.4.0' @@ -47,4 +39,5 @@ ext { revSpectator = '0.114.0' revSpock = '1.3-groovy-2.5' revSpotifyCompletableFutures = '0.3.3' + revTestESContainer = '1.15.0' } diff --git a/es5-persistence/README.md b/es5-persistence/README.md deleted file mode 100644 index 419a683636..0000000000 --- a/es5-persistence/README.md +++ /dev/null @@ -1,73 +0,0 @@ -# ES5 Persistence - -This module provides ES5 persistence when indexing workflows and tasks. - -## Usage - -This module uses the following configuration options: - -* `workflow.elasticsearch.instanceType` - This determines the type of ES instance we are using with conductor. -The two values are either `MEMORY` or `EXTERNAL`. -If `MEMORY`, then an embedded server will be run. -Default is `MEMORY`. -* `workflow.elasticsearch.url` - A comma separated list of schema/host/port of the ES nodes to communicate with. -Schema can be ignored when using `tcp` transport; otherwise, you must specify `http` or `https`. -If using the `http` or `https`, then conductor will use the REST transport protocol. -* `workflow.elasticsearch.index.name` - The name of the workflow and task index. -Defaults to `conductor` -* `workflow.elasticsearch.tasklog.index.name` - The name of the task log index. -Defaults to `task_log` -* `workflow.elasticsearch.async.dao.worker.queue.size` - Worker Queue size used in executor service for async methods in IndexDao -Defaults to `100` -* `workflow.elasticsearch.async.dao.max.pool.size` - Maximum thread pool size in executor service for async methods in IndexDao -Defaults to `12` -* `workflow.elasticsearch.async.buffer.flush.timeout.seconds` - Timeout (in seconds) for the in-memory to be flushed if not explicitly indexed -Defaults to `10` - -### Embedded Configuration - -If `workflow.elasticsearch.instanceType=MEMORY`, then you can configure the embedded server using the following configurations: - -* `workflow.elasticsearch.embedded.port` - The starting port of the embedded server. -This is the port used for the TCP transport. -It will also use this + 100 in order to setup the http transport. -Default is `9200` -* `workflow.elasticsearch.embedded.cluster.name` - The name of the embedded cluster name. -Default is `elasticsearch_test` -* `workflow.elasticsearch.embedded.host` - The host of the embedded server. -Default is `127.0.0.1` - -### REST Transport - -If you are using AWS ElasticSearch, you should use the `rest` transport as that's the only version transport that they support. -However, this module currently only works with open IAM, VPC version of ElasticSearch. -Eventually, we should create ES modules that can be loaded in to support authentication and request signing, but this currently does not support that. - -### Example Configurations - -**In-memory ES with TCP transport** - -``` -workflow.elasticsearch.instanceType=MEMORY -``` - -**In-memory ES with REST transport** - -``` -workflow.elasticsearch.instanceType=MEMORY -workflow.elasticsearch.url=http://localhost:9300 -``` - -**ES with TCP transport** - -``` -workflow.elasticsearch.instanceType=EXTERNAL -workflow.elasticsearch.url=127.0.0.1:9300 -``` - -**ES with REST transport** - -``` -workflow.elasticsearch.instanceType=EXTERNAL -workflow.elasticsearch.url=http://127.0.0.1:9200 -``` diff --git a/es5-persistence/build.gradle b/es5-persistence/build.gradle deleted file mode 100644 index aad4bbf823..0000000000 --- a/es5-persistence/build.gradle +++ /dev/null @@ -1,17 +0,0 @@ -dependencies { - compile project(':conductor-core') - - compile "commons-io:commons-io:${revCommonsIo}" - - compile "org.elasticsearch:elasticsearch:${revElasticSearch5}" - compile "org.elasticsearch.client:transport:${revElasticSearch5}" - compile "org.elasticsearch.client:elasticsearch-rest-client:${revElasticSearch5}" - compile "org.elasticsearch.client:elasticsearch-rest-high-level-client:${revElasticSearch5}" - - compile "org.apache.logging.log4j:log4j-api:${revLog4jApi}" - compile "org.apache.logging.log4j:log4j-core:${revLog4jCore}" - - // Test dependencies - testCompile "org.slf4j:slf4j-log4j12:${revSlf4jlog4j}" - testCompile "org.awaitility:awaitility:${revAwaitility}" -} \ No newline at end of file diff --git a/es5-persistence/dependencies.lock b/es5-persistence/dependencies.lock deleted file mode 100644 index 2bf49d8359..0000000000 --- a/es5-persistence/dependencies.lock +++ /dev/null @@ -1,1507 +0,0 @@ -{ - "compile": { - "com.amazonaws:aws-java-sdk-s3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.11.86" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0" - }, - "com.github.vmg.protogen:protogen-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.5.1" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.1" - }, - "commons-io:commons-io": { - "locked": "2.4", - "requested": "2.4" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "javax.inject:javax.inject": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1" - }, - "javax.validation:validation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.1.Final" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.3" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:transport": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch:elasticsearch": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.glassfish:javax.el": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0.0" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.7.25" - } - }, - "compileClasspath": { - "com.amazonaws:aws-java-sdk-s3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.11.86" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0" - }, - "com.github.vmg.protogen:protogen-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.5.1" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.1" - }, - "commons-io:commons-io": { - "locked": "2.4", - "requested": "2.4" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "javax.inject:javax.inject": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1" - }, - "javax.validation:validation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.1.Final" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.3" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:transport": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch:elasticsearch": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.glassfish:javax.el": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0.0" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.7.25" - } - }, - "default": { - "com.amazonaws:aws-java-sdk-s3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.11.86" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0" - }, - "com.github.vmg.protogen:protogen-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.5.1" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.1" - }, - "commons-io:commons-io": { - "locked": "2.4", - "requested": "2.4" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "javax.inject:javax.inject": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1" - }, - "javax.validation:validation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.1.Final" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.3" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:transport": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch:elasticsearch": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.glassfish:javax.el": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0.0" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.7.25" - } - }, - "jacocoAgent": { - "org.jacoco:org.jacoco.agent": { - "locked": "0.8.1" - } - }, - "jacocoAnt": { - "org.jacoco:org.jacoco.ant": { - "locked": "0.8.1" - } - }, - "runtime": { - "com.amazonaws:aws-java-sdk-s3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.11.86" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0" - }, - "com.github.vmg.protogen:protogen-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.5.1" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.1" - }, - "commons-io:commons-io": { - "locked": "2.4", - "requested": "2.4" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "javax.inject:javax.inject": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1" - }, - "javax.validation:validation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.1.Final" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.3" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:transport": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch:elasticsearch": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.glassfish:javax.el": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0.0" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.7.25" - } - }, - "runtimeClasspath": { - "com.amazonaws:aws-java-sdk-s3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.11.86" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0" - }, - "com.github.vmg.protogen:protogen-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.5.1" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.1" - }, - "commons-io:commons-io": { - "locked": "2.4", - "requested": "2.4" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "javax.inject:javax.inject": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1" - }, - "javax.validation:validation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.1.Final" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.3" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:transport": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch:elasticsearch": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.glassfish:javax.el": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0.0" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.7.25" - } - }, - "testCompile": { - "com.amazonaws:aws-java-sdk-s3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.11.86" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0" - }, - "com.github.vmg.protogen:protogen-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.5.1" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.1" - }, - "commons-io:commons-io": { - "locked": "2.4", - "requested": "2.4" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "javax.inject:javax.inject": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1" - }, - "javax.validation:validation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.1.Final" - }, - "junit:junit": { - "locked": "4.12", - "requested": "4.12" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.3" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.awaitility:awaitility": { - "locked": "3.1.2", - "requested": "3.1.2" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:transport": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch:elasticsearch": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.glassfish:javax.el": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0.0" - }, - "org.mockito:mockito-core": { - "locked": "3.1.0", - "requested": "3.1.0" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.8.0-alpha1" - }, - "org.slf4j:slf4j-log4j12": { - "locked": "1.8.0-alpha1", - "requested": "1.8.0-alpha1" - } - }, - "testCompileClasspath": { - "com.amazonaws:aws-java-sdk-s3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.11.86" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0" - }, - "com.github.vmg.protogen:protogen-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.5.1" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.1" - }, - "commons-io:commons-io": { - "locked": "2.4", - "requested": "2.4" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "javax.inject:javax.inject": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1" - }, - "javax.validation:validation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.1.Final" - }, - "junit:junit": { - "locked": "4.12", - "requested": "4.12" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.3" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.awaitility:awaitility": { - "locked": "3.1.2", - "requested": "3.1.2" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:transport": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch:elasticsearch": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.glassfish:javax.el": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0.0" - }, - "org.mockito:mockito-core": { - "locked": "3.1.0", - "requested": "3.1.0" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.8.0-alpha1" - }, - "org.slf4j:slf4j-log4j12": { - "locked": "1.8.0-alpha1", - "requested": "1.8.0-alpha1" - } - }, - "testRuntime": { - "com.amazonaws:aws-java-sdk-s3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.11.86" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0" - }, - "com.github.vmg.protogen:protogen-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.5.1" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.1" - }, - "commons-io:commons-io": { - "locked": "2.4", - "requested": "2.4" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "javax.inject:javax.inject": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1" - }, - "javax.validation:validation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.1.Final" - }, - "junit:junit": { - "locked": "4.12", - "requested": "4.12" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.3" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.awaitility:awaitility": { - "locked": "3.1.2", - "requested": "3.1.2" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:transport": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch:elasticsearch": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.glassfish:javax.el": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0.0" - }, - "org.mockito:mockito-core": { - "locked": "3.1.0", - "requested": "3.1.0" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.8.0-alpha1" - }, - "org.slf4j:slf4j-log4j12": { - "locked": "1.8.0-alpha1", - "requested": "1.8.0-alpha1" - } - }, - "testRuntimeClasspath": { - "com.amazonaws:aws-java-sdk-s3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.11.86" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0" - }, - "com.github.vmg.protogen:protogen-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.5.1" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.1" - }, - "commons-io:commons-io": { - "locked": "2.4", - "requested": "2.4" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "javax.inject:javax.inject": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1" - }, - "javax.validation:validation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.1.Final" - }, - "junit:junit": { - "locked": "4.12", - "requested": "4.12" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.0.3" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.awaitility:awaitility": { - "locked": "3.1.2", - "requested": "3.1.2" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:transport": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch:elasticsearch": { - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.glassfish:javax.el": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0.0" - }, - "org.mockito:mockito-core": { - "locked": "3.1.0", - "requested": "3.1.0" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.8.0-alpha1" - }, - "org.slf4j:slf4j-log4j12": { - "locked": "1.8.0-alpha1", - "requested": "1.8.0-alpha1" - } - } -} \ No newline at end of file diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/BulkRequestBuilderWrapper.java b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/BulkRequestBuilderWrapper.java deleted file mode 100644 index 24bc49a942..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/BulkRequestBuilderWrapper.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2020 Medallia, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.netflix.conductor.dao.es5.index; - -import org.elasticsearch.action.ActionFuture; -import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.update.UpdateRequest; - -import javax.annotation.Nonnull; -import java.util.Objects; - -/** - * Thread-safe wrapper for {@link BulkRequestBuilder}. - */ -public class BulkRequestBuilderWrapper { - private final BulkRequestBuilder bulkRequestBuilder; - - public BulkRequestBuilderWrapper(@Nonnull BulkRequestBuilder bulkRequestBuilder) { - this.bulkRequestBuilder = Objects.requireNonNull(bulkRequestBuilder); - } - - public void add(@Nonnull UpdateRequest req) { - synchronized (bulkRequestBuilder) { - bulkRequestBuilder.add(Objects.requireNonNull(req)); - } - } - - public void add(@Nonnull IndexRequest req) { - synchronized (bulkRequestBuilder) { - bulkRequestBuilder.add(Objects.requireNonNull(req)); - } - } - - public int numberOfActions() { - synchronized (bulkRequestBuilder) { - return bulkRequestBuilder.numberOfActions(); - } - } - - public ActionFuture execute() { - synchronized (bulkRequestBuilder) { - return bulkRequestBuilder.execute(); - } - } -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/BulkRequestWrapper.java b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/BulkRequestWrapper.java deleted file mode 100644 index 104a70a362..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/BulkRequestWrapper.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.netflix.conductor.dao.es5.index; - -import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.update.UpdateRequest; - -import javax.annotation.Nonnull; -import java.util.Objects; - -/** - * Thread-safe wrapper for {@link BulkRequest}. - */ -class BulkRequestWrapper -{ - private final BulkRequest bulkRequest; - - BulkRequestWrapper(@Nonnull BulkRequest bulkRequest) { - this.bulkRequest = Objects.requireNonNull(bulkRequest); - } - - public void add(@Nonnull UpdateRequest req) { - synchronized (bulkRequest) { - bulkRequest.add(Objects.requireNonNull(req)); - } - } - - public void add(@Nonnull IndexRequest req) { - synchronized (bulkRequest) { - bulkRequest.add(Objects.requireNonNull(req)); - } - } - - BulkRequest get() - { - return bulkRequest; - } - - int numberOfActions() { - synchronized (bulkRequest) { - return bulkRequest.numberOfActions(); - } - } -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java deleted file mode 100644 index 6ca1406a53..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java +++ /dev/null @@ -1,876 +0,0 @@ -/* - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package com.netflix.conductor.dao.es5.index; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.type.MapType; -import com.fasterxml.jackson.databind.type.TypeFactory; -import com.netflix.conductor.annotations.Trace; -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.common.utils.RetryUtil; -import com.netflix.conductor.core.events.queue.Message; -import com.netflix.conductor.core.execution.ApplicationException; -import com.netflix.conductor.core.execution.ApplicationException.Code; -import com.netflix.conductor.dao.IndexDAO; -import com.netflix.conductor.dao.es5.index.query.parser.Expression; -import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; -import com.netflix.conductor.elasticsearch.query.parser.ParserException; -import com.netflix.conductor.metrics.Monitors; -import java.io.IOException; -import java.io.InputStream; -import java.text.SimpleDateFormat; -import java.time.Instant; -import java.time.LocalDate; -import java.time.ZoneOffset; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Date; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.TimeZone; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; -import java.util.stream.IntStream; -import java.util.stream.StreamSupport; -import javax.annotation.PreDestroy; -import javax.inject.Inject; -import javax.inject.Singleton; -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; -import org.elasticsearch.ResourceAlreadyExistsException; -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; -import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.delete.DeleteRequest; -import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.get.GetRequest; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.update.UpdateRequest; -import org.elasticsearch.action.update.UpdateResponse; -import org.elasticsearch.client.Client; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.QueryStringQueryBuilder; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.fetch.subphase.FetchSourceContext; -import org.elasticsearch.search.sort.FieldSortBuilder; -import org.elasticsearch.search.sort.SortBuilders; -import org.elasticsearch.search.sort.SortOrder; -import org.joda.time.DateTime; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@Trace -@Singleton -public class ElasticSearchDAOV5 implements IndexDAO { - - private static final Logger logger = LoggerFactory.getLogger(ElasticSearchDAOV5.class); - - private static final String WORKFLOW_DOC_TYPE = "workflow"; - private static final String TASK_DOC_TYPE = "task"; - private static final String LOG_DOC_TYPE = "task_log"; - private static final String EVENT_DOC_TYPE = "event"; - private static final String MSG_DOC_TYPE = "message"; - - private static final String className = ElasticSearchDAOV5.class.getSimpleName(); - - private static final SimpleDateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyyMMWW"); - private static final TimeZone GMT = TimeZone.getTimeZone("GMT"); - private static final int RETRY_COUNT = 3; - - private final String indexName; - private String logIndexName; - private final String logIndexPrefix; - private final ObjectMapper objectMapper; - private final Client elasticSearchClient; - private final ExecutorService executorService; - private final ExecutorService logExecutorService; - private final int archiveSearchBatchSize; - private ConcurrentHashMap bulkRequests; - private final int indexBatchSize; - private final int asyncBufferFlushTimeout; - private final ElasticSearchConfiguration config; - - static { - SIMPLE_DATE_FORMAT.setTimeZone(GMT); - } - - @Inject - public ElasticSearchDAOV5(Client elasticSearchClient, ElasticSearchConfiguration config, - ObjectMapper objectMapper) { - this.objectMapper = objectMapper; - this.elasticSearchClient = elasticSearchClient; - this.indexName = config.getIndexName(); - this.logIndexPrefix = config.getTasklogIndexName(); - this.archiveSearchBatchSize = config.getArchiveSearchBatchSize(); - this.bulkRequests = new ConcurrentHashMap<>(); - this.indexBatchSize = config.getIndexBatchSize(); - this.asyncBufferFlushTimeout = config.getAsyncBufferFlushTimeout(); - this.config = config; - - int corePoolSize = 4; - int maximumPoolSize = config.getAsyncMaxPoolSize(); - long keepAliveTime = 1L; - int workerQueueSize = config.getAsyncWorkerQueueSize(); - this.executorService = new ThreadPoolExecutor(corePoolSize, - maximumPoolSize, - keepAliveTime, - TimeUnit.MINUTES, - new LinkedBlockingQueue<>(workerQueueSize), - (runnable, executor) -> { - logger.warn("Request {} to async dao discarded in executor {}", runnable, executor); - Monitors.recordDiscardedIndexingCount("indexQueue"); - }); - - corePoolSize = 1; - maximumPoolSize = 2; - keepAliveTime = 30L; - this.logExecutorService = new ThreadPoolExecutor(corePoolSize, - maximumPoolSize, - keepAliveTime, - TimeUnit.SECONDS, - new LinkedBlockingQueue<>(workerQueueSize), - (runnable, executor) -> { - logger.warn("Request {} to async log dao discarded in executor {}", runnable, executor); - Monitors.recordDiscardedIndexingCount("logQueue"); - }); - - Executors.newSingleThreadScheduledExecutor().scheduleAtFixedRate(this::flushBulkRequests, 60, 30, TimeUnit.SECONDS); - } - - @PreDestroy - private void shutdown() { - logger.info("Gracefully shutdown executor service"); - shutdownExecutorService(logExecutorService); - shutdownExecutorService(executorService); - } - - private void shutdownExecutorService(ExecutorService execService) { - try { - execService.shutdown(); - if (execService.awaitTermination(30, TimeUnit.SECONDS)) { - logger.debug("tasks completed, shutting down"); - } else { - logger.warn("Forcing shutdown after waiting for 30 seconds"); - execService.shutdownNow(); - } - } catch (InterruptedException ie) { - logger.warn("Shutdown interrupted, invoking shutdownNow on scheduledThreadPoolExecutor for delay queue"); - execService.shutdownNow(); - Thread.currentThread().interrupt(); - } - } - - @Override - public void setup() throws Exception { - elasticSearchClient.admin() - .cluster() - .prepareHealth() - .setWaitForGreenStatus() - .execute() - .get(); - - try { - initIndex(); - updateLogIndexName(); - Executors.newScheduledThreadPool(1) - .scheduleAtFixedRate(this::updateLogIndexName, 0, 1, TimeUnit.HOURS); - } catch (Exception e) { - logger.error("Error creating index templates", e); - } - - //1. Create the required index - try { - addIndex(indexName); - } catch (Exception e) { - logger.error("Failed to initialize index '{}'", indexName, e); - } - - //2. Add Mappings for the workflow document type - try { - addMappingToIndex(indexName, WORKFLOW_DOC_TYPE, "/mappings_docType_workflow.json"); - } catch (Exception e) { - logger.error("Failed to add {} mapping", WORKFLOW_DOC_TYPE); - } - - //3. Add Mappings for task document type - try { - addMappingToIndex(indexName, TASK_DOC_TYPE, "/mappings_docType_task.json"); - } catch (IOException e) { - logger.error("Failed to add {} mapping", TASK_DOC_TYPE); - } - - } - - private void addIndex(String indexName) { - try { - elasticSearchClient.admin() - .indices() - .prepareGetIndex() - .addIndices(indexName) - .execute() - .actionGet(); - } catch (IndexNotFoundException infe) { - try { - - CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName); - createIndexRequest.settings(Settings.builder() - .put("index.number_of_shards", config.getElasticSearchIndexShardCount()) - .put("index.number_of_replicas", config.getElasticSearchIndexReplicationCount()) - ); - - elasticSearchClient.admin() - .indices() - .create(createIndexRequest) - .actionGet(); - } catch (ResourceAlreadyExistsException done) { - // no-op - } - } - } - - private void addMappingToIndex(String indexName, String mappingType, String mappingFilename) - throws IOException { - GetMappingsResponse getMappingsResponse = elasticSearchClient.admin() - .indices() - .prepareGetMappings(indexName) - .addTypes(mappingType) - .execute() - .actionGet(); - - if (getMappingsResponse.mappings().isEmpty()) { - logger.info("Adding the mappings for type: {}", mappingType); - InputStream stream = ElasticSearchDAOV5.class.getResourceAsStream(mappingFilename); - byte[] bytes = IOUtils.toByteArray(stream); - String source = new String(bytes); - try { - elasticSearchClient.admin() - .indices() - .preparePutMapping(indexName) - .setType(mappingType) - .setSource(source, XContentFactory.xContentType(source)) - .execute() - .actionGet(); - } catch (Exception e) { - logger.error("Failed to init index mappings for type: {}", mappingType, e); - } - } - } - - private void updateLogIndexName() { - this.logIndexName = this.logIndexPrefix + "_" + SIMPLE_DATE_FORMAT.format(new Date()); - - try { - elasticSearchClient.admin() - .indices() - .prepareGetIndex() - .addIndices(logIndexName) - .execute() - .actionGet(); - } catch (IndexNotFoundException infe) { - try { - elasticSearchClient.admin() - .indices() - .prepareCreate(logIndexName) - .execute() - .actionGet(); - } catch (ResourceAlreadyExistsException ilee) { - // no-op - } catch (Exception e) { - logger.error("Failed to update log index name: {}", logIndexName, e); - } - } - } - - /** - * Initializes the index with required templates and mappings. - */ - private void initIndex() throws Exception { - - // 0. Add the tasklog template - GetIndexTemplatesResponse result = elasticSearchClient.admin() - .indices() - .prepareGetTemplates("tasklog_template") - .execute() - .actionGet(); - - if (result.getIndexTemplates().isEmpty()) { - logger.info("Creating the index template 'tasklog_template'"); - InputStream stream = ElasticSearchDAOV5.class - .getResourceAsStream("/template_tasklog.json"); - byte[] templateSource = IOUtils.toByteArray(stream); - - try { - elasticSearchClient.admin() - .indices() - .preparePutTemplate("tasklog_template") - .setSource(templateSource, XContentType.JSON) - .execute() - .actionGet(); - } catch (Exception e) { - logger.error("Failed to init tasklog_template", e); - } - } - } - - @Override - public void indexWorkflow(Workflow workflow) { - try { - long startTime = Instant.now().toEpochMilli(); - String id = workflow.getWorkflowId(); - WorkflowSummary summary = new WorkflowSummary(workflow); - byte[] doc = objectMapper.writeValueAsBytes(summary); - - UpdateRequest request = new UpdateRequest(indexName, WORKFLOW_DOC_TYPE, id); - request.doc(doc, XContentType.JSON); - request.upsert(doc, XContentType.JSON); - request.retryOnConflict(5); - - new RetryUtil().retryOnException( - () -> elasticSearchClient.update(request).actionGet(), - null, - null, - RETRY_COUNT, - "Indexing workflow document: " + workflow.getWorkflowId(), - "indexWorkflow" - ); - - long endTime = Instant.now().toEpochMilli(); - logger.debug("Time taken {} for indexing workflow: {}", endTime - startTime, workflow.getWorkflowId()); - Monitors.recordESIndexTime("index_workflow", WORKFLOW_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize("indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); - } catch (Exception e) { - Monitors.error(className, "indexWorkflow"); - logger.error("Failed to index workflow: {}", workflow.getWorkflowId(), e); - } - } - - @Override - public CompletableFuture asyncIndexWorkflow(Workflow workflow) { - return CompletableFuture.runAsync(() -> indexWorkflow(workflow), executorService); - } - - @Override - public void indexTask(Task task) { - try { - long startTime = Instant.now().toEpochMilli(); - String id = task.getTaskId(); - TaskSummary summary = new TaskSummary(task); - byte[] doc = objectMapper.writeValueAsBytes(summary); - - UpdateRequest req = new UpdateRequest(indexName, TASK_DOC_TYPE, id); - req.doc(doc, XContentType.JSON); - req.upsert(doc, XContentType.JSON); - logger.debug("Indexing task document: {} for workflow: {}" + id, task.getWorkflowInstanceId()); - indexObject(req, TASK_DOC_TYPE); - long endTime = Instant.now().toEpochMilli(); - logger.debug("Time taken {} for indexing task:{} in workflow: {}", endTime - startTime, task.getTaskId(), task.getWorkflowInstanceId()); - Monitors.recordESIndexTime("index_task", TASK_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize("indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); - } catch (Exception e) { - logger.error("Failed to index task: {}", task.getTaskId(), e); - } - } - - @Override - public CompletableFuture asyncIndexTask(Task task) { - return CompletableFuture.runAsync(() -> indexTask(task), executorService); - } - - @Override - public void addTaskExecutionLogs(List taskExecLogs) { - if (taskExecLogs.isEmpty()) { - return; - } - - try { - long startTime = Instant.now().toEpochMilli(); - BulkRequestBuilderWrapper bulkRequestBuilder = new BulkRequestBuilderWrapper(elasticSearchClient.prepareBulk()); - for (TaskExecLog log : taskExecLogs) { - IndexRequest request = new IndexRequest(logIndexName, LOG_DOC_TYPE); - request.source(objectMapper.writeValueAsBytes(log), XContentType.JSON); - bulkRequestBuilder.add(request); - } - new RetryUtil().retryOnException( - () -> bulkRequestBuilder.execute().actionGet(5, TimeUnit.SECONDS), - null, - BulkResponse::hasFailures, - RETRY_COUNT, - "Indexing task execution logs", - "addTaskExecutionLogs" - ); - long endTime = Instant.now().toEpochMilli(); - logger.debug("Time taken {} for indexing taskExecutionLogs", endTime - startTime); - Monitors.recordESIndexTime("index_task_execution_logs", LOG_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize("logQueue", ((ThreadPoolExecutor) logExecutorService).getQueue().size()); - } catch (Exception e) { - List taskIds = taskExecLogs.stream() - .map(TaskExecLog::getTaskId) - .collect(Collectors.toList()); - logger.error("Failed to index task execution logs for tasks: {}", taskIds, e); - } - } - - @Override - public CompletableFuture asyncAddTaskExecutionLogs(List logs) { - return CompletableFuture.runAsync(() -> addTaskExecutionLogs(logs), logExecutorService); - } - - @Override - public List getTaskExecutionLogs(String taskId) { - try { - Expression expression = Expression.fromString("taskId='" + taskId + "'"); - QueryBuilder queryBuilder = expression.getFilterBuilder(); - - BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(queryBuilder); - QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery("*"); - BoolQueryBuilder fq = QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); - - FieldSortBuilder sortBuilder = SortBuilders.fieldSort("createdTime") - .order(SortOrder.ASC); - final SearchRequestBuilder srb = elasticSearchClient.prepareSearch(logIndexPrefix + "*") - .setQuery(fq) - .setTypes(LOG_DOC_TYPE) - .addSort(sortBuilder) - .setSize(config.getElasticSearchTasklogLimit()); - - SearchResponse response = srb.execute().actionGet(); - - return Arrays.stream(response.getHits().getHits()) - .map(hit -> { - String source = hit.getSourceAsString(); - try { - return objectMapper.readValue(source, TaskExecLog.class); - } catch (IOException e) { - logger.error("exception deserializing taskExecLog: {}", source); - } - return null; - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - } catch (Exception e) { - logger.error("Failed to get task execution logs for task: {}", taskId, e); - } - - return null; - } - - @Override - public void addMessage(String queue, Message message) { - try { - long startTime = Instant.now().toEpochMilli(); - Map doc = new HashMap<>(); - doc.put("messageId", message.getId()); - doc.put("payload", message.getPayload()); - doc.put("queue", queue); - doc.put("created", System.currentTimeMillis()); - - UpdateRequest req = new UpdateRequest(logIndexName, MSG_DOC_TYPE, message.getId()); - req.doc(doc, XContentType.JSON); - req.upsert(doc, XContentType.JSON); - indexObject(req, MSG_DOC_TYPE); - long endTime = Instant.now().toEpochMilli(); - logger.debug("Time taken {} for indexing message: {}", endTime - startTime, message.getId()); - Monitors.recordESIndexTime("add_message", MSG_DOC_TYPE, endTime - startTime); - } catch (Exception e) { - logger.error("Failed to index message: {}", message.getId(), e); - } - } - - @Override - public CompletableFuture asyncAddMessage(String queue, Message message) { - return CompletableFuture.runAsync(() -> addMessage(queue, message), executorService); - } - - @Override - public void addEventExecution(EventExecution eventExecution) { - try { - long startTime = Instant.now().toEpochMilli(); - byte[] doc = objectMapper.writeValueAsBytes(eventExecution); - String id = - eventExecution.getName() + "." + eventExecution.getEvent() + "." + eventExecution - .getMessageId() + "." + eventExecution.getId(); - - UpdateRequest req = new UpdateRequest(logIndexName, EVENT_DOC_TYPE, id); - req.doc(doc, XContentType.JSON); - req.upsert(doc, XContentType.JSON); - indexObject(req, EVENT_DOC_TYPE); - long endTime = Instant.now().toEpochMilli(); - logger.debug("Time taken {} for indexing event execution: {}", endTime - startTime, eventExecution.getId()); - Monitors.recordESIndexTime("add_event_execution", EVENT_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize("logQueue", ((ThreadPoolExecutor) logExecutorService).getQueue().size()); - } catch (Exception e) { - logger.error("Failed to index event execution: {}", eventExecution.getId(), e); - } - } - - @Override - public CompletableFuture asyncAddEventExecution(EventExecution eventExecution) { - return CompletableFuture.runAsync(() -> addEventExecution(eventExecution), logExecutorService); - } - - private void indexObject(UpdateRequest req, String docType) { - if (bulkRequests.get(docType) == null) { - bulkRequests.put(docType, new BulkRequests(System.currentTimeMillis(), elasticSearchClient.prepareBulk())); - } - bulkRequests.get(docType).getBulkRequestBuilder().add(req); - if (bulkRequests.get(docType).getBulkRequestBuilder().numberOfActions() >= this.indexBatchSize) { - indexBulkRequest(docType); - } - } - - private synchronized void indexBulkRequest(String docType) { - if (bulkRequests.get(docType).getBulkRequestBuilder() != null && bulkRequests.get(docType).getBulkRequestBuilder().numberOfActions() > 0) { - updateWithRetry(bulkRequests.get(docType).getBulkRequestBuilder(), docType); - bulkRequests.put(docType, new BulkRequests(System.currentTimeMillis(), elasticSearchClient.prepareBulk())); - } - } - - private void updateWithRetry(BulkRequestBuilderWrapper request, String docType) { - try { - long startTime = Instant.now().toEpochMilli(); - new RetryUtil().retryOnException( - () -> request.execute().actionGet(5, TimeUnit.SECONDS), - null, - BulkResponse::hasFailures, - RETRY_COUNT, - "Bulk Indexing "+ docType, - "indexObject" - ); - long endTime = Instant.now().toEpochMilli(); - logger.debug("Time taken {} for indexing object of type: {}", endTime - startTime, docType); - Monitors.recordESIndexTime("index_object", docType, endTime - startTime); - } catch (Exception e) { - Monitors.error(className, "index"); - logger.error("Failed to index object of type: {}", docType, e); - } - } - - @Override - public SearchResult searchWorkflows(String query, String freeText, int start, int count, List sort) { - return search(indexName, query, start, count, sort, freeText, WORKFLOW_DOC_TYPE); - } - - @Override - public SearchResult searchTasks(String query, String freeText, int start, int count, List sort) { - return search(indexName, query, start, count, sort, freeText, TASK_DOC_TYPE); - } - - @Override - public void removeWorkflow(String workflowId) { - try { - long startTime = Instant.now().toEpochMilli(); - DeleteRequest request = new DeleteRequest(indexName, WORKFLOW_DOC_TYPE, workflowId); - DeleteResponse response = elasticSearchClient.delete(request).actionGet(); - if (response.getResult() == DocWriteResponse.Result.DELETED) { - logger.error("Index removal failed - document not found by id: {}", workflowId); - } - long endTime = Instant.now().toEpochMilli(); - logger.debug("Time taken {} for removing workflow: {}", endTime - startTime, workflowId); - Monitors.recordESIndexTime("remove_workflow", WORKFLOW_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize("indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); - } catch (Exception e) { - logger.error("Failed to remove workflow {} from index", workflowId, e); - Monitors.error(className, "remove"); - } - } - - @Override - public CompletableFuture asyncRemoveWorkflow(String workflowId) { - return CompletableFuture.runAsync(() -> removeWorkflow(workflowId), executorService); - } - - @Override - public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) { - if (keys.length != values.length) { - throw new ApplicationException(Code.INVALID_INPUT, - "Number of keys and values do not match"); - } - - long startTime = Instant.now().toEpochMilli(); - UpdateRequest request = new UpdateRequest(indexName, WORKFLOW_DOC_TYPE, workflowInstanceId); - Map source = IntStream.range(0, keys.length) - .boxed() - .collect(Collectors.toMap(i -> keys[i], i -> values[i])); - request.doc(source); - logger.debug("Updating workflow {} in elasticsearch index: {}", workflowInstanceId, indexName); - new RetryUtil<>().retryOnException( - () -> elasticSearchClient.update(request).actionGet(), - null, - null, - RETRY_COUNT, - "Updating index for doc_type workflow", - "updateWorkflow" - ); - - long endTime = Instant.now().toEpochMilli(); - logger.debug("Time taken {} for updating workflow: {}", endTime - startTime, workflowInstanceId); - Monitors.recordESIndexTime("update_workflow", WORKFLOW_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize("indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); - } - - @Override - public CompletableFuture asyncUpdateWorkflow(String workflowInstanceId, String[] keys, - Object[] values) { - return CompletableFuture.runAsync(() -> updateWorkflow(workflowInstanceId, keys, values), executorService); - } - - @Override - public String get(String workflowInstanceId, String fieldToGet) { - GetRequest request = new GetRequest(indexName, WORKFLOW_DOC_TYPE, workflowInstanceId) - .fetchSourceContext( - new FetchSourceContext(true, new String[]{fieldToGet}, Strings.EMPTY_ARRAY)); - GetResponse response = elasticSearchClient.get(request).actionGet(); - - if (response.isExists()) { - Map sourceAsMap = response.getSourceAsMap(); - if (sourceAsMap.containsKey(fieldToGet)) { - return sourceAsMap.get(fieldToGet).toString(); - } - } - - logger.info("Unable to find Workflow: {} in ElasticSearch index: {}.", workflowInstanceId, indexName); - return null; - } - - private SearchResult search(String indexName, String structuredQuery, int start, int size, - List sortOptions, String freeTextQuery, String docType) { - try { - QueryBuilder queryBuilder = QueryBuilders.matchAllQuery(); - if (StringUtils.isNotEmpty(structuredQuery)) { - Expression expression = Expression.fromString(structuredQuery); - queryBuilder = expression.getFilterBuilder(); - } - - BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(queryBuilder); - QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery(freeTextQuery); - BoolQueryBuilder fq = QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); - final SearchRequestBuilder srb = elasticSearchClient.prepareSearch(indexName) - .setQuery(fq) - .setTypes(docType) - .storedFields("_id") - .setFrom(start) - .setSize(size); - - if (sortOptions != null) { - sortOptions.forEach(sortOption -> addSortOptionToSearchRequest(srb, sortOption)); - } - - SearchResponse response = srb.get(); - - LinkedList result = StreamSupport.stream(response.getHits().spliterator(), false) - .map(SearchHit::getId) - .collect(Collectors.toCollection(LinkedList::new)); - long count = response.getHits().getTotalHits(); - - return new SearchResult<>(count, result); - } catch (ParserException e) { - String errorMsg = String.format("Error performing search on index:%s with docType:%s", indexName, docType); - logger.error(errorMsg); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg, e); - } - } - - private void addSortOptionToSearchRequest(SearchRequestBuilder searchRequestBuilder, - String sortOption) { - SortOrder order = SortOrder.ASC; - String field = sortOption; - int indx = sortOption.indexOf(':'); - if (indx > 0) { // Can't be 0, need the field name at-least - field = sortOption.substring(0, indx); - order = SortOrder.valueOf(sortOption.substring(indx + 1)); - } - searchRequestBuilder.addSort(field, order); - } - - @Override - public List searchArchivableWorkflows(String indexName, long archiveTtlDays) { - QueryBuilder q = QueryBuilders.boolQuery() - .must(QueryBuilders.rangeQuery("endTime").lt(LocalDate.now(ZoneOffset.UTC).minusDays(archiveTtlDays).toString()).gte(LocalDate.now(ZoneOffset.UTC).minusDays(archiveTtlDays).minusDays(1).toString())) - .should(QueryBuilders.termQuery("status", "COMPLETED")) - .should(QueryBuilders.termQuery("status", "FAILED")) - .should(QueryBuilders.termQuery("status", "TIMED_OUT")) - .should(QueryBuilders.termQuery("status", "TERMINATED")) - .mustNot(QueryBuilders.existsQuery("archived")) - .minimumShouldMatch(1); - SearchRequestBuilder s = elasticSearchClient.prepareSearch(indexName) - .setTypes("workflow") - .setQuery(q) - .addSort("endTime", SortOrder.ASC) - .setSize(archiveSearchBatchSize); - - SearchResponse response = s.execute().actionGet(); - - SearchHits hits = response.getHits(); - logger.info("Archive search totalHits - {}", hits.getTotalHits()); - - return Arrays.stream(hits.getHits()) - .map(SearchHit::getId) - .collect(Collectors.toCollection(LinkedList::new)); - } - - @Override - public List searchRecentRunningWorkflows(int lastModifiedHoursAgoFrom, - int lastModifiedHoursAgoTo) { - DateTime dateTime = new DateTime(); - QueryBuilder q = QueryBuilders.boolQuery() - .must(QueryBuilders.rangeQuery("updateTime") - .gt(dateTime.minusHours(lastModifiedHoursAgoFrom))) - .must(QueryBuilders.rangeQuery("updateTime") - .lt(dateTime.minusHours(lastModifiedHoursAgoTo))) - .must(QueryBuilders.termQuery("status", "RUNNING")); - - SearchRequestBuilder s = elasticSearchClient.prepareSearch(indexName) - .setTypes("workflow") - .setQuery(q) - .setSize(5000) - .addSort("updateTime", SortOrder.ASC); - - SearchResponse response = s.execute().actionGet(); - return StreamSupport.stream(response.getHits().spliterator(), false) - .map(SearchHit::getId) - .collect(Collectors.toCollection(LinkedList::new)); - } - - @Override - public List getMessages(String queue) { - try { - Expression expression = Expression.fromString("queue='" + queue + "'"); - QueryBuilder queryBuilder = expression.getFilterBuilder(); - - BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(queryBuilder); - QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery("*"); - BoolQueryBuilder fq = QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); - - final SearchRequestBuilder srb = elasticSearchClient.prepareSearch(logIndexPrefix + "*") - .setQuery(fq) - .setTypes(MSG_DOC_TYPE) - .addSort(SortBuilders.fieldSort("created").order(SortOrder.ASC)); - - return mapGetMessagesResponse(srb.execute().actionGet()); - } catch (Exception e) { - String errorMsg = String.format("Failed to get messages for queue: %s", queue); - logger.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg, e); - } - } - - private List mapGetMessagesResponse(SearchResponse response) throws IOException { - SearchHit[] hits = response.getHits().getHits(); - TypeFactory factory = TypeFactory.defaultInstance(); - MapType type = factory.constructMapType(HashMap.class, String.class, String.class); - List messages = new ArrayList<>(hits.length); - for (SearchHit hit : hits) { - String source = hit.getSourceAsString(); - Map mapSource = objectMapper.readValue(source, type); - Message msg = new Message(mapSource.get("messageId"), mapSource.get("payload"), null); - messages.add(msg); - } - return messages; - } - - @Override - public List getEventExecutions(String event) { - try { - Expression expression = Expression.fromString("event='" + event + "'"); - QueryBuilder queryBuilder = expression.getFilterBuilder(); - - BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(queryBuilder); - QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery("*"); - BoolQueryBuilder fq = QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); - - final SearchRequestBuilder srb = elasticSearchClient.prepareSearch(logIndexPrefix + "*") - .setQuery(fq).setTypes(EVENT_DOC_TYPE) - .addSort(SortBuilders.fieldSort("created") - .order(SortOrder.ASC)); - - return mapEventExecutionsResponse(srb.execute().actionGet()); - } catch (Exception e) { - String errorMsg = String.format("Failed to get executions for event: %s", event); - logger.error(errorMsg, e); - throw new ApplicationException(Code.BACKEND_ERROR, errorMsg, e); - } - } - - private List mapEventExecutionsResponse(SearchResponse response) throws IOException { - SearchHit[] hits = response.getHits().getHits(); - List executions = new ArrayList<>(hits.length); - for (SearchHit hit : hits) { - String source = hit.getSourceAsString(); - EventExecution tel = objectMapper.readValue(source, EventExecution.class); - executions.add(tel); - } - return executions; - } - - /** - * Flush the buffers if bulk requests have not been indexed for the past {@link ElasticSearchConfiguration#ELASTIC_SEARCH_ASYNC_BUFFER_FLUSH_TIMEOUT_PROPERTY_NAME} seconds - * This is to prevent data loss in case the instance is terminated, while the buffer still holds documents to be indexed. - */ - private void flushBulkRequests() { - bulkRequests.entrySet().stream() - .filter(entry -> (System.currentTimeMillis() - entry.getValue().getLastFlushTime()) >= asyncBufferFlushTimeout * 1000) - .filter(entry -> entry.getValue().getBulkRequestBuilder() != null && entry.getValue().getBulkRequestBuilder().numberOfActions() > 0) - .forEach(entry -> { - logger.debug("Flushing bulk request buffer for type {}, size: {}", entry.getKey(), entry.getValue().getBulkRequestBuilder().numberOfActions()); - indexBulkRequest(entry.getKey()); - }); - } - - private static class BulkRequests { - private final long lastFlushTime; - private final BulkRequestBuilderWrapper bulkRequestBuilder; - - public long getLastFlushTime() { - return lastFlushTime; - } - - public BulkRequestBuilderWrapper getBulkRequestBuilder() { - return bulkRequestBuilder; - } - - BulkRequests(long lastFlushTime, BulkRequestBuilder bulkRequestBuilder) { - this.lastFlushTime = lastFlushTime; - this.bulkRequestBuilder = new BulkRequestBuilderWrapper(bulkRequestBuilder); - } - } -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchRestDAOV5.java b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchRestDAOV5.java deleted file mode 100644 index fe0bf48263..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchRestDAOV5.java +++ /dev/null @@ -1,956 +0,0 @@ -/* - * Copyright 2019 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ -package com.netflix.conductor.dao.es5.index; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.fasterxml.jackson.databind.type.MapType; -import com.fasterxml.jackson.databind.type.TypeFactory; -import com.netflix.conductor.annotations.Trace; -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.common.utils.RetryUtil; -import com.netflix.conductor.core.events.queue.Message; -import com.netflix.conductor.core.execution.ApplicationException; -import com.netflix.conductor.dao.IndexDAO; -import com.netflix.conductor.dao.es5.index.query.parser.Expression; -import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; -import com.netflix.conductor.metrics.Monitors; -import java.io.IOException; -import java.io.InputStream; -import java.text.SimpleDateFormat; -import java.time.Instant; -import java.time.LocalDate; -import java.time.ZoneOffset; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.TimeZone; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; -import java.util.stream.IntStream; -import javax.annotation.PreDestroy; -import javax.inject.Inject; -import javax.inject.Singleton; -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.http.HttpEntity; -import org.apache.http.HttpStatus; -import org.apache.http.entity.ContentType; -import org.apache.http.nio.entity.NByteArrayEntity; -import org.apache.http.nio.entity.NStringEntity; -import org.apache.http.util.EntityUtils; -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.delete.DeleteRequest; -import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.get.GetRequest; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.update.UpdateRequest; -import org.elasticsearch.action.update.UpdateResponse; -import org.elasticsearch.client.Response; -import org.elasticsearch.client.ResponseException; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestHighLevelClient; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.QueryStringQueryBuilder; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.sort.FieldSortBuilder; -import org.elasticsearch.search.sort.SortOrder; -import org.joda.time.DateTime; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - - -@Trace -@Singleton -public class ElasticSearchRestDAOV5 implements IndexDAO { - - private static final Logger logger = LoggerFactory.getLogger(ElasticSearchRestDAOV5.class); - - private static final int RETRY_COUNT = 3; - - private static final String WORKFLOW_DOC_TYPE = "workflow"; - private static final String TASK_DOC_TYPE = "task"; - private static final String LOG_DOC_TYPE = "task_log"; - private static final String EVENT_DOC_TYPE = "event"; - private static final String MSG_DOC_TYPE = "message"; - - private static final TimeZone GMT = TimeZone.getTimeZone("GMT"); - private static final SimpleDateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyyMMWW"); - - private @interface HttpMethod { - String GET = "GET"; - String POST = "POST"; - String PUT = "PUT"; - String HEAD = "HEAD"; - } - - private static final String className = ElasticSearchRestDAOV5.class.getSimpleName(); - - private final String indexName; - private final String logIndexPrefix; - private final String clusterHealthColor; - private String logIndexName; - private final ObjectMapper objectMapper; - private final RestHighLevelClient elasticSearchClient; - private final RestClient elasticSearchAdminClient; - private final ExecutorService executorService; - private final ExecutorService logExecutorService; - private final ConcurrentHashMap bulkRequests; - private final int indexBatchSize; - private final int asyncBufferFlushTimeout; - private final ElasticSearchConfiguration config; - - static { - SIMPLE_DATE_FORMAT.setTimeZone(GMT); - } - - @Inject - public ElasticSearchRestDAOV5(RestClient lowLevelRestClient, ElasticSearchConfiguration config, ObjectMapper objectMapper) { - - this.objectMapper = objectMapper; - this.elasticSearchAdminClient = lowLevelRestClient; - this.elasticSearchClient = new RestHighLevelClient(lowLevelRestClient); - this.indexName = config.getIndexName(); - this.logIndexPrefix = config.getTasklogIndexName(); - this.clusterHealthColor = config.getClusterHealthColor(); - this.bulkRequests = new ConcurrentHashMap<>(); - this.indexBatchSize = config.getIndexBatchSize(); - this.asyncBufferFlushTimeout = config.getAsyncBufferFlushTimeout(); - this.config = config; - - // Set up a workerpool for performing async operations for workflow and task - int corePoolSize = 6; - int maximumPoolSize = config.getAsyncMaxPoolSize(); - long keepAliveTime = 1L; - int workerQueueSize = config.getAsyncWorkerQueueSize(); - this.executorService = new ThreadPoolExecutor(corePoolSize, - maximumPoolSize, - keepAliveTime, - TimeUnit.MINUTES, - new LinkedBlockingQueue<>(workerQueueSize), - (runnable, executor) -> { - logger.warn("Request {} to async dao discarded in executor {}", runnable, executor); - Monitors.recordDiscardedIndexingCount("indexQueue"); - }); - - // Set up a workerpool for performing async operations for task_logs, event_executions, message - corePoolSize = 1; - maximumPoolSize = 2; - keepAliveTime = 30L; - this.logExecutorService = new ThreadPoolExecutor(corePoolSize, - maximumPoolSize, - keepAliveTime, - TimeUnit.SECONDS, - new LinkedBlockingQueue<>(workerQueueSize), - (runnable, executor) -> { - logger.warn("Request {} to async log dao discarded in executor {}", runnable, executor); - Monitors.recordDiscardedIndexingCount("logQueue"); - }); - - Executors.newSingleThreadScheduledExecutor().scheduleAtFixedRate(this::flushBulkRequests, 60, 30, TimeUnit.SECONDS); - } - - @PreDestroy - private void shutdown() { - logger.info("Gracefully shutdown executor service"); - shutdownExecutorService(logExecutorService); - shutdownExecutorService(executorService); - } - - private void shutdownExecutorService(ExecutorService execService) { - try { - execService.shutdown(); - if (execService.awaitTermination(30, TimeUnit.SECONDS)) { - logger.debug("tasks completed, shutting down"); - } else { - logger.warn("Forcing shutdown after waiting for 30 seconds"); - execService.shutdownNow(); - } - } catch (InterruptedException ie) { - logger.warn("Shutdown interrupted, invoking shutdownNow on scheduledThreadPoolExecutor for delay queue"); - execService.shutdownNow(); - Thread.currentThread().interrupt(); - } - } - - @Override - public void setup() throws Exception { - waitForHealthyCluster(); - - try { - initIndex(); - updateIndexName(); - Executors.newScheduledThreadPool(1).scheduleAtFixedRate(this::updateIndexName, 0, 1, TimeUnit.HOURS); - } catch (Exception e) { - logger.error("Error creating index templates", e); - } - - //1. Create the required index - try { - addIndex(indexName); - } catch (IOException e) { - logger.error("Failed to initialize index '{}'", indexName, e); - } - - //2. Add mappings for the workflow document type - try { - addMappingToIndex(indexName, WORKFLOW_DOC_TYPE, "/mappings_docType_workflow.json"); - } catch (IOException e) { - logger.error("Failed to add {} mapping", WORKFLOW_DOC_TYPE); - } - - //3. Add mappings for task document type - try { - addMappingToIndex(indexName, TASK_DOC_TYPE, "/mappings_docType_task.json"); - } catch (IOException e) { - logger.error("Failed to add {} mapping", TASK_DOC_TYPE); - } - } - - /** - * Waits for the ES cluster to become green. - * @throws Exception If there is an issue connecting with the ES cluster. - */ - private void waitForHealthyCluster() throws Exception { - Map params = new HashMap<>(); - params.put("wait_for_status", this.clusterHealthColor); - params.put("timeout", "30s"); - - elasticSearchAdminClient.performRequest("GET", "/_cluster/health", params); - } - - /** - * Roll the tasklog index daily. - */ - private void updateIndexName() { - this.logIndexName = this.logIndexPrefix + "_" + SIMPLE_DATE_FORMAT.format(new Date()); - - try { - addIndex(logIndexName); - } catch (IOException e) { - logger.error("Failed to update log index name: {}", logIndexName, e); - } - } - - /** - * Initializes the index with the required templates and mappings. - */ - private void initIndex() throws Exception { - - //0. Add the tasklog template - if (doesResourceNotExist("/_template/tasklog_template")) { - logger.info("Creating the index template 'tasklog_template'"); - InputStream stream = ElasticSearchDAOV5.class.getResourceAsStream("/template_tasklog.json"); - byte[] templateSource = IOUtils.toByteArray(stream); - - HttpEntity entity = new NByteArrayEntity(templateSource, ContentType.APPLICATION_JSON); - try { - elasticSearchAdminClient.performRequest(HttpMethod.PUT, "/_template/tasklog_template", Collections.emptyMap(), entity); - } catch (IOException e) { - logger.error("Failed to initialize tasklog_template", e); - } - } - } - - /** - * Adds an index to elasticsearch if it does not exist. - * - * @param index The name of the index to create. - * @throws IOException If an error occurred during requests to ES. - */ - private void addIndex(final String index) throws IOException { - - logger.info("Adding index '{}'...", index); - - String resourcePath = "/" + index; - - if (doesResourceNotExist(resourcePath)) { - - try { - ObjectNode setting = objectMapper.createObjectNode(); - ObjectNode indexSetting = objectMapper.createObjectNode(); - - indexSetting.put("number_of_shards", config.getElasticSearchIndexShardCount()); - indexSetting.put("number_of_replicas", config.getElasticSearchIndexReplicationCount()); - - setting.set("index", indexSetting); - - elasticSearchAdminClient.performRequest(HttpMethod.PUT, resourcePath, Collections.emptyMap(), - new NStringEntity(setting.toString(), ContentType.APPLICATION_JSON)); - - logger.info("Added '{}' index", index); - } catch (ResponseException e) { - - boolean errorCreatingIndex = true; - - Response errorResponse = e.getResponse(); - if (errorResponse.getStatusLine().getStatusCode() == HttpStatus.SC_BAD_REQUEST) { - JsonNode root = objectMapper.readTree(EntityUtils.toString(errorResponse.getEntity())); - String errorCode = root.get("error").get("type").asText(); - if ("index_already_exists_exception".equals(errorCode)) { - errorCreatingIndex = false; - } - } - - if (errorCreatingIndex) { - throw e; - } - } - } else { - logger.info("Index '{}' already exists", index); - } - } - - /** - * Adds a mapping type to an index if it does not exist. - * - * @param index The name of the index. - * @param mappingType The name of the mapping type. - * @param mappingFilename The name of the mapping file to use to add the mapping if it does not exist. - * @throws IOException If an error occurred during requests to ES. - */ - private void addMappingToIndex(final String index, final String mappingType, final String mappingFilename) throws IOException { - - logger.info("Adding '{}' mapping to index '{}'...", mappingType, index); - - String resourcePath = "/" + index + "/_mapping/" + mappingType; - - if (doesResourceNotExist(resourcePath)) { - InputStream stream = ElasticSearchDAOV5.class.getResourceAsStream(mappingFilename); - byte[] mappingSource = IOUtils.toByteArray(stream); - - HttpEntity entity = new NByteArrayEntity(mappingSource, ContentType.APPLICATION_JSON); - elasticSearchAdminClient.performRequest(HttpMethod.PUT, resourcePath, Collections.emptyMap(), entity); - logger.info("Added '{}' mapping", mappingType); - } else { - logger.info("Mapping '{}' already exists", mappingType); - } - } - - /** - * Determines whether a resource exists in ES. This will call a GET method to a particular path and - * return true if status 200; false otherwise. - * - * @param resourcePath The path of the resource to get. - * @return True if it exists; false otherwise. - * @throws IOException If an error occurred during requests to ES. - */ - public boolean doesResourceExist(final String resourcePath) throws IOException { - Response response = elasticSearchAdminClient.performRequest(HttpMethod.HEAD, resourcePath); - return response.getStatusLine().getStatusCode() == HttpStatus.SC_OK; - } - - /** - * The inverse of doesResourceExist. - * - * @param resourcePath The path of the resource to check. - * @return True if it does not exist; false otherwise. - * @throws IOException If an error occurred during requests to ES. - */ - public boolean doesResourceNotExist(final String resourcePath) throws IOException { - return !doesResourceExist(resourcePath); - } - - @Override - public void indexWorkflow(Workflow workflow) { - try { - long startTime = Instant.now().toEpochMilli(); - String workflowId = workflow.getWorkflowId(); - WorkflowSummary summary = new WorkflowSummary(workflow); - byte[] docBytes = objectMapper.writeValueAsBytes(summary); - - IndexRequest request = new IndexRequest(indexName, WORKFLOW_DOC_TYPE, workflowId); - request.source(docBytes, XContentType.JSON); - new RetryUtil().retryOnException(() -> { - try { - return elasticSearchClient.index(request); - } catch (IOException e) { - throw new RuntimeException(e); - } - }, null, null, RETRY_COUNT, "Indexing workflow document: " + workflow.getWorkflowId(), "indexWorkflow"); - - long endTime = Instant.now().toEpochMilli(); - logger.debug("Time taken {} for indexing workflow: {}", endTime - startTime, workflowId); - Monitors.recordESIndexTime("index_workflow", WORKFLOW_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize("indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); - } catch (Exception e) { - Monitors.error(className, "indexWorkflow"); - logger.error("Failed to index workflow: {}", workflow.getWorkflowId(), e); - } - } - - @Override - public CompletableFuture asyncIndexWorkflow(Workflow workflow) { - return CompletableFuture.runAsync(() -> indexWorkflow(workflow), executorService); - } - - @Override - public void indexTask(Task task) { - try { - long startTime = Instant.now().toEpochMilli(); - String taskId = task.getTaskId(); - TaskSummary summary = new TaskSummary(task); - - indexObject(indexName, TASK_DOC_TYPE, taskId, summary); - long endTime = Instant.now().toEpochMilli(); - logger.debug("Time taken {} for indexing task:{} in workflow: {}", endTime - startTime, taskId, task.getWorkflowInstanceId()); - Monitors.recordESIndexTime("index_task", TASK_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize("indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); - } catch (Exception e) { - logger.error("Failed to index task: {}", task.getTaskId(), e); - } - } - - @Override - public CompletableFuture asyncIndexTask(Task task) { - return CompletableFuture.runAsync(() -> indexTask(task), executorService); - } - - @Override - public void addTaskExecutionLogs(List taskExecLogs) { - if (taskExecLogs.isEmpty()) { - return; - } - - long startTime = Instant.now().toEpochMilli(); - BulkRequest bulkRequest = new BulkRequest(); - for (TaskExecLog log : taskExecLogs) { - - byte[] docBytes; - try { - docBytes = objectMapper.writeValueAsBytes(log); - } catch (JsonProcessingException e) { - logger.error("Failed to convert task log to JSON for task {}", log.getTaskId()); - continue; - } - - IndexRequest request = new IndexRequest(logIndexName, LOG_DOC_TYPE); - request.source(docBytes, XContentType.JSON); - bulkRequest.add(request); - } - - try { - new RetryUtil().retryOnException(() -> { - try { - return elasticSearchClient.bulk(bulkRequest); - } catch (IOException e) { - throw new RuntimeException(e); - } - }, null, BulkResponse::hasFailures, RETRY_COUNT, "Indexing task execution logs", "addTaskExecutionLogs"); - long endTime = Instant.now().toEpochMilli(); - logger.debug("Time taken {} for indexing taskExecutionLogs", endTime - startTime); - Monitors.recordESIndexTime("index_task_execution_logs", LOG_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize("logQueue", ((ThreadPoolExecutor) logExecutorService).getQueue().size()); - } catch (Exception e) { - List taskIds = taskExecLogs.stream() - .map(TaskExecLog::getTaskId) - .collect(Collectors.toList()); - logger.error("Failed to index task execution logs for tasks: {}", taskIds, e); - } - } - - @Override - public CompletableFuture asyncAddTaskExecutionLogs(List logs) { - return CompletableFuture.runAsync(() -> addTaskExecutionLogs(logs), logExecutorService); - } - - @Override - public List getTaskExecutionLogs(String taskId) { - - try { - - // Build Query - Expression expression = Expression.fromString("taskId='" + taskId + "'"); - QueryBuilder queryBuilder = expression.getFilterBuilder(); - - BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(queryBuilder); - QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery("*"); - BoolQueryBuilder fq = QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); - - // Create the searchObjectIdsViaExpression source - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.query(fq); - searchSourceBuilder.sort(new FieldSortBuilder("createdTime").order(SortOrder.ASC)); - searchSourceBuilder.size(config.getElasticSearchTasklogLimit()); - - // Generate the actual request to send to ES. - SearchRequest searchRequest = new SearchRequest(logIndexPrefix + "*"); - searchRequest.types(LOG_DOC_TYPE); - searchRequest.source(searchSourceBuilder); - - SearchResponse response = elasticSearchClient.search(searchRequest); - - SearchHit[] hits = response.getHits().getHits(); - List logs = new ArrayList<>(hits.length); - for(SearchHit hit : hits) { - String source = hit.getSourceAsString(); - TaskExecLog tel = objectMapper.readValue(source, TaskExecLog.class); - logs.add(tel); - } - - return logs; - - }catch(Exception e) { - logger.error("Failed to get task execution logs for task: {}", taskId, e); - } - - return null; - } - - @Override - public void addMessage(String queue, Message message) { - try { - long startTime = Instant.now().toEpochMilli(); - Map doc = new HashMap<>(); - doc.put("messageId", message.getId()); - doc.put("payload", message.getPayload()); - doc.put("queue", queue); - doc.put("created", System.currentTimeMillis()); - indexObject(logIndexName, MSG_DOC_TYPE, null, doc); - long endTime = Instant.now().toEpochMilli(); - logger.debug("Time taken {} for indexing message: {}", endTime - startTime, message.getId()); - Monitors.recordESIndexTime("add_message", MSG_DOC_TYPE, endTime - startTime); - } catch (Exception e) { - logger.error("Failed to index message: {}", message.getId(), e); - } - } - - @Override - public CompletableFuture asyncAddMessage(String queue, Message message) { - return CompletableFuture.runAsync(() -> addMessage(queue, message), executorService); - } - - @Override - public void addEventExecution(EventExecution eventExecution) { - try { - long startTime = Instant.now().toEpochMilli(); - String id = - eventExecution.getName() + "." + eventExecution.getEvent() + "." + eventExecution.getMessageId() + "." - + eventExecution.getId(); - - indexObject(logIndexName, EVENT_DOC_TYPE, id, eventExecution); - long endTime = Instant.now().toEpochMilli(); - logger.debug("Time taken {} for indexing event execution: {}", endTime - startTime, eventExecution.getId()); - Monitors.recordESIndexTime("add_event_execution", EVENT_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize("logQueue", ((ThreadPoolExecutor) logExecutorService).getQueue().size()); - } catch (Exception e) { - logger.error("Failed to index event execution: {}", eventExecution.getId(), e); - } - } - - @Override - public CompletableFuture asyncAddEventExecution(EventExecution eventExecution) { - return CompletableFuture.runAsync(() -> addEventExecution(eventExecution), logExecutorService); - } - - @Override - public SearchResult searchWorkflows(String query, String freeText, int start, int count, List sort) { - return searchObjectIdsViaExpression(query, start, count, sort, freeText, WORKFLOW_DOC_TYPE); - } - - @Override - public SearchResult searchTasks(String query, String freeText, int start, int count, List sort) { - return searchObjectIdsViaExpression(query, start, count, sort, freeText, TASK_DOC_TYPE); - } - - @Override - public void removeWorkflow(String workflowId) { - long startTime = Instant.now().toEpochMilli(); - DeleteRequest request = new DeleteRequest(indexName, WORKFLOW_DOC_TYPE, workflowId); - - try { - DeleteResponse response = elasticSearchClient.delete(request); - - if (response.getResult() == DocWriteResponse.Result.NOT_FOUND) { - logger.error("Index removal failed - document not found by id: {}", workflowId); - } - long endTime = Instant.now().toEpochMilli(); - logger.debug("Time taken {} for removing workflow: {}", endTime - startTime, workflowId); - Monitors.recordESIndexTime("remove_workflow", WORKFLOW_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize("indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); - } catch (Exception e) { - logger.error("Failed to remove workflow {} from index", workflowId, e); - Monitors.error(className, "remove"); - } - } - - @Override - public CompletableFuture asyncRemoveWorkflow(String workflowId) { - return CompletableFuture.runAsync(() -> removeWorkflow(workflowId), executorService); - } - - @Override - public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) { - if (keys.length != values.length) { - throw new ApplicationException(ApplicationException.Code.INVALID_INPUT, "Number of keys and values do not match"); - } - - long startTime = Instant.now().toEpochMilli(); - UpdateRequest request = new UpdateRequest(indexName, WORKFLOW_DOC_TYPE, workflowInstanceId); - Map source = IntStream.range(0, keys.length).boxed() - .collect(Collectors.toMap(i -> keys[i], i -> values[i])); - request.doc(source); - - logger.debug("Updating workflow {} with {}", workflowInstanceId, source); - - new RetryUtil().retryOnException(() -> { - try { - return elasticSearchClient.update(request); - } catch (IOException e) { - throw new RuntimeException(e); - } - }, null, null, RETRY_COUNT, "Updating workflow document: " + workflowInstanceId, "updateWorkflow"); - long endTime = Instant.now().toEpochMilli(); - logger.debug("Time taken {} for updating workflow: {}", endTime - startTime, workflowInstanceId); - Monitors.recordESIndexTime("update_workflow", WORKFLOW_DOC_TYPE, endTime - startTime); - Monitors.recordWorkerQueueSize("indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); - } - - @Override - public CompletableFuture asyncUpdateWorkflow(String workflowInstanceId, String[] keys, Object[] values) { - return CompletableFuture.runAsync(() -> updateWorkflow(workflowInstanceId, keys, values), executorService); - } - - @Override - public String get(String workflowInstanceId, String fieldToGet) { - - GetRequest request = new GetRequest(indexName, WORKFLOW_DOC_TYPE, workflowInstanceId); - - GetResponse response; - try { - response = elasticSearchClient.get(request); - } catch (IOException e) { - logger.error("Unable to get Workflow: {} from ElasticSearch index: {}", workflowInstanceId, indexName, e); - return null; - } - - if (response.isExists()){ - Map sourceAsMap = response.getSourceAsMap(); - if (sourceAsMap.containsKey(fieldToGet)){ - return sourceAsMap.get(fieldToGet).toString(); - } - } - - logger.debug("Unable to find Workflow: {} in ElasticSearch index: {}.", workflowInstanceId, indexName); - return null; - } - - private SearchResult searchObjectIdsViaExpression(String structuredQuery, int start, int size, List sortOptions, String freeTextQuery, String docType) { - try { - // Build query - QueryBuilder queryBuilder = QueryBuilders.matchAllQuery(); - if(StringUtils.isNotEmpty(structuredQuery)) { - Expression expression = Expression.fromString(structuredQuery); - queryBuilder = expression.getFilterBuilder(); - } - - BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(queryBuilder); - QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery(freeTextQuery); - BoolQueryBuilder fq = QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); - - return searchObjectIds(indexName, fq, start, size, sortOptions, docType); - } catch (Exception e) { - throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, e.getMessage(), e); - } - } - - /** - * Tries to find object ids for a given query in an index. - * - * @param indexName The name of the index. - * @param queryBuilder The query to use for searching. - * @param start The start to use. - * @param size The total return size. - * @param sortOptions A list of string options to sort in the form VALUE:ORDER; where ORDER is optional and can be either ASC OR DESC. - * @param docType The document type to searchObjectIdsViaExpression for. - * - * @return The SearchResults which includes the count and IDs that were found. - * @throws IOException If we cannot communicate with ES. - */ - private SearchResult searchObjectIds(String indexName, QueryBuilder queryBuilder, int start, int size, List sortOptions, String docType) throws IOException { - - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.query(queryBuilder); - searchSourceBuilder.from(start); - searchSourceBuilder.size(size); - - if (sortOptions != null && !sortOptions.isEmpty()) { - - for (String sortOption : sortOptions) { - SortOrder order = SortOrder.ASC; - String field = sortOption; - int index = sortOption.indexOf(":"); - if (index > 0) { - field = sortOption.substring(0, index); - order = SortOrder.valueOf(sortOption.substring(index + 1)); - } - searchSourceBuilder.sort(new FieldSortBuilder(field).order(order)); - } - } - - // Generate the actual request to send to ES. - SearchRequest searchRequest = new SearchRequest(indexName); - searchRequest.types(docType); - searchRequest.source(searchSourceBuilder); - - SearchResponse response = elasticSearchClient.search(searchRequest); - - List result = new LinkedList<>(); - response.getHits().forEach(hit -> result.add(hit.getId())); - long count = response.getHits().getTotalHits(); - return new SearchResult<>(count, result); - } - - @Override - public List searchArchivableWorkflows(String indexName, long archiveTtlDays) { - QueryBuilder q = QueryBuilders.boolQuery() - .must(QueryBuilders.rangeQuery("endTime").lt(LocalDate.now(ZoneOffset.UTC).minusDays(archiveTtlDays).toString()).gte(LocalDate.now(ZoneOffset.UTC).minusDays(archiveTtlDays).minusDays(1).toString())) - .should(QueryBuilders.termQuery("status", "COMPLETED")) - .should(QueryBuilders.termQuery("status", "FAILED")) - .should(QueryBuilders.termQuery("status", "TIMED_OUT")) - .should(QueryBuilders.termQuery("status", "TERMINATED")) - .mustNot(QueryBuilders.existsQuery("archived")) - .minimumShouldMatch(1); - - SearchResult workflowIds; - try { - workflowIds = searchObjectIds(indexName, q, 0, 1000, null, WORKFLOW_DOC_TYPE); - } catch (IOException e) { - logger.error("Unable to communicate with ES to find archivable workflows", e); - return Collections.emptyList(); - } - - return workflowIds.getResults(); - } - - public List searchRecentRunningWorkflows(int lastModifiedHoursAgoFrom, int lastModifiedHoursAgoTo) { - DateTime dateTime = new DateTime(); - QueryBuilder q = QueryBuilders.boolQuery() - .must(QueryBuilders.rangeQuery("updateTime") - .gt(dateTime.minusHours(lastModifiedHoursAgoFrom))) - .must(QueryBuilders.rangeQuery("updateTime") - .lt(dateTime.minusHours(lastModifiedHoursAgoTo))) - .must(QueryBuilders.termQuery("status", "RUNNING")); - - SearchResult workflowIds; - try { - workflowIds = searchObjectIds(indexName, q, 0, 5000, Collections.singletonList("updateTime:ASC"), WORKFLOW_DOC_TYPE); - } catch (IOException e) { - logger.error("Unable to communicate with ES to find recent running workflows", e); - return Collections.emptyList(); - } - - return workflowIds.getResults(); - } - - private void indexObject(final String index, final String docType, final String docId, final Object doc) { - - byte[] docBytes; - try { - docBytes = objectMapper.writeValueAsBytes(doc); - } catch (JsonProcessingException e) { - logger.error("Failed to convert {} '{}' to byte string", docType, docId); - return; - } - - IndexRequest request = new IndexRequest(index, docType, docId); - request.source(docBytes, XContentType.JSON); - - if(bulkRequests.get(docType) == null) { - bulkRequests.put(docType, new BulkRequests(System.currentTimeMillis(), new BulkRequest())); - } - - bulkRequests.get(docType).getBulkRequest().add(request); - if (bulkRequests.get(docType).getBulkRequest().numberOfActions() >= this.indexBatchSize) { - indexBulkRequest(docType); - } - } - - private synchronized void indexBulkRequest(String docType) { - if (bulkRequests.get(docType).getBulkRequest() != null && bulkRequests.get(docType).getBulkRequest().numberOfActions() > 0) { - synchronized (bulkRequests.get(docType).getBulkRequest()) { - indexWithRetry(bulkRequests.get(docType).getBulkRequest().get(), "Bulk Indexing " + docType, docType); - bulkRequests.put(docType, new BulkRequests(System.currentTimeMillis(), new BulkRequest())); - } - } - } - - /** - * Performs an index operation with a retry. - * @param request The index request that we want to perform. - * @param operationDescription The type of operation that we are performing. - */ - private void indexWithRetry(final BulkRequest request, final String operationDescription, String docType) { - try { - long startTime = Instant.now().toEpochMilli(); - new RetryUtil().retryOnException(() -> { - try { - return elasticSearchClient.bulk(request); - } catch (IOException e) { - throw new RuntimeException(e); - } - }, null, null, RETRY_COUNT, operationDescription, "indexWithRetry"); - long endTime = Instant.now().toEpochMilli(); - logger.debug("Time taken {} for indexing object of type: {}", endTime - startTime, docType); - Monitors.recordESIndexTime("index_object", docType,endTime - startTime); - Monitors.recordWorkerQueueSize("indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size()); - Monitors.recordWorkerQueueSize("logQueue", ((ThreadPoolExecutor) logExecutorService).getQueue().size()); - } catch (Exception e) { - Monitors.error(className, "index"); - logger.error("Failed to index {} for request type: {}", request.toString(), docType, e); - } - } - - @Override - public List getMessages(String queue) { - try { - Expression expression = Expression.fromString("queue='" + queue + "'"); - QueryBuilder queryBuilder = expression.getFilterBuilder(); - - BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(queryBuilder); - QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery("*"); - BoolQueryBuilder query = QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); - - // Create the searchObjectIdsViaExpression source - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.query(query); - searchSourceBuilder.sort(new FieldSortBuilder("created").order(SortOrder.ASC)); - - // Generate the actual request to send to ES. - SearchRequest searchRequest = new SearchRequest(logIndexPrefix + "*"); - searchRequest.types(MSG_DOC_TYPE); - searchRequest.source(searchSourceBuilder); - - SearchResponse response = elasticSearchClient.search(searchRequest); - return mapGetMessagesResponse(response); - } catch (Exception e) { - logger.error("Failed to get messages for queue: {}", queue, e); - throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, e.getMessage(), e); - } - } - - private List mapGetMessagesResponse(SearchResponse response) throws IOException { - SearchHit[] hits = response.getHits().getHits(); - TypeFactory factory = TypeFactory.defaultInstance(); - MapType type = factory.constructMapType(HashMap.class, String.class, String.class); - List messages = new ArrayList<>(hits.length); - for (SearchHit hit : hits) { - String source = hit.getSourceAsString(); - Map mapSource = objectMapper.readValue(source, type); - Message msg = new Message(mapSource.get("messageId"), mapSource.get("payload"), null); - messages.add(msg); - } - return messages; - } - - @Override - public List getEventExecutions(String event) { - try { - Expression expression = Expression.fromString("event='" + event + "'"); - QueryBuilder queryBuilder = expression.getFilterBuilder(); - - BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(queryBuilder); - QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery("*"); - BoolQueryBuilder query = QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); - - // Create the searchObjectIdsViaExpression source - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.query(query); - searchSourceBuilder.sort(new FieldSortBuilder("created").order(SortOrder.ASC)); - - // Generate the actual request to send to ES. - SearchRequest searchRequest = new SearchRequest(logIndexPrefix + "*"); - searchRequest.types(EVENT_DOC_TYPE); - searchRequest.source(searchSourceBuilder); - - SearchResponse response = elasticSearchClient.search(searchRequest); - - return mapEventExecutionsResponse(response); - } catch (Exception e) { - logger.error("Failed to get executions for event: {}", event, e); - throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, e.getMessage(), e); - } - } - - private List mapEventExecutionsResponse(SearchResponse response) throws IOException { - SearchHit[] hits = response.getHits().getHits(); - List executions = new ArrayList<>(hits.length); - for (SearchHit hit : hits) { - String source = hit.getSourceAsString(); - EventExecution tel = objectMapper.readValue(source, EventExecution.class); - executions.add(tel); - } - return executions; - } - - /** - * Flush the buffers if bulk requests have not been indexed for the past {@link ElasticSearchConfiguration#getAsyncBufferFlushTimeout()} seconds - * This is to prevent data loss in case the instance is terminated, while the buffer still holds documents to be indexed. - */ - private void flushBulkRequests() { - bulkRequests.entrySet().stream() - .filter(entry -> (System.currentTimeMillis() - entry.getValue().getLastFlushTime()) >= asyncBufferFlushTimeout * 1000) - .filter(entry -> entry.getValue().getBulkRequest() != null && entry.getValue().getBulkRequest().numberOfActions() > 0) - .forEach(entry -> { - logger.debug("Flushing bulk request buffer for type {}, size: {}", entry.getKey(), entry.getValue().getBulkRequest().numberOfActions()); - indexBulkRequest(entry.getKey()); - }); - } - - private static class BulkRequests { - private final long lastFlushTime; - private final BulkRequestWrapper bulkRequestWrapper; - - long getLastFlushTime() { - return lastFlushTime; - } - - BulkRequestWrapper getBulkRequest() { - return bulkRequestWrapper; - } - - BulkRequests(long lastFlushTime, BulkRequest bulkRequestWrapper) { - this.lastFlushTime = lastFlushTime; - this.bulkRequestWrapper = new BulkRequestWrapper(bulkRequestWrapper); - } - } -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/Expression.java b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/Expression.java deleted file mode 100644 index a2d170491f..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/Expression.java +++ /dev/null @@ -1,126 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.dao.es5.index.query.parser; - -import com.netflix.conductor.elasticsearch.query.parser.AbstractNode; -import com.netflix.conductor.elasticsearch.query.parser.BooleanOp; -import com.netflix.conductor.elasticsearch.query.parser.ParserException; - -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; - -import java.io.BufferedInputStream; -import java.io.ByteArrayInputStream; -import java.io.InputStream; - -/** - * @author Viren - * - */ -public class Expression extends AbstractNode implements FilterProvider { - - private NameValue nameVal; - - private GroupedExpression ge; - - private BooleanOp op; - - private Expression rhs; - - public Expression(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - byte[] peeked = peek(1); - - if(peeked[0] == '('){ - this.ge = new GroupedExpression(is); - }else{ - this.nameVal = new NameValue(is); - } - - peeked = peek(3); - if( isBoolOpr(peeked) ){ - //we have an expression next - this.op = new BooleanOp(is); - this.rhs = new Expression(is); - } - } - - public boolean isBinaryExpr(){ - return this.op != null; - } - - public BooleanOp getOperator(){ - return this.op; - } - - public Expression getRightHandSide(){ - return this.rhs; - } - - public boolean isNameValue(){ - return this.nameVal != null; - } - - public NameValue getNameValue(){ - return this.nameVal; - } - - public GroupedExpression getGroupedExpression(){ - return this.ge; - } - - @Override - public QueryBuilder getFilterBuilder(){ - QueryBuilder lhs = null; - if(nameVal != null){ - lhs = nameVal.getFilterBuilder(); - }else{ - lhs = ge.getFilterBuilder(); - } - - if(this.isBinaryExpr()){ - QueryBuilder rhsFilter = rhs.getFilterBuilder(); - if(this.op.isAnd()){ - return QueryBuilders.boolQuery().must(lhs).must(rhsFilter); - }else{ - return QueryBuilders.boolQuery().should(lhs).should(rhsFilter); - } - }else{ - return lhs; - } - - } - - @Override - public String toString(){ - if(isBinaryExpr()){ - return "" + (nameVal==null?ge:nameVal) + op + rhs; - }else{ - return "" + (nameVal==null?ge:nameVal); - } - } - - public static Expression fromString(String value) throws ParserException{ - return new Expression(new BufferedInputStream(new ByteArrayInputStream(value.getBytes()))); - } -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/FilterProvider.java b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/FilterProvider.java deleted file mode 100644 index 8927e0712d..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/FilterProvider.java +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.dao.es5.index.query.parser; - -import org.elasticsearch.index.query.QueryBuilder; - -/** - * @author Viren - * - */ -public interface FilterProvider { - - /** - * - * @return FilterBuilder for elasticsearch - */ - public QueryBuilder getFilterBuilder(); - -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/GroupedExpression.java b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/GroupedExpression.java deleted file mode 100644 index 3b59eaa4fe..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/GroupedExpression.java +++ /dev/null @@ -1,67 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.dao.es5.index.query.parser; - -import com.netflix.conductor.elasticsearch.query.parser.AbstractNode; -import com.netflix.conductor.elasticsearch.query.parser.ParserException; - -import org.elasticsearch.index.query.QueryBuilder; - -import java.io.InputStream; - -/** - * @author Viren - * - */ -public class GroupedExpression extends AbstractNode implements FilterProvider { - - private Expression expression; - - public GroupedExpression(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - byte[] peeked = read(1); - assertExpected(peeked, "("); - - this.expression = new Expression(is); - - peeked = read(1); - assertExpected(peeked, ")"); - - } - - @Override - public String toString() { - return "(" + expression + ")"; - } - - /** - * @return the expression - */ - public Expression getExpression() { - return expression; - } - - @Override - public QueryBuilder getFilterBuilder() { - return expression.getFilterBuilder(); - } - - -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/NameValue.java b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/NameValue.java deleted file mode 100644 index 2803442016..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/NameValue.java +++ /dev/null @@ -1,126 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.dao.es5.index.query.parser; - -import com.netflix.conductor.elasticsearch.query.parser.AbstractNode; -import com.netflix.conductor.elasticsearch.query.parser.ComparisonOp; -import com.netflix.conductor.elasticsearch.query.parser.ComparisonOp.Operators; -import com.netflix.conductor.elasticsearch.query.parser.ConstValue; -import com.netflix.conductor.elasticsearch.query.parser.ListConst; -import com.netflix.conductor.elasticsearch.query.parser.Name; -import com.netflix.conductor.elasticsearch.query.parser.ParserException; -import com.netflix.conductor.elasticsearch.query.parser.Range; - -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; - -import java.io.InputStream; - -/** - * @author Viren - *

- * Represents an expression of the form as below:
- * key OPR value
- * OPR is the comparison operator which could be on the following:
- * 	>, <, = , !=, IN, BETWEEN
- * 
- */ -public class NameValue extends AbstractNode implements FilterProvider { - - private Name name; - - private ComparisonOp op; - - private ConstValue value; - - private Range range; - - private ListConst valueList; - - public NameValue(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - this.name = new Name(is); - this.op = new ComparisonOp(is); - - if (this.op.getOperator().equals(Operators.BETWEEN.value())) { - this.range = new Range(is); - } - if (this.op.getOperator().equals(Operators.IN.value())) { - this.valueList = new ListConst(is); - } else { - this.value = new ConstValue(is); - } - } - - @Override - public String toString() { - return "" + name + op + value; - } - - /** - * @return the name - */ - public Name getName() { - return name; - } - - /** - * @return the op - */ - public ComparisonOp getOp() { - return op; - } - - /** - * @return the value - */ - public ConstValue getValue() { - return value; - } - - @Override - public QueryBuilder getFilterBuilder() { - if (op.getOperator().equals(Operators.EQUALS.value())) { - return QueryBuilders.queryStringQuery(name.getName() + ":" + value.getValue().toString()); - } else if (op.getOperator().equals(Operators.BETWEEN.value())) { - return QueryBuilders.rangeQuery(name.getName()).from(range.getLow()).to(range.getHigh()); - } else if (op.getOperator().equals(Operators.IN.value())) { - return QueryBuilders.termsQuery(name.getName(), valueList.getList()); - } else if (op.getOperator().equals(Operators.NOT_EQUALS.value())) { - return QueryBuilders.queryStringQuery("NOT " + name.getName() + ":" + value.getValue().toString()); - } else if (op.getOperator().equals(Operators.GREATER_THAN.value())) { - return QueryBuilders.rangeQuery(name.getName()).from(value.getValue()).includeLower(false).includeUpper(false); - } else if (op.getOperator().equals(Operators.IS.value())) { - if (value.getSysConstant().equals(ConstValue.SystemConsts.NULL)) { - return QueryBuilders.boolQuery().mustNot(QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).mustNot(QueryBuilders.existsQuery(name.getName()))); - } else if (value.getSysConstant().equals(ConstValue.SystemConsts.NOT_NULL)) { - return QueryBuilders.boolQuery().mustNot(QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).must(QueryBuilders.existsQuery(name.getName()))); - } - } else if (op.getOperator().equals(Operators.LESS_THAN.value())) { - return QueryBuilders.rangeQuery(name.getName()).to(value.getValue()).includeLower(false).includeUpper(false); - } else if (op.getOperator().equals(Operators.STARTS_WITH.value())) { - return QueryBuilders.prefixQuery(name.getName(), value.getUnquotedValue()); - } - - throw new IllegalStateException("Incorrect/unsupported operators"); - } - - -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java deleted file mode 100644 index 5d8c95a9e2..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java +++ /dev/null @@ -1,194 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.elasticsearch; - -import com.google.common.base.Strings; -import com.netflix.conductor.core.config.Configuration; -import java.net.URI; -import java.util.Arrays; -import java.util.List; -import java.util.stream.Collectors; - -public interface ElasticSearchConfiguration extends Configuration { - - String ELASTICSEARCH_PROPERTY_NAME = "workflow.elasticsearch.instanceType"; - ElasticSearchInstanceType ELASTICSEARCH_INSTANCE_TYPE_DEFAULT_VALUE = ElasticSearchInstanceType.MEMORY; - - String ELASTIC_SEARCH_URL_PROPERTY_NAME = "workflow.elasticsearch.url"; - String ELASTIC_SEARCH_URL_DEFAULT_VALUE = "localhost:9300"; - - String ELASTIC_SEARCH_HEALTH_COLOR_PROPERTY_NAME = "workflow.elasticsearch.cluster.health.color"; - String ELASTIC_SEARCH_HEALTH_COLOR_DEFAULT_VALUE = "green"; - - String ELASTIC_SEARCH_INDEX_NAME_PROPERTY_NAME = "workflow.elasticsearch.index.name"; - String ELASTIC_SEARCH_INDEX_NAME_DEFAULT_VALUE = "conductor"; - - String TASK_LOG_INDEX_NAME_PROPERTY_NAME = "workflow.elasticsearch.tasklog.index.name"; - String TASK_LOG_INDEX_NAME_DEFAULT_VALUE = "task_log"; - - String EMBEDDED_DATA_PATH_PROPERTY_NAME = "workflow.elasticsearch.embedded.data.path"; - String EMBEDDED_DATA_PATH_DEFAULT_VALUE = "path.data"; - - String EMBEDDED_HOME_PATH_PROPERTY_NAME = "workflow.elasticsearch.embedded.data.home"; - String EMBEDDED_HOME_PATH_DEFAULT_VALUE = "path.home"; - - String EMBEDDED_PORT_PROPERTY_NAME = "workflow.elasticsearch.embedded.port"; - int EMBEDDED_PORT_DEFAULT_VALUE = 9200; - - String EMBEDDED_CLUSTER_NAME_PROPERTY_NAME = "workflow.elasticsearch.embedded.cluster.name"; - String EMBEDDED_CLUSTER_NAME_DEFAULT_VALUE = "elasticsearch_test"; - - String EMBEDDED_HOST_PROPERTY_NAME = "workflow.elasticsearch.embedded.host"; - String EMBEDDED_HOST_DEFAULT_VALUE = "127.0.0.1"; - - String EMBEDDED_SETTINGS_FILE_PROPERTY_NAME = "workflow.elasticsearch.embedded.settings.file"; - String EMBEDDED_SETTINGS_FILE_DEFAULT_VALUE = "embedded-es.yml"; - - String ELASTIC_SEARCH_ARCHIVE_SEARCH_BATCH_SIZE_PROPERTY_NAME = "workflow.elasticsearch.archive.search.batchSize"; - int ELASTIC_SEARCH_ARCHIVE_SEARCH_BATCH_SIZE_DEFAULT_VALUE = 5000; - - String ELASTIC_SEARCH_INDEX_BATCH_SIZE_PROPERTY_NAME = "workflow.elasticsearch.index.batchSize"; - int ELASTIC_SEARCH_INDEX_BATCH_SIZE_DEFAULT_VALUE = 1; - - String ELASTIC_SEARCH_ASYNC_DAO_WORKER_QUEUE_SIZE = "workflow.elasticsearch.async.dao.worker.queue.size"; - int DEFAULT_ASYNC_WORKER_QUEUE_SIZE = 100; - - String ELASTIC_SEARCH_ASYNC_DAO_MAX_POOL_SIZE = "workflow.elasticsearch.async.dao.max.pool.size"; - int DEFAULT_ASYNC_MAX_POOL_SIZE = 12; - - String ELASTIC_SEARCH_ASYNC_BUFFER_FLUSH_TIMEOUT_PROPERTY_NAME = "workflow.elasticsearch.async.buffer.flush.timeout.seconds"; - int ELASTIC_SEARCH_ASYNC_BUFFER_FLUSH_TIMEOUT_DEFAULT_VALUE = 10; - - String ELASTIC_SEARCH_INDEX_SHARD_COUNT_PROPERTY_NAME = "workflow.elasticsearch.index.shard.count"; - int ELASTIC_SEARCH_INDEX_SHARD_COUNT_DEFAULT_VALUE = 5; - - String ELASTIC_SEARCH_INDEX_REPLICAS_COUNT_PROPERTY_NAME = "workflow.elasticsearch.index.replicas.count"; - int ELASTIC_SEARCH_INDEX_REPLICAS_COUNT_DEFAULT_VALUE = 1; - - String ELASTIC_SEARCH_TASK_LOG_RESULT_LIMIT = "tasklog.elasticsearch.query.size"; - int ELASTIC_SEARCH_TASK_LOG_RESULT_LIMIT_DEFAULT_VALUE = 10; - - String ELASTIC_SEARCH_REST_CLIENT_CONNECTION_REQUEST_TIMEOUT_PROPERTY_NAME = "workflow.elasticsearch.rest.client.connectionRequestTimeout.milliseconds"; - int ELASTIC_SEARCH_REST_CLIENT_CONNECTION_REQUEST_TIMEOUT_DEFAULT_VALUE = -1; - - default String getURL() { - return getProperty(ELASTIC_SEARCH_URL_PROPERTY_NAME, ELASTIC_SEARCH_URL_DEFAULT_VALUE); - } - - default List getURIs(){ - - String clusterAddress = getURL(); - - String[] hosts = clusterAddress.split(","); - - return Arrays.stream(hosts).map( host -> - (host.startsWith("http://") || host.startsWith("https://") || host.startsWith("tcp://")) ? URI.create(host) : URI.create("tcp://" + host) - ).collect(Collectors.toList()); - } - - default String getIndexName() { - return getProperty(ELASTIC_SEARCH_INDEX_NAME_PROPERTY_NAME, ELASTIC_SEARCH_INDEX_NAME_DEFAULT_VALUE); - } - - default String getTasklogIndexName() { - return getProperty(TASK_LOG_INDEX_NAME_PROPERTY_NAME, TASK_LOG_INDEX_NAME_DEFAULT_VALUE); - } - - default String getClusterHealthColor() { - return getProperty(ELASTIC_SEARCH_HEALTH_COLOR_PROPERTY_NAME, ELASTIC_SEARCH_HEALTH_COLOR_DEFAULT_VALUE); - } - - default String getEmbeddedDataPath() { - return getProperty(EMBEDDED_DATA_PATH_PROPERTY_NAME, EMBEDDED_DATA_PATH_DEFAULT_VALUE); - } - - default String getEmbeddedHomePath() { - return getProperty(EMBEDDED_HOME_PATH_PROPERTY_NAME, EMBEDDED_HOME_PATH_DEFAULT_VALUE); - } - - default int getEmbeddedPort() { - return getIntProperty(EMBEDDED_PORT_PROPERTY_NAME, EMBEDDED_PORT_DEFAULT_VALUE); - - } - - default String getEmbeddedClusterName() { - return getProperty(EMBEDDED_CLUSTER_NAME_PROPERTY_NAME, EMBEDDED_CLUSTER_NAME_DEFAULT_VALUE); - } - - default String getEmbeddedHost() { - return getProperty(EMBEDDED_HOST_PROPERTY_NAME, EMBEDDED_HOST_DEFAULT_VALUE); - } - - default String getEmbeddedSettingsFile() { - return getProperty(EMBEDDED_SETTINGS_FILE_PROPERTY_NAME, EMBEDDED_SETTINGS_FILE_DEFAULT_VALUE); - } - - default int getElasticsearchRestClientConnectionRequestTimeout() { - return getIntProperty(ELASTIC_SEARCH_REST_CLIENT_CONNECTION_REQUEST_TIMEOUT_PROPERTY_NAME, - ELASTIC_SEARCH_REST_CLIENT_CONNECTION_REQUEST_TIMEOUT_DEFAULT_VALUE); - } - - default ElasticSearchInstanceType getElasticSearchInstanceType() { - ElasticSearchInstanceType elasticSearchInstanceType = ELASTICSEARCH_INSTANCE_TYPE_DEFAULT_VALUE; - String instanceTypeConfig = getProperty(ELASTICSEARCH_PROPERTY_NAME, ""); - if (!Strings.isNullOrEmpty(instanceTypeConfig)) { - elasticSearchInstanceType = ElasticSearchInstanceType.valueOf(instanceTypeConfig.toUpperCase()); - } - return elasticSearchInstanceType; - } - - enum ElasticSearchInstanceType { - MEMORY, EXTERNAL - } - - default int getArchiveSearchBatchSize() { - return getIntProperty(ELASTIC_SEARCH_ARCHIVE_SEARCH_BATCH_SIZE_PROPERTY_NAME, - ELASTIC_SEARCH_ARCHIVE_SEARCH_BATCH_SIZE_DEFAULT_VALUE); - } - - default int getIndexBatchSize() { - return getIntProperty(ELASTIC_SEARCH_INDEX_BATCH_SIZE_PROPERTY_NAME, - ELASTIC_SEARCH_INDEX_BATCH_SIZE_DEFAULT_VALUE); - } - - default int getAsyncWorkerQueueSize() { - return getIntProperty(ELASTIC_SEARCH_ASYNC_DAO_WORKER_QUEUE_SIZE, DEFAULT_ASYNC_WORKER_QUEUE_SIZE); - } - - default int getAsyncMaxPoolSize() { - return getIntProperty(ELASTIC_SEARCH_ASYNC_DAO_MAX_POOL_SIZE, DEFAULT_ASYNC_MAX_POOL_SIZE); - } - - default int getAsyncBufferFlushTimeout() { - return getIntProperty(ELASTIC_SEARCH_ASYNC_BUFFER_FLUSH_TIMEOUT_PROPERTY_NAME, - ELASTIC_SEARCH_ASYNC_BUFFER_FLUSH_TIMEOUT_DEFAULT_VALUE); - } - - default int getElasticSearchIndexShardCount() - { - return getIntProperty(ELASTIC_SEARCH_INDEX_SHARD_COUNT_PROPERTY_NAME, - ELASTIC_SEARCH_INDEX_SHARD_COUNT_DEFAULT_VALUE); - } - - default int getElasticSearchIndexReplicationCount() - { - return getIntProperty(ELASTIC_SEARCH_INDEX_REPLICAS_COUNT_PROPERTY_NAME, - ELASTIC_SEARCH_INDEX_REPLICAS_COUNT_DEFAULT_VALUE); - } - - default int getElasticSearchTasklogLimit() - { - return getIntProperty(ELASTIC_SEARCH_TASK_LOG_RESULT_LIMIT, - ELASTIC_SEARCH_TASK_LOG_RESULT_LIMIT_DEFAULT_VALUE); - } -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchModule.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchModule.java deleted file mode 100644 index 92bc8c069a..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchModule.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.elasticsearch; - -import com.google.inject.AbstractModule; -import com.google.inject.Singleton; -import com.netflix.conductor.elasticsearch.es5.ElasticSearchV5Module; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.RestClient; - -public class ElasticSearchModule extends AbstractModule { - @Override - protected void configure() { - - ElasticSearchConfiguration esConfiguration = new SystemPropertiesElasticSearchConfiguration(); - - bind(ElasticSearchConfiguration.class).to(SystemPropertiesElasticSearchConfiguration.class); - bind(Client.class).toProvider(ElasticSearchTransportClientProvider.class).in(Singleton.class); - bind(RestClient.class).toProvider(ElasticSearchRestClientProvider.class).in(Singleton.class); - - install(new ElasticSearchV5Module(esConfiguration)); - } -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchRestClientProvider.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchRestClientProvider.java deleted file mode 100644 index 5cb00d0e7c..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchRestClientProvider.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.elasticsearch; - -import java.net.URI; -import java.util.List; -import java.util.stream.Collectors; -import javax.inject.Inject; -import javax.inject.Provider; -import org.apache.http.HttpHost; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestClientBuilder; - -public class ElasticSearchRestClientProvider implements Provider { - private final ElasticSearchConfiguration configuration; - - @Inject - public ElasticSearchRestClientProvider(ElasticSearchConfiguration configuration) { - this.configuration = configuration; - } - - @Override - public RestClient get() { - RestClientBuilder restClientBuilder = RestClient.builder(convertToHttpHosts(configuration.getURIs())); - - if (configuration.getElasticsearchRestClientConnectionRequestTimeout() > 0) { - restClientBuilder.setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder.setConnectionRequestTimeout(configuration.getElasticsearchRestClientConnectionRequestTimeout())); - } - - return restClientBuilder.build(); - } - - private HttpHost[] convertToHttpHosts(List hosts) { - List list = hosts.stream() - .map(host -> new HttpHost(host.getHost(), host.getPort(), host.getScheme())) - .collect(Collectors.toList()); - - return list.toArray(new HttpHost[list.size()]); - } -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchTransportClientProvider.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchTransportClientProvider.java deleted file mode 100644 index 6a00d50168..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchTransportClientProvider.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.elasticsearch; - -import com.google.inject.ProvisionException; -import java.net.InetAddress; -import java.net.URI; -import java.net.UnknownHostException; -import java.util.List; -import java.util.Optional; -import javax.inject.Inject; -import javax.inject.Provider; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.InetSocketTransportAddress; -import org.elasticsearch.transport.client.PreBuiltTransportClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ElasticSearchTransportClientProvider implements Provider { - private static final Logger logger = LoggerFactory.getLogger(ElasticSearchTransportClientProvider.class); - - private final ElasticSearchConfiguration configuration; - - @Inject - public ElasticSearchTransportClientProvider(ElasticSearchConfiguration configuration) { - this.configuration = configuration; - } - - @Override - public Client get() { - - Settings settings = Settings.builder() - .put("client.transport.ignore_cluster_name", true) - .put("client.transport.sniff", true) - .build(); - - TransportClient tc = new PreBuiltTransportClient(settings); - - List clusterAddresses = configuration.getURIs(); - - if (clusterAddresses.isEmpty()) { - logger.warn(ElasticSearchConfiguration.ELASTIC_SEARCH_URL_PROPERTY_NAME + - " is not set. Indexing will remain DISABLED."); - } - for (URI hostAddress : clusterAddresses) { - int port = Optional.ofNullable(hostAddress.getPort()).orElse(9200); - try { - tc.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName(hostAddress.getHost()), port)); - } catch (UnknownHostException uhe){ - throw new ProvisionException("Invalid host" + hostAddress.getHost(), uhe); - } - } - return tc; - } -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearch.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearch.java deleted file mode 100644 index 6e579dfdd9..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearch.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.elasticsearch; - -import com.netflix.conductor.service.Lifecycle; -import java.io.File; -import java.io.IOException; -import java.nio.file.FileSystems; -import java.nio.file.Files; -import java.nio.file.Path; -import org.apache.commons.io.FileUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public interface EmbeddedElasticSearch extends Lifecycle { - Logger logger = LoggerFactory.getLogger(EmbeddedElasticSearch.class); - - default void cleanDataDir(String path) { - File dataDir = new File(path); - - try { - logger.info("Deleting contents of data dir {}", path); - if (dataDir.exists()) { - FileUtils.cleanDirectory(dataDir); - } - } catch (IOException e) { - logger.error(String.format("Failed to delete ES data dir: %s", dataDir.getAbsolutePath()), e); - } - } - - default File createDataDir(String dataDirLoc) throws IOException { - Path dataDirPath = FileSystems.getDefault().getPath(dataDirLoc); - Files.createDirectories(dataDirPath); - return dataDirPath.toFile(); - } - - default File setupDataDir(String path) throws IOException { - cleanDataDir(path); - return createDataDir(path); - } -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearchProvider.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearchProvider.java deleted file mode 100644 index f27b1f9c3c..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearchProvider.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.elasticsearch; - -import java.util.Optional; -import javax.inject.Provider; - -public interface EmbeddedElasticSearchProvider extends Provider> { -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/SystemPropertiesElasticSearchConfiguration.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/SystemPropertiesElasticSearchConfiguration.java deleted file mode 100644 index c6a4110c91..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/SystemPropertiesElasticSearchConfiguration.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.elasticsearch; - -import com.netflix.conductor.core.config.SystemPropertiesConfiguration; - -public class SystemPropertiesElasticSearchConfiguration - extends SystemPropertiesConfiguration implements ElasticSearchConfiguration { -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/ElasticSearchV5Module.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/ElasticSearchV5Module.java deleted file mode 100644 index e73106c492..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/ElasticSearchV5Module.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.elasticsearch.es5; - -import com.google.inject.AbstractModule; -import com.netflix.conductor.dao.IndexDAO; -import com.netflix.conductor.dao.es5.index.ElasticSearchDAOV5; -import com.netflix.conductor.dao.es5.index.ElasticSearchRestDAOV5; -import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; -import java.util.HashSet; -import java.util.Set; - - -/** - * @author Viren - * Provider for the elasticsearch index DAO. - */ -public class ElasticSearchV5Module extends AbstractModule { - - private boolean restTransport; - - public ElasticSearchV5Module(ElasticSearchConfiguration elasticSearchConfiguration) { - - Set REST_SCHEMAS = new HashSet<>(); - REST_SCHEMAS.add("http"); - REST_SCHEMAS.add("https"); - - String esTransport = elasticSearchConfiguration.getURIs().get(0).getScheme(); - - this.restTransport = REST_SCHEMAS.contains(esTransport); - } - - @Override - protected void configure() { - - if (restTransport) { - bind(IndexDAO.class).to(ElasticSearchRestDAOV5.class); - } else { - bind(IndexDAO.class).to(ElasticSearchDAOV5.class); - } - - bind(EmbeddedElasticSearchProvider.class).to(EmbeddedElasticSearchV5Provider.class); - } -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java deleted file mode 100644 index 3710a55e5e..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.elasticsearch.es5; - -import static java.util.Collections.singletonList; - -import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; -import java.util.Collection; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.InternalSettingsPreparer; -import org.elasticsearch.node.Node; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.transport.Netty4Plugin; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - - -public class EmbeddedElasticSearchV5 implements EmbeddedElasticSearch { - - private static final Logger logger = LoggerFactory.getLogger(EmbeddedElasticSearchV5.class); - - private final String clusterName; - private final String host; - private final int port; - - private Node instance; - private File dataDir; - - public EmbeddedElasticSearchV5(String clusterName, String host, int port){ - this.clusterName = clusterName; - this.host = host; - this.port = port; - } - - private class PluginConfigurableNode extends Node { - public PluginConfigurableNode(Settings preparedSettings, Collection> classpathPlugins) { - super(InternalSettingsPreparer.prepareEnvironment(preparedSettings, null), classpathPlugins); - } - } - - @Override - public void start() throws Exception { - start(clusterName, host, port); - } - - public synchronized void start(String clusterName, String host, int port) throws Exception { - - if (instance != null) { - String msg = String.format( - "An instance of this Embedded Elastic Search server is already running on port: %d. " + - "It must be stopped before you can call start again.", - getPort() - ); - logger.error(msg); - throw new IllegalStateException(msg); - } - - final Settings settings = getSettings(clusterName, host, port); - dataDir = setupDataDir(settings.get(ElasticSearchConfiguration.EMBEDDED_DATA_PATH_DEFAULT_VALUE)); - - logger.info("Starting ElasticSearch for cluster {} ", settings.get("cluster.name")); - instance = new PluginConfigurableNode(settings, singletonList(Netty4Plugin.class)); - instance.start(); - Runtime.getRuntime().addShutdownHook(new Thread(() -> { - try { - if (instance != null) { - instance.close(); - } - } catch (IOException e) { - logger.error("Error closing ElasticSearch"); - } - })); - logger.info("ElasticSearch cluster {} started in local mode on port {}", instance.settings().get("cluster.name"), getPort()); - } - - private Settings getSettings(String clusterName, String host, int port) throws IOException { - dataDir = Files.createTempDirectory(clusterName + "_" + System.currentTimeMillis() + "data").toFile(); - File homeDir = Files.createTempDirectory(clusterName + "_" + System.currentTimeMillis() + "-home").toFile(); - Settings.Builder settingsBuilder = Settings.builder() - .put("cluster.name", clusterName) - .put("http.host", host) - .put("http.port", port) - .put("transport.tcp.port", port + 100) - .put(ElasticSearchConfiguration.EMBEDDED_DATA_PATH_DEFAULT_VALUE, dataDir.getAbsolutePath()) - .put(ElasticSearchConfiguration.EMBEDDED_HOME_PATH_DEFAULT_VALUE, homeDir.getAbsolutePath()) - .put("http.enabled", true) - .put("script.inline", true) - .put("script.stored", true) - .put("node.data", true) - .put("http.enabled", true) - .put("http.type", "netty4") - .put("transport.type", "netty4"); - - return settingsBuilder.build(); - } - - private String getPort() { - return instance.settings().get("http.port"); - } - - @Override - public synchronized void stop() throws Exception { - - if (instance != null && !instance.isClosed()) { - String port = getPort(); - logger.info("Stopping Elastic Search"); - instance.close(); - instance = null; - logger.info("Elastic Search on port {} stopped", port); - } - - } -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java deleted file mode 100644 index fff43bc019..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.elasticsearch.es5; - -import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; -import java.util.Optional; -import javax.inject.Inject; - -public class EmbeddedElasticSearchV5Provider implements EmbeddedElasticSearchProvider { - private final ElasticSearchConfiguration configuration; - - @Inject - public EmbeddedElasticSearchV5Provider(ElasticSearchConfiguration configuration) { - this.configuration = configuration; - } - - @Override - public Optional get() { - return isEmbedded() ? Optional.of( - new EmbeddedElasticSearchV5( - configuration.getEmbeddedClusterName(), - configuration.getEmbeddedHost(), - configuration.getEmbeddedPort() - ) - ) : Optional.empty(); - } - - private boolean isEmbedded() { - return configuration.getElasticSearchInstanceType().equals(ElasticSearchConfiguration.ElasticSearchInstanceType.MEMORY); - } -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/AbstractNode.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/AbstractNode.java deleted file mode 100644 index 1ca29e9587..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/AbstractNode.java +++ /dev/null @@ -1,187 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.elasticsearch.query.parser; - -import java.io.InputStream; -import java.math.BigDecimal; -import java.util.HashSet; -import java.util.Set; -import java.util.regex.Pattern; - - -/** - * @author Viren - * - */ -public abstract class AbstractNode { - - public static final Pattern WHITESPACE = Pattern.compile("\\s"); - - protected static Set comparisonOprs = new HashSet(); - - static { - comparisonOprs.add('>'); - comparisonOprs.add('<'); - comparisonOprs.add('='); - } - - protected InputStream is; - - - - protected AbstractNode(InputStream is) throws ParserException { - this.is = is; - this.parse(); - } - - protected boolean isNumber(String test){ - try{ - //If you can convert to a big decimal value, then it is a number. - new BigDecimal(test); - return true; - - }catch(NumberFormatException e){ - //Ignore - } - return false; - } - - protected boolean isBoolOpr(byte[] buffer){ - if(buffer.length > 1 && buffer[0] == 'O' && buffer[1] == 'R'){ - return true; - }else if(buffer.length > 2 && buffer[0] == 'A' && buffer[1] == 'N' && buffer[2] == 'D'){ - return true; - } - return false; - } - - protected boolean isComparisonOpr(byte[] buffer){ - if(buffer[0] == 'I' && buffer[1] == 'N'){ - return true; - }else if(buffer[0] == '!' && buffer[1] == '='){ - return true; - }else{ - return comparisonOprs.contains((char)buffer[0]); - } - - } - - protected byte[] peek(int length) throws Exception { - return read(length, true); - } - - protected byte[] read(int length) throws Exception { - return read(length, false); - } - - protected String readToken() throws Exception { - skipWhitespace(); - StringBuilder sb = new StringBuilder(); - while(is.available() > 0){ - char c = (char) peek(1)[0]; - if(c == ' ' || c == '\t' || c == '\n' || c == '\r'){ - is.skip(1); - break; - }else if(c == '=' || c == '>' || c == '<' || c == '!'){ - //do not skip - break; - } - sb.append(c); - is.skip(1); - } - return sb.toString().trim(); - } - - protected boolean isNumeric(char c) { - if (c == '-' || c == 'e' || (c >= '0' && c <= '9') || c == '.'){ - return true; - } - return false; - } - - protected void assertExpected(byte[] found, String expected) throws ParserException { - assertExpected(new String(found), expected); - } - - protected void assertExpected(String found, String expected) throws ParserException { - if(!found.equals(expected)){ - throw new ParserException("Expected " + expected + ", found " + found); - } - } - protected void assertExpected(char found, char expected) throws ParserException { - if(found != expected){ - throw new ParserException("Expected " + expected + ", found " + found); - } - } - - protected static void efor(int length, FunctionThrowingException consumer) throws Exception { - for(int i = 0; i < length; i++){ - consumer.accept(i); - } - } - - protected abstract void _parse() throws Exception; - - //Public stuff here - private void parse() throws ParserException { - //skip white spaces - skipWhitespace(); - try{ - _parse(); - }catch(Exception e){ - System.out.println("\t" + this.getClass().getSimpleName() + "->" + this.toString()); - if(!(e instanceof ParserException)){ - throw new ParserException("Error parsing", e); - }else{ - throw (ParserException)e; - } - } - skipWhitespace(); - } - - //Private methods - - private byte[] read(int length, boolean peekOnly) throws Exception { - byte[] buf = new byte[length]; - if(peekOnly){ - is.mark(length); - } - efor(length, (Integer c)-> buf[c] = (byte) is.read()); - if(peekOnly){ - is.reset(); - } - return buf; - } - - protected void skipWhitespace() throws ParserException { - try{ - while(is.available() > 0){ - byte c = peek(1)[0]; - if(c == ' ' || c == '\t' || c == '\n' || c == '\r'){ - //skip - read(1); - }else{ - break; - } - } - }catch(Exception e){ - throw new ParserException(e.getMessage(), e); - } - } -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/BooleanOp.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/BooleanOp.java deleted file mode 100644 index f8f2f0862f..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/BooleanOp.java +++ /dev/null @@ -1,64 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.elasticsearch.query.parser; - -import java.io.InputStream; - -/** - * @author Viren - * - */ -public class BooleanOp extends AbstractNode { - - private String value; - - public BooleanOp(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - byte[] buffer = peek(3); - if(buffer.length > 1 && buffer[0] == 'O' && buffer[1] == 'R'){ - this.value = "OR"; - }else if(buffer.length > 2 && buffer[0] == 'A' && buffer[1] == 'N' && buffer[2] == 'D'){ - this.value = "AND"; - }else { - throw new ParserException("No valid boolean operator found..."); - } - read(this.value.length()); - } - - @Override - public String toString(){ - return " " + value + " "; - } - - public String getOperator(){ - return value; - } - - public boolean isAnd(){ - return "AND".equals(value); - } - - public boolean isOr(){ - return "OR".equals(value); - } -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ComparisonOp.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ComparisonOp.java deleted file mode 100644 index b938e22319..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ComparisonOp.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.elasticsearch.query.parser; - -import java.io.InputStream; - -/** - * @author Viren - */ -public class ComparisonOp extends AbstractNode { - - public enum Operators { - BETWEEN("BETWEEN"), EQUALS("="), LESS_THAN("<"), GREATER_THAN(">"), IN("IN"), NOT_EQUALS("!="), IS("IS"), - STARTS_WITH("STARTS_WITH"); - - private final String value; - Operators(String value){ - this.value = value; - } - - public String value(){ - return value; - } - } - - static { - int max = 0; - for (Operators op: Operators.values()) { - max = Math.max(max, op.value().length()); - } - maxOperatorLength = max; - } - - private static final int maxOperatorLength; - - private static final int betweenLen = Operators.BETWEEN.value().length(); - private static final int startsWithLen = Operators.STARTS_WITH.value().length(); - - private String value; - - public ComparisonOp(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - byte[] peeked = peek(maxOperatorLength); - if(peeked[0] == '=' || peeked[0] == '>' || peeked[0] == '<'){ - this.value = new String(peeked, 0, 1); - }else if(peeked[0] == 'I' && peeked[1] == 'N'){ - this.value = "IN"; - }else if(peeked[0] == 'I' && peeked[1] == 'S'){ - this.value = "IS"; - }else if(peeked[0] == '!' && peeked[1] == '='){ - this.value = "!="; - }else if(peeked.length >= betweenLen && peeked[0] == 'B' && peeked[1] == 'E' && peeked[2] == 'T' && peeked[3] == 'W' && peeked[4] == 'E' && peeked[5] == 'E' && peeked[6] == 'N'){ - this.value = Operators.BETWEEN.value(); - }else if(peeked.length == startsWithLen && new String(peeked).equals(Operators.STARTS_WITH.value())) { - this.value = Operators.STARTS_WITH.value(); - }else{ - throw new ParserException("Expecting an operator (=, >, <, !=, BETWEEN, IN, STARTS_WITH), but found none. Peeked=>" + new String(peeked)); - } - - read(this.value.length()); - } - - @Override - public String toString(){ - return " " + value + " "; - } - - public String getOperator(){ - return value; - } - -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ConstValue.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ConstValue.java deleted file mode 100644 index 086939060c..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ConstValue.java +++ /dev/null @@ -1,147 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.elasticsearch.query.parser; - -import java.io.InputStream; - - - - -/** - * @author Viren - * Constant value can be: - *
    - *
  1. List of values (a,b,c) - *
  2. Range of values (m AND n) - *
  3. A value (x) - *
  4. A value is either a string or a number - *
- * - */ -public class ConstValue extends AbstractNode { - - public static enum SystemConsts { - NULL("null"), NOT_NULL("not null"); - private String value; - SystemConsts(String value){ - this.value = value; - } - - public String value(){ - return value; - } - } - - private static String QUOTE = "\""; - - private Object value; - - private SystemConsts sysConsts; - - public ConstValue(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - byte[] peeked = peek(4); - String sp = new String(peeked).trim(); - //Read a constant value (number or a string) - if(peeked[0] == '"' || peeked[0] == '\''){ - this.value = readString(is); - } else if(sp.toLowerCase().startsWith("not")){ - this.value = SystemConsts.NOT_NULL.value(); - sysConsts = SystemConsts.NOT_NULL; - read(SystemConsts.NOT_NULL.value().length()); - } else if(sp.equalsIgnoreCase(SystemConsts.NULL.value())){ - this.value = SystemConsts.NULL.value(); - sysConsts = SystemConsts.NULL; - read(SystemConsts.NULL.value().length()); - } else{ - this.value = readNumber(is); - } - } - - private String readNumber(InputStream is) throws Exception { - StringBuilder sb = new StringBuilder(); - while(is.available() > 0){ - is.mark(1); - char c = (char) is.read(); - if(!isNumeric(c)){ - is.reset(); - break; - }else{ - sb.append(c); - } - } - String numValue = sb.toString().trim(); - return numValue; - } - /** - * Reads an escaped string - * @throws Exception - */ - private String readString(InputStream is) throws Exception { - char delim = (char)read(1)[0]; - StringBuilder sb = new StringBuilder(); - boolean valid = false; - while(is.available() > 0){ - char c = (char) is.read(); - if(c == delim){ - valid = true; - break; - } else if(c == '\\'){ - // read the next character as part of the value - c = (char) is.read(); - sb.append(c); - } else{ - sb.append(c); - } - } - if(!valid){ - throw new ParserException("String constant is not quoted with <" + delim + "> : " + sb.toString()); - } - return QUOTE + sb.toString() + QUOTE; - } - - public Object getValue(){ - return value; - } - - @Override - public String toString(){ - return ""+value; - } - - public String getUnquotedValue() { - String result = toString(); - if (result.length() >= 2 && result.startsWith(QUOTE) && result.endsWith(QUOTE)) { - result = result.substring(1, result.length() - 1); - } - return result; - } - - public boolean isSysConstant(){ - return this.sysConsts != null; - } - - public SystemConsts getSysConstant(){ - return this.sysConsts; - } -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/FunctionThrowingException.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/FunctionThrowingException.java deleted file mode 100644 index 82ec52472d..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/FunctionThrowingException.java +++ /dev/null @@ -1,30 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.elasticsearch.query.parser; - -/** - * @author Viren - * - */ -@FunctionalInterface -public interface FunctionThrowingException { - - void accept(T t) throws Exception; - -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ListConst.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ListConst.java deleted file mode 100644 index 29f0443fde..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ListConst.java +++ /dev/null @@ -1,81 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.elasticsearch.query.parser; - -import java.io.InputStream; -import java.util.LinkedList; -import java.util.List; - - - - -/** - * @author Viren - * List of constants - * - */ -public class ListConst extends AbstractNode { - - private List values; - - public ListConst(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - byte[] peeked = read(1); - assertExpected(peeked, "("); - this.values = readList(); - } - - private List readList() throws Exception { - List list = new LinkedList(); - boolean valid = false; - char c; - - StringBuilder sb = new StringBuilder(); - while(is.available() > 0){ - c = (char) is.read(); - if(c == ')'){ - valid = true; - break; - }else if(c == ','){ - list.add(sb.toString().trim()); - sb = new StringBuilder(); - }else{ - sb.append(c); - } - } - list.add(sb.toString().trim()); - if(!valid){ - throw new ParserException("Expected ')' but never encountered in the stream"); - } - return list; - } - - public List getList(){ - return (List) values; - } - - @Override - public String toString(){ - return values.toString(); - } -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Name.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Name.java deleted file mode 100644 index 7831a57a80..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Name.java +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.elasticsearch.query.parser; - -import java.io.InputStream; - -/** - * @author Viren - * Represents the name of the field to be searched against. - */ -public class Name extends AbstractNode { - - private String value; - - public Name(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - this.value = readToken(); - } - - @Override - public String toString(){ - return value; - } - - public String getName(){ - return value; - } - -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ParserException.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ParserException.java deleted file mode 100644 index 02f226a907..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ParserException.java +++ /dev/null @@ -1,36 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.elasticsearch.query.parser; - -/** - * @author Viren - * - */ -@SuppressWarnings("serial") -public class ParserException extends Exception { - - public ParserException(String message) { - super(message); - } - - public ParserException(String message, Throwable cause) { - super(message, cause); - } - -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Range.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Range.java deleted file mode 100644 index 896db71296..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Range.java +++ /dev/null @@ -1,92 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.elasticsearch.query.parser; - -import java.io.InputStream; - - - - -/** - * @author Viren - * - */ -public class Range extends AbstractNode { - - private String low; - - private String high; - - public Range(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - this.low = readNumber(is); - - skipWhitespace(); - byte[] peeked = read(3); - assertExpected(peeked, "AND"); - skipWhitespace(); - - String num = readNumber(is); - if(num == null || "".equals(num)){ - throw new ParserException("Missing the upper range value..."); - } - this.high = num; - - } - - private String readNumber(InputStream is) throws Exception { - StringBuilder sb = new StringBuilder(); - while(is.available() > 0){ - is.mark(1); - char c = (char) is.read(); - if(!isNumeric(c)){ - is.reset(); - break; - }else{ - sb.append(c); - } - } - String numValue = sb.toString().trim(); - return numValue; - } - - - /** - * @return the low - */ - public String getLow() { - return low; - } - - /** - * @return the high - */ - public String getHigh() { - return high; - } - - @Override - public String toString(){ - return low + " AND " + high; - } -} diff --git a/es5-persistence/src/main/resources/mappings_docType_task.json b/es5-persistence/src/main/resources/mappings_docType_task.json deleted file mode 100644 index 2a90be39cd..0000000000 --- a/es5-persistence/src/main/resources/mappings_docType_task.json +++ /dev/null @@ -1,68 +0,0 @@ -{ - "task": { - "properties": { - "correlationId": { - "type": "keyword", - "index": true - }, - "endTime": { - "type": "date", - "format": "strict_date_optional_time||epoch_millis" - }, - "executionTime": { - "type": "long" - }, - "input": { - "type": "text", - "index": true - }, - "output": { - "type": "text", - "index": true - }, - "queueWaitTime": { - "type": "long" - }, - "reasonForIncompletion": { - "type": "keyword", - "index": true - }, - "scheduledTime": { - "type": "date", - "format": "strict_date_optional_time||epoch_millis" - }, - "startTime": { - "type": "date", - "format": "strict_date_optional_time||epoch_millis" - }, - "status": { - "type": "keyword", - "index": true - }, - "taskDefName": { - "type": "keyword", - "index": true - }, - "taskId": { - "type": "keyword", - "index": true - }, - "taskType": { - "type": "keyword", - "index": true - }, - "updateTime": { - "type": "date", - "format": "strict_date_optional_time||epoch_millis" - }, - "workflowId": { - "type": "keyword", - "index": true - }, - "workflowType": { - "type": "keyword", - "index": true - } - } - } -} \ No newline at end of file diff --git a/es5-persistence/src/main/resources/mappings_docType_workflow.json b/es5-persistence/src/main/resources/mappings_docType_workflow.json deleted file mode 100644 index abec7535c9..0000000000 --- a/es5-persistence/src/main/resources/mappings_docType_workflow.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "workflow": { - "properties": { - "correlationId": { - "type": "keyword", - "index": true, - "doc_values": true - }, - "endTime": { - "type": "date", - "format": "strict_date_optional_time||epoch_millis", - "doc_values": true - }, - "executionTime": { - "type": "long", - "doc_values": true - }, - "failedReferenceTaskNames": { - "type": "text", - "index": false - }, - "input": { - "type": "text", - "index": true - }, - "output": { - "type": "text", - "index": true - }, - "reasonForIncompletion": { - "type": "keyword", - "index": true, - "doc_values": true - }, - "startTime": { - "type": "date", - "format": "strict_date_optional_time||epoch_millis", - "doc_values": true - }, - "status": { - "type": "keyword", - "index": true, - "doc_values": true - }, - "updateTime": { - "type": "date", - "format": "strict_date_optional_time||epoch_millis", - "doc_values": true - }, - "version": { - "type": "long", - "doc_values": true - }, - "workflowId": { - "type": "keyword", - "index": true, - "doc_values": true - }, - "workflowType": { - "type": "keyword", - "index": true, - "doc_values": true - }, - "rawJSON": { - "type": "text", - "index": false - }, - "event": { - "type": "keyword", - "index": true - } - } - } -} \ No newline at end of file diff --git a/es5-persistence/src/main/resources/template_tasklog.json b/es5-persistence/src/main/resources/template_tasklog.json deleted file mode 100644 index 8330771fc9..0000000000 --- a/es5-persistence/src/main/resources/template_tasklog.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "order": 0, - "template": "*task*log*", - "settings": { - "index": { - "refresh_interval": "1s" - } - }, - "mappings": { - "event": { - "properties": { - "action": { - "type": "keyword", - "index": true - }, - "created": { - "type": "long" - }, - "event": { - "type": "keyword", - "index": true - }, - "id": { - "type": "keyword", - "index": true - }, - "messageId": { - "type": "keyword", - "index": true - }, - "name": { - "type": "keyword", - "index": true - }, - "output": { - "properties": { - "workflowId": { - "type": "keyword", - "index": true - } - } - }, - "status": { - "type": "keyword", - "index": true - } - } - }, - "task_log": { - "properties": { - "createdTime": { - "type": "long" - }, - "log": { - "type": "keyword", - "index": true - }, - "taskId": { - "type": "keyword", - "index": true - } - } - }, - "message": { - "properties": { - "created": { - "type": "long" - }, - "messageId": { - "type": "keyword", - "index": true - }, - "payload": { - "type": "keyword", - "index": true - }, - "queue": { - "type": "keyword", - "index": true - } - } - } - }, - "aliases": {} -} \ No newline at end of file diff --git a/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/TestElasticSearchDAOV5.java b/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/TestElasticSearchDAOV5.java deleted file mode 100644 index d61cdbc9f0..0000000000 --- a/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/TestElasticSearchDAOV5.java +++ /dev/null @@ -1,544 +0,0 @@ -/* - * Copyright 2019 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.dao.es5.index; - -import static org.awaitility.Awaitility.await; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.metadata.events.EventHandler.Action.Type; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.Task.Status; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.utils.JsonMapperProvider; -import com.netflix.conductor.core.events.queue.Message; -import com.netflix.conductor.dao.es5.index.query.parser.Expression; -import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; -import com.netflix.conductor.elasticsearch.ElasticSearchTransportClientProvider; -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; -import com.netflix.conductor.elasticsearch.SystemPropertiesElasticSearchConfiguration; -import com.netflix.conductor.elasticsearch.es5.EmbeddedElasticSearchV5; -import com.netflix.conductor.elasticsearch.query.parser.ParserException; -import java.text.SimpleDateFormat; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.ZoneOffset; -import java.util.Arrays; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.TimeZone; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import org.apache.commons.lang3.StringUtils; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; -import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.QueryStringQueryBuilder; -import org.elasticsearch.search.SearchHit; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; - -public class TestElasticSearchDAOV5 { - private static final String MSG_DOC_TYPE = "message"; - private static final String EVENT_DOC_TYPE = "event"; - private static final String LOG_INDEX_PREFIX = "task_log"; - - private static ElasticSearchConfiguration configuration; - private static Client elasticSearchClient; - private static ElasticSearchDAOV5 indexDAO; - private static EmbeddedElasticSearch embeddedElasticSearch; - - private Workflow workflow; - - @BeforeClass - public static void startServer() throws Exception { - System.setProperty(ElasticSearchConfiguration.EMBEDDED_PORT_PROPERTY_NAME, "9203"); - System.setProperty(ElasticSearchConfiguration.ELASTIC_SEARCH_URL_PROPERTY_NAME, "localhost:9303"); - System.setProperty(ElasticSearchConfiguration.ELASTIC_SEARCH_INDEX_BATCH_SIZE_PROPERTY_NAME, "1"); - - configuration = new SystemPropertiesElasticSearchConfiguration(); - String host = configuration.getEmbeddedHost(); - int port = configuration.getEmbeddedPort(); - String clusterName = configuration.getEmbeddedClusterName(); - - embeddedElasticSearch = new EmbeddedElasticSearchV5(clusterName, host, port); - embeddedElasticSearch.start(); - - ElasticSearchTransportClientProvider transportClientProvider = - new ElasticSearchTransportClientProvider(configuration); - elasticSearchClient = transportClientProvider.get(); - - elasticSearchClient.admin() - .cluster() - .prepareHealth() - .setWaitForGreenStatus() - .execute() - .get(); - - ObjectMapper objectMapper = new JsonMapperProvider().get(); - indexDAO = new ElasticSearchDAOV5(elasticSearchClient, configuration, objectMapper); - } - - @AfterClass - public static void closeClient() throws Exception { - if (elasticSearchClient != null) { - elasticSearchClient.close(); - } - - embeddedElasticSearch.stop(); - } - - @Before - public void createTestWorkflow() throws Exception { - // define indices - indexDAO.setup(); - - // initialize workflow - workflow = new Workflow(); - workflow.getInput().put("requestId", "request id 001"); - workflow.getInput().put("hasAwards", true); - workflow.getInput().put("channelMapping", 5); - Map name = new HashMap<>(); - name.put("name", "The Who"); - name.put("year", 1970); - Map name2 = new HashMap<>(); - name2.put("name", "The Doors"); - name2.put("year", 1975); - - List names = new LinkedList<>(); - names.add(name); - names.add(name2); - - workflow.getOutput().put("name", name); - workflow.getOutput().put("names", names); - workflow.getOutput().put("awards", 200); - - Task task = new Task(); - task.setReferenceTaskName("task2"); - task.getOutputData().put("location", "http://location"); - task.setStatus(Task.Status.COMPLETED); - - Task task2 = new Task(); - task2.setReferenceTaskName("task3"); - task2.getOutputData().put("refId", "abcddef_1234_7890_aaffcc"); - task2.setStatus(Task.Status.SCHEDULED); - - workflow.getTasks().add(task); - workflow.getTasks().add(task2); - } - - @After - public void tearDown() { - deleteAllIndices(); - } - - private void deleteAllIndices() { - - ImmutableOpenMap indices = elasticSearchClient.admin().cluster() - .prepareState().get().getState() - .getMetaData().getIndices(); - - indices.forEach(cursor -> { - try { - elasticSearchClient.admin() - .indices() - .delete(new DeleteIndexRequest(cursor.value.getIndex().getName())) - .get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); - } - - private boolean indexExists(final String index) { - IndicesExistsRequest request = new IndicesExistsRequest(index); - try { - return elasticSearchClient.admin().indices().exists(request).get().isExists(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - } - - private boolean doesMappingExist(final String index, final String mappingName) { - GetMappingsRequest request = new GetMappingsRequest() - .indices(index); - try { - GetMappingsResponse response = elasticSearchClient.admin() - .indices() - .getMappings(request) - .get(); - - return response.getMappings() - .get(index) - .containsKey(mappingName); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - } - - @Test - public void assertInitialSetup() throws Exception { - SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMWW"); - dateFormat.setTimeZone(TimeZone.getTimeZone("GMT")); - - String taskLogIndex = "task_log_" + dateFormat.format(new Date()); - - assertTrue("Index 'conductor' should exist", indexExists("conductor")); - assertTrue("Index '" + taskLogIndex + "' should exist", indexExists(taskLogIndex)); - - assertTrue("Mapping 'workflow' for index 'conductor' should exist", doesMappingExist("conductor", "workflow")); - assertTrue("Mapping 'task' for index 'conductor' should exist", doesMappingExist("conductor", "task")); - } - - @Test - public void testWorkflowCRUD() throws Exception { - String testWorkflowType = "testworkflow"; - String testId = "1"; - - workflow.setWorkflowId(testId); - workflow.setWorkflowType(testWorkflowType); - - // Create - String workflowType = indexDAO.get(testId, "workflowType"); - assertNull("Workflow should not exist", workflowType); - - // Get - indexDAO.indexWorkflow(workflow); - - workflowType = indexDAO.get(testId, "workflowType"); - assertEquals("Should have found our workflow type", testWorkflowType, workflowType); - - // Update - String newWorkflowType = "newworkflowtype"; - String[] keyChanges = {"workflowType"}; - String[] valueChanges = {newWorkflowType}; - - indexDAO.updateWorkflow(testId, keyChanges, valueChanges); - - await() - .atMost(3, TimeUnit.SECONDS) - .untilAsserted( - () -> { - String actualWorkflowType = indexDAO.get(testId, "workflowType"); - assertEquals("Should have updated our new workflow type", newWorkflowType, actualWorkflowType); - } - ); - - // Delete - indexDAO.removeWorkflow(testId); - - workflowType = indexDAO.get(testId, "workflowType"); - assertNull("We should no longer have our workflow in the system", workflowType); - } - - @Test - public void testWorkflowSearch() { - String workflowId = "search-workflow-id"; - workflow.setWorkflowId(workflowId); - indexDAO.indexWorkflow(workflow); - await() - .atMost(3, TimeUnit.SECONDS) - .untilAsserted( - () -> { - List searchIds = indexDAO.searchWorkflows("", "workflowId:\"" + workflowId + "\"", 0, 100, Collections.singletonList("workflowId:ASC")).getResults(); - assertEquals(1, searchIds.size()); - assertEquals(workflowId, searchIds.get(0)); - } - ); - } - - @Test - public void testSearchRecentRunningWorkflows() { - workflow.setWorkflowId("completed-workflow"); - workflow.setStatus(Workflow.WorkflowStatus.COMPLETED); - indexDAO.indexWorkflow(workflow); - - String workflowId = "recent-running-workflow-id"; - workflow.setWorkflowId(workflowId); - workflow.setStatus(Workflow.WorkflowStatus.RUNNING); - workflow.setCreateTime(new Date().getTime()); - workflow.setUpdateTime(new Date().getTime()); - workflow.setEndTime(new Date().getTime()); - indexDAO.indexWorkflow(workflow); - - await() - .atMost(3, TimeUnit.SECONDS) - .untilAsserted( - () -> { - List searchIds = indexDAO.searchRecentRunningWorkflows(1,0); - assertEquals(1, searchIds.size()); - assertEquals(workflowId, searchIds.get(0)); - } - ); - } - - @Test - public void testSearchArchivableWorkflows() { - String workflowId = "search-workflow-id"; - - workflow.setWorkflowId(workflowId); - workflow.setStatus(Workflow.WorkflowStatus.COMPLETED); - workflow.setCreateTime((new Date(System.currentTimeMillis() - TimeUnit.DAYS.toMillis(4))).getTime()); - workflow.setUpdateTime((new Date(System.currentTimeMillis() - TimeUnit.DAYS.toMillis(4))).getTime()); - workflow.setEndTime((new Date(System.currentTimeMillis() - TimeUnit.DAYS.toMillis(4))).getTime()); - - indexDAO.indexWorkflow(workflow); - - await() - .atMost(3, TimeUnit.SECONDS) - .untilAsserted( - () -> { - List searchIds = indexDAO.searchArchivableWorkflows("conductor",3); - assertEquals(1, searchIds.size()); - assertEquals(workflowId, searchIds.get(0)); - } - ); - } - - @Test - public void taskExecutionLogs() throws Exception { - TaskExecLog taskExecLog1 = new TaskExecLog(); - taskExecLog1.setTaskId("some-task-id"); - long createdTime1 = LocalDateTime.of(2018, 11, 01, 06, 33, 22) - .toEpochSecond(ZoneOffset.UTC); - taskExecLog1.setCreatedTime(createdTime1); - taskExecLog1.setLog("some-log"); - TaskExecLog taskExecLog2 = new TaskExecLog(); - taskExecLog2.setTaskId("some-task-id"); - long createdTime2 = LocalDateTime.of(2018, 11, 01, 06, 33, 22) - .toEpochSecond(ZoneOffset.UTC); - taskExecLog2.setCreatedTime(createdTime2); - taskExecLog2.setLog("some-log"); - List logsToAdd = Arrays.asList(taskExecLog1, taskExecLog2); - indexDAO.addTaskExecutionLogs(logsToAdd); - - await() - .atMost(5, TimeUnit.SECONDS) - .untilAsserted(() -> { - List taskExecutionLogs = indexDAO.getTaskExecutionLogs("some-task-id"); - assertEquals(2, taskExecutionLogs.size()); - }); - } - - @Test - public void indexTask() throws Exception { - String correlationId = "some-correlation-id"; - - Task task = new Task(); - task.setTaskId("some-task-id"); - task.setWorkflowInstanceId("some-workflow-instance-id"); - task.setTaskType("some-task-type"); - task.setStatus(Status.FAILED); - task.setInputData(new HashMap() {{ put("input_key", "input_value"); }}); - task.setCorrelationId(correlationId); - task.setTaskDefName("some-task-def-name"); - task.setReasonForIncompletion("some-failure-reason"); - - indexDAO.indexTask(task); - - await() - .atMost(5, TimeUnit.SECONDS) - .untilAsserted(() -> { - SearchResult result = indexDAO - .searchTasks("correlationId='" + correlationId + "'", "*", 0, 10000, null); - - assertTrue("should return 1 or more search results", result.getResults().size() > 0); - assertEquals("taskId should match the indexed task", "some-task-id", result.getResults().get(0)); - }); - } - - @Test - public void indexTaskWithBatchSizeTwo() throws Exception { - embeddedElasticSearch.stop(); - startElasticSearchWithBatchSize(2); - String correlationId = "some-correlation-id"; - - Task task = new Task(); - task.setTaskId("some-task-id"); - task.setWorkflowInstanceId("some-workflow-instance-id"); - task.setTaskType("some-task-type"); - task.setStatus(Status.FAILED); - task.setInputData(new HashMap() {{ put("input_key", "input_value"); }}); - task.setCorrelationId(correlationId); - task.setTaskDefName("some-task-def-name"); - task.setReasonForIncompletion("some-failure-reason"); - - indexDAO.indexTask(task); - indexDAO.indexTask(task); - - await() - .atMost(5, TimeUnit.SECONDS) - .untilAsserted(() -> { - SearchResult result = indexDAO - .searchTasks("correlationId='" + correlationId + "'", "*", 0, 10000, null); - - assertTrue("should return 1 or more search results", result.getResults().size() > 0); - assertEquals("taskId should match the indexed task", "some-task-id", result.getResults().get(0)); - }); - - embeddedElasticSearch.stop(); - startElasticSearchWithBatchSize(1); - } - - private void startElasticSearchWithBatchSize(int i) throws Exception { - System.setProperty(ElasticSearchConfiguration.ELASTIC_SEARCH_INDEX_BATCH_SIZE_PROPERTY_NAME, String.valueOf(i)); - - configuration = new SystemPropertiesElasticSearchConfiguration(); - String host = configuration.getEmbeddedHost(); - int port = configuration.getEmbeddedPort(); - String clusterName = configuration.getEmbeddedClusterName(); - - embeddedElasticSearch = new EmbeddedElasticSearchV5(clusterName, host, port); - embeddedElasticSearch.start(); - - ElasticSearchTransportClientProvider transportClientProvider = - new ElasticSearchTransportClientProvider(configuration); - elasticSearchClient = transportClientProvider.get(); - - elasticSearchClient.admin() - .cluster() - .prepareHealth() - .setWaitForGreenStatus() - .execute() - .get(); - - ObjectMapper objectMapper = new JsonMapperProvider().get(); - indexDAO = new ElasticSearchDAOV5(elasticSearchClient, configuration, objectMapper); - } - - @Test - public void addMessage() { - String messageId = "some-message-id"; - - Message message = new Message(); - message.setId(messageId); - message.setPayload("some-payload"); - message.setReceipt("some-receipt"); - - indexDAO.addMessage("some-queue", message); - - await() - .atMost(5, TimeUnit.SECONDS) - .untilAsserted(() -> { - SearchResponse searchResponse = search( - LOG_INDEX_PREFIX + "*", - "messageId='" + messageId + "'", - 0, - 10000, - "*", - MSG_DOC_TYPE - ); - assertEquals("search results should be length 1", searchResponse.getHits().getTotalHits(), 1); - - SearchHit searchHit = searchResponse.getHits().getAt(0); - GetResponse response = elasticSearchClient - .prepareGet(searchHit.getIndex(), MSG_DOC_TYPE, searchHit.getId()) - .get(); - assertEquals("indexed message id should match", messageId, response.getSource().get("messageId")); - assertEquals("indexed payload should match", "some-payload", response.getSource().get("payload")); - }); - - List messages = indexDAO.getMessages("some-queue"); - assertEquals(1, messages.size()); - assertEquals(message.getId(), messages.get(0).getId()); - assertEquals(message.getPayload(), messages.get(0).getPayload()); - } - - @Test - public void addEventExecution() { - String messageId = "some-message-id"; - - EventExecution eventExecution = new EventExecution(); - eventExecution.setId("some-id"); - eventExecution.setMessageId(messageId); - eventExecution.setAction(Type.complete_task); - eventExecution.setEvent("some-event"); - eventExecution.setStatus(EventExecution.Status.COMPLETED); - - indexDAO.addEventExecution(eventExecution); - - await() - .atMost(5, TimeUnit.SECONDS) - .untilAsserted(() -> { - SearchResponse searchResponse = search( - LOG_INDEX_PREFIX + "*", - "messageId='" + messageId + "'", - 0, - 10000, - "*", - EVENT_DOC_TYPE - ); - - assertEquals("search results should be length 1", searchResponse.getHits().getTotalHits(), 1); - - SearchHit searchHit = searchResponse.getHits().getAt(0); - GetResponse response = elasticSearchClient - .prepareGet(searchHit.getIndex(), EVENT_DOC_TYPE, searchHit.getId()) - .get(); - - assertEquals("indexed message id should match", messageId, response.getSource().get("messageId")); - assertEquals("indexed id should match", "some-id", response.getSource().get("id")); - assertEquals("indexed status should match", EventExecution.Status.COMPLETED.name(), response.getSource().get("status")); - }); - - List events = indexDAO.getEventExecutions("some-event"); - assertEquals(1, events.size()); - assertEquals(eventExecution, events.get(0)); - - } - - - private SearchResponse search(String indexName, String structuredQuery, int start, - int size, String freeTextQuery, String docType) throws ParserException { - QueryBuilder queryBuilder = QueryBuilders.matchAllQuery(); - if (StringUtils.isNotEmpty(structuredQuery)) { - Expression expression = Expression.fromString(structuredQuery); - queryBuilder = expression.getFilterBuilder(); - } - - BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(queryBuilder); - QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery(freeTextQuery); - BoolQueryBuilder fq = QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); - final SearchRequestBuilder srb = elasticSearchClient.prepareSearch(indexName) - .setQuery(fq) - .setTypes(docType) - .storedFields("_id") - .setFrom(start) - .setSize(size); - - return srb.get(); - } - -} diff --git a/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/TestElasticSearchRestDAOV5.java b/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/TestElasticSearchRestDAOV5.java deleted file mode 100644 index 6e48ecc497..0000000000 --- a/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/TestElasticSearchRestDAOV5.java +++ /dev/null @@ -1,600 +0,0 @@ -/* - * Copyright 2019 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.dao.es5.index; - -import static org.awaitility.Awaitility.await; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -import com.amazonaws.util.IOUtils; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.metadata.events.EventHandler.Action.Type; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.Task.Status; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.utils.JsonMapperProvider; -import com.netflix.conductor.core.events.queue.Message; -import com.netflix.conductor.dao.es5.index.query.parser.Expression; -import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; -import com.netflix.conductor.elasticsearch.ElasticSearchRestClientProvider; -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; -import com.netflix.conductor.elasticsearch.SystemPropertiesElasticSearchConfiguration; -import com.netflix.conductor.elasticsearch.es5.EmbeddedElasticSearchV5; -import com.netflix.conductor.elasticsearch.query.parser.ParserException; -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.io.Reader; -import java.text.SimpleDateFormat; -import java.time.LocalDateTime; -import java.time.ZoneOffset; -import java.util.Arrays; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.TimeZone; -import java.util.concurrent.TimeUnit; -import org.apache.commons.lang3.StringUtils; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.client.Response; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestHighLevelClient; -import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.QueryStringQueryBuilder; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.sort.FieldSortBuilder; -import org.elasticsearch.search.sort.SortOrder; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class TestElasticSearchRestDAOV5 { - - private static final Logger logger = LoggerFactory.getLogger(TestElasticSearchRestDAOV5.class); - - private static final String INDEX_NAME = "conductor"; - private static final String LOG_INDEX_PREFIX = "task_log"; - - private static final String MSG_DOC_TYPE = "message"; - private static final String EVENT_DOC_TYPE = "event"; - - private static ElasticSearchConfiguration configuration; - private static RestClient restClient; - private static RestHighLevelClient elasticSearchClient; - private static ElasticSearchRestDAOV5 indexDAO; - private static EmbeddedElasticSearch embeddedElasticSearch; - private static ObjectMapper objectMapper; - - private Workflow workflow; - - private @interface HttpMethod { - String GET = "GET"; - String POST = "POST"; - String PUT = "PUT"; - String HEAD = "HEAD"; - String DELETE = "DELETE"; - } - - @BeforeClass - public static void startServer() throws Exception { - System.setProperty(ElasticSearchConfiguration.EMBEDDED_PORT_PROPERTY_NAME, "9204"); - System.setProperty(ElasticSearchConfiguration.ELASTIC_SEARCH_URL_PROPERTY_NAME, "http://localhost:9204"); - System.setProperty(ElasticSearchConfiguration.ELASTIC_SEARCH_INDEX_BATCH_SIZE_PROPERTY_NAME, "1"); - - configuration = new SystemPropertiesElasticSearchConfiguration(); - - String host = configuration.getEmbeddedHost(); - int port = configuration.getEmbeddedPort(); - String clusterName = configuration.getEmbeddedClusterName(); - - embeddedElasticSearch = new EmbeddedElasticSearchV5(clusterName, host, port); - embeddedElasticSearch.start(); - - ElasticSearchRestClientProvider restClientProvider = - new ElasticSearchRestClientProvider(configuration); - restClient = restClientProvider.get(); - elasticSearchClient = new RestHighLevelClient(restClient); - - Map params = new HashMap<>(); - params.put("wait_for_status", "yellow"); - params.put("timeout", "30s"); - - restClient.performRequest("GET", "/_cluster/health", params); - - objectMapper = new JsonMapperProvider().get(); - indexDAO = new ElasticSearchRestDAOV5(restClient, configuration, objectMapper); - } - - @AfterClass - public static void closeClient() throws Exception { - if (restClient != null) { - restClient.close(); - } - - embeddedElasticSearch.stop(); - } - - @Before - public void createTestWorkflow() throws Exception { - // define indices - indexDAO.setup(); - - // initialize workflow - workflow = new Workflow(); - workflow.getInput().put("requestId", "request id 001"); - workflow.getInput().put("hasAwards", true); - workflow.getInput().put("channelMapping", 5); - Map name = new HashMap<>(); - name.put("name", "The Who"); - name.put("year", 1970); - Map name2 = new HashMap<>(); - name2.put("name", "The Doors"); - name2.put("year", 1975); - - List names = new LinkedList<>(); - names.add(name); - names.add(name2); - - workflow.getOutput().put("name", name); - workflow.getOutput().put("names", names); - workflow.getOutput().put("awards", 200); - - Task task = new Task(); - task.setReferenceTaskName("task2"); - task.getOutputData().put("location", "http://location"); - task.setStatus(Task.Status.COMPLETED); - - Task task2 = new Task(); - task2.setReferenceTaskName("task3"); - task2.getOutputData().put("refId", "abcddef_1234_7890_aaffcc"); - task2.setStatus(Task.Status.SCHEDULED); - - workflow.getTasks().add(task); - workflow.getTasks().add(task2); - } - - @After - public void tearDown() throws IOException { - deleteAllIndices(); - } - - private void deleteAllIndices() throws IOException { - Response beforeResponse = restClient.performRequest(HttpMethod.GET, "/_cat/indices"); - - Reader streamReader = new InputStreamReader(beforeResponse.getEntity().getContent()); - BufferedReader bufferedReader = new BufferedReader(streamReader); - - String line; - while ((line = bufferedReader.readLine()) != null) { - String[] fields = line.split("\\s"); - String endpoint = String.format("/%s", fields[2]); - - restClient.performRequest(HttpMethod.DELETE, endpoint); - } - } - - private boolean indexExists(final String index) throws IOException { - return indexDAO.doesResourceExist("/" + index); - } - - private boolean doesMappingExist(final String index, final String mappingName) throws IOException { - return indexDAO.doesResourceExist("/" + index + "/_mapping/" + mappingName); - } - - @Test - public void assertInitialSetup() throws Exception { - - SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMWW"); - dateFormat.setTimeZone(TimeZone.getTimeZone("GMT")); - - String taskLogIndex = "task_log_" + dateFormat.format(new Date()); - - assertTrue("Index 'conductor' should exist", indexExists("conductor")); - assertTrue("Index '" + taskLogIndex + "' should exist", indexExists(taskLogIndex)); - - assertTrue("Mapping 'workflow' for index 'conductor' should exist", doesMappingExist("conductor", "workflow")); - assertTrue("Mapping 'task' for inndex 'conductor' should exist", doesMappingExist("conductor", "task")); - } - - @Test - public void testWorkflowCRUD() { - - String testWorkflowType = "testworkflow"; - String testId = "1"; - - workflow.setWorkflowId(testId); - workflow.setWorkflowType(testWorkflowType); - - // Create - String workflowType = indexDAO.get(testId, "workflowType"); - assertNull("Workflow should not exist", workflowType); - - // Get - indexDAO.indexWorkflow(workflow); - - workflowType = indexDAO.get(testId, "workflowType"); - assertEquals("Should have found our workflow type", testWorkflowType, workflowType); - - // Update - String newWorkflowType = "newworkflowtype"; - String[] keyChanges = {"workflowType"}; - String[] valueChanges = {newWorkflowType}; - - indexDAO.updateWorkflow(testId, keyChanges, valueChanges); - - workflowType = indexDAO.get(testId, "workflowType"); - assertEquals("Should have updated our new workflow type", newWorkflowType, workflowType); - - // Delete - indexDAO.removeWorkflow(testId); - - workflowType = indexDAO.get(testId, "workflowType"); - assertNull("We should no longer have our workflow in the system", workflowType); - } - - @Test - public void testWorkflowSearch() { - String workflowId = "search-workflow-id"; - workflow.setWorkflowId(workflowId); - indexDAO.indexWorkflow(workflow); - await() - .atMost(3, TimeUnit.SECONDS) - .untilAsserted( - () -> { - List searchIds = indexDAO.searchWorkflows("", "workflowId:\"" + workflowId + "\"", 0, 100, Collections.singletonList("workflowId:ASC")).getResults(); - assertEquals(1, searchIds.size()); - assertEquals(workflowId, searchIds.get(0)); - } - ); - } - - @Test - public void testSearchRecentRunningWorkflows() { - workflow.setWorkflowId("completed-workflow"); - workflow.setStatus(Workflow.WorkflowStatus.COMPLETED); - indexDAO.indexWorkflow(workflow); - - String workflowId = "recent-running-workflow-id"; - workflow.setWorkflowId(workflowId); - workflow.setStatus(Workflow.WorkflowStatus.RUNNING); - workflow.setCreateTime(new Date().getTime()); - workflow.setUpdateTime(new Date().getTime()); - workflow.setEndTime(new Date().getTime()); - indexDAO.indexWorkflow(workflow); - - await() - .atMost(3, TimeUnit.SECONDS) - .untilAsserted( - () -> { - List searchIds = indexDAO.searchRecentRunningWorkflows(1,0); - assertEquals(1, searchIds.size()); - assertEquals(workflowId, searchIds.get(0)); - } - ); - } - - @Test - public void testSearchArchivableWorkflows() throws IOException { - String workflowId = "search-workflow-id"; - Long time = DateTime.now(DateTimeZone.UTC).minusDays(7).toDate().getTime(); - - workflow.setWorkflowId(workflowId); - workflow.setStatus(Workflow.WorkflowStatus.COMPLETED); - workflow.setCreateTime(time); - workflow.setUpdateTime(time); - workflow.setEndTime(time); - - indexDAO.indexWorkflow(workflow); - - assertTrue(indexExists("conductor")); - - await() - .atMost(5, TimeUnit.SECONDS) - .untilAsserted( - () -> { - List searchIds = indexDAO.searchArchivableWorkflows("conductor",6); - assertEquals(1, searchIds.size()); - assertEquals(workflowId, searchIds.get(0)); - } - ); - } - - @Test - public void taskExecutionLogs() throws Exception { - TaskExecLog taskExecLog1 = new TaskExecLog(); - taskExecLog1.setTaskId("some-task-id"); - long createdTime1 = LocalDateTime.of(2018, 11, 01, 06, 33, 22) - .toEpochSecond(ZoneOffset.UTC); - taskExecLog1.setCreatedTime(createdTime1); - taskExecLog1.setLog("some-log"); - TaskExecLog taskExecLog2 = new TaskExecLog(); - taskExecLog2.setTaskId("some-task-id"); - long createdTime2 = LocalDateTime.of(2018, 11, 01, 06, 33, 22) - .toEpochSecond(ZoneOffset.UTC); - taskExecLog2.setCreatedTime(createdTime2); - taskExecLog2.setLog("some-log"); - List logsToAdd = Arrays.asList(taskExecLog1, taskExecLog2); - indexDAO.addTaskExecutionLogs(logsToAdd); - - await() - .atMost(5, TimeUnit.SECONDS) - .untilAsserted(() -> { - List taskExecutionLogs = indexDAO.getTaskExecutionLogs("some-task-id"); - assertEquals(2, taskExecutionLogs.size()); - }); - } - - @Test - public void indexTask() throws Exception { - String correlationId = "some-correlation-id"; - - Task task = new Task(); - task.setTaskId("some-task-id"); - task.setWorkflowInstanceId("some-workflow-instance-id"); - task.setTaskType("some-task-type"); - task.setStatus(Status.FAILED); - task.setInputData(new HashMap() {{ put("input_key", "input_value"); }}); - task.setCorrelationId(correlationId); - task.setTaskDefName("some-task-def-name"); - task.setReasonForIncompletion("some-failure-reason"); - - indexDAO.indexTask(task); - - await() - .atMost(5, TimeUnit.SECONDS) - .untilAsserted(() -> { - SearchResult result = indexDAO - .searchTasks("correlationId='" + correlationId + "'", "*", 0, 10000, null); - - assertTrue("should return 1 or more search results", result.getResults().size() > 0); - assertEquals("taskId should match the indexed task", "some-task-id", result.getResults().get(0)); - }); - } - - @Test - public void indexTaskWithBatchSizeTwo() throws Exception { - embeddedElasticSearch.stop(); - startElasticSearchWithBatchSize(2); - String correlationId = "some-correlation-id"; - - Task task = new Task(); - task.setTaskId("some-task-id"); - task.setWorkflowInstanceId("some-workflow-instance-id"); - task.setTaskType("some-task-type"); - task.setStatus(Status.FAILED); - task.setInputData(new HashMap() {{ put("input_key", "input_value"); }}); - task.setCorrelationId(correlationId); - task.setTaskDefName("some-task-def-name"); - task.setReasonForIncompletion("some-failure-reason"); - - indexDAO.indexTask(task); - indexDAO.indexTask(task); - - await() - .atMost(5, TimeUnit.SECONDS) - .untilAsserted(() -> { - SearchResult result = indexDAO - .searchTasks("correlationId='" + correlationId + "'", "*", 0, 10000, null); - - assertTrue("should return 1 or more search results", result.getResults().size() > 0); - assertEquals("taskId should match the indexed task", "some-task-id", result.getResults().get(0)); - }); - - embeddedElasticSearch.stop(); - startElasticSearchWithBatchSize(1); - - } - - private void startElasticSearchWithBatchSize(int i) throws Exception { - System.setProperty(ElasticSearchConfiguration.ELASTIC_SEARCH_INDEX_BATCH_SIZE_PROPERTY_NAME, String.valueOf(i)); - - configuration = new SystemPropertiesElasticSearchConfiguration(); - - String host = configuration.getEmbeddedHost(); - int port = configuration.getEmbeddedPort(); - String clusterName = configuration.getEmbeddedClusterName(); - - embeddedElasticSearch = new EmbeddedElasticSearchV5(clusterName, host, port); - embeddedElasticSearch.start(); - - ElasticSearchRestClientProvider restClientProvider = - new ElasticSearchRestClientProvider(configuration); - restClient = restClientProvider.get(); - elasticSearchClient = new RestHighLevelClient(restClient); - - Map params = new HashMap<>(); - params.put("wait_for_status", "yellow"); - params.put("timeout", "30s"); - - restClient.performRequest("GET", "/_cluster/health", params); - - objectMapper = new JsonMapperProvider().get(); - indexDAO = new ElasticSearchRestDAOV5(restClient, configuration, objectMapper); - } - - @Test - public void addMessage() { - String messageId = "some-message-id"; - - Message message = new Message(); - message.setId(messageId); - message.setPayload("some-payload"); - message.setReceipt("some-receipt"); - - indexDAO.addMessage("some-queue", message); - - await() - .atMost(5, TimeUnit.SECONDS) - .untilAsserted(() -> { - SearchResponse searchResponse = searchObjectIdsViaExpression( - LOG_INDEX_PREFIX + "*", - "messageId='" + messageId + "'", - 0, - 10000, - null, - "*", - MSG_DOC_TYPE - ); - assertTrue("should return 1 or more search results", searchResponse.getHits().getTotalHits() > 0); - - SearchHit searchHit = searchResponse.getHits().getAt(0); - String resourcePath = - String.format("/%s/%s/%s", searchHit.getIndex(), MSG_DOC_TYPE, searchHit.getId()); - Response response = restClient.performRequest(HttpMethod.GET, resourcePath); - - String responseBody = IOUtils.toString(response.getEntity().getContent()); - logger.info("responseBody: {}", responseBody); - - TypeReference> typeRef = - new TypeReference>() {}; - Map responseMap = objectMapper.readValue(responseBody, typeRef); - Map source = (Map) responseMap.get("_source"); - assertEquals("indexed message id should match", messageId, source.get("messageId")); - assertEquals("indexed payload should match", "some-payload", source.get("payload")); - }); - - List messages = indexDAO.getMessages("some-queue"); - assertEquals(1, messages.size()); - assertEquals(message.getId(), messages.get(0).getId()); - assertEquals(message.getPayload(), messages.get(0).getPayload()); - } - - @Test - public void addEventExecution() { - String messageId = "some-message-id"; - - EventExecution eventExecution = new EventExecution(); - eventExecution.setId("some-id"); - eventExecution.setMessageId(messageId); - eventExecution.setAction(Type.complete_task); - eventExecution.setEvent("some-event"); - eventExecution.setStatus(EventExecution.Status.COMPLETED); - - indexDAO.addEventExecution(eventExecution); - - await() - .atMost(5, TimeUnit.SECONDS) - .untilAsserted(() -> { - SearchResponse searchResponse = searchObjectIdsViaExpression( - LOG_INDEX_PREFIX + "*", - "messageId='" + messageId + "'", - 0, - 10000, - null, - "*", - EVENT_DOC_TYPE - ); - assertTrue("should return 1 or more search results", searchResponse.getHits().getTotalHits() > 0); - - SearchHit searchHit = searchResponse.getHits().getAt(0); - String resourcePath = - String.format("/%s/%s/%s", searchHit.getIndex(), EVENT_DOC_TYPE, searchHit.getId()); - Response response = restClient.performRequest(HttpMethod.GET, resourcePath); - - String responseBody = IOUtils.toString(response.getEntity().getContent()); - TypeReference> typeRef = - new TypeReference>() { - }; - Map responseMap = objectMapper.readValue(responseBody, typeRef); - - Map sourceMap = (Map) responseMap.get("_source"); - assertEquals("indexed id should match", "some-id", sourceMap.get("id")); - assertEquals("indexed message id should match", messageId, sourceMap.get("messageId")); - assertEquals("indexed action should match", Type.complete_task.name(), sourceMap.get("action")); - assertEquals("indexed event should match", "some-event", sourceMap.get("event")); - assertEquals("indexed status should match", EventExecution.Status.COMPLETED.name(), sourceMap.get("status")); - }); - - List events = indexDAO.getEventExecutions("some-event"); - assertEquals(1, events.size()); - assertEquals(eventExecution, events.get(0)); - } - - private SearchResponse searchObjectIdsViaExpression(String indexName, String structuredQuery, int start, int size, - List sortOptions, String freeTextQuery, String docType) throws ParserException, IOException { - - // Build query - QueryBuilder queryBuilder = QueryBuilders.matchAllQuery(); - if(StringUtils.isNotEmpty(structuredQuery)) { - Expression expression = Expression.fromString(structuredQuery); - queryBuilder = expression.getFilterBuilder(); - } - - BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(queryBuilder); - QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery(freeTextQuery); - BoolQueryBuilder fq = QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); - - return searchObjectIds(indexName, fq, start, size, sortOptions, docType); - } - - /** - * Tries to find object ids for a given query in an index. - * - * @param indexName The name of the index. - * @param queryBuilder The query to use for searching. - * @param start The start to use. - * @param size The total return size. - * @param sortOptions A list of string options to sort in the form VALUE:ORDER; where ORDER is optional and can be either ASC OR DESC. - * @param docType The document type to searchObjectIdsViaExpression for. - * - * @return The SearchResults which includes the count and IDs that were found. - * @throws IOException If we cannot communicate with ES. - */ - private SearchResponse searchObjectIds(String indexName, QueryBuilder queryBuilder, int start, int size, List sortOptions, String docType) throws IOException { - - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.query(queryBuilder); - searchSourceBuilder.from(start); - searchSourceBuilder.size(size); - - if (sortOptions != null && !sortOptions.isEmpty()) { - - for (String sortOption : sortOptions) { - SortOrder order = SortOrder.ASC; - String field = sortOption; - int index = sortOption.indexOf(":"); - if (index > 0) { - field = sortOption.substring(0, index); - order = SortOrder.valueOf(sortOption.substring(index + 1)); - } - searchSourceBuilder.sort(new FieldSortBuilder(field).order(order)); - } - } - - // Generate the actual request to send to ES. - SearchRequest searchRequest = new SearchRequest(indexName); - searchRequest.types(docType); - searchRequest.source(searchSourceBuilder); - - return elasticSearchClient.search(searchRequest); - } - -} diff --git a/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestExpression.java b/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestExpression.java deleted file mode 100644 index 3a282864b4..0000000000 --- a/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestExpression.java +++ /dev/null @@ -1,160 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.dao.es5.index.query.parser; - -import com.netflix.conductor.elasticsearch.query.parser.AbstractParserTest; -import com.netflix.conductor.elasticsearch.query.parser.ConstValue; - -import org.junit.Test; - -import java.io.BufferedInputStream; -import java.io.ByteArrayInputStream; -import java.io.InputStream; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -/** - * @author Viren - * - */ -public class TestExpression extends AbstractParserTest { - - @Test - public void test() throws Exception{ - String test = "type='IMAGE' AND subType ='sdp' AND (metadata.width > 50 OR metadata.height > 50)"; - //test = "type='IMAGE' AND subType ='sdp'"; - //test = "(metadata.type = 'IMAGE')"; - InputStream is = new BufferedInputStream(new ByteArrayInputStream(test.getBytes())); - Expression expr = new Expression(is); - - System.out.println(expr); - - assertTrue(expr.isBinaryExpr()); - assertNull(expr.getGroupedExpression()); - assertNotNull(expr.getNameValue()); - - NameValue nv = expr.getNameValue(); - assertEquals("type", nv.getName().getName()); - assertEquals("=", nv.getOp().getOperator()); - assertEquals("\"IMAGE\"", nv.getValue().getValue()); - - Expression rhs = expr.getRightHandSide(); - assertNotNull(rhs); - assertTrue(rhs.isBinaryExpr()); - - nv = rhs.getNameValue(); - assertNotNull(nv); //subType = sdp - assertNull(rhs.getGroupedExpression()); - assertEquals("subType", nv.getName().getName()); - assertEquals("=", nv.getOp().getOperator()); - assertEquals("\"sdp\"", nv.getValue().getValue()); - - assertEquals("AND", rhs.getOperator().getOperator()); - rhs = rhs.getRightHandSide(); - assertNotNull(rhs); - assertFalse(rhs.isBinaryExpr()); - GroupedExpression ge = rhs.getGroupedExpression(); - assertNotNull(ge); - expr = ge.getExpression(); - assertNotNull(expr); - - assertTrue(expr.isBinaryExpr()); - nv = expr.getNameValue(); - assertNotNull(nv); - assertEquals("metadata.width", nv.getName().getName()); - assertEquals(">", nv.getOp().getOperator()); - assertEquals("50", nv.getValue().getValue()); - - - - assertEquals("OR", expr.getOperator().getOperator()); - rhs = expr.getRightHandSide(); - assertNotNull(rhs); - assertFalse(rhs.isBinaryExpr()); - nv = rhs.getNameValue(); - assertNotNull(nv); - - assertEquals("metadata.height", nv.getName().getName()); - assertEquals(">", nv.getOp().getOperator()); - assertEquals("50", nv.getValue().getValue()); - - } - - @Test - public void testWithSysConstants() throws Exception{ - String test = "type='IMAGE' AND subType ='sdp' AND description IS null"; - InputStream is = new BufferedInputStream(new ByteArrayInputStream(test.getBytes())); - Expression expr = new Expression(is); - - System.out.println(expr); - - assertTrue(expr.isBinaryExpr()); - assertNull(expr.getGroupedExpression()); - assertNotNull(expr.getNameValue()); - - NameValue nv = expr.getNameValue(); - assertEquals("type", nv.getName().getName()); - assertEquals("=", nv.getOp().getOperator()); - assertEquals("\"IMAGE\"", nv.getValue().getValue()); - - Expression rhs = expr.getRightHandSide(); - assertNotNull(rhs); - assertTrue(rhs.isBinaryExpr()); - - nv = rhs.getNameValue(); - assertNotNull(nv); //subType = sdp - assertNull(rhs.getGroupedExpression()); - assertEquals("subType", nv.getName().getName()); - assertEquals("=", nv.getOp().getOperator()); - assertEquals("\"sdp\"", nv.getValue().getValue()); - - assertEquals("AND", rhs.getOperator().getOperator()); - rhs = rhs.getRightHandSide(); - assertNotNull(rhs); - assertFalse(rhs.isBinaryExpr()); - GroupedExpression ge = rhs.getGroupedExpression(); - assertNull(ge); - nv = rhs.getNameValue(); - assertNotNull(nv); - assertEquals("description", nv.getName().getName()); - assertEquals("IS", nv.getOp().getOperator()); - ConstValue cv = nv.getValue(); - assertNotNull(cv); - assertEquals(cv.getSysConstant(), ConstValue.SystemConsts.NULL); - - test = "description IS not null"; - is = new BufferedInputStream(new ByteArrayInputStream(test.getBytes())); - expr = new Expression(is); - - System.out.println(expr); - nv = expr.getNameValue(); - assertNotNull(nv); - assertEquals("description", nv.getName().getName()); - assertEquals("IS", nv.getOp().getOperator()); - cv = nv.getValue(); - assertNotNull(cv); - assertEquals(cv.getSysConstant(), ConstValue.SystemConsts.NOT_NULL); - - } - -} diff --git a/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestGroupedExpression.java b/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestGroupedExpression.java deleted file mode 100644 index 2f3726bddf..0000000000 --- a/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestGroupedExpression.java +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.dao.es5.index.query.parser; - -import org.junit.Test; - -/** - * @author Viren - * - */ -public class TestGroupedExpression { - - @Test - public void test(){ - - } -} diff --git a/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/TestElasticSearchConfiguration.java b/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/TestElasticSearchConfiguration.java deleted file mode 100644 index 46f3700421..0000000000 --- a/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/TestElasticSearchConfiguration.java +++ /dev/null @@ -1,21 +0,0 @@ -package com.netflix.conductor.elasticsearch; - -import org.junit.Assert; -import org.junit.Test; - -public class TestElasticSearchConfiguration { - - @Test - public void testAsyncWorkerQueueSize() { - ElasticSearchConfiguration es = new SystemPropertiesElasticSearchConfiguration(); - int workerQueueSize = es.getAsyncWorkerQueueSize(); - Assert.assertEquals(workerQueueSize, 100); - } - - @Test - public void testAsyncMaxPoolSize() { - ElasticSearchConfiguration es = new SystemPropertiesElasticSearchConfiguration(); - int poolSize = es.getAsyncMaxPoolSize(); - Assert.assertEquals(poolSize, 12); - } -} diff --git a/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/AbstractParserTest.java b/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/AbstractParserTest.java deleted file mode 100644 index cd4c318a80..0000000000 --- a/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/AbstractParserTest.java +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.elasticsearch.query.parser; - -import java.io.BufferedInputStream; -import java.io.ByteArrayInputStream; -import java.io.InputStream; - -/** - * @author Viren - * - */ -public abstract class AbstractParserTest { - - protected InputStream getInputStream(String expression) { - return new BufferedInputStream(new ByteArrayInputStream(expression.getBytes())); - } - -} diff --git a/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestBooleanOp.java b/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestBooleanOp.java deleted file mode 100644 index 9c0ef2acb0..0000000000 --- a/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestBooleanOp.java +++ /dev/null @@ -1,52 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.elasticsearch.query.parser; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -/** - * @author Viren - * - */ -public class TestBooleanOp extends AbstractParserTest { - - @Test - public void test() throws Exception { - String[] tests = new String[]{"AND", "OR"}; - for(String test : tests){ - BooleanOp name = new BooleanOp(getInputStream(test)); - String nameVal = name.getOperator(); - assertNotNull(nameVal); - assertEquals(test, nameVal); - } - } - - @Test(expected=ParserException.class) - public void testInvalid() throws Exception { - String test = "<"; - BooleanOp name = new BooleanOp(getInputStream(test)); - String nameVal = name.getOperator(); - assertNotNull(nameVal); - assertEquals(test, nameVal); - - } -} diff --git a/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestComparisonOp.java b/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestComparisonOp.java deleted file mode 100644 index cc69d62b64..0000000000 --- a/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestComparisonOp.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.elasticsearch.query.parser; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -import org.junit.Test; - -/** - * @author Viren - * - */ -public class TestComparisonOp extends AbstractParserTest { - - @Test - public void test() throws Exception { - String[] tests = new String[]{"<",">","=","!=","IN","BETWEEN","STARTS_WITH"}; - for(String test : tests){ - ComparisonOp name = new ComparisonOp(getInputStream(test)); - String nameVal = name.getOperator(); - assertNotNull(nameVal); - assertEquals(test, nameVal); - } - } - - @Test(expected=ParserException.class) - public void testInvalidOp() throws Exception { - String test = "AND"; - ComparisonOp name = new ComparisonOp(getInputStream(test)); - String nameVal = name.getOperator(); - assertNotNull(nameVal); - assertEquals(test, nameVal); - } -} diff --git a/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestConstValue.java b/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestConstValue.java deleted file mode 100644 index 8cc81641a3..0000000000 --- a/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestConstValue.java +++ /dev/null @@ -1,103 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.elasticsearch.query.parser; - -import org.junit.Test; - -import java.util.List; - -import static org.junit.Assert.*; - -/** - * @author Viren - * - */ -public class TestConstValue extends AbstractParserTest { - - @Test - public void testStringConst() throws Exception { - String test = "'string value'"; - String expected = test.replaceAll("'", "\""); //Quotes are removed but then the result is double quoted. - ConstValue cv = new ConstValue(getInputStream(test)); - assertNotNull(cv.getValue()); - assertEquals(expected, cv.getValue()); - assertTrue(cv.getValue() instanceof String); - - test = "\"string value\""; - cv = new ConstValue(getInputStream(test)); - assertNotNull(cv.getValue()); - assertEquals(expected, cv.getValue()); - assertTrue(cv.getValue() instanceof String); - } - - @Test - public void testSystemConst() throws Exception { - String test = "null"; - ConstValue cv = new ConstValue(getInputStream(test)); - assertNotNull(cv.getValue()); - assertTrue(cv.getValue() instanceof String); - assertEquals(cv.getSysConstant(), ConstValue.SystemConsts.NULL); - test = "null"; - - test = "not null"; - cv = new ConstValue(getInputStream(test)); - assertNotNull(cv.getValue()); - assertEquals(cv.getSysConstant(), ConstValue.SystemConsts.NOT_NULL); - } - - @Test(expected=ParserException.class) - public void testInvalid() throws Exception { - String test = "'string value"; - new ConstValue(getInputStream(test)); - } - - - @Test - public void testNumConst() throws Exception { - String test = "12345.89"; - ConstValue cv = new ConstValue(getInputStream(test)); - assertNotNull(cv.getValue()); - assertTrue(cv.getValue() instanceof String); //Numeric values are stored as string as we are just passing thru them to ES - assertEquals(test, cv.getValue()); - } - - @Test - public void testRange() throws Exception { - String test = "50 AND 100"; - Range range = new Range(getInputStream(test)); - assertEquals("50", range.getLow()); - assertEquals("100", range.getHigh()); - } - - @Test(expected=ParserException.class) - public void testBadRange() throws Exception { - String test = "50 AND"; - new Range(getInputStream(test)); - } - - @Test - public void testArray() throws Exception { - String test = "(1, 3, 'name', 'value2')"; - ListConst lc = new ListConst(getInputStream(test)); - List list = lc.getList(); - assertEquals(4, list.size()); - assertTrue(list.contains("1")); - assertEquals("'value2'", list.get(3)); //Values are preserved as it is... - } -} diff --git a/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestName.java b/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestName.java deleted file mode 100644 index d3ea73c145..0000000000 --- a/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestName.java +++ /dev/null @@ -1,40 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.elasticsearch.query.parser; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -/** - * @author Viren - * - */ -public class TestName extends AbstractParserTest { - - @Test - public void test() throws Exception{ - String test = "metadata.en_US.lang "; - Name name = new Name(getInputStream(test)); - String nameVal = name.getName(); - assertNotNull(nameVal); - assertEquals(test.trim(), nameVal); - } -} diff --git a/es5-persistence/src/test/resources/log4j.properties b/es5-persistence/src/test/resources/log4j.properties deleted file mode 100644 index 22ccb6e709..0000000000 --- a/es5-persistence/src/test/resources/log4j.properties +++ /dev/null @@ -1,27 +0,0 @@ -# -# Copyright 2019 Netflix, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Set root logger level to WARN and its only appender to A1. -log4j.rootLogger=WARN, A1 - -# A1 is set to be a ConsoleAppender. -log4j.appender.A1=org.apache.log4j.ConsoleAppender - -# A1 uses PatternLayout. -log4j.appender.A1.layout=org.apache.log4j.PatternLayout -log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n - -log4j.appender.org.apache.http=info diff --git a/es5-persistence/src/test/resources/log4j2.properties b/es5-persistence/src/test/resources/log4j2.properties deleted file mode 100644 index 4800463488..0000000000 --- a/es5-persistence/src/test/resources/log4j2.properties +++ /dev/null @@ -1,27 +0,0 @@ -# -# Copyright 2019 Netflix, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -status = error -dest = err -name = Console - -appender.console.type = Console -appender.console.name = STDOUT -appender.console.layout.type = PatternLayout -appender.console.layout.pattern = %-4r [%t] %-5p %c %x - %m%n - -rootLogger.level = warn -rootLogger.appenderRef.stdout.ref = STDOUT \ No newline at end of file diff --git a/es6-persistence/build.gradle b/es6-persistence/build.gradle index 35e3f2d86d..51846ff53a 100644 --- a/es6-persistence/build.gradle +++ b/es6-persistence/build.gradle @@ -4,8 +4,8 @@ dependencies { implementation 'org.springframework.boot:spring-boot-starter' - implementation "commons-io:commons-io:${revCommonsIo}" - implementation "org.apache.commons:commons-lang3:${revCommonsLang3}" + implementation "commons-io:commons-io" + implementation "org.apache.commons:commons-lang3" // SBMTODO: remove guava dep implementation "com.google.guava:guava:${revGuava}" @@ -14,5 +14,5 @@ dependencies { implementation "org.elasticsearch.client:elasticsearch-rest-high-level-client" testImplementation "org.awaitility:awaitility:${revAwaitility}" - testImplementation "org.testcontainers:elasticsearch:1.15.0" + testImplementation "org.testcontainers:elasticsearch:${revTestESContainer}" } diff --git a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/ElasticSearchDaoBaseTest.java b/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/ElasticSearchDaoBaseTest.java index e102bda085..873b29552c 100644 --- a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/ElasticSearchDaoBaseTest.java +++ b/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/ElasticSearchDaoBaseTest.java @@ -31,19 +31,12 @@ public void setup() throws Exception { elasticSearchClient = new PreBuiltTransportClient(settings) .addTransportAddress(new TransportAddress(InetAddress.getByName("localhost"), mappedPort)); - elasticSearchClient.admin() - .cluster() - .prepareHealth() - .setWaitForGreenStatus() - .execute() - .get(); - indexDAO = new ElasticSearchDAOV6(elasticSearchClient, properties, objectMapper); indexDAO.setup(); } @AfterClass - public static void closeClient() throws Exception { + public static void closeClient() { container.stop(); } diff --git a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/ElasticSearchRestDaoBaseTest.java b/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/ElasticSearchRestDaoBaseTest.java index 2faca33328..74134e6a2c 100644 --- a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/ElasticSearchRestDaoBaseTest.java +++ b/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/ElasticSearchRestDaoBaseTest.java @@ -30,12 +30,6 @@ public void setup() throws Exception { RestClientBuilder restClientBuilder = RestClient.builder(new HttpHost(host, port, "http")); restClient = restClientBuilder.build(); - Map params = new HashMap<>(); - params.put("wait_for_status", "yellow"); - params.put("timeout", "30s"); - - restClient.performRequest("GET", "/_cluster/health", params); - indexDAO = new ElasticSearchRestDAOV6(restClientBuilder, properties, objectMapper); indexDAO.setup(); } diff --git a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/TestElasticSearchDAOV6Batch.java b/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/TestElasticSearchDAOV6Batch.java index 9083d091f7..0af460c2c9 100644 --- a/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/TestElasticSearchDAOV6Batch.java +++ b/es6-persistence/src/test/java/com/netflix/conductor/es6/dao/index/TestElasticSearchDAOV6Batch.java @@ -16,7 +16,7 @@ public class TestElasticSearchDAOV6Batch extends ElasticSearchDaoBaseTest { @Test - public void indexTaskWithBatchSizeTwo() throws Exception { + public void indexTaskWithBatchSizeTwo() { String correlationId = "some-correlation-id"; Task task = new Task(); diff --git a/grpc-server/build.gradle b/grpc-server/build.gradle index 1e2808a4f9..cd248e287d 100644 --- a/grpc-server/build.gradle +++ b/grpc-server/build.gradle @@ -1,8 +1,3 @@ -//plugins { -// // FIXME This is temporary until the server module refactoring is completed. -// id 'com.github.johnrengelman.shadow' version '1.2.3' -//} - dependencies { implementation project(':conductor-common') implementation project(':conductor-core') diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java index a5ad2489af..df68e2179c 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java @@ -25,7 +25,7 @@ public class GRPCServer { - private static final Logger logger = LoggerFactory.getLogger(GRPCServer.class); + private static final Logger LOGGER = LoggerFactory.getLogger(GRPCServer.class); private final Server server; @@ -38,13 +38,13 @@ public GRPCServer(int port, List services) { @PostConstruct public void start() throws IOException { server.start(); - logger.info("grpc: Server started, listening on " + server.getPort()); + LOGGER.info("grpc: Server started, listening on " + server.getPort()); } @PreDestroy public void stop() { if (server != null) { - logger.info("grpc: server shutting down"); + LOGGER.info("grpc: server shutting down"); server.shutdown(); } } diff --git a/mysql-persistence/build.gradle b/mysql-persistence/build.gradle index 4648551dd4..b4ee433e12 100644 --- a/mysql-persistence/build.gradle +++ b/mysql-persistence/build.gradle @@ -20,14 +20,14 @@ dependencies { // SBMTODO: remove guava dep implementation "com.google.guava:guava:${revGuava}" - implementation "com.fasterxml.jackson.core:jackson-databind:${revJackson}" - implementation "com.fasterxml.jackson.core:jackson-core:${revJackson}" + implementation "com.fasterxml.jackson.core:jackson-databind" + implementation "com.fasterxml.jackson.core:jackson-core" - implementation "org.apache.commons:commons-lang3:${revCommonsLang3}" + implementation "org.apache.commons:commons-lang3" - implementation "mysql:mysql-connector-java:${revMySqlConnector}" - implementation "com.zaxxer:HikariCP:${revHikariCP}" - implementation "org.flywaydb:flyway-core:${revFlywayCore}" + implementation "mysql:mysql-connector-java" + implementation "com.zaxxer:HikariCP" + implementation "org.flywaydb:flyway-core" testImplementation project(':conductor-core').sourceSets.test.output } diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/mysql/config/MySQLConfiguration.java b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/config/MySQLConfiguration.java index 52e582ea0c..5de1ab768d 100644 --- a/mysql-persistence/src/main/java/com/netflix/conductor/mysql/config/MySQLConfiguration.java +++ b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/config/MySQLConfiguration.java @@ -13,21 +13,17 @@ package com.netflix.conductor.mysql.config; import com.fasterxml.jackson.databind.ObjectMapper; -import com.netflix.conductor.dao.EventHandlerDAO; import com.netflix.conductor.dao.ExecutionDAO; import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.dao.PollDataDAO; import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.dao.RateLimitingDAO; import com.netflix.conductor.mysql.dao.MySQLExecutionDAO; import com.netflix.conductor.mysql.dao.MySQLMetadataDAO; import com.netflix.conductor.mysql.dao.MySQLQueueDAO; +import javax.sql.DataSource; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import javax.sql.DataSource; - @SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection") @Configuration(proxyBeanMethods = false) @ConditionalOnProperty(name = "db", havingValue = "mysql") @@ -43,27 +39,11 @@ public MetadataDAO mySqlMetadataDAO(ObjectMapper objectMapper, DataSource dataSo return new MySQLMetadataDAO(objectMapper, dataSource, properties); } - @Bean - public EventHandlerDAO mySqlEventHandlerDAO(ObjectMapper objectMapper, DataSource dataSource, - MySQLProperties properties) { - return new MySQLMetadataDAO(objectMapper, dataSource, properties); - } - @Bean public ExecutionDAO mySqlExecutionDAO(ObjectMapper objectMapper, DataSource dataSource) { return new MySQLExecutionDAO(objectMapper, dataSource); } - @Bean - public RateLimitingDAO mySqlRateLimitingDAO(ObjectMapper objectMapper, DataSource dataSource) { - return new MySQLExecutionDAO(objectMapper, dataSource); - } - - @Bean - public PollDataDAO mySqlPollDataDAO(ObjectMapper objectMapper, DataSource dataSource) { - return new MySQLExecutionDAO(objectMapper, dataSource); - } - @Bean public QueueDAO mySqlQueueDAO(ObjectMapper objectMapper, DataSource dataSource) { return new MySQLQueueDAO(objectMapper, dataSource); diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/mysql/config/MySQLDataSourceProvider.java b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/config/MySQLDataSourceProvider.java index 88978d9268..491e9555b4 100644 --- a/mysql-persistence/src/main/java/com/netflix/conductor/mysql/config/MySQLDataSourceProvider.java +++ b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/config/MySQLDataSourceProvider.java @@ -16,6 +16,7 @@ import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; import org.flywaydb.core.Flyway; +import org.flywaydb.core.api.configuration.FluentConfiguration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -80,14 +81,16 @@ private void flywayMigrate(DataSource dataSource) { return; } - Flyway flyway = new Flyway(); + FluentConfiguration fluentConfiguration = Flyway.configure() + .dataSource(dataSource) + .placeholderReplacement(false); + properties.getFlywayTable().ifPresent(tableName -> { LOGGER.debug("Using Flyway migration table '{}'", tableName); - flyway.setTable(tableName); + fluentConfiguration.table(tableName); }); - flyway.setDataSource(dataSource); - flyway.setPlaceholderReplacement(false); + Flyway flyway = new Flyway(fluentConfiguration); flyway.migrate(); } } diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/mysql/util/MySQLDAOTestUtil.java b/mysql-persistence/src/test/java/com/netflix/conductor/mysql/util/MySQLDAOTestUtil.java index 1ac0fc25c5..fcc9f0b368 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/mysql/util/MySQLDAOTestUtil.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/mysql/util/MySQLDAOTestUtil.java @@ -12,22 +12,22 @@ */ package com.netflix.conductor.mysql.util; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.mysql.config.MySQLProperties; import com.zaxxer.hikari.HikariDataSource; -import org.flywaydb.core.Flyway; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.sql.DataSource; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; +import javax.sql.DataSource; +import org.flywaydb.core.Flyway; +import org.flywaydb.core.api.configuration.FluentConfiguration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class MySQLDAOTestUtil { @@ -84,9 +84,11 @@ private HikariDataSource getDataSource(MySQLProperties properties) { } private void flywayMigrate(DataSource dataSource) { - Flyway flyway = new Flyway(); - flyway.setDataSource(dataSource); - flyway.setPlaceholderReplacement(false); + FluentConfiguration fluentConfiguration = Flyway.configure() + .dataSource(dataSource) + .placeholderReplacement(false); + + Flyway flyway = new Flyway(fluentConfiguration); flyway.migrate(); } diff --git a/postgres-persistence/build.gradle b/postgres-persistence/build.gradle index 4f7ad65aba..74976bb711 100644 --- a/postgres-persistence/build.gradle +++ b/postgres-persistence/build.gradle @@ -21,13 +21,13 @@ dependencies { // SBMTODO: remove guava dep implementation "com.google.guava:guava:${revGuava}" - implementation "com.fasterxml.jackson.core:jackson-databind:${revJackson}" - implementation "com.fasterxml.jackson.core:jackson-core:${revJackson}" + implementation "com.fasterxml.jackson.core:jackson-databind" + implementation "com.fasterxml.jackson.core:jackson-core" - implementation "org.apache.commons:commons-lang3:${revCommonsLang3}" - implementation "org.postgresql:postgresql:${revPostgres}" - implementation "com.zaxxer:HikariCP:${revHikariCP}" - implementation "org.flywaydb:flyway-core:${revFlywayCore}" + implementation "org.apache.commons:commons-lang3" + implementation "org.postgresql:postgresql" + implementation "com.zaxxer:HikariCP" + implementation "org.flywaydb:flyway-core" testImplementation project(':conductor-core').sourceSets.test.output } diff --git a/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresConfiguration.java b/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresConfiguration.java index 02d5515afc..d2fc52f950 100644 --- a/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresConfiguration.java +++ b/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresConfiguration.java @@ -13,21 +13,17 @@ package com.netflix.conductor.postgres.config; import com.fasterxml.jackson.databind.ObjectMapper; -import com.netflix.conductor.dao.EventHandlerDAO; import com.netflix.conductor.dao.ExecutionDAO; import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.dao.PollDataDAO; import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.dao.RateLimitingDAO; import com.netflix.conductor.postgres.dao.PostgresExecutionDAO; import com.netflix.conductor.postgres.dao.PostgresMetadataDAO; import com.netflix.conductor.postgres.dao.PostgresQueueDAO; +import javax.sql.DataSource; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import javax.sql.DataSource; - @SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection") @Configuration(proxyBeanMethods = false) @ConditionalOnProperty(name = "db", havingValue = "postgres") @@ -39,34 +35,18 @@ public DataSource dataSource(PostgresProperties config) { } @Bean - public MetadataDAO mySqlMetadataDAO(ObjectMapper objectMapper, DataSource dataSource, - PostgresProperties properties) { - return new PostgresMetadataDAO(objectMapper, dataSource, properties); - } - - @Bean - public EventHandlerDAO mySqlEventHandlerDAO(ObjectMapper objectMapper, DataSource dataSource, + public MetadataDAO postgresMetadataDAO(ObjectMapper objectMapper, DataSource dataSource, PostgresProperties properties) { return new PostgresMetadataDAO(objectMapper, dataSource, properties); } @Bean - public ExecutionDAO mySqlExecutionDAO(ObjectMapper objectMapper, DataSource dataSource) { - return new PostgresExecutionDAO(objectMapper, dataSource); - } - - @Bean - public RateLimitingDAO mySqlRateLimitingDAO(ObjectMapper objectMapper, DataSource dataSource) { - return new PostgresExecutionDAO(objectMapper, dataSource); - } - - @Bean - public PollDataDAO mySqlPollDataDAO(ObjectMapper objectMapper, DataSource dataSource) { + public ExecutionDAO postgresExecutionDAO(ObjectMapper objectMapper, DataSource dataSource) { return new PostgresExecutionDAO(objectMapper, dataSource); } @Bean - public QueueDAO mySqlQueueDAO(ObjectMapper objectMapper, DataSource dataSource) { + public QueueDAO postgresQueueDAO(ObjectMapper objectMapper, DataSource dataSource) { return new PostgresQueueDAO(objectMapper, dataSource); } } diff --git a/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresDataSourceProvider.java b/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresDataSourceProvider.java index b619b05d98..0372116579 100644 --- a/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresDataSourceProvider.java +++ b/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresDataSourceProvider.java @@ -15,15 +15,15 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; +import java.nio.file.Paths; +import java.util.concurrent.ThreadFactory; +import javax.inject.Provider; +import javax.sql.DataSource; import org.flywaydb.core.Flyway; +import org.flywaydb.core.api.configuration.FluentConfiguration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.inject.Provider; -import javax.sql.DataSource; -import java.nio.file.Paths; -import java.util.concurrent.ThreadFactory; - public class PostgresDataSourceProvider implements Provider { private static final Logger LOGGER = LoggerFactory.getLogger(PostgresDataSourceProvider.class); @@ -81,15 +81,17 @@ private void flywayMigrate(DataSource dataSource) { return; } - Flyway flyway = new Flyway(); + FluentConfiguration fluentConfiguration = Flyway.configure() + .locations(Paths.get("db", "migration_postgres").toString()) + .dataSource(dataSource) + .placeholderReplacement(false); + properties.getFlywayTable().ifPresent(tableName -> { LOGGER.debug("Using Flyway migration table '{}'", tableName); - flyway.setTable(tableName); + fluentConfiguration.table(tableName); }); - flyway.setLocations(Paths.get("db", "migration_postgres").toString()); - flyway.setDataSource(dataSource); - flyway.setPlaceholderReplacement(false); + Flyway flyway = new Flyway(fluentConfiguration); flyway.migrate(); } } diff --git a/postgres-persistence/src/test/java/com/netflix/conductor/postgres/util/PostgresDAOTestUtil.java b/postgres-persistence/src/test/java/com/netflix/conductor/postgres/util/PostgresDAOTestUtil.java index 3d4855d0c9..23d026b1e4 100644 --- a/postgres-persistence/src/test/java/com/netflix/conductor/postgres/util/PostgresDAOTestUtil.java +++ b/postgres-persistence/src/test/java/com/netflix/conductor/postgres/util/PostgresDAOTestUtil.java @@ -12,22 +12,22 @@ */ package com.netflix.conductor.postgres.util; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.postgres.config.PostgresProperties; import com.zaxxer.hikari.HikariDataSource; -import org.flywaydb.core.Flyway; -import org.postgresql.ds.PGSimpleDataSource; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.sql.DataSource; import java.nio.file.Paths; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; +import javax.sql.DataSource; +import org.flywaydb.core.Flyway; +import org.flywaydb.core.api.configuration.FluentConfiguration; +import org.postgresql.ds.PGSimpleDataSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class PostgresDAOTestUtil { @@ -77,11 +77,12 @@ private HikariDataSource getDataSource(PostgresProperties properties) { } private void flywayMigrate(DataSource dataSource) { + FluentConfiguration fluentConfiguration = Flyway.configure() + .locations(Paths.get("db", "migration_postgres").toString()) + .dataSource(dataSource) + .placeholderReplacement(false); - Flyway flyway = new Flyway(); - flyway.setLocations(Paths.get("db", "migration_postgres").toString()); - flyway.setDataSource(dataSource); - flyway.setPlaceholderReplacement(false); + Flyway flyway = new Flyway(fluentConfiguration); flyway.migrate(); } diff --git a/redis-lock/build.gradle b/redis-lock/build.gradle index 1dc4f8f287..40d3e58642 100644 --- a/redis-lock/build.gradle +++ b/redis-lock/build.gradle @@ -3,7 +3,7 @@ dependencies { implementation project(':conductor-core') implementation 'org.springframework.boot:spring-boot-starter' - implementation "org.apache.commons:commons-lang3:${revCommonsLang3}" + implementation "org.apache.commons:commons-lang3" implementation "org.redisson:redisson:${revRedisson}" testImplementation "com.github.kstyrc:embedded-redis:${revEmbeddedRedis}" diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/dynomite/DynomiteClusterConfiguration.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/dynomite/DynomiteClusterConfiguration.java index 28465ce71c..b3cefb972e 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/dynomite/DynomiteClusterConfiguration.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/dynomite/DynomiteClusterConfiguration.java @@ -12,6 +12,9 @@ */ package com.netflix.conductor.redis.config.dynomite; +import static com.netflix.conductor.redis.config.utils.RedisQueuesProvider.DEFAULT_CLIENT_INJECTION_NAME; +import static com.netflix.conductor.redis.config.utils.RedisQueuesProvider.READ_CLIENT_INJECTION_NAME; + import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.dao.EventHandlerDAO; import com.netflix.conductor.dao.ExecutionDAO; @@ -44,9 +47,6 @@ import org.springframework.context.annotation.Configuration; import redis.clients.jedis.commands.JedisCommands; -import static com.netflix.conductor.redis.config.utils.RedisQueuesProvider.DEFAULT_CLIENT_INJECTION_NAME; -import static com.netflix.conductor.redis.config.utils.RedisQueuesProvider.READ_CLIENT_INJECTION_NAME; - @SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection") @Configuration(proxyBeanMethods = false) @ConditionalOnProperty(name = "db", havingValue = "dynomite") @@ -79,6 +79,11 @@ public JedisCommands readJedisCommands(RedisProperties properties, HostSupplier return new DynomiteJedisProvider(properties, hostSupplier, tokenMapSupplier).get(); } + @Bean + public JedisProxy jedisProxy(@Qualifier(DEFAULT_CLIENT_INJECTION_NAME) JedisCommands jedisCommands) { + return new JedisProxy(jedisCommands); + } + @Bean public ShardingStrategy shardingStrategy(ShardSupplier shardSupplier, RedisProperties properties) { return new RedisQueuesShardingStrategyProvider(shardSupplier, properties).get(); @@ -98,19 +103,19 @@ public MetadataDAO redisMetadataDAO(JedisProxy jedisProxy, ObjectMapper objectMa @Bean public ExecutionDAO redisExecutionDAO(JedisProxy jedisProxy, ObjectMapper objectMapper, - RedisProperties properties) { + RedisProperties properties) { return new RedisExecutionDAO(jedisProxy, objectMapper, properties); } @Bean public EventHandlerDAO eventHandlerDAO(JedisProxy jedisProxy, ObjectMapper objectMapper, - RedisProperties properties) { + RedisProperties properties) { return new RedisEventHandlerDAO(jedisProxy, objectMapper, properties); } @Bean public RateLimitingDAO rateLimitingDAO(JedisProxy jedisProxy, ObjectMapper objectMapper, - RedisProperties properties) { + RedisProperties properties) { return new RedisRateLimitingDAO(jedisProxy, objectMapper, properties); } diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/inmemory/InMemoryRedisConfiguration.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/inmemory/InMemoryRedisConfiguration.java index 610d9ff216..95f167c892 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/inmemory/InMemoryRedisConfiguration.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/inmemory/InMemoryRedisConfiguration.java @@ -12,6 +12,9 @@ */ package com.netflix.conductor.redis.config.inmemory; +import static com.netflix.conductor.redis.config.utils.RedisQueuesProvider.DEFAULT_CLIENT_INJECTION_NAME; +import static com.netflix.conductor.redis.config.utils.RedisQueuesProvider.READ_CLIENT_INJECTION_NAME; + import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.dao.EventHandlerDAO; import com.netflix.conductor.dao.ExecutionDAO; @@ -42,9 +45,6 @@ import org.springframework.context.annotation.Configuration; import redis.clients.jedis.commands.JedisCommands; -import static com.netflix.conductor.redis.config.utils.RedisQueuesProvider.DEFAULT_CLIENT_INJECTION_NAME; -import static com.netflix.conductor.redis.config.utils.RedisQueuesProvider.READ_CLIENT_INJECTION_NAME; - @SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection") @Configuration(proxyBeanMethods = false) @ConditionalOnProperty(name = "db", havingValue = "memory", matchIfMissing = true) @@ -75,6 +75,11 @@ public JedisCommands readJedisCommands(JedisMock jedisMock) { return jedisMock; } + @Bean + public JedisProxy jedisProxy(@Qualifier(DEFAULT_CLIENT_INJECTION_NAME) JedisCommands jedisCommands) { + return new JedisProxy(jedisCommands); + } + @Bean public ShardingStrategy shardingStrategy(ShardSupplier shardSupplier, RedisProperties properties) { return new RedisQueuesShardingStrategyProvider(shardSupplier, properties).get(); @@ -94,19 +99,19 @@ public MetadataDAO redisMetadataDAO(JedisProxy jedisProxy, ObjectMapper objectMa @Bean public ExecutionDAO redisExecutionDAO(JedisProxy jedisProxy, ObjectMapper objectMapper, - RedisProperties properties) { + RedisProperties properties) { return new RedisExecutionDAO(jedisProxy, objectMapper, properties); } @Bean public EventHandlerDAO eventHandlerDAO(JedisProxy jedisProxy, ObjectMapper objectMapper, - RedisProperties properties) { + RedisProperties properties) { return new RedisEventHandlerDAO(jedisProxy, objectMapper, properties); } @Bean public RateLimitingDAO rateLimitingDAO(JedisProxy jedisProxy, ObjectMapper objectMapper, - RedisProperties properties) { + RedisProperties properties) { return new RedisRateLimitingDAO(jedisProxy, objectMapper, properties); } diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/rediscluster/RedisClusterConfiguration.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/rediscluster/RedisClusterConfiguration.java index 9156f5ee8a..80f5ca9d97 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/rediscluster/RedisClusterConfiguration.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/rediscluster/RedisClusterConfiguration.java @@ -12,6 +12,9 @@ */ package com.netflix.conductor.redis.config.rediscluster; +import static com.netflix.conductor.redis.config.utils.RedisQueuesProvider.DEFAULT_CLIENT_INJECTION_NAME; +import static com.netflix.conductor.redis.config.utils.RedisQueuesProvider.READ_CLIENT_INJECTION_NAME; + import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.dao.EventHandlerDAO; import com.netflix.conductor.dao.ExecutionDAO; @@ -42,9 +45,6 @@ import org.springframework.context.annotation.Configuration; import redis.clients.jedis.commands.JedisCommands; -import static com.netflix.conductor.redis.config.utils.RedisQueuesProvider.DEFAULT_CLIENT_INJECTION_NAME; -import static com.netflix.conductor.redis.config.utils.RedisQueuesProvider.READ_CLIENT_INJECTION_NAME; - @SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection") @Configuration(proxyBeanMethods = false) @ConditionalOnProperty(name = "db", havingValue = "redis_cluster") @@ -65,6 +65,11 @@ public JedisCommands readJedisCommands(HostSupplier hostSupplier, RedisPropertie return new RedisClusterJedisProvider(hostSupplier, properties).get(); } + @Bean + public JedisProxy jedisProxy(@Qualifier(DEFAULT_CLIENT_INJECTION_NAME) JedisCommands jedisCommands) { + return new JedisProxy(jedisCommands); + } + @Bean public ShardSupplier shardSupplier(HostSupplier hostSupplier, RedisProperties properties) { return new DynoShardSupplierProvider(hostSupplier, properties).get(); @@ -89,19 +94,19 @@ public MetadataDAO redisMetadataDAO(JedisProxy jedisProxy, ObjectMapper objectMa @Bean public ExecutionDAO redisExecutionDAO(JedisProxy jedisProxy, ObjectMapper objectMapper, - RedisProperties properties) { + RedisProperties properties) { return new RedisExecutionDAO(jedisProxy, objectMapper, properties); } @Bean public EventHandlerDAO eventHandlerDAO(JedisProxy jedisProxy, ObjectMapper objectMapper, - RedisProperties properties) { + RedisProperties properties) { return new RedisEventHandlerDAO(jedisProxy, objectMapper, properties); } @Bean public RateLimitingDAO rateLimitingDAO(JedisProxy jedisProxy, ObjectMapper objectMapper, - RedisProperties properties) { + RedisProperties properties) { return new RedisRateLimitingDAO(jedisProxy, objectMapper, properties); } diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/redissentinel/RedisSentinelConfiguration.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/redissentinel/RedisSentinelConfiguration.java index 878c6ab482..8fde997f03 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/redissentinel/RedisSentinelConfiguration.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/redissentinel/RedisSentinelConfiguration.java @@ -12,6 +12,9 @@ */ package com.netflix.conductor.redis.config.redissentinel; +import static com.netflix.conductor.redis.config.utils.RedisQueuesProvider.DEFAULT_CLIENT_INJECTION_NAME; +import static com.netflix.conductor.redis.config.utils.RedisQueuesProvider.READ_CLIENT_INJECTION_NAME; + import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.dao.EventHandlerDAO; import com.netflix.conductor.dao.ExecutionDAO; @@ -44,9 +47,6 @@ import org.springframework.context.annotation.Configuration; import redis.clients.jedis.commands.JedisCommands; -import static com.netflix.conductor.redis.config.utils.RedisQueuesProvider.DEFAULT_CLIENT_INJECTION_NAME; -import static com.netflix.conductor.redis.config.utils.RedisQueuesProvider.READ_CLIENT_INJECTION_NAME; - @SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection") @Configuration(proxyBeanMethods = false) @ConditionalOnProperty(name = "db", havingValue = "redis_sentinel") @@ -67,6 +67,11 @@ public JedisCommands readJedisCommands(HostSupplier hostSupplier, RedisPropertie return new RedisSentinelJedisProvider(hostSupplier, properties).get(); } + @Bean + public JedisProxy jedisProxy(@Qualifier(DEFAULT_CLIENT_INJECTION_NAME) JedisCommands jedisCommands) { + return new JedisProxy(jedisCommands); + } + @Bean public TokenMapSupplier tokenMapSupplier() { return new TokenMapSupplierProvider().get(); @@ -96,19 +101,19 @@ public MetadataDAO redisMetadataDAO(JedisProxy jedisProxy, ObjectMapper objectMa @Bean public ExecutionDAO redisExecutionDAO(JedisProxy jedisProxy, ObjectMapper objectMapper, - RedisProperties properties) { + RedisProperties properties) { return new RedisExecutionDAO(jedisProxy, objectMapper, properties); } @Bean public EventHandlerDAO eventHandlerDAO(JedisProxy jedisProxy, ObjectMapper objectMapper, - RedisProperties properties) { + RedisProperties properties) { return new RedisEventHandlerDAO(jedisProxy, objectMapper, properties); } @Bean public RateLimitingDAO rateLimitingDAO(JedisProxy jedisProxy, ObjectMapper objectMapper, - RedisProperties properties) { + RedisProperties properties) { return new RedisRateLimitingDAO(jedisProxy, objectMapper, properties); } diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/utils/JedisProxy.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/utils/JedisProxy.java index 084e8bd4f4..26623603bc 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/utils/JedisProxy.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/utils/JedisProxy.java @@ -12,15 +12,7 @@ */ package com.netflix.conductor.redis.config.utils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.stereotype.Component; -import redis.clients.jedis.ScanParams; -import redis.clients.jedis.ScanResult; -import redis.clients.jedis.Tuple; -import redis.clients.jedis.commands.JedisCommands; -import redis.clients.jedis.params.ZAddParams; +import static com.netflix.conductor.redis.config.utils.RedisQueuesProvider.DEFAULT_CLIENT_INJECTION_NAME; import java.util.HashMap; import java.util.HashSet; @@ -29,13 +21,18 @@ import java.util.Map.Entry; import java.util.Optional; import java.util.Set; - -import static com.netflix.conductor.redis.config.utils.RedisQueuesProvider.DEFAULT_CLIENT_INJECTION_NAME; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Qualifier; +import redis.clients.jedis.ScanParams; +import redis.clients.jedis.ScanResult; +import redis.clients.jedis.Tuple; +import redis.clients.jedis.commands.JedisCommands; +import redis.clients.jedis.params.ZAddParams; /** * Proxy for the Dynomite client */ -@Component public class JedisProxy { private static final Logger LOGGER = LoggerFactory.getLogger(JedisProxy.class); diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/utils/RedisProperties.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/utils/RedisProperties.java index f4b8a9355f..02fc780e8f 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/utils/RedisProperties.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/utils/RedisProperties.java @@ -41,7 +41,6 @@ public class RedisProperties { @Value("${EC2_AVAILABILITY_ZONE:us-east-1c}") private String availabilityZone; - // SBMTODO: default null value (?) @Value("${workflow.dynomite.cluster:#{null}}") private String cluster; diff --git a/rest/src/main/java/com/netflix/conductor/rest/controllers/AdminResource.java b/rest/src/main/java/com/netflix/conductor/rest/controllers/AdminResource.java index 6f8d0467e1..54ad71e869 100644 --- a/rest/src/main/java/com/netflix/conductor/rest/controllers/AdminResource.java +++ b/rest/src/main/java/com/netflix/conductor/rest/controllers/AdminResource.java @@ -12,9 +12,15 @@ */ package com.netflix.conductor.rest.controllers; +import static com.netflix.conductor.rest.config.RequestMappingConstants.ADMIN; +import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE; +import static org.springframework.http.MediaType.TEXT_PLAIN_VALUE; + import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.service.AdminService; import io.swagger.v3.oas.annotations.Operation; +import java.util.List; +import java.util.Map; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; @@ -22,13 +28,6 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; -import java.util.List; -import java.util.Map; - -import static com.netflix.conductor.rest.config.RequestMappingConstants.ADMIN; -import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE; -import static org.springframework.http.MediaType.TEXT_PLAIN_VALUE; - @RestController @RequestMapping(ADMIN) public class AdminResource { @@ -48,8 +47,8 @@ public Map getAllConfig() { @GetMapping("/task/{tasktype}") @Operation(summary = "Get the list of pending tasks for a given task type") public List view(@PathVariable("tasktype") String taskType, - @RequestParam(value = "start", defaultValue = "0") int start, - @RequestParam(value = "count", defaultValue = "100") int count) { + @RequestParam(value = "start", defaultValue = "0", required = false) int start, + @RequestParam(value = "count", defaultValue = "100", required = false) int count) { return adminService.getListOfPendingTask(taskType, start, count); } diff --git a/rest/src/main/java/com/netflix/conductor/rest/controllers/EventResource.java b/rest/src/main/java/com/netflix/conductor/rest/controllers/EventResource.java index df20f6df6b..e69b6f85ab 100644 --- a/rest/src/main/java/com/netflix/conductor/rest/controllers/EventResource.java +++ b/rest/src/main/java/com/netflix/conductor/rest/controllers/EventResource.java @@ -12,9 +12,13 @@ */ package com.netflix.conductor.rest.controllers; +import static com.netflix.conductor.rest.config.RequestMappingConstants.EVENT; + import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.service.EventService; import io.swagger.v3.oas.annotations.Operation; +import java.util.List; +import java.util.Map; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; @@ -25,11 +29,6 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; -import java.util.List; -import java.util.Map; - -import static com.netflix.conductor.rest.config.RequestMappingConstants.EVENT; - @RestController @RequestMapping(EVENT) public class EventResource { @@ -67,13 +66,14 @@ public List getEventHandlers() { @GetMapping("/{event}") @Operation(summary = "Get event handlers for a given event") public List getEventHandlersForEvent(@PathVariable("event") String event, - @RequestParam(value = "activeOnly", defaultValue = "true") boolean activeOnly) { + @RequestParam(value = "activeOnly", defaultValue = "true", required = false) boolean activeOnly) { return eventService.getEventHandlersForEvent(event, activeOnly); } @GetMapping("/queues") @Operation(summary = "Get registered queues") - public Map getEventQueues(@RequestParam(value = "verbose", defaultValue = "false") boolean verbose) { + public Map getEventQueues( + @RequestParam(value = "verbose", defaultValue = "false", required = false) boolean verbose) { return eventService.getEventQueues(verbose); } diff --git a/rest/src/main/java/com/netflix/conductor/rest/controllers/TaskResource.java b/rest/src/main/java/com/netflix/conductor/rest/controllers/TaskResource.java index 951cd6b3fd..44fb753188 100644 --- a/rest/src/main/java/com/netflix/conductor/rest/controllers/TaskResource.java +++ b/rest/src/main/java/com/netflix/conductor/rest/controllers/TaskResource.java @@ -12,6 +12,10 @@ */ package com.netflix.conductor.rest.controllers; +import static com.netflix.conductor.rest.config.RequestMappingConstants.TASKS; +import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE; +import static org.springframework.http.MediaType.TEXT_PLAIN_VALUE; + import com.netflix.conductor.common.metadata.tasks.PollData; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskExecLog; @@ -21,6 +25,8 @@ import com.netflix.conductor.common.run.TaskSummary; import com.netflix.conductor.service.TaskService; import io.swagger.v3.oas.annotations.Operation; +import java.util.List; +import java.util.Map; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; @@ -30,13 +36,6 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; -import java.util.List; -import java.util.Map; - -import static com.netflix.conductor.rest.config.RequestMappingConstants.TASKS; -import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE; -import static org.springframework.http.MediaType.TEXT_PLAIN_VALUE; - @RestController @RequestMapping(TASKS) public class TaskResource { @@ -50,16 +49,16 @@ public TaskResource(TaskService taskService) { @GetMapping("/poll/{tasktype}") @Operation(summary = "Poll for a task of a certain type") public Task poll(@PathVariable("tasktype") String taskType, - @RequestParam("workerid") String workerId, - @RequestParam("domain") String domain) { + @RequestParam(value = "workerid", required = false) String workerId, + @RequestParam(value = "domain", required = false) String domain) { return taskService.poll(taskType, workerId, domain); } @GetMapping("/poll/batch/{tasktype}") @Operation(summary = "Batch poll for a task of a certain type") public List batchPoll(@PathVariable("tasktype") String taskType, - @RequestParam("workerid") String workerId, - @RequestParam("domain") String domain, + @RequestParam(value = "workerid", required = false) String workerId, + @RequestParam(value = "domain", required = false) String domain, @RequestParam(value = "count", defaultValue = "1") int count, @RequestParam(value = "timeout", defaultValue = "100") int timeout) { return taskService.batchPoll(taskType, workerId, domain, count, timeout); @@ -68,8 +67,8 @@ public List batchPoll(@PathVariable("tasktype") String taskType, @GetMapping("/in_progress/{tasktype}") @Operation(summary = "Get in progress tasks. The results are paginated.") public List getTasks(@PathVariable("tasktype") String taskType, - @RequestParam("startKey") String startKey, - @RequestParam(value = "count", defaultValue = "100") int count) { + @RequestParam(value = "startKey", required = false) String startKey, + @RequestParam(value = "count", defaultValue = "100", required = false) int count) { return taskService.getTasks(taskType, startKey, count); } @@ -89,7 +88,7 @@ public String updateTask(@RequestBody TaskResult taskResult) { @PostMapping("/{taskId}/ack") @Operation(summary = "Ack Task is received") public String ack(@PathVariable("taskId") String taskId, - @RequestParam("workerid") String workerId) { + @RequestParam(value = "workerid", required = false) String workerId) { return taskService.ackTaskReceived(taskId, workerId); } @@ -120,7 +119,7 @@ public void removeTaskFromQueue(@PathVariable("taskType") String taskType, @GetMapping("/queue/sizes") @Operation(summary = "Get Task type queue sizes") - public Map size(@RequestParam("taskType") List taskTypes) { + public Map size(@RequestParam(value = "taskType", required = false) List taskTypes) { return taskService.getTaskQueueSizes(taskTypes); } @@ -158,11 +157,12 @@ public String requeuePendingTask(@PathVariable("taskType") String taskType) { description = "use sort options as sort=:ASC|DESC e.g. sort=name&sort=workflowId:DESC." + " If order is not specified, defaults to ASC") @GetMapping(value = "/search", produces = APPLICATION_JSON_VALUE) - public SearchResult search(@RequestParam(value = "start", defaultValue = "0") int start, - @RequestParam(value = "size", defaultValue = "100") int size, - @RequestParam("sort") String sort, - @RequestParam(value = "freeText", defaultValue = "*") String freeText, - @RequestParam("query") String query) { + public SearchResult search( + @RequestParam(value = "start", defaultValue = "0", required = false) int start, + @RequestParam(value = "size", defaultValue = "100", required = false) int size, + @RequestParam(value = "sort", required = false) String sort, + @RequestParam(value = "freeText", defaultValue = "*", required = false) String freeText, + @RequestParam(value = "query", required = false) String query) { return taskService.search(start, size, sort, freeText, query); } diff --git a/rest/src/main/java/com/netflix/conductor/rest/controllers/WorkflowBulkResource.java b/rest/src/main/java/com/netflix/conductor/rest/controllers/WorkflowBulkResource.java index 535dfe1708..10a5d16105 100644 --- a/rest/src/main/java/com/netflix/conductor/rest/controllers/WorkflowBulkResource.java +++ b/rest/src/main/java/com/netflix/conductor/rest/controllers/WorkflowBulkResource.java @@ -12,9 +12,12 @@ */ package com.netflix.conductor.rest.controllers; +import static com.netflix.conductor.rest.config.RequestMappingConstants.WORKFLOW_BULK; + import com.netflix.conductor.core.utils.BulkResponse; import com.netflix.conductor.service.WorkflowBulkService; import io.swagger.v3.oas.annotations.Operation; +import java.util.List; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.PutMapping; @@ -23,10 +26,6 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; -import java.util.List; - -import static com.netflix.conductor.rest.config.RequestMappingConstants.WORKFLOW_BULK; - /** * Synchronous Bulk APIs to process the workflows in batches */ @@ -74,7 +73,7 @@ public BulkResponse resumeWorkflow(@RequestBody List workflowIds) { @PostMapping("/restart") @Operation(summary = "Restart the list of completed workflow") public BulkResponse restart(@RequestBody List workflowIds, - @RequestParam(value = "useLatestDefinitions", defaultValue = "false") boolean useLatestDefinitions) { + @RequestParam(value = "useLatestDefinitions", defaultValue = "false", required = false) boolean useLatestDefinitions) { return workflowBulkService.restart(workflowIds, useLatestDefinitions); } diff --git a/rest/src/main/java/com/netflix/conductor/rest/controllers/WorkflowResource.java b/rest/src/main/java/com/netflix/conductor/rest/controllers/WorkflowResource.java index fac776aac3..4dc9cdacf6 100644 --- a/rest/src/main/java/com/netflix/conductor/rest/controllers/WorkflowResource.java +++ b/rest/src/main/java/com/netflix/conductor/rest/controllers/WorkflowResource.java @@ -12,6 +12,10 @@ */ package com.netflix.conductor.rest.controllers; +import static com.netflix.conductor.rest.config.RequestMappingConstants.WORKFLOW; +import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE; +import static org.springframework.http.MediaType.TEXT_PLAIN_VALUE; + import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; @@ -21,6 +25,8 @@ import com.netflix.conductor.common.run.WorkflowSummary; import com.netflix.conductor.service.WorkflowService; import io.swagger.v3.oas.annotations.Operation; +import java.util.List; +import java.util.Map; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; @@ -31,13 +37,6 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; -import java.util.List; -import java.util.Map; - -import static com.netflix.conductor.rest.config.RequestMappingConstants.WORKFLOW; -import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE; -import static org.springframework.http.MediaType.TEXT_PLAIN_VALUE; - @RestController @RequestMapping(WORKFLOW) public class WorkflowResource { @@ -68,16 +67,16 @@ public String startWorkflow(@PathVariable("name") String name, @Operation(summary = "Lists workflows for the given correlation id") public List getWorkflows(@PathVariable("name") String name, @PathVariable("correlationId") String correlationId, - @RequestParam(value = "includeClosed", defaultValue = "false") boolean includeClosed, - @RequestParam(value = "includeTasks", defaultValue = "false") boolean includeTasks) { + @RequestParam(value = "includeClosed", defaultValue = "false", required = false) boolean includeClosed, + @RequestParam(value = "includeTasks", defaultValue = "false", required = false) boolean includeTasks) { return workflowService.getWorkflows(name, correlationId, includeClosed, includeTasks); } @PostMapping(value = "/{name}/correlated", consumes = APPLICATION_JSON_VALUE) @Operation(summary = "Lists workflows for the given correlation id list") public Map> getWorkflows(@PathVariable("name") String name, - @RequestParam(value = "includeClosed", defaultValue = "false") boolean includeClosed, - @RequestParam(value = "includeTasks", defaultValue = "false") boolean includeTasks, + @RequestParam(value = "includeClosed", defaultValue = "false", required = false) boolean includeClosed, + @RequestParam(value = "includeTasks", defaultValue = "false", required = false) boolean includeTasks, List correlationIds) { return workflowService.getWorkflows(name, includeClosed, includeTasks, correlationIds); } @@ -85,23 +84,23 @@ public Map> getWorkflows(@PathVariable("name") String nam @GetMapping("/{workflowId}") @Operation(summary = "Gets the workflow by workflow id") public Workflow getExecutionStatus(@PathVariable("workflowId") String workflowId, - @RequestParam(value = "includeTasks", defaultValue = "true") boolean includeTasks) { + @RequestParam(value = "includeTasks", defaultValue = "true", required = false) boolean includeTasks) { return workflowService.getExecutionStatus(workflowId, includeTasks); } @DeleteMapping("/{workflowId}/remove") @Operation(summary = "Removes the workflow from the system") public void delete(@PathVariable("workflowId") String workflowId, - @RequestParam(value = "archiveWorkflow", defaultValue = "true") boolean archiveWorkflow) { + @RequestParam(value = "archiveWorkflow", defaultValue = "true", required = false) boolean archiveWorkflow) { workflowService.deleteWorkflow(workflowId, archiveWorkflow); } @GetMapping("/running/{name}") @Operation(summary = "Retrieve all the running workflows") public List getRunningWorkflow(@PathVariable("name") String workflowName, - @RequestParam(value = "version", defaultValue = "1") int version, - @RequestParam("startTime") Long startTime, - @RequestParam("endTime") Long endTime) { + @RequestParam(value = "version", defaultValue = "1", required = false) int version, + @RequestParam(value = "startTime", required = false) Long startTime, + @RequestParam(value = "endTime", required = false) Long endTime) { return workflowService.getRunningWorkflows(workflowName, version, startTime, endTime); } @@ -142,7 +141,7 @@ public String rerun(@PathVariable("workflowId") String workflowId, @PostMapping("/{workflowId}/restart") @Operation(summary = "Restarts a completed workflow") public void restart(@PathVariable("workflowId") String workflowId, - @RequestParam(value = "useLatestDefinitions", defaultValue = "false") boolean useLatestDefinitions) { + @RequestParam(value = "useLatestDefinitions", defaultValue = "false", required = false) boolean useLatestDefinitions) { workflowService.restartWorkflow(workflowId, useLatestDefinitions); } @@ -161,7 +160,7 @@ public void resetWorkflow(@PathVariable("workflowId") String workflowId) { @DeleteMapping("/{workflowId}") @Operation(summary = "Terminate workflow execution") public void terminate(@PathVariable("workflowId") String workflowId, - @RequestParam("reason") String reason) { + @RequestParam(value = "reason", required = false) String reason) { workflowService.terminateWorkflow(workflowId, reason); } @@ -169,11 +168,12 @@ public void terminate(@PathVariable("workflowId") String workflowId, description = "use sort options as sort=:ASC|DESC e.g. sort=name&sort=workflowId:DESC." + " If order is not specified, defaults to ASC.") @GetMapping(value = "/search", produces = APPLICATION_JSON_VALUE) - public SearchResult search(@RequestParam(value = "start", defaultValue = "0") int start, - @RequestParam(value = "size", defaultValue = "100") int size, - @RequestParam("sort") String sort, - @RequestParam(value = "freeText", defaultValue = "*") String freeText, - @RequestParam("query") String query) { + public SearchResult search( + @RequestParam(value = "start", defaultValue = "0", required = false) int start, + @RequestParam(value = "size", defaultValue = "100", required = false) int size, + @RequestParam(value = "sort", required = false) String sort, + @RequestParam(value = "freeText", defaultValue = "*", required = false) String freeText, + @RequestParam(value = "query", required = false) String query) { return workflowService.searchWorkflows(start, size, sort, freeText, query); } @@ -182,11 +182,11 @@ public SearchResult search(@RequestParam(value = "start", defau " If order is not specified, defaults to ASC") @GetMapping(value = "/search-by-tasks", produces = APPLICATION_JSON_VALUE) public SearchResult searchWorkflowsByTasks( - @RequestParam(value = "start", defaultValue = "0") int start, - @RequestParam(value = "size", defaultValue = "100") int size, - @RequestParam("sort") String sort, - @RequestParam(value = "freeText", defaultValue = "*") String freeText, - @RequestParam("query") String query) { + @RequestParam(value = "start", defaultValue = "0", required = false) int start, + @RequestParam(value = "size", defaultValue = "100", required = false) int size, + @RequestParam(value = "sort", required = false) String sort, + @RequestParam(value = "freeText", defaultValue = "*", required = false) String freeText, + @RequestParam(value = "query", required = false) String query) { return workflowService.searchWorkflowsByTasks(start, size, sort, freeText, query); } diff --git a/server/build.gradle b/server/build.gradle index 71b53b9390..fb8c2983c8 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -14,6 +14,7 @@ dependencies { implementation project(':conductor-grpc-server') implementation 'org.springframework.boot:spring-boot-starter' + implementation 'org.springframework.boot:spring-boot-starter-log4j2' implementation 'org.apache.logging.log4j:log4j-web' diff --git a/settings.gradle b/settings.gradle index 253fa28eed..2ac7a207e0 100644 --- a/settings.gradle +++ b/settings.gradle @@ -23,6 +23,6 @@ include 'grpc' include 'grpc-server' include 'grpc-client' -//include 'test-harness' +include 'test-harness' rootProject.children.each {it.name="conductor-${it.name}"} diff --git a/test-harness/build.gradle b/test-harness/build.gradle index 63854b315e..58cc48c5d0 100644 --- a/test-harness/build.gradle +++ b/test-harness/build.gradle @@ -12,46 +12,30 @@ buildscript { apply plugin: 'docker-compose' apply plugin: 'groovy' -configurations.all { - resolutionStrategy { - force "com.fasterxml.jackson.core:jackson-core:${revJacksonCore}" - force "com.fasterxml.jackson.core:jackson-databind:${revJacksonDatabind}" - force "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:${revJaxrsJackson}" - } -} dependencies { - testCompile project(':conductor-core') - testCompile project(':conductor-jersey') - testCompile project(':conductor-redis-persistence').sourceSets.test.output - testCompile project(':conductor-mysql-persistence').sourceSets.test.output - testCompile project(':conductor-postgres-persistence').sourceSets.test.output - testCompile project(':conductor-client') - testCompile project(':conductor-server') - testCompile project(':conductor-grpc-client') - testCompile project(':conductor-grpc-server') - testCompile "org.elasticsearch:elasticsearch:${revElasticSearch5}" - testCompile "org.eclipse.jetty:jetty-server:${revJetteyServer}" - testCompile "org.eclipse.jetty:jetty-servlet:${revJettyServlet}" - testCompile "org.rarefiedredis.redis:redis-java:${revRarefiedRedis}" - testCompile "com.sun.jersey.contribs:jersey-guice:${revJerseyGuice}" + testImplementation project(':conductor-core') + testImplementation project(':conductor-common') + testImplementation project(':conductor-contribs') + testImplementation project(':conductor-server') + testImplementation project(':conductor-rest') + testImplementation project(':conductor-redis-persistence') + testImplementation project(':conductor-mysql-persistence') + testImplementation project(':conductor-postgres-persistence') + testImplementation project(':conductor-grpc') + testImplementation project(':conductor-grpc-server') + testImplementation project(':conductor-grpc-client') + testImplementation project(':conductor-client') - testCompile "com.google.inject.extensions:guice-servlet:${revGuiceServlet}" - testCompile "io.swagger:swagger-jersey-jaxrs:${revSwaggerJersey}" - testCompile "org.awaitility:awaitility:${revAwaitility}" + testImplementation "org.codehaus.groovy:groovy-all:${revGroovy}" + testImplementation "org.spockframework:spock-core:${revSpock}" + testImplementation "org.spockframework:spock-spring:${revSpock}" - testCompile "org.codehaus.groovy:groovy-all:${revGroovy}" - testCompile "org.spockframework:spock-core:${revSpock}" - testCompile "org.spockframework:spock-guice:${revSpock}" - testCompile 'com.netflix.governator:governator-test-spock:latest.release' - testCompile "org.elasticsearch:elasticsearch:${revElasticSearch5}" - testCompile "org.elasticsearch.client:transport:${revElasticSearch5}" - testCompile "org.elasticsearch.client:elasticsearch-rest-client:${revElasticSearch5}" - testCompile "org.elasticsearch.client:elasticsearch-rest-high-level-client:${revElasticSearch5}" + testImplementation "org.testcontainers:elasticsearch:${revTestESContainer}" } -dockerCompose{ - integrationTestDocker{ +dockerCompose { + integrationTestDocker { useComposeFiles = ["./docker-compose.yml"] startedServices = ["mysqldb", "postgresdb"] waitForTcpPorts = true @@ -78,14 +62,8 @@ test.doFirst { test.finalizedBy("intDockerDown") - test { testLogging { exceptionFormat = 'full' } } - -task server(type: JavaExec) { - main = 'com.netflix.conductor.demo.Main' - classpath = sourceSets.test.runtimeClasspath -} diff --git a/test-harness/dependencies.lock b/test-harness/dependencies.lock deleted file mode 100644 index f5bb4791d0..0000000000 --- a/test-harness/dependencies.lock +++ /dev/null @@ -1,2336 +0,0 @@ -{ - "jacocoAgent": { - "org.jacoco:org.jacoco.agent": { - "locked": "0.8.1" - } - }, - "jacocoAnt": { - "org.jacoco:org.jacoco.ant": { - "locked": "0.8.1" - } - }, - "testCompile": { - "com.amazonaws:aws-java-sdk-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.11.893" - }, - "com.amazonaws:aws-java-sdk-s3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.11.86" - }, - "com.amazonaws:aws-java-sdk-sqs": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "1.11.893" - }, - "com.datastax.cassandra:cassandra-driver-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-cassandra-persistence" - ], - "locked": "3.6.0" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "2.10.0" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0" - }, - "com.github.vmg.protogen:protogen-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.0.0" - }, - "com.google.api.grpc:proto-google-common-protos": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject.extensions:guice-servlet": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "4.1.0", - "requested": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-mysql-persistence", - "com.netflix.conductor:conductor-postgres-persistence", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-server" - ], - "locked": "4.1.0" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.5.1" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.archaius:archaius-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "0.7.6" - }, - "com.netflix.conductor:conductor-cassandra-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-client": { - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-contribs", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-jersey" - ], - "project": true - }, - "com.netflix.conductor:conductor-contribs": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-contribs", - "com.netflix.conductor:conductor-es5-persistence", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-jersey", - "com.netflix.conductor:conductor-mysql-persistence", - "com.netflix.conductor:conductor-postgres-persistence", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-server", - "com.netflix.conductor:conductor-zookeeper-lock" - ], - "project": true - }, - "com.netflix.conductor:conductor-es5-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-es6-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-grpc": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-grpc-client": { - "project": true - }, - "com.netflix.conductor:conductor-grpc-server": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-jersey": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-mysql-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-postgres-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-redis-lock": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-redis-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-server": { - "project": true - }, - "com.netflix.conductor:conductor-zookeeper-lock": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.dyno-queues:dyno-queues-redis": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "2.0.13" - }, - "com.netflix.eureka:eureka-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.8.7" - }, - "com.netflix.governator:governator-test-spock": { - "locked": "1.17.10", - "requested": "latest.release" - }, - "com.netflix.runtime:health-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-jersey" - ], - "locked": "1.1.4" - }, - "com.netflix.runtime:health-guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "1.1.4" - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "com.netflix.spectator:spectator-reg-metrics3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "0.68.0" - }, - "com.rabbitmq:amqp-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "5.8.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.1" - }, - "com.sun.jersey.contribs.jersey-oauth:oauth-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "1.19.4" - }, - "com.sun.jersey.contribs.jersey-oauth:oauth-signature": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "1.19.4" - }, - "com.sun.jersey.contribs:jersey-guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "1.19.4", - "requested": "1.19.4" - }, - "com.sun.jersey:jersey-bundle": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-jersey" - ], - "locked": "1.19.1" - }, - "com.sun.jersey:jersey-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.19.4" - }, - "com.zaxxer:HikariCP": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-mysql-persistence", - "com.netflix.conductor:conductor-postgres-persistence" - ], - "locked": "3.2.0" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-es5-persistence", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-mysql-persistence", - "com.netflix.conductor:conductor-postgres-persistence" - ], - "locked": "2.6" - }, - "io.grpc:grpc-netty": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server" - ], - "locked": "1.14.0" - }, - "io.grpc:grpc-protobuf": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.14.0" - }, - "io.grpc:grpc-services": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-server" - ], - "locked": "1.14.0" - }, - "io.grpc:grpc-stub": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.14.0" - }, - "io.nats:java-nats-streaming": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "0.5.0" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "io.swagger:swagger-jaxrs": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs", - "com.netflix.conductor:conductor-jersey" - ], - "locked": "1.5.9" - }, - "io.swagger:swagger-jersey-jaxrs": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "1.5.9", - "requested": "1.5.0" - }, - "javax.inject:javax.inject": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1" - }, - "javax.servlet:javax.servlet-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "3.1.0" - }, - "javax.validation:validation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-jersey" - ], - "locked": "2.0.1.Final" - }, - "javax.ws.rs:jsr311-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-jersey" - ], - "locked": "1.1.1" - }, - "junit:junit": { - "locked": "4.12", - "requested": "4.12" - }, - "log4j:log4j": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server" - ], - "locked": "1.2.17" - }, - "mysql:mysql-connector-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-mysql-persistence" - ], - "locked": "8.0.11" - }, - "net.thisptr:jackson-jq": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "0.0.12" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-jersey" - ], - "locked": "2.0.3" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.6" - }, - "org.apache.curator:curator-recipes": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-zookeeper-lock" - ], - "locked": "2.4.0" - }, - "org.apache.kafka:kafka-clients": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "2.2.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence", - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence", - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "2.9.1" - }, - "org.awaitility:awaitility": { - "locked": "3.1.2", - "requested": "3.1.2" - }, - "org.codehaus.groovy:groovy-all": { - "locked": "2.4.15", - "requested": "2.4.15" - }, - "org.eclipse.jetty:jetty-jmx": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "9.4.22.v20191022" - }, - "org.eclipse.jetty:jetty-server": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "9.4.22.v20191022", - "requested": "9.4.22.v20191022" - }, - "org.eclipse.jetty:jetty-servlet": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "9.4.22.v20191022", - "requested": "9.4.22.v20191022" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence" - ], - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence" - ], - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:transport": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence" - ], - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch:elasticsearch": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence" - ], - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.flywaydb:flyway-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-mysql-persistence", - "com.netflix.conductor:conductor-postgres-persistence" - ], - "locked": "4.0.3" - }, - "org.glassfish:javax.el": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0.0" - }, - "org.mockito:mockito-core": { - "locked": "3.1.0", - "requested": "3.1.0" - }, - "org.postgresql:postgresql": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-postgres-persistence" - ], - "locked": "42.2.6" - }, - "org.rarefiedredis.redis:redis-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "0.0.17", - "requested": "0.0.17" - }, - "org.redisson:redisson": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-lock" - ], - "locked": "3.11.4" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.7.29" - }, - "org.spockframework:spock-core": { - "locked": "1.3-groovy-2.4", - "requested": "1.3-groovy-2.4" - }, - "org.spockframework:spock-guice": { - "locked": "1.3-groovy-2.4", - "requested": "1.3-groovy-2.4" - }, - "redis.clients:jedis": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "3.0.1" - } - }, - "testCompileClasspath": { - "com.amazonaws:aws-java-sdk-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.11.893" - }, - "com.amazonaws:aws-java-sdk-s3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.11.86" - }, - "com.amazonaws:aws-java-sdk-sqs": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "1.11.893" - }, - "com.datastax.cassandra:cassandra-driver-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-cassandra-persistence" - ], - "locked": "3.6.0" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "2.10.0" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0" - }, - "com.github.vmg.protogen:protogen-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.0.0" - }, - "com.google.api.grpc:proto-google-common-protos": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject.extensions:guice-servlet": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "4.1.0", - "requested": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-mysql-persistence", - "com.netflix.conductor:conductor-postgres-persistence", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-server" - ], - "locked": "4.1.0" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.5.1" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.archaius:archaius-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "0.7.6" - }, - "com.netflix.conductor:conductor-cassandra-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-client": { - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-contribs", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-jersey" - ], - "project": true - }, - "com.netflix.conductor:conductor-contribs": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-contribs", - "com.netflix.conductor:conductor-es5-persistence", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-jersey", - "com.netflix.conductor:conductor-mysql-persistence", - "com.netflix.conductor:conductor-postgres-persistence", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-server", - "com.netflix.conductor:conductor-zookeeper-lock" - ], - "project": true - }, - "com.netflix.conductor:conductor-es5-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-es6-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-grpc": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-grpc-client": { - "project": true - }, - "com.netflix.conductor:conductor-grpc-server": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-jersey": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-mysql-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-postgres-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-redis-lock": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-redis-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-server": { - "project": true - }, - "com.netflix.conductor:conductor-zookeeper-lock": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.dyno-queues:dyno-queues-redis": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "2.0.13" - }, - "com.netflix.eureka:eureka-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.8.7" - }, - "com.netflix.governator:governator-test-spock": { - "locked": "1.17.10", - "requested": "latest.release" - }, - "com.netflix.runtime:health-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-jersey" - ], - "locked": "1.1.4" - }, - "com.netflix.runtime:health-guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "1.1.4" - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "com.netflix.spectator:spectator-reg-metrics3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "0.68.0" - }, - "com.rabbitmq:amqp-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "5.8.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.1" - }, - "com.sun.jersey.contribs.jersey-oauth:oauth-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "1.19.4" - }, - "com.sun.jersey.contribs.jersey-oauth:oauth-signature": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "1.19.4" - }, - "com.sun.jersey.contribs:jersey-guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "1.19.4", - "requested": "1.19.4" - }, - "com.sun.jersey:jersey-bundle": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-jersey" - ], - "locked": "1.19.1" - }, - "com.sun.jersey:jersey-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.19.4" - }, - "com.zaxxer:HikariCP": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-mysql-persistence", - "com.netflix.conductor:conductor-postgres-persistence" - ], - "locked": "3.2.0" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-es5-persistence", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-mysql-persistence", - "com.netflix.conductor:conductor-postgres-persistence" - ], - "locked": "2.6" - }, - "io.grpc:grpc-netty": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server" - ], - "locked": "1.14.0" - }, - "io.grpc:grpc-protobuf": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.14.0" - }, - "io.grpc:grpc-services": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-server" - ], - "locked": "1.14.0" - }, - "io.grpc:grpc-stub": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.14.0" - }, - "io.nats:java-nats-streaming": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "0.5.0" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "io.swagger:swagger-jaxrs": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs", - "com.netflix.conductor:conductor-jersey" - ], - "locked": "1.5.9" - }, - "io.swagger:swagger-jersey-jaxrs": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "1.5.9", - "requested": "1.5.0" - }, - "javax.inject:javax.inject": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1" - }, - "javax.servlet:javax.servlet-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "3.1.0" - }, - "javax.validation:validation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-jersey" - ], - "locked": "2.0.1.Final" - }, - "javax.ws.rs:jsr311-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-jersey" - ], - "locked": "1.1.1" - }, - "junit:junit": { - "locked": "4.12", - "requested": "4.12" - }, - "log4j:log4j": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server" - ], - "locked": "1.2.17" - }, - "mysql:mysql-connector-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-mysql-persistence" - ], - "locked": "8.0.11" - }, - "net.thisptr:jackson-jq": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "0.0.12" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-jersey" - ], - "locked": "2.0.3" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.6" - }, - "org.apache.curator:curator-recipes": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-zookeeper-lock" - ], - "locked": "2.4.0" - }, - "org.apache.kafka:kafka-clients": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "2.2.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence", - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence", - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "2.9.1" - }, - "org.awaitility:awaitility": { - "locked": "3.1.2", - "requested": "3.1.2" - }, - "org.codehaus.groovy:groovy-all": { - "locked": "2.4.15", - "requested": "2.4.15" - }, - "org.eclipse.jetty:jetty-jmx": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "9.4.22.v20191022" - }, - "org.eclipse.jetty:jetty-server": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "9.4.22.v20191022", - "requested": "9.4.22.v20191022" - }, - "org.eclipse.jetty:jetty-servlet": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "9.4.22.v20191022", - "requested": "9.4.22.v20191022" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence" - ], - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence" - ], - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:transport": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence" - ], - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch:elasticsearch": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence" - ], - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.flywaydb:flyway-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-mysql-persistence", - "com.netflix.conductor:conductor-postgres-persistence" - ], - "locked": "4.0.3" - }, - "org.glassfish:javax.el": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0.0" - }, - "org.mockito:mockito-core": { - "locked": "3.1.0", - "requested": "3.1.0" - }, - "org.postgresql:postgresql": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-postgres-persistence" - ], - "locked": "42.2.6" - }, - "org.rarefiedredis.redis:redis-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "0.0.17", - "requested": "0.0.17" - }, - "org.redisson:redisson": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-lock" - ], - "locked": "3.11.4" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.7.29" - }, - "org.spockframework:spock-core": { - "locked": "1.3-groovy-2.4", - "requested": "1.3-groovy-2.4" - }, - "org.spockframework:spock-guice": { - "locked": "1.3-groovy-2.4", - "requested": "1.3-groovy-2.4" - }, - "redis.clients:jedis": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "3.0.1" - } - }, - "testRuntime": { - "com.amazonaws:aws-java-sdk-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.11.893" - }, - "com.amazonaws:aws-java-sdk-s3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.11.86" - }, - "com.amazonaws:aws-java-sdk-sqs": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "1.11.893" - }, - "com.datastax.cassandra:cassandra-driver-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-cassandra-persistence" - ], - "locked": "3.6.0" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "2.10.0" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0" - }, - "com.github.vmg.protogen:protogen-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.0.0" - }, - "com.google.api.grpc:proto-google-common-protos": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject.extensions:guice-servlet": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "4.1.0", - "requested": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-mysql-persistence", - "com.netflix.conductor:conductor-postgres-persistence", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-server" - ], - "locked": "4.1.0" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.5.1" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.archaius:archaius-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "0.7.6" - }, - "com.netflix.conductor:conductor-cassandra-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-client": { - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-contribs", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-jersey" - ], - "project": true - }, - "com.netflix.conductor:conductor-contribs": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-contribs", - "com.netflix.conductor:conductor-es5-persistence", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-jersey", - "com.netflix.conductor:conductor-mysql-persistence", - "com.netflix.conductor:conductor-postgres-persistence", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-server", - "com.netflix.conductor:conductor-zookeeper-lock" - ], - "project": true - }, - "com.netflix.conductor:conductor-es5-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-es6-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-grpc": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-grpc-client": { - "project": true - }, - "com.netflix.conductor:conductor-grpc-server": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-jersey": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-mysql-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-postgres-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-redis-lock": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-redis-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-server": { - "project": true - }, - "com.netflix.conductor:conductor-zookeeper-lock": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.dyno-queues:dyno-queues-redis": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "2.0.13" - }, - "com.netflix.eureka:eureka-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.8.7" - }, - "com.netflix.governator:governator-test-spock": { - "locked": "1.17.10", - "requested": "latest.release" - }, - "com.netflix.runtime:health-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-jersey" - ], - "locked": "1.1.4" - }, - "com.netflix.runtime:health-guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "1.1.4" - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "com.netflix.spectator:spectator-reg-metrics3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "0.68.0" - }, - "com.rabbitmq:amqp-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "5.8.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.1" - }, - "com.sun.jersey.contribs.jersey-oauth:oauth-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "1.19.4" - }, - "com.sun.jersey.contribs.jersey-oauth:oauth-signature": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "1.19.4" - }, - "com.sun.jersey.contribs:jersey-guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "1.19.4", - "requested": "1.19.4" - }, - "com.sun.jersey:jersey-bundle": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-jersey" - ], - "locked": "1.19.1" - }, - "com.sun.jersey:jersey-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.19.4" - }, - "com.zaxxer:HikariCP": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-mysql-persistence", - "com.netflix.conductor:conductor-postgres-persistence" - ], - "locked": "3.2.0" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-es5-persistence", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-mysql-persistence", - "com.netflix.conductor:conductor-postgres-persistence" - ], - "locked": "2.6" - }, - "io.grpc:grpc-netty": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server" - ], - "locked": "1.14.0" - }, - "io.grpc:grpc-protobuf": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.14.0" - }, - "io.grpc:grpc-services": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-server" - ], - "locked": "1.14.0" - }, - "io.grpc:grpc-stub": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.14.0" - }, - "io.nats:java-nats-streaming": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "0.5.0" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "io.swagger:swagger-jaxrs": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs", - "com.netflix.conductor:conductor-jersey" - ], - "locked": "1.5.9" - }, - "io.swagger:swagger-jersey-jaxrs": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "1.5.9", - "requested": "1.5.0" - }, - "javax.inject:javax.inject": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1" - }, - "javax.servlet:javax.servlet-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "3.1.0" - }, - "javax.validation:validation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-jersey" - ], - "locked": "2.0.1.Final" - }, - "javax.ws.rs:jsr311-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-jersey" - ], - "locked": "1.1.1" - }, - "junit:junit": { - "locked": "4.12", - "requested": "4.12" - }, - "log4j:log4j": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server" - ], - "locked": "1.2.17" - }, - "mysql:mysql-connector-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-mysql-persistence" - ], - "locked": "8.0.11" - }, - "net.thisptr:jackson-jq": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "0.0.12" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-jersey" - ], - "locked": "2.0.3" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.6" - }, - "org.apache.curator:curator-recipes": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-zookeeper-lock" - ], - "locked": "2.4.0" - }, - "org.apache.kafka:kafka-clients": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "2.2.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence", - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence", - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "2.9.1" - }, - "org.awaitility:awaitility": { - "locked": "3.1.2", - "requested": "3.1.2" - }, - "org.codehaus.groovy:groovy-all": { - "locked": "2.4.15", - "requested": "2.4.15" - }, - "org.eclipse.jetty:jetty-jmx": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "9.4.22.v20191022" - }, - "org.eclipse.jetty:jetty-server": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "9.4.22.v20191022", - "requested": "9.4.22.v20191022" - }, - "org.eclipse.jetty:jetty-servlet": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "9.4.22.v20191022", - "requested": "9.4.22.v20191022" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence" - ], - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence" - ], - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:transport": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence" - ], - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch:elasticsearch": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence" - ], - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.flywaydb:flyway-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-mysql-persistence", - "com.netflix.conductor:conductor-postgres-persistence" - ], - "locked": "4.0.3" - }, - "org.glassfish:javax.el": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0.0" - }, - "org.mockito:mockito-core": { - "locked": "3.1.0", - "requested": "3.1.0" - }, - "org.postgresql:postgresql": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-postgres-persistence" - ], - "locked": "42.2.6" - }, - "org.rarefiedredis.redis:redis-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "0.0.17", - "requested": "0.0.17" - }, - "org.redisson:redisson": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-lock" - ], - "locked": "3.11.4" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.7.29" - }, - "org.spockframework:spock-core": { - "locked": "1.3-groovy-2.4", - "requested": "1.3-groovy-2.4" - }, - "org.spockframework:spock-guice": { - "locked": "1.3-groovy-2.4", - "requested": "1.3-groovy-2.4" - }, - "redis.clients:jedis": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "3.0.1" - } - }, - "testRuntimeClasspath": { - "com.amazonaws:aws-java-sdk-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.11.893" - }, - "com.amazonaws:aws-java-sdk-s3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.11.86" - }, - "com.amazonaws:aws-java-sdk-sqs": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "1.11.893" - }, - "com.datastax.cassandra:cassandra-driver-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-cassandra-persistence" - ], - "locked": "3.6.0" - }, - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "2.10.0" - }, - "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "2.10.0" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0" - }, - "com.github.vmg.protogen:protogen-annotations": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.0.0" - }, - "com.google.api.grpc:proto-google-common-protos": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject.extensions:guice-servlet": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "4.1.0", - "requested": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-mysql-persistence", - "com.netflix.conductor:conductor-postgres-persistence", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-server" - ], - "locked": "4.1.0" - }, - "com.google.protobuf:protobuf-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "3.5.1" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.archaius:archaius-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "0.7.6" - }, - "com.netflix.conductor:conductor-cassandra-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-client": { - "project": true - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-contribs", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-jersey" - ], - "project": true - }, - "com.netflix.conductor:conductor-contribs": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-cassandra-persistence", - "com.netflix.conductor:conductor-contribs", - "com.netflix.conductor:conductor-es5-persistence", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server", - "com.netflix.conductor:conductor-jersey", - "com.netflix.conductor:conductor-mysql-persistence", - "com.netflix.conductor:conductor-postgres-persistence", - "com.netflix.conductor:conductor-redis-lock", - "com.netflix.conductor:conductor-redis-persistence", - "com.netflix.conductor:conductor-server", - "com.netflix.conductor:conductor-zookeeper-lock" - ], - "project": true - }, - "com.netflix.conductor:conductor-es5-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-es6-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-grpc": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-grpc-client": { - "project": true - }, - "com.netflix.conductor:conductor-grpc-server": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-jersey": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-mysql-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-postgres-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-redis-lock": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-redis-persistence": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.conductor:conductor-server": { - "project": true - }, - "com.netflix.conductor:conductor-zookeeper-lock": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "project": true - }, - "com.netflix.dyno-queues:dyno-queues-redis": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "2.0.13" - }, - "com.netflix.eureka:eureka-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.8.7" - }, - "com.netflix.governator:governator-test-spock": { - "locked": "1.17.10", - "requested": "latest.release" - }, - "com.netflix.runtime:health-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc", - "com.netflix.conductor:conductor-jersey" - ], - "locked": "1.1.4" - }, - "com.netflix.runtime:health-guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "1.1.4" - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "com.netflix.spectator:spectator-reg-metrics3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "0.68.0" - }, - "com.rabbitmq:amqp-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "5.8.0" - }, - "com.spotify:completable-futures": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.3.1" - }, - "com.sun.jersey.contribs.jersey-oauth:oauth-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "1.19.4" - }, - "com.sun.jersey.contribs.jersey-oauth:oauth-signature": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "1.19.4" - }, - "com.sun.jersey.contribs:jersey-guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "1.19.4", - "requested": "1.19.4" - }, - "com.sun.jersey:jersey-bundle": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-jersey" - ], - "locked": "1.19.1" - }, - "com.sun.jersey:jersey-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client" - ], - "locked": "1.19.4" - }, - "com.zaxxer:HikariCP": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-mysql-persistence", - "com.netflix.conductor:conductor-postgres-persistence" - ], - "locked": "3.2.0" - }, - "commons-io:commons-io": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-client", - "com.netflix.conductor:conductor-es5-persistence", - "com.netflix.conductor:conductor-es6-persistence", - "com.netflix.conductor:conductor-mysql-persistence", - "com.netflix.conductor:conductor-postgres-persistence" - ], - "locked": "2.6" - }, - "io.grpc:grpc-netty": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server" - ], - "locked": "1.14.0" - }, - "io.grpc:grpc-protobuf": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.14.0" - }, - "io.grpc:grpc-services": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-server" - ], - "locked": "1.14.0" - }, - "io.grpc:grpc-stub": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc" - ], - "locked": "1.14.0" - }, - "io.nats:java-nats-streaming": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "0.5.0" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "io.swagger:swagger-jaxrs": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs", - "com.netflix.conductor:conductor-jersey" - ], - "locked": "1.5.9" - }, - "io.swagger:swagger-jersey-jaxrs": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "1.5.9", - "requested": "1.5.0" - }, - "javax.inject:javax.inject": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1" - }, - "javax.servlet:javax.servlet-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "3.1.0" - }, - "javax.validation:validation-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-jersey" - ], - "locked": "2.0.1.Final" - }, - "javax.ws.rs:jsr311-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-jersey" - ], - "locked": "1.1.1" - }, - "junit:junit": { - "locked": "4.12", - "requested": "4.12" - }, - "log4j:log4j": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-grpc-client", - "com.netflix.conductor:conductor-grpc-server" - ], - "locked": "1.2.17" - }, - "mysql:mysql-connector-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-mysql-persistence" - ], - "locked": "8.0.11" - }, - "net.thisptr:jackson-jq": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "0.0.12" - }, - "org.apache.bval:bval-jsr": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core", - "com.netflix.conductor:conductor-jersey" - ], - "locked": "2.0.3" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.6" - }, - "org.apache.curator:curator-recipes": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-zookeeper-lock" - ], - "locked": "2.4.0" - }, - "org.apache.kafka:kafka-clients": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-contribs" - ], - "locked": "2.2.0" - }, - "org.apache.logging.log4j:log4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence", - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence", - "com.netflix.conductor:conductor-es6-persistence" - ], - "locked": "2.9.1" - }, - "org.awaitility:awaitility": { - "locked": "3.1.2", - "requested": "3.1.2" - }, - "org.codehaus.groovy:groovy-all": { - "locked": "2.4.15", - "requested": "2.4.15" - }, - "org.eclipse.jetty:jetty-jmx": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "9.4.22.v20191022" - }, - "org.eclipse.jetty:jetty-server": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "9.4.22.v20191022", - "requested": "9.4.22.v20191022" - }, - "org.eclipse.jetty:jetty-servlet": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-server" - ], - "locked": "9.4.22.v20191022", - "requested": "9.4.22.v20191022" - }, - "org.elasticsearch.client:elasticsearch-rest-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence" - ], - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:elasticsearch-rest-high-level-client": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence" - ], - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch.client:transport": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence" - ], - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.elasticsearch:elasticsearch": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-es5-persistence" - ], - "locked": "5.6.8", - "requested": "5.6.8" - }, - "org.flywaydb:flyway-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-mysql-persistence", - "com.netflix.conductor:conductor-postgres-persistence" - ], - "locked": "4.0.3" - }, - "org.glassfish:javax.el": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common", - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0.0" - }, - "org.mockito:mockito-core": { - "locked": "3.1.0", - "requested": "3.1.0" - }, - "org.postgresql:postgresql": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-postgres-persistence" - ], - "locked": "42.2.6" - }, - "org.rarefiedredis.redis:redis-java": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "0.0.17", - "requested": "0.0.17" - }, - "org.redisson:redisson": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-lock" - ], - "locked": "3.11.4" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.7.29" - }, - "org.spockframework:spock-core": { - "locked": "1.3-groovy-2.4", - "requested": "1.3-groovy-2.4" - }, - "org.spockframework:spock-guice": { - "locked": "1.3-groovy-2.4", - "requested": "1.3-groovy-2.4" - }, - "redis.clients:jedis": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-redis-persistence" - ], - "locked": "3.0.1" - } - } -} \ No newline at end of file diff --git a/test-harness/docker-compose.yml b/test-harness/docker-compose.yml index 1ea32aaec6..0c242d7518 100644 --- a/test-harness/docker-compose.yml +++ b/test-harness/docker-compose.yml @@ -20,4 +20,4 @@ services: volumes: - ".docker/postgresdb/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d" environment: - POSTGRES_PASSWORD: postgres \ No newline at end of file + POSTGRES_PASSWORD: postgres diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/base/AbstractResiliencySpecification.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/base/AbstractResiliencySpecification.groovy new file mode 100644 index 0000000000..cc3abb78ce --- /dev/null +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/base/AbstractResiliencySpecification.groovy @@ -0,0 +1,70 @@ +/* + * Copyright 2020 Netflix, Inc. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package com.netflix.conductor.test.base + +import com.netflix.conductor.dao.QueueDAO +import com.netflix.conductor.redis.dao.DynoQueueDAO +import com.netflix.conductor.redis.jedis.JedisMock +import com.netflix.dyno.connectionpool.Host +import com.netflix.dyno.queues.ShardSupplier +import com.netflix.dyno.queues.redis.RedisQueues +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty +import org.springframework.context.annotation.Bean +import org.springframework.context.annotation.Configuration +import org.springframework.context.annotation.Primary +import org.springframework.test.context.TestPropertySource +import redis.clients.jedis.commands.JedisCommands +import spock.mock.DetachedMockFactory + +@TestPropertySource(properties = [ + "conductor.disable.async.workers=true", + "queue.spy.enabled=true" +]) +abstract class AbstractResiliencySpecification extends AbstractSpecification { + + @Configuration + static class TestQueueConfiguration { + + @Primary + @Bean + @ConditionalOnProperty(name = "queue.spy.enabled", havingValue = "true") + QueueDAO SpyQueueDAO() { + DetachedMockFactory detachedMockFactory = new DetachedMockFactory() + JedisCommands jedisMock = new JedisMock() + ShardSupplier shardSupplier = new ShardSupplier() { + @Override + Set getQueueShards() { + return new HashSet<>(Collections.singletonList("a")) + } + + @Override + String getCurrentShard() { + return "a" + } + + @Override + String getShardForHost(Host host) { + return "a" + } + } + RedisQueues redisQueues = new RedisQueues(jedisMock, jedisMock, "mockedQueues", shardSupplier, 60000, 120000) + DynoQueueDAO dynoQueueDAO = new DynoQueueDAO(redisQueues) + + return detachedMockFactory.Spy(dynoQueueDAO) + } + } + + @Autowired + QueueDAO queueDAO +} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/base/AbstractSpecification.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/base/AbstractSpecification.groovy new file mode 100644 index 0000000000..49f998d20f --- /dev/null +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/base/AbstractSpecification.groovy @@ -0,0 +1,43 @@ +/* + * Copyright 2020 Netflix, Inc. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package com.netflix.conductor.test.base + +import com.netflix.conductor.core.execution.WorkflowExecutor +import com.netflix.conductor.service.ExecutionService +import com.netflix.conductor.service.MetadataService +import com.netflix.conductor.test.util.WorkflowTestUtil +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.boot.test.context.SpringBootTest +import org.springframework.test.context.TestPropertySource +import spock.lang.Specification + +@SpringBootTest +@TestPropertySource(locations = "classpath:application-integrationtest.properties") +abstract class AbstractSpecification extends Specification { + + @Autowired + ExecutionService workflowExecutionService + + @Autowired + MetadataService metadataService + + @Autowired + WorkflowExecutor workflowExecutor + + @Autowired + WorkflowTestUtil workflowTestUtil + + def cleanup() { + workflowTestUtil.clearWorkflows() + } +} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/DecisionTaskSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/DecisionTaskSpec.groovy index 63b9257f60..1a4f54e9ec 100644 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/DecisionTaskSpec.groovy +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/DecisionTaskSpec.groovy @@ -1,52 +1,26 @@ -/** +/* * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ - package com.netflix.conductor.test.integration - import com.netflix.conductor.common.metadata.tasks.Task import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.WorkflowExecutor -import com.netflix.conductor.service.ExecutionService -import com.netflix.conductor.service.MetadataService -import com.netflix.conductor.test.util.WorkflowTestUtil -import com.netflix.conductor.tests.utils.TestModule -import com.netflix.governator.guice.test.ModulesForTesting +import com.netflix.conductor.test.base.AbstractSpecification import spock.lang.Shared -import spock.lang.Specification import spock.lang.Unroll -import javax.inject.Inject - import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask -@ModulesForTesting([TestModule.class]) -class DecisionTaskSpec extends Specification { - - @Inject - ExecutionService workflowExecutionService - - @Inject - MetadataService metadataService - - @Inject - WorkflowExecutor workflowExecutor - - @Inject - WorkflowTestUtil workflowTestUtil +class DecisionTaskSpec extends AbstractSpecification { @Shared def DECISION_WF = "DecisionWorkflow" @@ -57,7 +31,6 @@ class DecisionTaskSpec extends Specification { @Shared def COND_TASK_WF = "ConditionalTaskWF" - def setup() { //initialization code for each feature workflowTestUtil.registerWorkflows('simple_decision_task_integration_test.json', @@ -65,10 +38,6 @@ class DecisionTaskSpec extends Specification { 'conditional_task_workflow_integration_test.json') } - def cleanup() { - workflowTestUtil.clearWorkflows() - } - def "Test simple decision workflow"() { given: "Workflow an input of a workflow with decision task" Map input = new HashMap() @@ -376,5 +345,4 @@ class DecisionTaskSpec extends Specification { tasks[3].status == Task.Status.COMPLETED } } - -} \ No newline at end of file +} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/DoWhileSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/DoWhileSpec.groovy index 955e97f5bb..c31b50196e 100644 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/DoWhileSpec.groovy +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/DoWhileSpec.groovy @@ -1,17 +1,14 @@ /* * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.test.integration @@ -19,36 +16,11 @@ import com.netflix.conductor.common.metadata.tasks.Task import com.netflix.conductor.common.metadata.tasks.TaskDef import com.netflix.conductor.common.run.Workflow import com.netflix.conductor.common.utils.TaskUtils -import com.netflix.conductor.core.execution.WorkflowExecutor -import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.service.ExecutionService -import com.netflix.conductor.service.MetadataService -import com.netflix.conductor.test.util.WorkflowTestUtil -import com.netflix.conductor.tests.utils.TestModule -import com.netflix.governator.guice.test.ModulesForTesting -import spock.lang.Specification - -import javax.inject.Inject +import com.netflix.conductor.test.base.AbstractSpecification import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask -@ModulesForTesting([TestModule.class]) -class DoWhileSpec extends Specification { - - @Inject - ExecutionService workflowExecutionService - - @Inject - MetadataService metadataService - - @Inject - WorkflowExecutor workflowExecutor - - @Inject - WorkflowTestUtil workflowTestUtil - - @Inject - QueueDAO queueDAO +class DoWhileSpec extends AbstractSpecification { def setup() { workflowTestUtil.registerWorkflows("do_while_integration_test.json", @@ -56,10 +28,6 @@ class DoWhileSpec extends Specification { "do_while_as_subtask_integration_test.json") } - def cleanup() { - workflowTestUtil.clearWorkflows() - } - def "Test workflow with a single iteration Do While task"() { given: "Number of iterations of the loop is set to 1" def workflowInput = new HashMap() @@ -384,7 +352,8 @@ class DoWhileSpec extends Specification { def taskName = 'integration_task_0' def persistedTaskDefinition = workflowTestUtil.getPersistedTaskDefinition(taskName).get() def modifiedTaskDefinition = new TaskDef(persistedTaskDefinition.name, persistedTaskDefinition.description, - 0, persistedTaskDefinition.timeoutSeconds) + persistedTaskDefinition.ownerEmail, 0, persistedTaskDefinition.timeoutSeconds, + persistedTaskDefinition.responseTimeoutSeconds) metadataService.updateTaskDef(modifiedTaskDefinition) when: "A do while workflow is started" @@ -516,7 +485,8 @@ class DoWhileSpec extends Specification { def taskName = 'integration_task_0' def persistedTaskDefinition = workflowTestUtil.getPersistedTaskDefinition(taskName).get() def modifiedTaskDefinition = new TaskDef(persistedTaskDefinition.name, persistedTaskDefinition.description, - 1, persistedTaskDefinition.timeoutSeconds) + persistedTaskDefinition.ownerEmail, 1, persistedTaskDefinition.timeoutSeconds, + persistedTaskDefinition.responseTimeoutSeconds) modifiedTaskDefinition.setRetryDelaySeconds(0) metadataService.updateTaskDef(modifiedTaskDefinition) diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/DynamicForkJoinSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/DynamicForkJoinSpec.groovy index 47d3a87b9c..ce5315d172 100644 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/DynamicForkJoinSpec.groovy +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/DynamicForkJoinSpec.groovy @@ -1,68 +1,33 @@ -/** +/* * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ - package com.netflix.conductor.test.integration import com.netflix.conductor.common.metadata.tasks.Task import com.netflix.conductor.common.metadata.tasks.TaskDef import com.netflix.conductor.common.metadata.workflow.WorkflowTask import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.WorkflowExecutor -import com.netflix.conductor.core.metadata.MetadataMapperService -import com.netflix.conductor.service.ExecutionService -import com.netflix.conductor.service.MetadataService -import com.netflix.conductor.test.util.WorkflowTestUtil -import com.netflix.conductor.tests.utils.TestModule -import com.netflix.governator.guice.test.ModulesForTesting +import com.netflix.conductor.test.base.AbstractSpecification import spock.lang.Shared -import spock.lang.Specification - -import javax.inject.Inject - -@ModulesForTesting([TestModule.class]) -class DynamicForkJoinSpec extends Specification { - - @Inject - ExecutionService workflowExecutionService - - @Inject - MetadataService metadataService - - @Inject - WorkflowExecutor workflowExecutor - @Inject - WorkflowTestUtil workflowTestUtil - - @Inject - MetadataMapperService metadataMapperService +class DynamicForkJoinSpec extends AbstractSpecification { @Shared def DYNAMIC_FORK_JOIN_WF = "DynamicFanInOutTest" - def setup() { workflowTestUtil.registerWorkflows('dynamic_fork_join_integration_test.json') } - def cleanup() { - workflowTestUtil.clearWorkflows() - } - - def "Test dynamic fork join success flow"() { when: " a dynamic fork join workflow is started" def workflowInstanceId = workflowExecutor.startWorkflow(DYNAMIC_FORK_JOIN_WF, 1, @@ -160,7 +125,8 @@ class DynamicForkJoinSpec extends Specification { setup: "Make sure that the integration_task_2 does not have any retry count" def persistedTask2Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_2').get() def modifiedTask2Definition = new TaskDef(persistedTask2Definition.name, - persistedTask2Definition.description, 0, persistedTask2Definition.timeoutSeconds) + persistedTask2Definition.description, persistedTask2Definition.ownerEmail, 0, + persistedTask2Definition.timeoutSeconds, persistedTask2Definition.responseTimeoutSeconds) metadataService.updateTaskDef(modifiedTask2Definition) when: " a dynamic fork join workflow is started" @@ -240,7 +206,8 @@ class DynamicForkJoinSpec extends Specification { setup: "Make sure that the integration_task_2 does not have any retry count" def persistedTask2Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_2').get() def modifiedTask2Definition = new TaskDef(persistedTask2Definition.name, - persistedTask2Definition.description, 0, persistedTask2Definition.timeoutSeconds) + persistedTask2Definition.description, persistedTask2Definition.ownerEmail, 0, + persistedTask2Definition.timeoutSeconds, persistedTask2Definition.responseTimeoutSeconds) metadataService.updateTaskDef(modifiedTask2Definition) when: " a dynamic fork join workflow is started" @@ -377,5 +344,4 @@ class DynamicForkJoinSpec extends Specification { cleanup: "roll back the change made to integration_task_2 definition" metadataService.updateTaskDef(persistedTask2Definition) } - } diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/EventTaskSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/EventTaskSpec.groovy index 04c523e7d4..eba3a37831 100644 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/EventTaskSpec.groovy +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/EventTaskSpec.groovy @@ -1,57 +1,30 @@ /* * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.test.integration import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.workflow.TaskType +import com.netflix.conductor.common.metadata.tasks.TaskType import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.WorkflowExecutor -import com.netflix.conductor.core.execution.WorkflowSweeper -import com.netflix.conductor.service.ExecutionService -import com.netflix.conductor.test.util.WorkflowTestUtil -import com.netflix.conductor.tests.utils.TestModule -import com.netflix.governator.guice.test.ModulesForTesting -import spock.lang.Specification - -import javax.inject.Inject +import com.netflix.conductor.test.base.AbstractSpecification import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask -@ModulesForTesting([TestModule.class]) -class EventTaskSpec extends Specification { - - @Inject - ExecutionService workflowExecutionService - - @Inject - WorkflowExecutor workflowExecutor - - @Inject - WorkflowTestUtil workflowTestUtil +class EventTaskSpec extends AbstractSpecification { def EVENT_BASED_WORKFLOW = 'test_event_workflow' def setup() { - workflowTestUtil.registerWorkflows( - 'event_workflow_integration_test.json' - ) - } - - def cleanup() { - workflowTestUtil.clearWorkflows() + workflowTestUtil.registerWorkflows('event_workflow_integration_test.json') } def "Verify that a event based simple work flow is executed"() { @@ -59,7 +32,7 @@ class EventTaskSpec extends Specification { def workflowInstanceId = workflowExecutor.startWorkflow(EVENT_BASED_WORKFLOW, 1, '', [:], null, null, null) - and:"Sleep for 1 second to mimic the event trigger" + and: "Sleep for 1 second to mimic the event trigger" Thread.sleep(1000) then: "Retrieve the workflow " @@ -73,10 +46,10 @@ class EventTaskSpec extends Specification { tasks[1].status == Task.Status.SCHEDULED } - when:"The integration_task_1 is polled and completed" + when: "The integration_task_1 is polled and completed" def polledAndCompletedTry1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker') - then:"verify that the task was polled and completed and the workflow is in a complete state" + then: "verify that the task was polled and completed and the workflow is in a complete state" verifyPolledAndAcknowledgedTask(polledAndCompletedTry1) with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.COMPLETED @@ -84,7 +57,5 @@ class EventTaskSpec extends Specification { tasks[1].taskType == 'integration_task_1' tasks[1].status == Task.Status.COMPLETED } - } - } diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/ExclusiveJoinSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/ExclusiveJoinSpec.groovy new file mode 100644 index 0000000000..fd8630199f --- /dev/null +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/ExclusiveJoinSpec.groovy @@ -0,0 +1,360 @@ +/* + * Copyright 2020 Netflix, Inc. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package com.netflix.conductor.test.integration + +import com.netflix.conductor.common.metadata.tasks.Task +import com.netflix.conductor.common.metadata.tasks.TaskResult +import com.netflix.conductor.common.run.Workflow +import com.netflix.conductor.test.base.AbstractSpecification +import spock.lang.Shared + +import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask + +class ExclusiveJoinSpec extends AbstractSpecification { + + @Shared + def EXCLUSIVE_JOIN_WF = "ExclusiveJoinTestWorkflow" + + def setup() { + workflowTestUtil.registerWorkflows('exclusive_join_integration_test.json') + } + + def setTaskResult(String workflowInstanceId, String taskId, TaskResult.Status status, + Map output) { + TaskResult taskResult = new TaskResult(); + taskResult.setTaskId(taskId) + taskResult.setWorkflowInstanceId(workflowInstanceId) + taskResult.setStatus(status) + taskResult.setOutputData(output) + return taskResult + } + + def "Test that the default decision is run"() { + given: "The input parameter required to make decision_1 is null to ensure that the default decision is run" + def input = ["decision_1": "null"] + + when: "An exclusive join workflow is started with then workflow input" + def workflowInstanceId = workflowExecutor.startWorkflow(EXCLUSIVE_JOIN_WF, 1, 'exclusive_join_workflow', + input, null, null, null) + + then: "verify that the workflow is in running state" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.RUNNING + tasks.size() == 1 + tasks[0].taskType == 'integration_task_1' + tasks[0].status == Task.Status.SCHEDULED + } + + when: "the task 'integration_task_1' is polled and completed" + def polledAndCompletedTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1' + + '.integration.worker', ["taskReferenceName": "task1"]) + + then: "verify that the task is completed and acknowledged" + verifyPolledAndAcknowledgedTask(polledAndCompletedTask1Try1) + + and: "verify that the 'integration_task_1' is COMPLETED and the workflow has COMPLETED" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.COMPLETED + tasks.size() == 3 + tasks[0].taskType == 'integration_task_1' + tasks[0].status == Task.Status.COMPLETED + tasks[1].taskType == 'DECISION' + tasks[1].status == Task.Status.COMPLETED + tasks[2].taskType == 'EXCLUSIVE_JOIN' + tasks[2].status == Task.Status.COMPLETED + tasks[2].outputData['taskReferenceName'] == 'task1' + } + } + + def "Test when the one decision is true and the other is decision null"() { + given: "The input parameter required to make decision_1 true and decision_2 null" + def input = ["decision_1": "true", "decision_2": "null"] + + when: "An exclusive join workflow is started with then workflow input" + def workflowInstanceId = workflowExecutor.startWorkflow(EXCLUSIVE_JOIN_WF, 1, 'exclusive_join_workflow', + input, null, null, null) + + then: "verify that the workflow is in running state" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.RUNNING + tasks.size() == 1 + tasks[0].taskType == 'integration_task_1' + tasks[0].status == Task.Status.SCHEDULED + } + + when: "the task 'integration_task_1' is polled and completed" + def polledAndCompletedTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1' + + '.integration.worker', ["taskReferenceName": "task1"]) + + then: "verify that the task is completed and acknowledged" + verifyPolledAndAcknowledgedTask(polledAndCompletedTask1Try1) + + and: "verify that the 'integration_task_1' is COMPLETED and the workflow has progressed" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.RUNNING + tasks.size() == 3 + tasks[1].taskType == 'DECISION' + tasks[1].status == Task.Status.COMPLETED + tasks[2].taskType == 'integration_task_2' + tasks[2].status == Task.Status.SCHEDULED + } + + when: "the task 'integration_task_2' is polled and completed" + def polledAndCompletedTask2Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2' + + '.integration.worker', ["taskReferenceName": "task2"]) + + then: "verify that the task is completed and acknowledged" + verifyPolledAndAcknowledgedTask(polledAndCompletedTask2Try1) + + and: "verify that the 'integration_task_2' is COMPLETED and the workflow has COMPLETED" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.COMPLETED + tasks.size() == 5 + tasks[0].taskType == 'integration_task_1' + tasks[0].status == Task.Status.COMPLETED + tasks[1].taskType == 'DECISION' + tasks[1].status == Task.Status.COMPLETED + tasks[2].taskType == 'integration_task_2' + tasks[2].status == Task.Status.COMPLETED + tasks[3].taskType == 'DECISION' + tasks[3].status == Task.Status.COMPLETED + tasks[4].taskType == 'EXCLUSIVE_JOIN' + tasks[4].status == Task.Status.COMPLETED + tasks[4].outputData['taskReferenceName'] == 'task2' + } + } + + def "Test when both the decisions, decision_1 and decision_2 are true"() { + given: "The input parameters to ensure that both the decisions are true" + def input = ["decision_1": "true", "decision_2": "true"] + + when: "An exclusive join workflow is started with then workflow input" + def workflowInstanceId = workflowExecutor.startWorkflow(EXCLUSIVE_JOIN_WF, 1, 'exclusive_join_workflow', + input, null, null, null) + + then: "verify that the workflow is in running state" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.RUNNING + tasks.size() == 1 + tasks[0].taskType == 'integration_task_1' + tasks[0].status == Task.Status.SCHEDULED + } + + when: "the task 'integration_task_1' is polled and completed" + def polledAndCompletedTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1' + + '.integration.worker', ["taskReferenceName": "task1"]) + + then: "verify that the task is completed and acknowledged" + verifyPolledAndAcknowledgedTask(polledAndCompletedTask1Try1) + + and: "verify that the 'integration_task_1' is COMPLETED and the workflow has COMPLETED" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.RUNNING + tasks.size() == 3 + tasks[1].taskType == 'DECISION' + tasks[1].status == Task.Status.COMPLETED + tasks[2].taskType == 'integration_task_2' + tasks[2].status == Task.Status.SCHEDULED + } + + when: "the task 'integration_task_2' is polled and completed" + def polledAndCompletedTask2Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2' + + '.integration.worker', ["taskReferenceName": "task2"]) + + then: "verify that the task is completed and acknowledged" + verifyPolledAndAcknowledgedTask(polledAndCompletedTask2Try1) + + and: "verify that the 'integration_task_2' is COMPLETED and the workflow has progressed" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.RUNNING + tasks.size() == 5 + tasks[0].taskType == 'integration_task_1' + tasks[0].status == Task.Status.COMPLETED + tasks[1].taskType == 'DECISION' + tasks[1].status == Task.Status.COMPLETED + tasks[2].taskType == 'integration_task_2' + tasks[2].status == Task.Status.COMPLETED + tasks[3].taskType == 'DECISION' + tasks[3].status == Task.Status.COMPLETED + tasks[4].taskType == 'integration_task_3' + tasks[4].status == Task.Status.SCHEDULED + } + + when: "the task 'integration_task_3' is polled and completed" + def polledAndCompletedTask3Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_3', 'task3' + + '.integration.worker', ["taskReferenceName": "task3"]) + + then: "verify that the task is completed and acknowledged" + verifyPolledAndAcknowledgedTask(polledAndCompletedTask3Try1) + + and: "verify that the 'integration_task_3' is COMPLETED and the workflow has COMPLETED" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.COMPLETED + tasks.size() == 6 + tasks[0].taskType == 'integration_task_1' + tasks[0].status == Task.Status.COMPLETED + tasks[1].taskType == 'DECISION' + tasks[1].status == Task.Status.COMPLETED + tasks[2].taskType == 'integration_task_2' + tasks[2].status == Task.Status.COMPLETED + tasks[3].taskType == 'DECISION' + tasks[3].status == Task.Status.COMPLETED + tasks[4].taskType == 'integration_task_3' + tasks[4].status == Task.Status.COMPLETED + tasks[5].taskType == 'EXCLUSIVE_JOIN' + tasks[5].status == Task.Status.COMPLETED + tasks[5].outputData['taskReferenceName'] == 'task3' + } + } + + def "Test when decision_1 is false and decision_3 is default"() { + given: "The input parameter required to make decision_1 false and decision_3 default" + def input = ["decision_1": "false", "decision_3": "null"] + + when: "An exclusive join workflow is started with then workflow input" + def workflowInstanceId = workflowExecutor.startWorkflow(EXCLUSIVE_JOIN_WF, 1, 'exclusive_join_workflow', + input, null, null, null) + + then: "verify that the workflow is in running state" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.RUNNING + tasks.size() == 1 + tasks[0].taskType == 'integration_task_1' + tasks[0].status == Task.Status.SCHEDULED + } + + when: "the task 'integration_task_1' is polled and completed" + def polledAndCompletedTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1' + + '.integration.worker', ["taskReferenceName": "task1"]) + + then: "verify that the task is completed and acknowledged" + verifyPolledAndAcknowledgedTask(polledAndCompletedTask1Try1) + + and: "verify that the 'integration_task_1' is COMPLETED and the workflow has progressed" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.RUNNING + tasks.size() == 3 + tasks[1].taskType == 'DECISION' + tasks[1].status == Task.Status.COMPLETED + tasks[2].taskType == 'integration_task_4' + tasks[2].status == Task.Status.SCHEDULED + } + + when: "the task 'integration_task_4' is polled and completed" + def polledAndCompletedTask4Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_4', 'task4' + + '.integration.worker', ["taskReferenceName": "task4"]) + + then: "verify that the task is completed and acknowledged" + verifyPolledAndAcknowledgedTask(polledAndCompletedTask4Try1) + + and: "verify that the 'integration_task_4' is COMPLETED and the workflow has COMPLETED" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.COMPLETED + tasks.size() == 5 + tasks[0].taskType == 'integration_task_1' + tasks[0].status == Task.Status.COMPLETED + tasks[1].taskType == 'DECISION' + tasks[1].status == Task.Status.COMPLETED + tasks[2].taskType == 'integration_task_4' + tasks[2].status == Task.Status.COMPLETED + tasks[3].taskType == 'DECISION' + tasks[3].status == Task.Status.COMPLETED + tasks[4].taskType == 'EXCLUSIVE_JOIN' + tasks[4].status == Task.Status.COMPLETED + tasks[4].outputData['taskReferenceName'] == 'task4' + } + } + + def "Test when decision_1 is false and decision_3 is true"() { + given: "The input parameter required to make decision_1 false and decision_3 true" + def input = ["decision_1": "false", "decision_3": "true"] + + when: "An exclusive join workflow is started with then workflow input" + def workflowInstanceId = workflowExecutor.startWorkflow(EXCLUSIVE_JOIN_WF, 1, 'exclusive_join_workflow', + input, null, null, null) + + then: "verify that the workflow is in running state" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.RUNNING + tasks.size() == 1 + tasks[0].taskType == 'integration_task_1' + tasks[0].status == Task.Status.SCHEDULED + } + + when: "the task 'integration_task_1' is polled and completed" + def polledAndCompletedTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1' + + '.integration.worker', ["taskReferenceName": "task1"]) + + then: "verify that the task is completed and acknowledged" + verifyPolledAndAcknowledgedTask(polledAndCompletedTask1Try1) + + and: "verify that the 'integration_task_1' is COMPLETED and the workflow has progressed" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.RUNNING + tasks.size() == 3 + tasks[1].taskType == 'DECISION' + tasks[1].status == Task.Status.COMPLETED + tasks[2].taskType == 'integration_task_4' + tasks[2].status == Task.Status.SCHEDULED + } + + when: "the task 'integration_task_4' is polled and completed" + def polledAndCompletedTask4Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_4', 'task4' + + '.integration.worker', ["taskReferenceName": "task4"]) + + then: "verify that the task is completed and acknowledged" + verifyPolledAndAcknowledgedTask(polledAndCompletedTask4Try1) + + and: "verify that the 'integration_task_4' is COMPLETED and the workflow has progressed" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.RUNNING + tasks.size() == 5 + tasks[0].taskType == 'integration_task_1' + tasks[0].status == Task.Status.COMPLETED + tasks[1].taskType == 'DECISION' + tasks[1].status == Task.Status.COMPLETED + tasks[2].taskType == 'integration_task_4' + tasks[2].status == Task.Status.COMPLETED + tasks[3].taskType == 'DECISION' + tasks[3].status == Task.Status.COMPLETED + tasks[4].taskType == 'integration_task_5' + tasks[4].status == Task.Status.SCHEDULED + } + + when: "the task 'integration_task_5' is polled and completed" + def polledAndCompletedTask5Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_5', 'task5' + + '.integration.worker', ["taskReferenceName": "task5"]) + + then: "verify that the task is completed and acknowledged" + verifyPolledAndAcknowledgedTask(polledAndCompletedTask5Try1) + + and: "verify that the 'integration_task_4' is COMPLETED and the workflow has progressed" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.COMPLETED + tasks.size() == 6 + tasks[0].taskType == 'integration_task_1' + tasks[0].status == Task.Status.COMPLETED + tasks[1].taskType == 'DECISION' + tasks[1].status == Task.Status.COMPLETED + tasks[2].taskType == 'integration_task_4' + tasks[2].status == Task.Status.COMPLETED + tasks[3].taskType == 'DECISION' + tasks[3].status == Task.Status.COMPLETED + tasks[4].taskType == 'integration_task_5' + tasks[4].status == Task.Status.COMPLETED + tasks[5].taskType == 'EXCLUSIVE_JOIN' + tasks[5].status == Task.Status.COMPLETED + tasks[5].outputData['taskReferenceName'] == 'task5' + } + } +} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/ExternalPayloadStorageSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/ExternalPayloadStorageSpec.groovy index ae6d936678..0d717be2d4 100644 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/ExternalPayloadStorageSpec.groovy +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/ExternalPayloadStorageSpec.groovy @@ -14,43 +14,22 @@ package com.netflix.conductor.test.integration import com.netflix.conductor.common.metadata.tasks.Task import com.netflix.conductor.common.metadata.tasks.TaskDef -import com.netflix.conductor.common.metadata.workflow.TaskType +import com.netflix.conductor.common.metadata.tasks.TaskType import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.WorkflowExecutor import com.netflix.conductor.core.execution.tasks.SubWorkflow -import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.service.ExecutionService -import com.netflix.conductor.service.MetadataService -import com.netflix.conductor.test.util.WorkflowTestUtil -import com.netflix.conductor.tests.utils.TestModule -import com.netflix.conductor.tests.utils.UserTask -import com.netflix.governator.guice.test.ModulesForTesting +import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask +import com.netflix.conductor.test.base.AbstractSpecification +import com.netflix.conductor.test.utils.UserTask import spock.lang.Shared -import spock.lang.Specification - -import javax.inject.Inject import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedLargePayloadTask import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask -import static com.netflix.conductor.tests.utils.MockExternalPayloadStorage.* - -@ModulesForTesting([TestModule.class]) -class ExternalPayloadStorageSpec extends Specification { - - @Inject - ExecutionService workflowExecutionService - - @Inject - MetadataService metadataService - - @Inject - WorkflowExecutor workflowExecutor +import static com.netflix.conductor.test.utils.MockExternalPayloadStorage.INITIAL_WORKFLOW_INPUT_PATH +import static com.netflix.conductor.test.utils.MockExternalPayloadStorage.INPUT_PAYLOAD_PATH +import static com.netflix.conductor.test.utils.MockExternalPayloadStorage.TASK_OUTPUT_PATH +import static com.netflix.conductor.test.utils.MockExternalPayloadStorage.WORKFLOW_OUTPUT_PATH - @Inject - WorkflowTestUtil workflowTestUtil - - @Inject - UserTask userTask +class ExternalPayloadStorageSpec extends AbstractSpecification { @Shared def LINEAR_WORKFLOW_T1_T2 = 'integration_test_wf' @@ -64,9 +43,6 @@ class ExternalPayloadStorageSpec extends Specification { @Shared def WORKFLOW_WITH_INLINE_SUB_WF = "WorkflowWithInlineSubWorkflow" - @Inject - QueueDAO queueDAO - def setup() { workflowTestUtil.registerWorkflows('simple_workflow_1_integration_test.json', 'conditional_system_task_workflow_integration_test.json', @@ -74,10 +50,6 @@ class ExternalPayloadStorageSpec extends Specification { 'simple_workflow_with_sub_workflow_inline_def_integration_test.json') } - def cleanup() { - workflowTestUtil.clearWorkflows() - } - def "Test simple workflow using external payload storage"() { given: "An existing simple workflow definition" @@ -196,7 +168,7 @@ class ExternalPayloadStorageSpec extends Specification { when: "the system task 'USER_TASK' is started by issuing a system task call" def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) def taskId = workflow.getTaskByRefName('user_task').getTaskId() - workflowExecutor.executeSystemTask(userTask, taskId, 1) + workflowExecutor.executeSystemTask(WorkflowSystemTask.get(UserTask.NAME), taskId, 1) then: "verify that the user task is in a COMPLETED state" with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { @@ -440,7 +412,7 @@ class ExternalPayloadStorageSpec extends Specification { when: "the subworkflow is started by issuing a system task call" def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) def subWorkflowTaskId = workflow.getTaskByRefName('swt').taskId - workflowExecutor.executeSystemTask(new SubWorkflow(), subWorkflowTaskId, 1) + workflowExecutor.executeSystemTask(WorkflowSystemTask.get(SubWorkflow.NAME), subWorkflowTaskId, 1) then: "verify that the sub workflow task is in a IN_PROGRESS state" with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { @@ -552,7 +524,8 @@ class ExternalPayloadStorageSpec extends Specification { setup: "Modify the task definition" def persistedTask2Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_2').get() def modifiedTask2Definition = new TaskDef(persistedTask2Definition.name, persistedTask2Definition.description, - 2, persistedTask2Definition.timeoutSeconds) + persistedTask2Definition.ownerEmail, 2, persistedTask2Definition.timeoutSeconds, + persistedTask2Definition.responseTimeoutSeconds) modifiedTask2Definition.setRetryDelaySeconds(0) metadataService.updateTaskDef(modifiedTask2Definition) diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/ForkJoinSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/ForkJoinSpec.groovy index 185fbba845..e3c3fd436c 100644 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/ForkJoinSpec.groovy +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/ForkJoinSpec.groovy @@ -1,50 +1,14 @@ -/** - * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ package com.netflix.conductor.test.integration - import com.netflix.conductor.common.metadata.tasks.Task import com.netflix.conductor.common.metadata.tasks.TaskDef import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.WorkflowExecutor import com.netflix.conductor.core.execution.tasks.SubWorkflow -import com.netflix.conductor.service.ExecutionService -import com.netflix.conductor.service.MetadataService -import com.netflix.conductor.test.util.WorkflowTestUtil -import com.netflix.conductor.tests.utils.TestModule -import com.netflix.governator.guice.test.ModulesForTesting +import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask +import com.netflix.conductor.test.base.AbstractSpecification import spock.lang.Shared -import spock.lang.Specification - -import javax.inject.Inject - -@ModulesForTesting([TestModule.class]) -class ForkJoinSpec extends Specification { - - @Inject - ExecutionService workflowExecutionService - - @Inject - MetadataService metadataService - @Inject - WorkflowExecutor workflowExecutor - - @Inject - WorkflowTestUtil workflowTestUtil +class ForkJoinSpec extends AbstractSpecification { @Shared def FORK_JOIN_WF = 'FanInOutTest' @@ -61,10 +25,6 @@ class ForkJoinSpec extends Specification { @Shared def FORK_JOIN_SUB_WORKFLOW = 'integration_test_fork_join_sw' - def cleanup() { - workflowTestUtil.clearWorkflows() - } - def setup() { workflowTestUtil.registerWorkflows('fork_join_integration_test.json', 'fork_join_with_no_task_retry_integration_test.json', @@ -78,18 +38,18 @@ class ForkJoinSpec extends Specification { } /** - * start - * | - * fork - * / \ + * start + * | + * fork + * / \ * task1 task2 - * \ / - * task3 / + * | / + * task3 / * \ / * \ / * join * | - * task4 + * task4 * | * End */ @@ -185,12 +145,12 @@ class ForkJoinSpec extends Specification { } } - def "Test a simple workflow with fork join failure flow"() { setup: "Ensure that 'integration_task_2' has a retry count of 0" def persistedIntegrationTask2Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_2').get() def modifiedIntegrationTask2Definition = new TaskDef(persistedIntegrationTask2Definition.name, - persistedIntegrationTask2Definition.description, 0, 0) + persistedIntegrationTask2Definition.description, persistedIntegrationTask2Definition.ownerEmail, 0, + 0, persistedIntegrationTask2Definition.responseTimeoutSeconds) metadataService.updateTaskDef(modifiedIntegrationTask2Definition) when: "A fork join workflow is started" @@ -627,7 +587,7 @@ class ForkJoinSpec extends Specification { and: "Get the sub workflow id associated with the SubWorkflow Task sw1 and start the system task" def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) def subWorkflowTaskId = workflow.getTaskByRefName("sw1").getTaskId() - workflowExecutor.executeSystemTask(new SubWorkflow(), subWorkflowTaskId, 1) + workflowExecutor.executeSystemTask(WorkflowSystemTask.get(SubWorkflow.NAME), subWorkflowTaskId, 1) def updatedWorkflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) def subWorkflowInstanceId = updatedWorkflow.getTaskByRefName('sw1').subWorkflowId @@ -783,9 +743,9 @@ class ForkJoinSpec extends Specification { when: "both the sub workflows are started by issuing a system task call" def workflowWithScheduledSubWorkflows = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) def subWorkflowTaskId1 = workflowWithScheduledSubWorkflows.getTaskByRefName('st1').taskId - workflowExecutor.executeSystemTask(new SubWorkflow(), subWorkflowTaskId1, 1) + workflowExecutor.executeSystemTask(WorkflowSystemTask.get(SubWorkflow.NAME), subWorkflowTaskId1, 1) def subWorkflowTaskId2 = workflowWithScheduledSubWorkflows.getTaskByRefName('st2').taskId - workflowExecutor.executeSystemTask(new SubWorkflow(), subWorkflowTaskId2, 1) + workflowExecutor.executeSystemTask(WorkflowSystemTask.get(SubWorkflow.NAME), subWorkflowTaskId2, 1) then: "verify that the sub workflow tasks are in a IN PROGRESS state" with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { @@ -886,7 +846,7 @@ class ForkJoinSpec extends Specification { when: "the subworkflow is started by issuing a system task call" def parentWorkflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) def subWorkflowTaskId = parentWorkflow.getTaskByRefName('st1').taskId - workflowExecutor.executeSystemTask(new SubWorkflow(), subWorkflowTaskId, 1) + workflowExecutor.executeSystemTask(WorkflowSystemTask.get(SubWorkflow.NAME), subWorkflowTaskId, 1) then: "verify that the sub workflow task is in a IN_PROGRESS state" with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { @@ -950,7 +910,7 @@ class ForkJoinSpec extends Specification { when: "the sub workflow is started by issuing a system task call" parentWorkflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) subWorkflowTaskId = parentWorkflow.getTaskByRefName('st1').taskId - workflowExecutor.executeSystemTask(new SubWorkflow(), subWorkflowTaskId, 1) + workflowExecutor.executeSystemTask(WorkflowSystemTask.get(SubWorkflow.NAME), subWorkflowTaskId, 1) then: "verify that the sub workflow task is in a IN PROGRESS state" with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/JsonJQTransformSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/JsonJQTransformSpec.groovy new file mode 100644 index 0000000000..4774808478 --- /dev/null +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/JsonJQTransformSpec.groovy @@ -0,0 +1,87 @@ +/* + * Copyright 2020 Netflix, Inc. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package com.netflix.conductor.test.integration + +import com.netflix.conductor.common.metadata.tasks.Task +import com.netflix.conductor.common.run.Workflow +import com.netflix.conductor.test.base.AbstractSpecification +import spock.lang.Shared + +class JsonJQTransformSpec extends AbstractSpecification { + + @Shared + def JSON_JQ_TRANSFORM_WF = 'test_json_jq_transform_wf' + + def setup() { + workflowTestUtil.registerWorkflows( + 'simple_json_jq_transform_integration_test.json', + ) + } + + /** + * Given the following input JSON + *{* "in1": {* "array": [ "a", "b" ] + *}, + * "in2": {* "array": [ "c", "d" ] + *}*}* expect the workflow task to transform to following result: + *{* out: [ "a", "b", "c", "d" ] + *}*/ + def "Test workflow with json jq transform task succeeds"() { + given: "workflow input" + def workflowInput = new HashMap() + workflowInput['in1'] = new HashMap() + workflowInput['in1']['array'] = ["a", "b"] + workflowInput['in2'] = new HashMap() + workflowInput['in2']['array'] = ["c", "d"] + + when: "workflow which has the json jq transform task has started" + def workflowInstanceId = workflowExecutor.startWorkflow(JSON_JQ_TRANSFORM_WF, 1, + '', workflowInput, null, null, null) + + then: "verify that the workflow and task are completed with expected output" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.COMPLETED + tasks.size() == 1 + tasks[0].status == Task.Status.COMPLETED + tasks[0].taskType == 'JSON_JQ_TRANSFORM' + tasks[0].outputData as String == "[result:[out:[a, b, c, d]], resultList:[[out:[a, b, c, d]]]]" + } + } + + /** + * Given the following input JSON + *{* "in1": "a", + * "in2": "b" + *}* using the same query from the success test, jq will try to get in1['array'] + * and fail since 'in1' is a string + */ + def "Test workflow with json jq transform task fails"() { + given: "workflow input" + def workflowInput = new HashMap() + workflowInput['in1'] = "a" + workflowInput['in2'] = "b" + + when: "workflow which has the json jq transform task has started" + def workflowInstanceId = workflowExecutor.startWorkflow(JSON_JQ_TRANSFORM_WF, 1, + '', workflowInput, null, null, null) + + then: "verify that the workflow and task failed with expected error" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.FAILED + tasks.size() == 1 + tasks[0].status == Task.Status.FAILED + tasks[0].taskType == 'JSON_JQ_TRANSFORM' + tasks[0].reasonForIncompletion as String == "Cannot index string with string \"array\"" + } + } +} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/KafkaTaskSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/KafkaTaskSpec.groovy index 6e277a2788..6db0826861 100644 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/KafkaTaskSpec.groovy +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/KafkaTaskSpec.groovy @@ -1,57 +1,20 @@ -/* - * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ package com.netflix.conductor.test.integration import com.fasterxml.jackson.databind.ObjectMapper -import com.netflix.archaius.guice.ArchaiusModule import com.netflix.conductor.common.metadata.tasks.TaskDef import com.netflix.conductor.common.metadata.tasks.TaskResult -import com.netflix.conductor.common.metadata.workflow.TaskType +import com.netflix.conductor.common.metadata.tasks.TaskType import com.netflix.conductor.common.metadata.workflow.WorkflowDef import com.netflix.conductor.common.metadata.workflow.WorkflowTask import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.common.utils.JsonMapperProvider -import com.netflix.conductor.core.execution.WorkflowExecutor -import com.netflix.conductor.service.ExecutionService -import com.netflix.conductor.service.MetadataService -import com.netflix.conductor.test.util.WorkflowTestUtil -import com.netflix.conductor.tests.utils.TestModule -import com.netflix.governator.guice.test.ModulesForTesting +import com.netflix.conductor.test.base.AbstractSpecification +import org.springframework.beans.factory.annotation.Autowired import spock.lang.Shared -import spock.lang.Specification -import javax.inject.Inject +class KafkaTaskSpec extends AbstractSpecification { -@ModulesForTesting([TestModule.class, ArchaiusModule.class]) -class KafkaTaskSpec extends Specification { - - @Inject - ExecutionService workflowExecutionService - - @Inject - MetadataService metadataService - - @Inject - WorkflowExecutor workflowExecutor - - @Inject - WorkflowTestUtil registrationUtil - - @Shared - ObjectMapper objectMapper = new JsonMapperProvider().get() + @Autowired + ObjectMapper objectMapper @Shared def isWorkflowRegistered = false @@ -62,15 +25,10 @@ class KafkaTaskSpec extends Specification { 'outputPath' : 's3://bucket/outputPath' ] - def expectedTaskInput = "{\"kafka_request\":{\"topic\":\"test_kafka_topic\",\"bootStrapServers\":\"localhost:9092\",\"value\":{\"requestDetails\":{\"key1\":\"value1\",\"key2\":42},\"outputPath\":\"s3://bucket/outputPath\",\"inputPaths\":[\"file://path1\",\"file://path2\"]}}}" - def cleanup() { - registrationUtil.clearWorkflows() - } - def setup() { - if(!isWorkflowRegistered) { + if (!isWorkflowRegistered) { registerKafkaWorkflow() isWorkflowRegistered = true } @@ -78,17 +36,17 @@ class KafkaTaskSpec extends Specification { def "Test the kafka template usage failure case"() { - given:"Start a workflow based on the registered workflow" + given: "Start a workflow based on the registered workflow" def workflowInstanceId = workflowExecutor.startWorkflow("template_kafka_workflow", 1, "testTaskDefTemplate", kafkaInput, null, null, null) - and:"Get the workflow based on the Id that is being executed" + and: "Get the workflow based on the Id that is being executed" def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) def task = workflow.tasks.get(0) def taskInput = task.inputData - when:"Ensure that the task is pollable and fail the task" + when: "Ensure that the task is pollable and fail the task" def polledTask = workflowExecutionService.poll('KAFKA_PUBLISH', 'test') workflowExecutionService.ackTaskReceived(polledTask.taskId) def taskResult = new TaskResult(polledTask) @@ -98,89 +56,86 @@ class KafkaTaskSpec extends Specification { taskResult.addOutputData("ErrorMessage", "There was a terminal error") workflowExecutionService.updateTask(taskResult) - and:"Then run a decide to move the workflow forward" + and: "Then run a decide to move the workflow forward" workflowExecutor.decide(workflowInstanceId) - and:"Get the updated workflow after the task result has been updated" + and: "Get the updated workflow after the task result has been updated" def updatedWorkflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - then:"Check that the workflow is created and is not terminal" + then: "Check that the workflow is created and is not terminal" workflowInstanceId workflow !workflow.getStatus().isTerminal() !workflow.getReasonForIncompletion() - and:"Check if the input of the next task to be polled is as expected for a kafka task" + and: "Check if the input of the next task to be polled is as expected for a kafka task" taskInput taskInput.containsKey('kafka_request') taskInput['kafka_request'] instanceof Map objectMapper.writeValueAsString(taskInput) == expectedTaskInput - and:"Polled task is not null and the workflowInstanceId of the task is same as the workflow created initially" + and: "Polled task is not null and the workflowInstanceId of the task is same as the workflow created initially" polledTask polledTask.workflowInstanceId == workflowInstanceId - and:"The updated workflow is in a failed state" + and: "The updated workflow is in a failed state" updatedWorkflow updatedWorkflow.status == Workflow.WorkflowStatus.FAILED - - } - def "Test the kafka template usage success case"() { - given:"Start a workflow based on the registered kafka workflow" + given: "Start a workflow based on the registered kafka workflow" def workflowInstanceId = workflowExecutor.startWorkflow("template_kafka_workflow", 1, "testTaskDefTemplate", kafkaInput, null, null, null) - and:"Get the workflow based on the Id that is being executed" + and: "Get the workflow based on the Id that is being executed" def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) def task = workflow.tasks.get(0) def taskInput = task.inputData - when:"Ensure that the task is pollable and complete the task" + when: "Ensure that the task is pollable and complete the task" def polledTask = workflowExecutionService.poll('KAFKA_PUBLISH', 'test') workflowExecutionService.ackTaskReceived(polledTask.taskId) def taskResult = new TaskResult(polledTask) taskResult.setStatus(TaskResult.Status.COMPLETED) workflowExecutionService.updateTask(taskResult) - and:"Then run a decide to move the workflow forward" + and: "Then run a decide to move the workflow forward" workflowExecutor.decide(workflowInstanceId) - and:"Get the updated workflow after the task result has been updated" + and: "Get the updated workflow after the task result has been updated" def updatedWorkflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) - then:"Check that the workflow is created and is not terminal" + then: "Check that the workflow is created and is not terminal" workflowInstanceId workflow !workflow.getStatus().isTerminal() !workflow.getReasonForIncompletion() - and:"Check if the input of the next task to be polled is as expected for a kafka task" + and: "Check if the input of the next task to be polled is as expected for a kafka task" taskInput taskInput.containsKey('kafka_request') taskInput['kafka_request'] instanceof Map objectMapper.writeValueAsString(taskInput) == expectedTaskInput - and:"Polled task is not null and the workflowInstanceId of the task is same as the workflow created initially" + and: "Polled task is not null and the workflowInstanceId of the task is same as the workflow created initially" polledTask polledTask.workflowInstanceId == workflowInstanceId - and:"The updated workflow is complete" + and: "The updated workflow is complete" updatedWorkflow updatedWorkflow.status == Workflow.WorkflowStatus.COMPLETED } - def registerKafkaWorkflow() { System.setProperty("STACK_KAFKA", "test_kafka_topic") TaskDef templatedTask = new TaskDef() templatedTask.name = "templated_kafka_task" templatedTask.retryCount = 0 + templatedTask.ownerEmail = "test@harness.com" def kafkaRequest = new HashMap<>() kafkaRequest["topic"] = '${STACK_KAFKA}' @@ -203,6 +158,7 @@ class KafkaTaskSpec extends Specification { wft.taskReferenceName = "t0" templateWf.tasks.add(wft) templateWf.schemaVersion = 2 + templateWf.ownerEmail = "test@harness.com" metadataService.registerWorkflowDef(templateWf) } } diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/LambdaAndTerminateTaskSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/LambdaAndTerminateTaskSpec.groovy new file mode 100644 index 0000000000..0f882817c9 --- /dev/null +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/LambdaAndTerminateTaskSpec.groovy @@ -0,0 +1,172 @@ +/* + * Copyright 2020 Netflix, Inc. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package com.netflix.conductor.test.integration + +import com.netflix.conductor.common.metadata.tasks.Task +import com.netflix.conductor.common.metadata.tasks.TaskResult +import com.netflix.conductor.common.run.Workflow +import com.netflix.conductor.test.base.AbstractSpecification +import spock.lang.Shared + +class LambdaAndTerminateTaskSpec extends AbstractSpecification { + + @Shared + def WORKFLOW_WITH_TERMINATE_TASK = 'test_terminate_task_wf' + + @Shared + def WORKFLOW_WITH_TERMINATE_TASK_FAILED = 'test_terminate_task_failed_wf' + + @Shared + def WORKFLOW_WITH_LAMBDA_TASK = 'test_lambda_wf' + + @Shared + def PARENT_WORKFLOW_WITH_TERMINATE_TASK = 'test_terminate_task_parent_wf' + + @Shared + def SUBWORKFLOW_FOR_TERMINATE_TEST = 'test_terminate_task_sub_wf' + + def setup() { + workflowTestUtil.registerWorkflows( + 'failure_workflow_for_terminate_task_workflow.json', + 'terminate_task_completed_workflow_integration_test.json', + 'terminate_task_failed_workflow_integration.json', + 'simple_lambda_workflow_integration_test.json', + 'terminate_task_parent_workflow.json', + 'terminate_task_sub_workflow.json' + ) + } + + def "Test workflow with a terminate task when the status is completed"() { + given: "workflow input" + def workflowInput = new HashMap() + workflowInput['a'] = 1 + + when: "Start the workflow which has the terminate task" + def workflowInstanceId = workflowExecutor.startWorkflow(WORKFLOW_WITH_TERMINATE_TASK, 1, + '', workflowInput, null, null, null) + + then: "Ensure that the workflow has started and the first task is in scheduled state" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.COMPLETED + tasks.size() == 2 + tasks[0].status == Task.Status.COMPLETED + tasks[0].taskType == 'LAMBDA' + tasks[1].status == Task.Status.COMPLETED + tasks[1].taskType == 'TERMINATE' + } + } + + def "Test workflow with a terminate task when the status is failed"() { + given: "workflow input" + def workflowInput = new HashMap() + workflowInput['a'] = 1 + + when: "Start the workflow which has the terminate task" + def workflowInstanceId = workflowExecutor.startWorkflow(WORKFLOW_WITH_TERMINATE_TASK_FAILED, 1, + '', workflowInput, null, null, null) + + then: "Verify that the workflow has failed" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.FAILED + tasks.size() == 2 + tasks[0].status == Task.Status.COMPLETED + tasks[0].taskType == 'LAMBDA' + tasks[1].status == Task.Status.COMPLETED + tasks[1].taskType == 'TERMINATE' + output + def failedWorkflowId = output['conductor.failure_workflow'] as String + with(workflowExecutionService.getExecutionStatus(failedWorkflowId, true)) { + status == Workflow.WorkflowStatus.COMPLETED + input['workflowId'] == workflowInstanceId + tasks.size() == 1 + tasks[0].taskType == 'LAMBDA' + } + } + } + + def "Test workflow with a terminate task when the workflow has a subworkflow"() { + given: "workflow input" + def workflowInput = new HashMap() + workflowInput['a'] = 1 + + when: "Start the workflow which has the terminate task" + def workflowInstanceId = workflowExecutor.startWorkflow(PARENT_WORKFLOW_WITH_TERMINATE_TASK, 1, + '', workflowInput, null, null, null) + + then: "verify that the workflow has started and the tasks are as expected" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.RUNNING + tasks.size() == 6 + tasks[0].status == Task.Status.COMPLETED + tasks[0].taskType == 'FORK' + tasks[1].status == Task.Status.COMPLETED + tasks[1].taskType == 'LAMBDA' + tasks[1].referenceTaskName == 'lambdaTask1' + tasks[2].status == Task.Status.COMPLETED + tasks[2].taskType == 'LAMBDA' + tasks[2].referenceTaskName == 'lambdaTask2' + tasks[3].status == Task.Status.IN_PROGRESS + tasks[3].taskType == 'JOIN' + tasks[4].status == Task.Status.SCHEDULED || tasks[4].status == Task.Status.IN_PROGRESS + tasks[4].taskType == 'SUB_WORKFLOW' + tasks[5].status == Task.Status.IN_PROGRESS + tasks[5].taskType == 'WAIT' + } + + when: "Complete the WAIT task that should cause the TERMINATE task to execute" + def waitTask = workflowExecutionService.getExecutionStatus(workflowInstanceId, true).tasks[5] + waitTask.status = Task.Status.COMPLETED + workflowExecutor.updateTask(new TaskResult(waitTask)) + + then: "Verify that the workflow has completed and the SUB_WORKFLOW is not still IN_PROGRESS (should be SKIPPED)" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.COMPLETED + tasks.size() == 7 + tasks[0].status == Task.Status.COMPLETED + tasks[0].taskType == 'FORK' + tasks[1].status == Task.Status.COMPLETED + tasks[1].taskType == 'LAMBDA' + tasks[1].referenceTaskName == 'lambdaTask1' + tasks[2].status == Task.Status.COMPLETED + tasks[2].taskType == 'LAMBDA' + tasks[2].referenceTaskName == 'lambdaTask2' + tasks[3].status == Task.Status.SKIPPED + tasks[3].taskType == 'JOIN' + tasks[4].status == Task.Status.SKIPPED + tasks[4].taskType == 'SUB_WORKFLOW' + tasks[5].status == Task.Status.COMPLETED + tasks[5].taskType == 'WAIT' + tasks[6].status == Task.Status.COMPLETED + tasks[6].taskType == 'TERMINATE' + } + } + + def "Test workflow with lambda task"() { + given: "workflow input" + def workflowInput = new HashMap() + workflowInput['a'] = 1 + + when: "Start the workflow which has the terminate task" + def workflowInstanceId = workflowExecutor.startWorkflow(WORKFLOW_WITH_LAMBDA_TASK, 1, + '', workflowInput, null, null, null) + + then: "verify that the task is completed" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.COMPLETED + tasks.size() == 1 + tasks[0].status == Task.Status.COMPLETED + tasks[0].taskType == 'LAMBDA' + tasks[0].outputData as String == "[result:[testvalue:true]]" + } + } +} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SetVariableTaskSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SetVariableTaskSpec.groovy index 6908920837..4f314f7e28 100644 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SetVariableTaskSpec.groovy +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SetVariableTaskSpec.groovy @@ -1,58 +1,33 @@ /* * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ - package com.netflix.conductor.test.integration import com.netflix.conductor.common.metadata.tasks.Task import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.WorkflowExecutor -import com.netflix.conductor.service.ExecutionService -import com.netflix.conductor.test.util.WorkflowTestUtil -import com.netflix.conductor.tests.utils.TestModule -import com.netflix.governator.guice.test.ModulesForTesting +import com.netflix.conductor.test.base.AbstractSpecification import spock.lang.Shared -import spock.lang.Specification - -import javax.inject.Inject - -@ModulesForTesting([TestModule.class]) -class SetVariableTaskSpec extends Specification { - - @Inject - ExecutionService workflowExecutionService - @Inject - WorkflowExecutor workflowExecutor - - @Inject - WorkflowTestUtil workflowTestUtil +class SetVariableTaskSpec extends AbstractSpecification { @Shared def SET_VARIABLE_WF = 'test_set_variable_wf' def setup() { workflowTestUtil.registerWorkflows( - 'simple_set_variable_workflow_integration_test.json' + 'simple_set_variable_workflow_integration_test.json' ) } - def cleanup() { - workflowTestUtil.clearWorkflows() - } - def "Test workflow with set variable task"() { given: "workflow input" def workflowInput = new HashMap() @@ -62,7 +37,7 @@ class SetVariableTaskSpec extends Specification { def workflowInstanceId = workflowExecutor.startWorkflow(SET_VARIABLE_WF, 1, '', workflowInput, null, null, null) - then:"verify that the task is completed and variables were set" + then: "verify that the task is completed and variables were set" with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.COMPLETED tasks.size() == 1 @@ -76,21 +51,20 @@ class SetVariableTaskSpec extends Specification { def "Test workflow with set variable task passing variables payload size threshold"() { given: "workflow input" def workflowInput = new HashMap() - // Threshold is defined in MockConfiguration under getMaxWorkflowVariablesPayloadSizeThresholdKB long maxThreshold = 2 workflowInput['var'] = String.join("", - Collections.nCopies(1 + ((int)(maxThreshold * 1024 / 8)), "01234567" )); + Collections.nCopies(1 + ((int) (maxThreshold * 1024 / 8)), "01234567")) when: "Start the workflow which has the set variable task" def workflowInstanceId = workflowExecutor.startWorkflow(SET_VARIABLE_WF, 1, - '', workflowInput, null, null, null) + '', workflowInput, null, null, null) def EXTRA_HASHMAP_SIZE = 17 def expectedErrorMessage = - String.format( - "The variables payload size: %dB of workflow: %s is greater than the permissible limit: %dKB", - EXTRA_HASHMAP_SIZE + maxThreshold * 1024 + 1, workflowInstanceId, maxThreshold); + String.format( + "The variables payload size: %dB of workflow: %s is greater than the permissible limit: %dKB", + EXTRA_HASHMAP_SIZE + maxThreshold * 1024 + 1, workflowInstanceId, maxThreshold) - then:"verify that the task is completed and variables were set" + then: "verify that the task is completed and variables were set" with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.FAILED tasks.size() == 1 diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SimpleWorkflowSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SimpleWorkflowSpec.groovy index 43506a535b..dbe3457525 100644 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SimpleWorkflowSpec.groovy +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SimpleWorkflowSpec.groovy @@ -1,60 +1,37 @@ /* * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.test.integration import com.netflix.conductor.common.metadata.tasks.Task import com.netflix.conductor.common.metadata.tasks.TaskDef import com.netflix.conductor.common.metadata.tasks.TaskResult -import com.netflix.conductor.common.metadata.workflow.TaskType +import com.netflix.conductor.common.metadata.tasks.TaskType import com.netflix.conductor.common.metadata.workflow.WorkflowDef import com.netflix.conductor.common.metadata.workflow.WorkflowTask import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.ApplicationException -import com.netflix.conductor.core.execution.WorkflowExecutor +import com.netflix.conductor.core.exception.ApplicationException import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.service.ExecutionService -import com.netflix.conductor.service.MetadataService -import com.netflix.conductor.test.util.WorkflowTestUtil -import com.netflix.conductor.tests.utils.TestModule -import com.netflix.governator.guice.test.ModulesForTesting +import com.netflix.conductor.test.base.AbstractSpecification import org.apache.commons.lang3.StringUtils +import org.springframework.beans.factory.annotation.Autowired import spock.lang.Shared -import spock.lang.Specification -import javax.inject.Inject - -import static com.netflix.conductor.core.execution.ApplicationException.Code.CONFLICT +import static com.netflix.conductor.core.exception.ApplicationException.Code.CONFLICT import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask -@ModulesForTesting([TestModule.class]) -class SimpleWorkflowSpec extends Specification { - - @Inject - ExecutionService workflowExecutionService - - @Inject - MetadataService metadataService - - @Inject - WorkflowExecutor workflowExecutor +class SimpleWorkflowSpec extends AbstractSpecification { - @Inject - WorkflowTestUtil workflowTestUtil - - @Inject + @Autowired QueueDAO queueDAO @Shared @@ -70,10 +47,6 @@ class SimpleWorkflowSpec extends Specification { 'simple_workflow_with_resp_time_out_integration_test.json') } - def cleanup() { - workflowTestUtil.clearWorkflows() - } - def "Test simple workflow completion"() { given: "An existing simple workflow definition" @@ -150,7 +123,7 @@ class SimpleWorkflowSpec extends Specification { null, null, null) then: "verify the workflow has started and the input params have propagated" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)){ + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.RUNNING tasks.size() == 1 input['param2'] == null @@ -160,14 +133,14 @@ class SimpleWorkflowSpec extends Specification { } when: "'integration_task_1' is polled and completed with output data" - def pollAndCompleteTask1Try1= workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', - ['someOtherKey': ['a':1, 'A': null], 'someKey': null]) + def pollAndCompleteTask1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', + ['someOtherKey': ['a': 1, 'A': null], 'someKey': null]) then: "verify that the 'integration_task_1' was polled and acknowledged" verifyPolledAndAcknowledgedTask(pollAndCompleteTask1Try1) and: "verify that the task is completed and the output data has propagated as input data to 'integration_task_2'" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)){ + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.RUNNING tasks.size() == 2 tasks[0].taskType == 'integration_task_1' @@ -181,10 +154,11 @@ class SimpleWorkflowSpec extends Specification { } def "Test simple workflow terminal error condition"() { - setup:"Modify the task definition and the workflow output definition" + setup: "Modify the task definition and the workflow output definition" def persistedTask1Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_1').get() def modifiedTask1Definition = new TaskDef(persistedTask1Definition.name, persistedTask1Definition.description, - 1, persistedTask1Definition.timeoutSeconds) + persistedTask1Definition.ownerEmail, 1, persistedTask1Definition.timeoutSeconds, + persistedTask1Definition.responseTimeoutSeconds) metadataService.updateTaskDef(modifiedTask1Definition) def workflowDef = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1) @@ -203,7 +177,7 @@ class SimpleWorkflowSpec extends Specification { null, null, null) then: "verify that the workflow has started" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)){ + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.RUNNING tasks.size() == 1 } @@ -334,17 +308,23 @@ class SimpleWorkflowSpec extends Specification { verifyPolledAndAcknowledgedTask(polledIntegrationTask2Try1) and: "verify that the workflow is in a completed state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)){ + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.COMPLETED } } - def "Test if the workflow definitions with and without schema version can be registered"() { given: "A workflow definition with no schema version" def workflowDef1 = new WorkflowDef() workflowDef1.name = 'Test_schema_version1' workflowDef1.version = 1 + workflowDef1.ownerEmail = "test@harness.com" + + and: "A new workflow task is created" + def workflowTask = new WorkflowTask() + workflowTask.name = 'integration_task_1' + workflowTask.taskReferenceName = 't1' + workflowDef1.tasks.add(workflowTask) and: "The workflow definition with no schema version is saved" metadataService.updateWorkflowDef(workflowDef1) @@ -354,6 +334,8 @@ class SimpleWorkflowSpec extends Specification { workflowDef2.name = 'Test_schema_version2' workflowDef2.version = 1 workflowDef2.schemaVersion = 2 + workflowDef2.ownerEmail = "test@harness.com" + workflowDef2.tasks.add(workflowTask) and: "The workflow definition with schema version is persisted" metadataService.updateWorkflowDef(workflowDef2) @@ -369,24 +351,24 @@ class SimpleWorkflowSpec extends Specification { foundWorkflowDef2.schemaVersion == 2 } - def "Test Simple workflow restart without using the latest definition"() { - setup:"Register a task definition with no retries" + setup: "Register a task definition with no retries" def persistedTask1Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_1').get() def modifiedTaskDefinition = new TaskDef(persistedTask1Definition.name, persistedTask1Definition.description, - 0, persistedTask1Definition.timeoutSeconds) + persistedTask1Definition.ownerEmail, 0, persistedTask1Definition.timeoutSeconds, + persistedTask1Definition.responseTimeoutSeconds) metadataService.updateTaskDef(modifiedTaskDefinition) - when:"Get the workflow definition associated with the simple workflow" + when: "Get the workflow definition associated with the simple workflow" WorkflowDef workflowDefinition = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1) - then:"Ensure that there is a workflow definition" + then: "Ensure that there is a workflow definition" workflowDefinition workflowDefinition.failureWorkflow StringUtils.isNotBlank(workflowDefinition.failureWorkflow) - when:"Start a simple workflow with non null params" - def correlationId = 'integration_test_1'+ UUID.randomUUID().toString() + when: "Start a simple workflow with non null params" + def correlationId = 'integration_test_1' + UUID.randomUUID().toString() def workflowInput = new HashMap() String inputParam1 = 'p1 value' workflowInput['param1'] = inputParam1 @@ -396,34 +378,34 @@ class SimpleWorkflowSpec extends Specification { correlationId, workflowInput, null, null, null) - then:"A workflow instance has started" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)){ + then: "A workflow instance has started" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.RUNNING } - when:"poll the task that is queued and fail the task" + when: "poll the task that is queued and fail the task" def polledIntegrationTask1Try1 = workflowTestUtil.pollAndFailTask('integration_task_1', 'task1.integration.worker', 'failed..') - then:"The workflow ends up in a failed state" + then: "The workflow ends up in a failed state" verifyPolledAndAcknowledgedTask(polledIntegrationTask1Try1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)){ + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.FAILED tasks[0].status == Task.Status.FAILED tasks[0].taskType == 'integration_task_1' } - when:"Rewind the workflow which is in the failed state without the latest definition" + when: "Rewind the workflow which is in the failed state without the latest definition" workflowExecutor.rewind(workflowInstanceId, false) - then:"verify that the rewound workflow is in a running state" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)){ + then: "verify that the rewound workflow is in a running state" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.RUNNING } - when:"Poll for the 'integration_task_1' " + when: "Poll for the 'integration_task_1' " def polledIntegrationTask1Try2 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker') - then:"verify that the task is polled and the workflow is in a running state" + then: "verify that the task is polled and the workflow is in a running state" verifyPolledAndAcknowledgedTask(polledIntegrationTask1Try2) with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.RUNNING @@ -448,22 +430,23 @@ class SimpleWorkflowSpec extends Specification { def "Test Simple workflow restart with the latest definition"() { - setup:"Register a task definition with no retries" + setup: "Register a task definition with no retries" def persistedTask1Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_1').get() def modifiedTaskDefinition = new TaskDef(persistedTask1Definition.name, persistedTask1Definition.description, - 0, persistedTask1Definition.timeoutSeconds) + persistedTask1Definition.ownerEmail, 0, persistedTask1Definition.timeoutSeconds, + persistedTask1Definition.responseTimeoutSeconds) metadataService.updateTaskDef(modifiedTaskDefinition) - when:"Get the workflow definition associated with the simple workflow" + when: "Get the workflow definition associated with the simple workflow" WorkflowDef workflowDefinition = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1) - then:"Ensure that there is a workflow definition" + then: "Ensure that there is a workflow definition" workflowDefinition workflowDefinition.failureWorkflow StringUtils.isNotBlank(workflowDefinition.failureWorkflow) - when:"Start a simple workflow with non null params" - def correlationId = 'integration_test_1'+ UUID.randomUUID().toString() + when: "Start a simple workflow with non null params" + def correlationId = 'integration_test_1' + UUID.randomUUID().toString() def workflowInput = new HashMap() String inputParam1 = 'p1 value' workflowInput['param1'] = inputParam1 @@ -473,15 +456,15 @@ class SimpleWorkflowSpec extends Specification { correlationId, workflowInput, null, null, null) - then:"A workflow instance has started" - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)){ + then: "A workflow instance has started" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.RUNNING } - when:"poll the task that is queued and fail the task" + when: "poll the task that is queued and fail the task" def polledIntegrationTask1Try1 = workflowTestUtil.pollAndFailTask('integration_task_1', 'task1.integration.worker', 'failed..') - then:"the workflow ends up in a failed state" + then: "the workflow ends up in a failed state" verifyPolledAndAcknowledgedTask(polledIntegrationTask1Try1) with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.FAILED @@ -489,7 +472,7 @@ class SimpleWorkflowSpec extends Specification { tasks[0].taskType == 'integration_task_1' } - when:"A new version of the workflow definition is registered" + when: "A new version of the workflow definition is registered" WorkflowTask workflowTask = new WorkflowTask() workflowTask.name = 'integration_task_20' workflowTask.taskReferenceName = 'task_added' @@ -499,18 +482,18 @@ class SimpleWorkflowSpec extends Specification { workflowDefinition.version = 2 metadataService.updateWorkflowDef(workflowDefinition) - and:"rewind/restart the workflow with the latest workflow definition" + and: "rewind/restart the workflow with the latest workflow definition" workflowExecutor.rewind(workflowInstanceId, true) - then:"verify that the rewound workflow is in a running state" + then: "verify that the rewound workflow is in a running state" with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.RUNNING } - when:"Poll and complete the 'integration_task_1' " + when: "Poll and complete the 'integration_task_1' " def polledIntegrationTask1Try2 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker') - then:"verify that the task is polled and the workflow is in a running state" + then: "verify that the task is polled and the workflow is in a running state" verifyPolledAndAcknowledgedTask(polledIntegrationTask1Try2) with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.RUNNING @@ -518,7 +501,7 @@ class SimpleWorkflowSpec extends Specification { tasks[0].taskType == 'integration_task_1' } - when:"Poll and complete the 'integration_task_2' " + when: "Poll and complete the 'integration_task_2' " def polledIntegrationTask2 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task1.integration.worker') then: "verify that the task is polled and acknowledged" @@ -530,7 +513,7 @@ class SimpleWorkflowSpec extends Specification { when: "Poll and complete the 'integration_task_20' " def polledIntegrationTask20Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_20', 'task1.integration.worker') - then:"verify that the task is polled and acknowledged" + then: "verify that the task is polled and acknowledged" verifyPolledAndAcknowledgedTask(polledIntegrationTask20Try1) def polledIntegrationTask20 = polledIntegrationTask20Try1[0] as Task polledIntegrationTask20.workflowInstanceId == workflowInstanceId @@ -546,14 +529,15 @@ class SimpleWorkflowSpec extends Specification { } def "Test simple workflow with task retries"() { - setup:"Change the task definition to ensure that it has retries and delay between retries" + setup: "Change the task definition to ensure that it has retries and delay between retries" def integrationTask2Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_2').get() def modifiedTaskDefinition = new TaskDef(integrationTask2Definition.name, integrationTask2Definition.description, - 3, integrationTask2Definition.timeoutSeconds) + integrationTask2Definition.ownerEmail, 3, integrationTask2Definition.timeoutSeconds, + integrationTask2Definition.responseTimeoutSeconds) modifiedTaskDefinition.retryDelaySeconds = 2 metadataService.updateTaskDef(modifiedTaskDefinition) - when:"A new simple workflow is started" + when: "A new simple workflow is started" def correlationId = 'integration_test_1' def workflowInput = new HashMap() workflowInput['param1'] = 'p1 value' @@ -562,20 +546,20 @@ class SimpleWorkflowSpec extends Specification { correlationId, workflowInput, null, null, null) - then:"verify that the workflow has started" + then: "verify that the workflow has started" workflowInstanceId def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) workflow.status == Workflow.WorkflowStatus.RUNNING - when:"Poll for the first task and complete the task" + when: "Poll for the first task and complete the task" def polledIntegrationTask1 = workflowExecutionService.poll('integration_task_1', 'task1.integration.worker') def ackPolledIntegrationTask1 = workflowExecutionService.ackTaskReceived(polledIntegrationTask1.taskId) polledIntegrationTask1.status = Task.Status.COMPLETED - def polledIntegrationTask1Output = "task1.output -> " + polledIntegrationTask1.inputData['p1'] +"."+ polledIntegrationTask1.inputData['p2'] + def polledIntegrationTask1Output = "task1.output -> " + polledIntegrationTask1.inputData['p1'] + "." + polledIntegrationTask1.inputData['p2'] polledIntegrationTask1.outputData['op'] = polledIntegrationTask1Output workflowExecutionService.updateTask(polledIntegrationTask1) - then:"verify that the 'integration_task_1' is polled and completed" + then: "verify that the 'integration_task_1' is polled and completed" ackPolledIntegrationTask1 with(polledIntegrationTask1) { inputData.containsKey('p1') @@ -585,22 +569,22 @@ class SimpleWorkflowSpec extends Specification { } //Need to figure out how to use expect and where here - when:" 'integration_task_2' is polled and marked as failed for the first time" + when: " 'integration_task_2' is polled and marked as failed for the first time" Tuple polledAndFailedTaskTry1 = workflowTestUtil.pollAndFailTask('integration_task_2', 'task2.integration.worker', 'failure...0', null, 2) - then:"verify that the task was polled and the input params of the tasks are as expected" + then: "verify that the task was polled and the input params of the tasks are as expected" verifyPolledAndAcknowledgedTask(polledAndFailedTaskTry1, ['tp2': polledIntegrationTask1Output, 'tp1': 'p1 value']) - when:" 'integration_task_2' is polled and marked as failed for the second time" - Tuple polledAndFailedTaskTry2 = workflowTestUtil.pollAndFailTask('integration_task_2', 'task2.integration.worker', 'failure...0', null,2) + when: " 'integration_task_2' is polled and marked as failed for the second time" + Tuple polledAndFailedTaskTry2 = workflowTestUtil.pollAndFailTask('integration_task_2', 'task2.integration.worker', 'failure...0', null, 2) - then:"verify that the task was polled and the input params of the tasks are as expected" + then: "verify that the task was polled and the input params of the tasks are as expected" verifyPolledAndAcknowledgedTask(polledAndFailedTaskTry2, ['tp2': polledIntegrationTask1Output, 'tp1': 'p1 value']) - when:"'integration_task_2' is polled and marked as completed for the third time" + when: "'integration_task_2' is polled and marked as completed for the third time" def polledAndCompletedTry3 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker') - then:"verify that the task was polled and the input params of the tasks are as expected" + then: "verify that the task was polled and the input params of the tasks are as expected" verifyPolledAndAcknowledgedTask(polledAndCompletedTry3, ['tp2': polledIntegrationTask1Output, 'tp1': 'p1 value']) with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.COMPLETED @@ -622,26 +606,27 @@ class SimpleWorkflowSpec extends Specification { } def "Test simple workflow with retry at workflow level"() { - setup:"Change the task definition to ensure that it has retries and no delay between retries" + setup: "Change the task definition to ensure that it has retries and no delay between retries" def integrationTask1Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_1').get() def modifiedTaskDefinition = new TaskDef(integrationTask1Definition.name, integrationTask1Definition.description, - 1, integrationTask1Definition.timeoutSeconds) + integrationTask1Definition.ownerEmail, 1, integrationTask1Definition.timeoutSeconds, + integrationTask1Definition.responseTimeoutSeconds) modifiedTaskDefinition.retryDelaySeconds = 0 metadataService.updateTaskDef(modifiedTaskDefinition) - when:"Start a simple workflow with non null params" - def correlationId = 'retry_test'+ UUID.randomUUID().toString() + when: "Start a simple workflow with non null params" + def correlationId = 'retry_test' + UUID.randomUUID().toString() def workflowInput = new HashMap() String inputParam1 = 'p1 value' workflowInput['param1'] = inputParam1 workflowInput['param2'] = 'p2 value' - and:"start a simple workflow with input params" + and: "start a simple workflow with input params" def workflowInstanceId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, workflowInput, null, null, null) - then:"verify that the workflow has started and the next task is scheduled" + then: "verify that the workflow has started and the next task is scheduled" workflowInstanceId with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.RUNNING @@ -654,10 +639,10 @@ class SimpleWorkflowSpec extends Specification { StringUtils.isNotBlank(failureWorkflow) } - when:"The first task 'integration_task_1' is polled and failed" + when: "The first task 'integration_task_1' is polled and failed" Tuple polledAndFailedTask1Try1 = workflowTestUtil.pollAndFailTask('integration_task_1', 'task1.integration.worker', 'failure...0') - then:"verify that the task was polled and acknowledged and the workflow is still in a running state" + then: "verify that the task was polled and acknowledged and the workflow is still in a running state" verifyPolledAndAcknowledgedTask(polledAndFailedTask1Try1) with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.RUNNING @@ -667,10 +652,10 @@ class SimpleWorkflowSpec extends Specification { tasks[1].getInputData().get("p3") == tasks[1].getTaskId() } - when:"The first task 'integration_task_1' is polled and failed for the second time" + when: "The first task 'integration_task_1' is polled and failed for the second time" Tuple polledAndFailedTask1Try2 = workflowTestUtil.pollAndFailTask('integration_task_1', 'task1.integration.worker', 'failure...0') - then:"verify that the task was polled and acknowledged and the workflow is still in a running state" + then: "verify that the task was polled and acknowledged and the workflow is still in a running state" verifyPolledAndAcknowledgedTask(polledAndFailedTask1Try2) with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.FAILED @@ -679,7 +664,7 @@ class SimpleWorkflowSpec extends Specification { tasks[1].status == Task.Status.FAILED } - when:"The workflow is retried" + when: "The workflow is retried" workflowExecutor.retry(workflowInstanceId) then: @@ -692,10 +677,10 @@ class SimpleWorkflowSpec extends Specification { tasks[2].getInputData().get("p3") == tasks[2].getTaskId() } - when:"The 'integration_task_1' task is polled and is completed" + when: "The 'integration_task_1' task is polled and is completed" def polledAndCompletedTry3 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task2.integration.worker') - then:"verify that the task was polled and acknowledged" + then: "verify that the task was polled and acknowledged" verifyPolledAndAcknowledgedTask(polledAndCompletedTry3) with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.RUNNING @@ -704,10 +689,10 @@ class SimpleWorkflowSpec extends Specification { tasks[3].status == Task.Status.SCHEDULED } - when:"The 'integration_task_2' task is polled and is completed" + when: "The 'integration_task_2' task is polled and is completed" def polledAndCompletedTaskTry1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker') - then:"verify that the task was polled and acknowledged" + then: "verify that the task was polled and acknowledged" verifyPolledAndAcknowledgedTask(polledAndCompletedTaskTry1) with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.COMPLETED @@ -721,13 +706,13 @@ class SimpleWorkflowSpec extends Specification { } def "Test Long running simple workflow"() { - given:"A new simple workflow is started" + given: "A new simple workflow is started" def correlationId = 'integration_test_1' def workflowInput = new HashMap() workflowInput['param1'] = 'p1 value' workflowInput['param2'] = 'p2 value' - when:"start a new workflow with the input" + when: "start a new workflow with the input" def workflowInstanceId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, workflowInput, null, null, null) @@ -747,47 +732,47 @@ class SimpleWorkflowSpec extends Specification { pollTaskTry1.status = Task.Status.IN_PROGRESS workflowExecutionService.updateTask(pollTaskTry1) - then:"verify that the task is polled and acknowledged" + then: "verify that the task is polled and acknowledged" pollTaskTry1 ackReceivedTaskTry1 - and:"the input data of the data is as expected" + and: "the input data of the data is as expected" pollTaskTry1.inputData.containsKey('p1') pollTaskTry1.inputData['p1'] == 'p1 value' pollTaskTry1.inputData.containsKey('p2') pollTaskTry1.inputData['p1'] == 'p1 value' - and:"the task queue reflects the presence of 'integration_task_1' " + and: "the task queue reflects the presence of 'integration_task_1' " workflowExecutionService.getTaskQueueSizes(['integration_task_1']).get('integration_task_1') == 1 - when:"the 'integration_task_1' task is polled again" + when: "the 'integration_task_1' task is polled again" def pollTaskTry2 = workflowExecutionService.poll('integration_task_1', 'task1.integration.worker') - then:"verify that there was no task polled" + then: "verify that there was no task polled" !pollTaskTry2 - when:"the 'integration_task_1' is polled again after a delay of 5 seconds and completed" + when: "the 'integration_task_1' is polled again after a delay of 5 seconds and completed" Thread.sleep(5000) def task1Try3Tuple = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - then:"verify that the task is polled and acknowledged" + then: "verify that the task is polled and acknowledged" verifyPolledAndAcknowledgedTask(task1Try3Tuple, [:]) - and:"verify that the workflow is updated with the latest task" + and: "verify that the workflow is updated with the latest task" with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { tasks[0].status == Task.Status.COMPLETED tasks[0].taskType == 'integration_task_1' tasks[0].outputData['op'] == 'task1.done' } - when:"the 'integration_task_1' is polled and completed" + when: "the 'integration_task_1' is polled and completed" def task2Try1Tuple = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker') - then:"verify that the task was polled and completed with the expected inputData for the task that was polled" + then: "verify that the task was polled and completed with the expected inputData for the task that was polled" verifyPolledAndAcknowledgedTask(task2Try1Tuple, ['tp2': 'task1.done', 'tp1': 'p1 value']) - and:"The workflow is in a completed state and reflects the tasks that are completed" + and: "The workflow is in a completed state and reflects the tasks that are completed" with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.COMPLETED tasks.size() == 2 @@ -801,13 +786,13 @@ class SimpleWorkflowSpec extends Specification { def "Test simple workflow when the task's call back after seconds are reset"() { - given:"A new simple workflow is started" + given: "A new simple workflow is started" def correlationId = 'integration_test_1' def workflowInput = new HashMap() workflowInput['param1'] = 'p1 value' workflowInput['param2'] = 'p2 value' - when:"start a new workflow with the input" + when: "start a new workflow with the input" def workflowInstanceId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, workflowInput, null, null, null) @@ -829,56 +814,56 @@ class SimpleWorkflowSpec extends Specification { pollTaskTry1.status = Task.Status.IN_PROGRESS workflowExecutionService.updateTask(pollTaskTry1) - then:"verify that the task is polled and acknowledged" + then: "verify that the task is polled and acknowledged" pollTaskTry1 ackReceivedTaskTry1 - and:"the input data of the data is as expected" + and: "the input data of the data is as expected" pollTaskTry1.inputData.containsKey('p1') pollTaskTry1.inputData['p1'] == 'p1 value' pollTaskTry1.inputData.containsKey('p2') pollTaskTry1.inputData['p1'] == 'p1 value' - and:"the task queue reflects the presence of 'integration_task_1' " + and: "the task queue reflects the presence of 'integration_task_1' " workflowExecutionService.getTaskQueueSizes(['integration_task_1']).get('integration_task_1') == 1 - when:"the 'integration_task_1' task is polled again" + when: "the 'integration_task_1' task is polled again" def pollTaskTry2 = workflowExecutionService.poll('integration_task_1', 'task1.integration.worker') - then:"verify that there was no task polled" + then: "verify that there was no task polled" !pollTaskTry2 - when:"the 'integration_task_1' task is polled again" + when: "the 'integration_task_1' task is polled again" def pollTaskTry3 = workflowExecutionService.poll('integration_task_1', 'task1.integration.worker') - then:"verify that there was no task polled" + then: "verify that there was no task polled" !pollTaskTry3 - when:"The callbackSeconds of the tasks in progress for the workflow are reset" + when: "The callbackSeconds of the tasks in progress for the workflow are reset" workflowExecutor.resetCallbacksForWorkflow(workflowInstanceId) - and:"the 'integration_task_1' is polled again after all the in progress tasks are reset" + and: "the 'integration_task_1' is polled again after all the in progress tasks are reset" def task1Try4Tuple = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - then:"verify that the task is polled and acknowledged" + then: "verify that the task is polled and acknowledged" verifyPolledAndAcknowledgedTask(task1Try4Tuple) - and:"verify that the workflow is updated with the latest task" + and: "verify that the workflow is updated with the latest task" with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { tasks[0].status == Task.Status.COMPLETED tasks[0].taskType == 'integration_task_1' tasks[0].outputData['op'] == 'task1.done' } - when:"the 'integration_task_1' is polled and completed" + when: "the 'integration_task_1' is polled and completed" def task2Try1Tuple = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker') - then:"verify that the task was polled and completed with the expected inputData for the task that was polled" + then: "verify that the task was polled and completed with the expected inputData for the task that was polled" verifyPolledAndAcknowledgedTask(task2Try1Tuple, ['tp2': 'task1.done', 'tp1': 'p1 value']) - and:"The workflow is in a completed state and reflects the tasks that are completed" + and: "The workflow is in a completed state and reflects the tasks that are completed" with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.COMPLETED tasks.size() == 2 @@ -890,10 +875,11 @@ class SimpleWorkflowSpec extends Specification { } def "Test non restartable simple workflow"() { - setup:"Change the task definition to ensure that it has no retries and register a non restartable workflow" + setup: "Change the task definition to ensure that it has no retries and register a non restartable workflow" def integrationTask1Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_1').get() def modifiedTaskDefinition = new TaskDef(integrationTask1Definition.name, integrationTask1Definition.description, - 0, integrationTask1Definition.timeoutSeconds) + integrationTask1Definition.ownerEmail, 0, integrationTask1Definition.timeoutSeconds, + integrationTask1Definition.responseTimeoutSeconds) metadataService.updateTaskDef(modifiedTaskDefinition) def simpleWorkflowDefinition = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1) @@ -901,7 +887,7 @@ class SimpleWorkflowSpec extends Specification { simpleWorkflowDefinition.restartable = false metadataService.updateWorkflowDef(simpleWorkflowDefinition) - when:"A non restartable workflow is started" + when: "A non restartable workflow is started" def correlationId = 'integration_test_1' def workflowInput = new HashMap() workflowInput['param1'] = 'p1 value' @@ -911,40 +897,40 @@ class SimpleWorkflowSpec extends Specification { correlationId, workflowInput, null, null, null) - and:"the 'integration_task_1' is polled and failed" + and: "the 'integration_task_1' is polled and failed" Tuple polledAndFailedTaskTry1 = workflowTestUtil.pollAndFailTask('integration_task_1', 'task1.integration.worker', 'failure...0') - then:"verify that the task was polled and acknowledged" + then: "verify that the task was polled and acknowledged" verifyPolledAndAcknowledgedTask(polledAndFailedTaskTry1) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)){ + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.FAILED tasks[0].status == Task.Status.FAILED tasks[0].taskType == 'integration_task_1' } - when:"The failed workflow is rewound" + when: "The failed workflow is rewound" workflowExecutor.rewind(workflowInstanceId, false) - and:"The first task 'integration_task_1' is polled and completed" + and: "The first task 'integration_task_1' is polled and completed" def task1Try2 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker', ['op': 'task1.done']) - then:"Verify that the task is polled and acknowledged" + then: "Verify that the task is polled and acknowledged" verifyPolledAndAcknowledgedTask(task1Try2) - with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)){ + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.RUNNING tasks[0].status == Task.Status.COMPLETED tasks[0].taskType == 'integration_task_1' } - when:"The second task 'integration_task_2' is polled and completed" + when: "The second task 'integration_task_2' is polled and completed" def task2Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker') - then:"Verify that the task was polled and acknowledged" + then: "Verify that the task was polled and acknowledged" verifyPolledAndAcknowledgedTask(task2Try1, ['tp2': 'task1.done', 'tp1': 'p1 value']) - and:"The workflow is in a completed state and reflects the tasks that are completed" + and: "The workflow is in a completed state and reflects the tasks that are completed" with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.COMPLETED tasks.size() == 2 @@ -955,14 +941,14 @@ class SimpleWorkflowSpec extends Specification { output['o3'] == 'task1.done' } - when:"The successfully completed non restartable workflow is rewound" + when: "The successfully completed non restartable workflow is rewound" workflowExecutor.rewind(workflowInstanceId, false) - then:"Ensure that an exception is thrown" + then: "Ensure that an exception is thrown" def exceptionThrown = thrown(ApplicationException) exceptionThrown - cleanup:"clean up the changes made to the task and workflow definition during start up" + cleanup: "clean up the changes made to the task and workflow definition during start up" metadataService.updateTaskDef(integrationTask1Definition) simpleWorkflowDefinition.name = LINEAR_WORKFLOW_T1_T2 simpleWorkflowDefinition.restartable = true diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SubWorkflowSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SubWorkflowSpec.groovy index 30d2e93311..0c0ecc2f14 100644 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SubWorkflowSpec.groovy +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SubWorkflowSpec.groovy @@ -13,51 +13,31 @@ package com.netflix.conductor.test.integration import com.netflix.conductor.common.metadata.tasks.Task -import com.netflix.conductor.common.metadata.workflow.TaskType +import com.netflix.conductor.common.metadata.tasks.TaskType import com.netflix.conductor.common.metadata.workflow.WorkflowDef import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.WorkflowExecutor import com.netflix.conductor.core.execution.WorkflowRepairService import com.netflix.conductor.core.execution.WorkflowSweeper -import com.netflix.conductor.core.execution.tasks.SystemTaskWorkerCoordinator +import com.netflix.conductor.core.execution.tasks.SubWorkflow import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.service.ExecutionService -import com.netflix.conductor.service.MetadataService -import com.netflix.conductor.test.util.WorkflowTestUtil -import com.netflix.conductor.tests.utils.TestModule -import com.netflix.governator.guice.test.ModulesForTesting +import com.netflix.conductor.test.base.AbstractSpecification +import org.springframework.beans.factory.annotation.Autowired import spock.lang.Shared -import spock.lang.Specification - -import javax.inject.Inject import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask -@ModulesForTesting([TestModule.class]) -class SubWorkflowSpec extends Specification { - - @Inject - ExecutionService workflowExecutionService - - @Inject - MetadataService metadataService +class SubWorkflowSpec extends AbstractSpecification { - @Inject - WorkflowExecutor workflowExecutor + @Autowired + QueueDAO queueDAO - @Inject + @Autowired WorkflowSweeper workflowSweeper - @Inject + @Autowired WorkflowRepairService workflowRepairService - @Inject - WorkflowTestUtil workflowTestUtil - - @Inject - QueueDAO queueDAO - @Shared def WORKFLOW_WITH_SUBWORKFLOW = 'integration_test_wf_with_sub_wf' @@ -69,10 +49,6 @@ class SubWorkflowSpec extends Specification { 'workflow_with_sub_workflow_1_integration_test.json') } - def cleanup() { - workflowTestUtil.clearWorkflows() - } - def "Test retrying a subworkflow where parent workflow timed out due to workflowTimeout"() { setup: "Register a workflow definition with a timeout policy set to timeout workflow" @@ -85,6 +61,7 @@ class SubWorkflowSpec extends Specification { modifiedWorkflowDefinition.outputParameters = persistedWorkflowDefinition.outputParameters modifiedWorkflowDefinition.timeoutPolicy = WorkflowDef.TimeoutPolicy.TIME_OUT_WF modifiedWorkflowDefinition.timeoutSeconds = 10 + modifiedWorkflowDefinition.ownerEmail = persistedWorkflowDefinition.ownerEmail metadataService.updateWorkflowDef([modifiedWorkflowDefinition]) and: "an existing workflow with subworkflow and registered definitions" @@ -128,8 +105,7 @@ class SubWorkflowSpec extends Specification { when: "the subworkflow is started by issuing a system task call" List polledTaskIds = queueDAO.pop("SUB_WORKFLOW", 1, 200) - WorkflowSystemTask systemTask = SystemTaskWorkerCoordinator.taskNameWorkflowTaskMapping.get("SUB_WORKFLOW") - workflowExecutor.executeSystemTask(systemTask, polledTaskIds.get(0), 30) + workflowExecutor.executeSystemTask(WorkflowSystemTask.get(SubWorkflow.NAME), polledTaskIds.get(0), 30) then: "verify that the 'sub_workflow_task' is in a IN_PROGRESS" with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { @@ -271,9 +247,8 @@ class SubWorkflowSpec extends Specification { } when: "Polled for and executed subworkflow task" - List polledTaskIds = queueDAO.pop("SUB_WORKFLOW", 1, 200); - WorkflowSystemTask systemTask = SystemTaskWorkerCoordinator.taskNameWorkflowTaskMapping.get("SUB_WORKFLOW") - workflowExecutor.executeSystemTask(systemTask, polledTaskIds.get(0), 30) + List polledTaskIds = queueDAO.pop("SUB_WORKFLOW", 1, 200) + workflowExecutor.executeSystemTask(WorkflowSystemTask.get(SubWorkflow.NAME), polledTaskIds.get(0), 30) def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) def subWorkflowId = workflow.tasks[1].subWorkflowId @@ -296,7 +271,7 @@ class SubWorkflowSpec extends Specification { } when: "subworkflow is terminated" - def terminateReason = "terminating from a test case" + def terminateReason = "terminating from a test case" workflowExecutor.terminateWorkflow(subWorkflowId, terminateReason) then: "verify that sub workflow is in terminated state" diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SystemTaskSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SystemTaskSpec.groovy index 41b241af73..5e9b292e95 100644 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SystemTaskSpec.groovy +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/SystemTaskSpec.groovy @@ -15,52 +15,28 @@ package com.netflix.conductor.test.integration import com.netflix.conductor.common.metadata.tasks.Task import com.netflix.conductor.common.metadata.tasks.TaskResult import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.WorkflowExecutor import com.netflix.conductor.core.execution.WorkflowRepairService import com.netflix.conductor.core.execution.WorkflowSweeper import com.netflix.conductor.core.execution.tasks.SystemTaskWorkerCoordinator import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.service.ExecutionService -import com.netflix.conductor.service.MetadataService -import com.netflix.conductor.test.util.WorkflowTestUtil -import com.netflix.conductor.tests.utils.TestModule -import com.netflix.conductor.tests.utils.UserTask -import com.netflix.governator.guice.test.ModulesForTesting +import com.netflix.conductor.test.base.AbstractSpecification +import org.springframework.beans.factory.annotation.Autowired import spock.lang.Shared -import spock.lang.Specification - -import javax.inject.Inject import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask -@ModulesForTesting([TestModule.class]) -class SystemTaskSpec extends Specification { - - @Inject - ExecutionService workflowExecutionService - - @Inject - MetadataService metadataService - - @Inject - WorkflowExecutor workflowExecutor - - @Inject - WorkflowTestUtil workflowTestUtil +class SystemTaskSpec extends AbstractSpecification { - @Inject + @Autowired QueueDAO queueDAO - @Inject + @Autowired WorkflowSweeper workflowSweeper - @Inject + @Autowired WorkflowRepairService workflowRepairService - @Inject - UserTask userTask - @Shared def ASYNC_COMPLETE_SYSTEM_TASK_WORKFLOW = 'async_complete_integration_test_wf' @@ -68,10 +44,6 @@ class SystemTaskSpec extends Specification { workflowTestUtil.registerWorkflows('simple_workflow_with_async_complete_system_task_integration_test.json') } - def cleanup() { - workflowTestUtil.clearWorkflows() - } - def "Test system task with asyncComplete set to true"() { given: "An existing workflow definition with async complete system task" @@ -89,7 +61,7 @@ class SystemTaskSpec extends Specification { correlationId, input, null, null, null) then: "ensure that the workflow has started" - with (workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.RUNNING tasks.size() == 1 tasks[0].taskType == 'integration_task_1' diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/TaskLimitsWorkflowSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/TaskLimitsWorkflowSpec.groovy index 7f213576c1..75b90386b5 100644 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/TaskLimitsWorkflowSpec.groovy +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/TaskLimitsWorkflowSpec.groovy @@ -1,53 +1,32 @@ /* * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.test.integration import com.netflix.conductor.common.metadata.tasks.Task import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.WorkflowExecutor +import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.service.ExecutionService -import com.netflix.conductor.test.util.WorkflowTestUtil -import com.netflix.conductor.tests.utils.TestModule -import com.netflix.conductor.tests.utils.UserTask -import com.netflix.governator.guice.test.ModulesForTesting -import spock.lang.Specification - -import javax.inject.Inject +import com.netflix.conductor.test.base.AbstractSpecification +import com.netflix.conductor.test.utils.UserTask +import org.springframework.beans.factory.annotation.Autowired import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask -@ModulesForTesting([TestModule.class]) -class TaskLimitsWorkflowSpec extends Specification { - - @Inject - ExecutionService workflowExecutionService - - @Inject - WorkflowExecutor workflowExecutor - - @Inject - WorkflowTestUtil workflowTestUtil +class TaskLimitsWorkflowSpec extends AbstractSpecification { - @Inject + @Autowired QueueDAO queueDAO - @Inject - UserTask userTask - def RATE_LIMITED_SYSTEM_TASK_WORKFLOW = 'test_rate_limit_system_task_workflow' def RATE_LIMITED_SIMPLE_TASK_WORKFLOW = 'test_rate_limit_simple_task_workflow' def CONCURRENCY_EXECUTION_LIMITED_WORKFLOW = 'test_concurrency_limits_workflow' @@ -60,10 +39,6 @@ class TaskLimitsWorkflowSpec extends Specification { ) } - def cleanup() { - workflowTestUtil.clearWorkflows() - } - def "Verify that the rate limiting for system tasks is honored"() { when: "Start a workflow that has a rate limited system task in it" def workflowInstanceId = workflowExecutor.startWorkflow(RATE_LIMITED_SYSTEM_TASK_WORKFLOW, 1, @@ -79,7 +54,7 @@ class TaskLimitsWorkflowSpec extends Specification { when: "Execute the user task" def scheduledTask1 = workflowExecutionService.getExecutionStatus(workflowInstanceId, true).tasks[0] - workflowExecutor.executeSystemTask(userTask, scheduledTask1.taskId, 30) + workflowExecutor.executeSystemTask(WorkflowSystemTask.get(UserTask.NAME), scheduledTask1.taskId, 30) then: "Verify the state of the workflow is completed" with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { @@ -103,7 +78,7 @@ class TaskLimitsWorkflowSpec extends Specification { when: "Execute the user task on the second workflow" def scheduledTask2 = workflowExecutionService.getExecutionStatus(workflowTwoInstanceId, true).tasks[0] - workflowExecutor.executeSystemTask(userTask, scheduledTask2.taskId, 30) + workflowExecutor.executeSystemTask(WorkflowSystemTask.get(UserTask.NAME), scheduledTask2.taskId, 30) then: "Verify the state of the workflow is still in running state" with(workflowExecutionService.getExecutionStatus(workflowTwoInstanceId, true)) { diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/WaitTaskSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/WaitTaskSpec.groovy index bc575894f2..5c767237dc 100644 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/WaitTaskSpec.groovy +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/WaitTaskSpec.groovy @@ -1,59 +1,33 @@ /* * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.test.integration import com.netflix.conductor.common.metadata.tasks.Task import com.netflix.conductor.common.metadata.tasks.TaskResult -import com.netflix.conductor.common.metadata.workflow.TaskType +import com.netflix.conductor.common.metadata.tasks.TaskType import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.WorkflowExecutor -import com.netflix.conductor.service.ExecutionService -import com.netflix.conductor.test.util.WorkflowTestUtil -import com.netflix.conductor.tests.utils.TestModule -import com.netflix.governator.guice.test.ModulesForTesting +import com.netflix.conductor.test.base.AbstractSpecification import spock.lang.Shared -import spock.lang.Specification - -import javax.inject.Inject import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask -@ModulesForTesting([TestModule.class]) -class WaitTaskSpec extends Specification { - - @Inject - ExecutionService workflowExecutionService - - @Inject - WorkflowExecutor workflowExecutor - - @Inject - WorkflowTestUtil workflowTestUtil +class WaitTaskSpec extends AbstractSpecification { @Shared def WAIT_BASED_WORKFLOW = 'test_wait_workflow' def setup() { - workflowTestUtil.registerWorkflows( - 'wait_workflow_integration_test.json' - ) - } - - def cleanup() { - workflowTestUtil.clearWorkflows() + workflowTestUtil.registerWorkflows('wait_workflow_integration_test.json') } def "Verify that a wait based simple workflow is executed"() { @@ -69,12 +43,12 @@ class WaitTaskSpec extends Specification { tasks[0].status == Task.Status.IN_PROGRESS } - when:"The wait task is completed" + when: "The wait task is completed" def waitTask = workflowExecutionService.getExecutionStatus(workflowInstanceId, true).tasks[0] waitTask.status = Task.Status.COMPLETED workflowExecutor.updateTask(new TaskResult(waitTask)) - then:"ensure that the wait task is completed and the next task is scheduled" + then: "ensure that the wait task is completed and the next task is scheduled" with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.RUNNING tasks.size() == 2 @@ -84,10 +58,10 @@ class WaitTaskSpec extends Specification { tasks[1].status == Task.Status.SCHEDULED } - when:"The integration_task_1 is polled and completed" + when: "The integration_task_1 is polled and completed" def polledAndCompletedTry1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker') - then:"verify that the task was polled and completed and the workflow is in a complete state" + then: "verify that the task was polled and completed and the workflow is in a complete state" verifyPolledAndAcknowledgedTask(polledAndCompletedTry1) with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { status == Workflow.WorkflowStatus.COMPLETED diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/WorkflowAndTaskConfigurationSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/WorkflowAndTaskConfigurationSpec.groovy index 72b86accb1..4862696249 100644 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/WorkflowAndTaskConfigurationSpec.groovy +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/WorkflowAndTaskConfigurationSpec.groovy @@ -1,25 +1,22 @@ /* * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.test.integration import com.netflix.conductor.common.metadata.tasks.Task import com.netflix.conductor.common.metadata.tasks.TaskDef import com.netflix.conductor.common.metadata.tasks.TaskResult +import com.netflix.conductor.common.metadata.tasks.TaskType import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest -import com.netflix.conductor.common.metadata.workflow.TaskType import com.netflix.conductor.common.metadata.workflow.WorkflowDef import com.netflix.conductor.common.metadata.workflow.WorkflowTask import com.netflix.conductor.common.run.Workflow @@ -27,42 +24,23 @@ import com.netflix.conductor.core.execution.WorkflowExecutor import com.netflix.conductor.core.execution.WorkflowRepairService import com.netflix.conductor.core.execution.WorkflowSweeper import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.service.ExecutionService -import com.netflix.conductor.service.MetadataService -import com.netflix.conductor.test.util.WorkflowTestUtil -import com.netflix.conductor.tests.utils.TestModule -import com.netflix.governator.guice.test.ModulesForTesting +import com.netflix.conductor.test.base.AbstractSpecification +import org.springframework.beans.factory.annotation.Autowired import spock.lang.Shared -import spock.lang.Specification - -import javax.inject.Inject import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask -@ModulesForTesting([TestModule.class]) -class WorkflowAndTaskConfigurationSpec extends Specification { - - @Inject - ExecutionService workflowExecutionService - - @Inject - MetadataService metadataService +class WorkflowAndTaskConfigurationSpec extends AbstractSpecification { - @Inject - WorkflowExecutor workflowExecutor + @Autowired + QueueDAO queueDAO - @Inject + @Autowired WorkflowSweeper workflowSweeper - @Inject + @Autowired WorkflowRepairService workflowRepairService - @Inject - WorkflowTestUtil workflowTestUtil - - @Inject - QueueDAO queueDAO - @Shared def LINEAR_WORKFLOW_T1_T2 = 'integration_test_wf' @@ -84,10 +62,6 @@ class WorkflowAndTaskConfigurationSpec extends Specification { 'simple_wait_task_workflow_integration_test.json') } - def cleanup() { - workflowTestUtil.clearWorkflows() - } - def "Test simple workflow which has an optional task"() { given: "A input parameters for a workflow with an optional task" @@ -165,7 +139,7 @@ class WorkflowAndTaskConfigurationSpec extends Specification { setup: "Register a task definition with retry policy on time out" def persistedTask1Definition = workflowTestUtil.getPersistedTaskDefinition('integration_task_1').get() def modifiedTaskDefinition = new TaskDef(persistedTask1Definition.name, persistedTask1Definition.description, - 1, 1) + persistedTask1Definition.ownerEmail, 1, 1, 1) modifiedTaskDefinition.retryDelaySeconds = 0 modifiedTaskDefinition.timeoutPolicy = TaskDef.TimeoutPolicy.RETRY metadataService.updateTaskDef(modifiedTaskDefinition) @@ -846,6 +820,7 @@ class WorkflowAndTaskConfigurationSpec extends Specification { body['outputPath'] = '${workflow.input.outputPath}' httpRequest['body'] = body templatedTask.inputTemplate['http_request'] = httpRequest + templatedTask.ownerEmail = "test@harness.com" metadataService.registerTaskDef(Arrays.asList(templatedTask)) and: "set a system property for STACK2" @@ -861,6 +836,7 @@ class WorkflowAndTaskConfigurationSpec extends Specification { templateWorkflowDef.setName("template_workflow") templateWorkflowDef.getTasks().add(workflowTask) templateWorkflowDef.setSchemaVersion(2) + templateWorkflowDef.setOwnerEmail("test@harness.com") metadataService.registerWorkflowDef(templateWorkflowDef) and: "the input to the workflow is curated" @@ -868,7 +844,7 @@ class WorkflowAndTaskConfigurationSpec extends Specification { requestDetails['key1'] = 'value1' requestDetails['key2'] = 42 - def input = new HashMap<>() + Map input = new HashMap<>() input['path1'] = 'file://path1' input['path2'] = 'file://path2' input['outputPath'] = 's3://bucket/outputPath' @@ -896,5 +872,4 @@ class WorkflowAndTaskConfigurationSpec extends Specification { tasks[0].inputData.get('http_request')['uri'] == '/get/something' } } - } diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/resiliency/QueueResiliencySpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/resiliency/QueueResiliencySpec.groovy index 2027a00ecc..a64efa3aae 100644 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/resiliency/QueueResiliencySpec.groovy +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/resiliency/QueueResiliencySpec.groovy @@ -1,17 +1,14 @@ /* * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.test.resiliency @@ -20,16 +17,11 @@ import com.netflix.conductor.common.metadata.tasks.TaskResult import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest import com.netflix.conductor.common.run.Workflow -import com.netflix.conductor.core.execution.ApplicationException -import com.netflix.conductor.dao.QueueDAO -import com.netflix.conductor.server.resources.TaskResource -import com.netflix.conductor.server.resources.WorkflowResource -import com.netflix.conductor.test.util.MockQueueDAOModule -import com.netflix.conductor.test.util.WorkflowTestUtil -import spock.guice.UseModules -import spock.lang.Specification - -import javax.inject.Inject +import com.netflix.conductor.core.exception.ApplicationException +import com.netflix.conductor.rest.controllers.TaskResource +import com.netflix.conductor.rest.controllers.WorkflowResource +import com.netflix.conductor.test.base.AbstractResiliencySpecification +import org.springframework.beans.factory.annotation.Autowired /** * When QueueDAO is unavailable, @@ -38,19 +30,12 @@ import javax.inject.Inject * 2. Succeeds * 3. Doesn't involve QueueDAO */ -@UseModules(MockQueueDAOModule) -class QueueResiliencySpec extends Specification { - - @Inject - WorkflowTestUtil workflowTestUtil +class QueueResiliencySpec extends AbstractResiliencySpecification { - @Inject - QueueDAO queueDAO - - @Inject + @Autowired WorkflowResource workflowResource - @Inject + @Autowired TaskResource taskResource def SIMPLE_TWO_TASK_WORKFLOW = 'integration_test_wf' @@ -61,11 +46,7 @@ class QueueResiliencySpec extends Specification { 'simple_workflow_1_integration_test.json' ) } - - def cleanup() { - workflowTestUtil.clearWorkflows() - } - + /// Workflow Resource endpoints def "Verify Start workflow fails when QueueDAO is unavailable"() { @@ -573,20 +554,6 @@ class QueueResiliencySpec extends Specification { thrown(Exception) } - def "Verify requeue fails when QueueDAO is unavailable"() { - when: "Start a simple workflow" - def workflowInstanceId = workflowResource.startWorkflow(new StartWorkflowRequest() - .withName(SIMPLE_TWO_TASK_WORKFLOW) - .withVersion(1)) - - and: - taskResource.requeue() - - then: - 1 * queueDAO.pushIfNotExists(*_) >> { throw new IllegalStateException("Queue pushIfNotExists failed from Spy") } - thrown(Exception) - } - def "Verify task search is not impacted by QueueDAO"() { when: "We perform a search" taskResource.search(0, 1, "", "", "") diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/resiliency/TaskResiliencySpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/resiliency/TaskResiliencySpec.groovy new file mode 100644 index 0000000000..4bbee65f7b --- /dev/null +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/resiliency/TaskResiliencySpec.groovy @@ -0,0 +1,101 @@ +/* + * Copyright 2020 Netflix, Inc. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package com.netflix.conductor.test.resiliency + +import com.netflix.conductor.common.metadata.tasks.Task +import com.netflix.conductor.common.run.Workflow +import com.netflix.conductor.core.execution.WorkflowRepairService +import com.netflix.conductor.test.base.AbstractResiliencySpecification +import org.springframework.beans.factory.annotation.Autowired +import spock.lang.Shared + +import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask + +class TaskResiliencySpec extends AbstractResiliencySpecification { + + @Autowired + WorkflowRepairService workflowRepairService + + @Shared + def SIMPLE_TWO_TASK_WORKFLOW = 'integration_test_wf' + + def setup() { + workflowTestUtil.taskDefinitions() + workflowTestUtil.registerWorkflows( + 'simple_workflow_1_integration_test.json' + ) + } + + def "Verify that a workflow recovers and completes on schedule task failure from queue push failure"() { + when: "Start a simple workflow" + def workflowInstanceId = workflowExecutor.startWorkflow(SIMPLE_TWO_TASK_WORKFLOW, 1, + '', [:], null, null, null) + + then: "Retrieve the workflow" + def workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true) + workflow.status == Workflow.WorkflowStatus.RUNNING + workflow.tasks.size() == 1 + workflow.tasks[0].taskType == 'integration_task_1' + workflow.tasks[0].status == Task.Status.SCHEDULED + def taskId = workflow.tasks[0].taskId + + // Simulate queue push failure when creating a new task, after completing first task + when: "The first task 'integration_task_1' is polled and completed" + def task1Try1 = workflowTestUtil.pollAndCompleteTask('integration_task_1', 'task1.integration.worker') + + then: "Verify that the task was polled and acknowledged" + 1 * queueDAO.pop(_, 1, _) >> Collections.singletonList(taskId) + 1 * queueDAO.ack(*_) >> true + 1 * queueDAO.push(*_) >> { throw new IllegalStateException("Queue push failed from Spy") } + verifyPolledAndAcknowledgedTask(task1Try1) + + and: "Ensure that the next task is SCHEDULED even after failing to push taskId message to queue" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.RUNNING + tasks.size() == 2 + tasks[0].taskType == 'integration_task_1' + tasks[0].status == Task.Status.COMPLETED + tasks[1].taskType == 'integration_task_2' + tasks[1].status == Task.Status.SCHEDULED + } + + when: "The second task 'integration_task_2' is polled for" + def task1Try2 = workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker') + + then: "Verify that the task was not polled, and the taskId doesn't exist in the queue" + task1Try2[0] == null + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.RUNNING + tasks.size() == 2 + tasks[0].taskType == 'integration_task_1' + tasks[0].status == Task.Status.COMPLETED + tasks[1].taskType == 'integration_task_2' + tasks[1].status == Task.Status.SCHEDULED + def currentTaskId = tasks[1].getTaskId() + !queueDAO.containsMessage("integration_task_2", currentTaskId) + } + + when: "Running a repair and decide on the workflow" + workflowRepairService.verifyAndRepairWorkflow(workflowInstanceId, true) + workflowExecutor.decide(workflowInstanceId) + workflowTestUtil.pollAndCompleteTask('integration_task_2', 'task2.integration.worker') + + then: "verify that the next scheduled task can be polled and executed successfully" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.COMPLETED + tasks.size() == 2 + tasks[1].taskType == 'integration_task_2' + tasks[1].status == Task.Status.COMPLETED + } + } +} diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/util/WorkflowTestUtil.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/util/WorkflowTestUtil.groovy index 7a3dd104c3..0f3c93f044 100644 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/util/WorkflowTestUtil.groovy +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/util/WorkflowTestUtil.groovy @@ -1,36 +1,33 @@ /* * Copyright 2020 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.test.util +import com.fasterxml.jackson.databind.ObjectMapper import com.netflix.conductor.common.metadata.tasks.Task import com.netflix.conductor.common.metadata.tasks.TaskDef import com.netflix.conductor.common.metadata.workflow.WorkflowDef import com.netflix.conductor.common.run.Workflow import com.netflix.conductor.core.WorkflowContext -import com.netflix.conductor.core.execution.ApplicationException +import com.netflix.conductor.core.exception.ApplicationException import com.netflix.conductor.core.execution.WorkflowExecutor import com.netflix.conductor.dao.QueueDAO import com.netflix.conductor.service.ExecutionService import com.netflix.conductor.service.MetadataService -import com.netflix.conductor.tests.utils.JsonUtils -import org.apache.commons.lang.StringUtils +import org.apache.commons.lang3.StringUtils +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.stereotype.Component import javax.annotation.PostConstruct -import javax.inject.Inject -import javax.inject.Singleton import static com.netflix.conductor.common.metadata.tasks.Task.Status.COMPLETED @@ -46,29 +43,32 @@ import static com.netflix.conductor.common.metadata.tasks.Task.Status.COMPLETED *

  • verifyPolledAndAcknowledgedTask
  • * * - * Usage: Inject this class in any Spock based specification: + * Usage: Autowire this class in any Spock based specification: * - * @Inject - * WorkflowTestUtil workflowTestUtil + * {@literal @}Autowired + * WorkflowTestUtil workflowTestUtil * */ -@Singleton +@Component class WorkflowTestUtil { private final MetadataService metadataService private final ExecutionService workflowExecutionService private final WorkflowExecutor workflowExecutor private final QueueDAO queueDAO + private final ObjectMapper objectMapper private static final int RETRY_COUNT = 1 private static final String TEMP_FILE_PATH = "/input.json" + private static final String DEFAULT_EMAIL_ADDRESS = "test@harness.com" - @Inject + @Autowired WorkflowTestUtil(MetadataService metadataService, ExecutionService workflowExecutionService, - WorkflowExecutor workflowExecutor, QueueDAO queueDAO) { + WorkflowExecutor workflowExecutor, QueueDAO queueDAO, ObjectMapper objectMapper) { this.metadataService = metadataService this.workflowExecutionService = workflowExecutionService this.workflowExecutor = workflowExecutor this.queueDAO = queueDAO + this.objectMapper = objectMapper } /** @@ -80,15 +80,15 @@ class WorkflowTestUtil { (0..20).collect { "integration_task_$it" } .findAll { !getPersistedTaskDefinition(it).isPresent() } - .collect { new TaskDef(it, it, 1, 120) } + .collect { new TaskDef(it, it, DEFAULT_EMAIL_ADDRESS, 1, 120, 120) } .forEach { metadataService.registerTaskDef([it]) } (0..4).collect { "integration_task_0_RT_$it" } .findAll { !getPersistedTaskDefinition(it).isPresent() } - .collect { new TaskDef(it, it, 0, 120) } + .collect { new TaskDef(it, it, DEFAULT_EMAIL_ADDRESS, 0, 120, 120) } .forEach { metadataService.registerTaskDef([it]) } - metadataService.registerTaskDef([new TaskDef('short_time_out', 'short_time_out', 1, 5)]) + metadataService.registerTaskDef([new TaskDef('short_time_out', 'short_time_out', DEFAULT_EMAIL_ADDRESS, 1, 5, 5)]) //This taskWithResponseTimeOut is required by the integration test which exercises the response time out scenarios TaskDef taskWithResponseTimeOut = new TaskDef() @@ -97,6 +97,7 @@ class WorkflowTestUtil { taskWithResponseTimeOut.retryCount = RETRY_COUNT taskWithResponseTimeOut.retryDelaySeconds = 0 taskWithResponseTimeOut.responseTimeoutSeconds = 10 + taskWithResponseTimeOut.ownerEmail = DEFAULT_EMAIL_ADDRESS TaskDef optionalTask = new TaskDef() optionalTask.setName("task_optional") @@ -104,49 +105,61 @@ class WorkflowTestUtil { optionalTask.setRetryCount(1) optionalTask.setTimeoutPolicy(TaskDef.TimeoutPolicy.RETRY) optionalTask.setRetryDelaySeconds(0) + optionalTask.setResponseTimeoutSeconds(5) + optionalTask.setOwnerEmail(DEFAULT_EMAIL_ADDRESS) TaskDef simpleSubWorkflowTask = new TaskDef() simpleSubWorkflowTask.setName('simple_task_in_sub_wf') simpleSubWorkflowTask.setRetryCount(0) + simpleSubWorkflowTask.setOwnerEmail(DEFAULT_EMAIL_ADDRESS) TaskDef subWorkflowTask = new TaskDef() subWorkflowTask.setName('sub_workflow_task') subWorkflowTask.setRetryCount(1) subWorkflowTask.setResponseTimeoutSeconds(5) subWorkflowTask.setRetryDelaySeconds(0) + subWorkflowTask.setOwnerEmail(DEFAULT_EMAIL_ADDRESS) TaskDef waitTimeOutTask = new TaskDef() waitTimeOutTask.name = 'waitTimeout' waitTimeOutTask.timeoutSeconds = 2 + waitTimeOutTask.responseTimeoutSeconds = 2 waitTimeOutTask.retryCount = 1 waitTimeOutTask.timeoutPolicy = TaskDef.TimeoutPolicy.RETRY waitTimeOutTask.retryDelaySeconds = 10 + waitTimeOutTask.ownerEmail = DEFAULT_EMAIL_ADDRESS TaskDef userTask = new TaskDef() userTask.setName("user_task") userTask.setTimeoutSeconds(20) + userTask.setResponseTimeoutSeconds(20) userTask.setRetryCount(1) userTask.setTimeoutPolicy(TaskDef.TimeoutPolicy.RETRY) userTask.setRetryDelaySeconds(10) - + userTask.setOwnerEmail(DEFAULT_EMAIL_ADDRESS) TaskDef concurrentExecutionLimitedTask = new TaskDef() concurrentExecutionLimitedTask.name = "test_task_with_concurrency_limit" concurrentExecutionLimitedTask.concurrentExecLimit = 1 + concurrentExecutionLimitedTask.ownerEmail = DEFAULT_EMAIL_ADDRESS TaskDef rateLimitedTask = new TaskDef() rateLimitedTask.name = 'test_task_with_rateLimits' rateLimitedTask.rateLimitFrequencyInSeconds = 10 rateLimitedTask.rateLimitPerFrequency = 1 + rateLimitedTask.ownerEmail = DEFAULT_EMAIL_ADDRESS TaskDef rateLimitedSimpleTask = new TaskDef() rateLimitedSimpleTask.name = 'test_simple_task_with_rateLimits' rateLimitedSimpleTask.rateLimitFrequencyInSeconds = 10 rateLimitedSimpleTask.rateLimitPerFrequency = 1 + rateLimitedSimpleTask.ownerEmail = DEFAULT_EMAIL_ADDRESS TaskDef eventTaskX = new TaskDef() eventTaskX.name = 'eventX' eventTaskX.timeoutSeconds = 1 + eventTaskX.responseTimeoutSeconds = 1 + eventTaskX.ownerEmail = DEFAULT_EMAIL_ADDRESS metadataService.registerTaskDef( [taskWithResponseTimeOut, optionalTask, simpleSubWorkflowTask, @@ -200,14 +213,19 @@ class WorkflowTestUtil { } /** - * A helper methods that registers that workflows based on the paths of the json file representing a workflow definition + * A helper methods that registers workflows based on the paths of the json file representing a workflow definition * @param workflowJsonPaths a comma separated var ags of the paths of the workflow definitions */ void registerWorkflows(String... workflowJsonPaths) { - workflowJsonPaths.collect { JsonUtils.fromJson(it, WorkflowDef.class) } + workflowJsonPaths.collect { readFile(it) } .forEach { metadataService.updateWorkflowDef(it) } } + WorkflowDef readFile(String path) { + InputStream inputStream = getClass().getClassLoader().getResourceAsStream(path) + return objectMapper.readValue(inputStream, WorkflowDef.class) + } + /** * A helper method intended to be used in the when: block of the spock test feature * This method is intended to be used to poll and update the task status as failed @@ -237,7 +255,7 @@ class WorkflowTestUtil { * A helper method to introduce delay and convert the polledIntegrationTask and ackPolledIntegrationTask * into a tuple. This method is intended to be used by pollAndFailTask and pollAndCompleteTask * @param waitAtEndSeconds The total seconds of delay before the method returns - * @param polledIntegrationTask instance of polled task + * @param polledIntegrationTask instance of polled task * @param ackPolledIntegrationTask a acknowledgement of a poll * @return A Tuple of polledTask and acknowledgement of the poll */ @@ -292,7 +310,7 @@ class WorkflowTestUtil { * @param expectedTaskInputParams a map of input params that are verified against the polledTask that is part of the completedTaskAndAck tuple */ static void verifyPolledAndAcknowledgedTask(Tuple completedTaskAndAck, Map expectedTaskInputParams = null) { - assert completedTaskAndAck[0] : "The task polled cannot be null" + assert completedTaskAndAck[0]: "The task polled cannot be null" def polledIntegrationTask = completedTaskAndAck[0] as Task def ackPolledIntegrationTask = completedTaskAndAck[1] as boolean assert polledIntegrationTask @@ -307,7 +325,7 @@ class WorkflowTestUtil { } static void verifyPolledAndAcknowledgedLargePayloadTask(Tuple completedTaskAndAck) { - assert completedTaskAndAck[0] : "The task polled cannot be null" + assert completedTaskAndAck[0]: "The task polled cannot be null" def polledIntegrationTask = completedTaskAndAck[0] as Task def ackPolledIntegrationTask = completedTaskAndAck[1] as boolean assert polledIntegrationTask diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/test/integration/AbstractEndToEndTest.java similarity index 84% rename from test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java rename to test-harness/src/test/java/com/netflix/conductor/test/integration/AbstractEndToEndTest.java index d5419f232e..ab315dc172 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/test/integration/AbstractEndToEndTest.java @@ -1,20 +1,35 @@ -package com.netflix.conductor.tests.integration; +/* + * Copyright 2020 Netflix, Inc. + *

    + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

    + * http://www.apache.org/licenses/LICENSE-2.0 + *

    + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package com.netflix.conductor.test.integration; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.workflow.TaskType; +import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; -import org.junit.Test; - import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.Optional; +import org.junit.Test; +import org.springframework.test.context.TestPropertySource; +import org.testcontainers.elasticsearch.ElasticsearchContainer; +import org.testcontainers.utility.DockerImageName; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - +@TestPropertySource(properties = {"workflow.indexing.enabled=true", "workflow.elasticsearch.version=6"}) public abstract class AbstractEndToEndTest { private static final String TASK_DEFINITION_PREFIX = "task_"; @@ -23,6 +38,10 @@ public abstract class AbstractEndToEndTest { private static final String DEFAULT_NULL_VALUE = "null"; protected static final String DEFAULT_EMAIL_ADDRESS = "test@harness.com"; + protected static final ElasticsearchContainer container = new ElasticsearchContainer(DockerImageName + .parse("docker.elastic.co/elasticsearch/elasticsearch-oss") + .withTag("6.8.12")); // this should match the client version + @Test public void testEphemeralWorkflowsWithStoredTasks() { String workflowExecutionName = "testEphemeralWorkflow"; @@ -140,7 +159,7 @@ protected List createAndRegisterTaskDefinitions(String prefixTaskDefini String prefix = Optional.ofNullable(prefixTaskDefinition).orElse(TASK_DEFINITION_PREFIX); List definitions = new LinkedList<>(); for (int i = 0; i < numberOfTaskDefinitions; i++) { - TaskDef def = new TaskDef(prefix + i, "task " + i + DEFAULT_DESCRIPTION, DEFAULT_EMAIL_ADDRESS, 3, 60 ,60); + TaskDef def = new TaskDef(prefix + i, "task " + i + DEFAULT_DESCRIPTION, DEFAULT_EMAIL_ADDRESS, 3, 60, 60); def.setTimeoutPolicy(TaskDef.TimeoutPolicy.RETRY); definitions.add(def); } diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractGrpcEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/test/integration/grpc/AbstractGrpcEndToEndTest.java similarity index 74% rename from test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractGrpcEndToEndTest.java rename to test-harness/src/test/java/com/netflix/conductor/test/integration/grpc/AbstractGrpcEndToEndTest.java index b014fa20d4..f48658fd21 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractGrpcEndToEndTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/test/integration/grpc/AbstractGrpcEndToEndTest.java @@ -1,5 +1,5 @@ -/** - * Copyright 2016 Netflix, Inc. +/* + * Copyright 2020 Netflix, Inc. *

    * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at @@ -10,10 +10,7 @@ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ -/** - * - */ -package com.netflix.conductor.tests.integration; +package com.netflix.conductor.test.integration.grpc; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; @@ -34,20 +31,26 @@ import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; +import com.netflix.conductor.test.integration.AbstractEndToEndTest; import java.util.LinkedList; import java.util.List; import org.junit.Test; - -/** - * @author Viren - */ +import org.junit.runner.RunWith; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.junit4.SpringRunner; + +@RunWith(SpringRunner.class) +@SpringBootTest(properties = { + "conductor.grpc.server.enabled=true", + "conductor.grpc.server.port=8092" +}) +@TestPropertySource(locations = "classpath:application-integrationtest.properties") public abstract class AbstractGrpcEndToEndTest extends AbstractEndToEndTest { protected static TaskClient taskClient; protected static WorkflowClient workflowClient; protected static MetadataClient metadataClient; - protected static EmbeddedElasticSearch search; @Override protected String startWorkflow(String workflowExecutionName, WorkflowDef workflowDefinition) { @@ -94,7 +97,6 @@ public void testAll() throws Exception { WorkflowTask t0 = createWorkflowTask("t0"); WorkflowTask t1 = createWorkflowTask("t1"); - def.getTasks().add(t0); def.getTasks().add(t1); @@ -110,18 +112,17 @@ public void testAll() throws Exception { String workflowId = workflowClient.startWorkflow(startWf); assertNotNull(workflowId); - System.out.println("Started workflow id=" + workflowId); - Workflow wf = workflowClient.getWorkflow(workflowId, false); - assertEquals(0, wf.getTasks().size()); - assertEquals(workflowId, wf.getWorkflowId()); + Workflow workflow = workflowClient.getWorkflow(workflowId, false); + assertEquals(0, workflow.getTasks().size()); + assertEquals(workflowId, workflow.getWorkflowId()); - wf = workflowClient.getWorkflow(workflowId, true); - assertNotNull(wf); - assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); - assertEquals(1, wf.getTasks().size()); - assertEquals(t0.getTaskReferenceName(), wf.getTasks().get(0).getReferenceTaskName()); - assertEquals(workflowId, wf.getWorkflowId()); + workflow = workflowClient.getWorkflow(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(1, workflow.getTasks().size()); + assertEquals(t0.getTaskReferenceName(), workflow.getTasks().get(0).getReferenceTaskName()); + assertEquals(workflowId, workflow.getWorkflowId()); List runningIds = workflowClient.getRunningWorkflow(def.getName(), def.getVersion()); assertNotNull(runningIds); @@ -138,8 +139,7 @@ public void testAll() throws Exception { assertEquals(t0.getName(), polled.get(0).getTaskDefName()); Task task = polled.get(0); - Boolean acked = taskClient.ack(task.getTaskId(), "test"); - assertNotNull(acked); + boolean acked = taskClient.ack(task.getTaskId(), "test"); assertTrue(acked); task.getOutputData().put("key1", "value1"); @@ -150,30 +150,27 @@ public void testAll() throws Exception { assertNotNull(polled); assertTrue(polled.toString(), polled.isEmpty()); - wf = workflowClient.getWorkflow(workflowId, true); - assertNotNull(wf); - assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); - assertEquals(2, wf.getTasks().size()); - assertEquals(t0.getTaskReferenceName(), wf.getTasks().get(0).getReferenceTaskName()); - assertEquals(t1.getTaskReferenceName(), wf.getTasks().get(1).getReferenceTaskName()); - assertEquals(Status.COMPLETED, wf.getTasks().get(0).getStatus()); - assertEquals(Status.SCHEDULED, wf.getTasks().get(1).getStatus()); + workflow = workflowClient.getWorkflow(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(2, workflow.getTasks().size()); + assertEquals(t0.getTaskReferenceName(), workflow.getTasks().get(0).getReferenceTaskName()); + assertEquals(t1.getTaskReferenceName(), workflow.getTasks().get(1).getReferenceTaskName()); + assertEquals(Status.COMPLETED, workflow.getTasks().get(0).getStatus()); + assertEquals(Status.SCHEDULED, workflow.getTasks().get(1).getStatus()); Task taskById = taskClient.getTaskDetails(task.getTaskId()); assertNotNull(taskById); assertEquals(task.getTaskId(), taskById.getTaskId()); - List getTasks = taskClient.getPendingTasksByType(t0.getName(), null, 1); assertNotNull(getTasks); assertEquals(0, getTasks.size()); //getTasks only gives pending tasks - getTasks = taskClient.getPendingTasksByType(t1.getName(), null, 1); assertNotNull(getTasks); assertEquals(1, getTasks.size()); - Task pending = taskClient.getPendingTaskForWorkflow(workflowId, t1.getTaskReferenceName()); assertNotNull(pending); assertEquals(t1.getTaskReferenceName(), pending.getReferenceTaskName()); @@ -185,14 +182,14 @@ public void testAll() throws Exception { assertEquals(1, searchResult.getTotalHits()); workflowClient.terminateWorkflow(workflowId, "terminate reason"); - wf = workflowClient.getWorkflow(workflowId, true); - assertNotNull(wf); - assertEquals(WorkflowStatus.TERMINATED, wf.getStatus()); + workflow = workflowClient.getWorkflow(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.TERMINATED, workflow.getStatus()); workflowClient.restart(workflowId, false); - wf = workflowClient.getWorkflow(workflowId, true); - assertNotNull(wf); - assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); - assertEquals(1, wf.getTasks().size()); + workflow = workflowClient.getWorkflow(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(1, workflow.getTasks().size()); } } diff --git a/test-harness/src/test/java/com/netflix/conductor/test/integration/grpc/GrpcEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/test/integration/grpc/GrpcEndToEndTest.java new file mode 100644 index 0000000000..70869df3e4 --- /dev/null +++ b/test-harness/src/test/java/com/netflix/conductor/test/integration/grpc/GrpcEndToEndTest.java @@ -0,0 +1,51 @@ +/* + * Copyright 2020 Netflix, Inc. + *

    + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

    + * http://www.apache.org/licenses/LICENSE-2.0 + *

    + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package com.netflix.conductor.test.integration.grpc; + +import com.netflix.conductor.client.grpc.MetadataClient; +import com.netflix.conductor.client.grpc.TaskClient; +import com.netflix.conductor.client.grpc.WorkflowClient; +import com.netflix.conductor.dao.IndexDAO; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.springframework.beans.factory.annotation.Autowired; + +@SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection") +public class GrpcEndToEndTest extends AbstractGrpcEndToEndTest { + + @Autowired + private IndexDAO indexDAO; + + @BeforeClass + public static void setup() { + container.start(); + + String httpHostAddress = container.getHttpHostAddress(); + System.setProperty("workflow.elasticsearch.url", "http://" + httpHostAddress); + } + + @AfterClass + public static void cleanup() { + container.stop(); + } + + @Before + public void init() throws Exception { + indexDAO.setup(); + + taskClient = new TaskClient("localhost", 8092); + workflowClient = new WorkflowClient("localhost", 8092); + metadataClient = new MetadataClient("localhost", 8092); + } +} diff --git a/test-harness/src/test/java/com/netflix/conductor/test/integration/grpc/mysql/MySQLGrpcEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/test/integration/grpc/mysql/MySQLGrpcEndToEndTest.java new file mode 100644 index 0000000000..a0783c8d8f --- /dev/null +++ b/test-harness/src/test/java/com/netflix/conductor/test/integration/grpc/mysql/MySQLGrpcEndToEndTest.java @@ -0,0 +1,67 @@ +/* + * Copyright 2020 Netflix, Inc. + *

    + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

    + * http://www.apache.org/licenses/LICENSE-2.0 + *

    + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package com.netflix.conductor.test.integration.grpc.mysql; + +import com.netflix.conductor.client.grpc.MetadataClient; +import com.netflix.conductor.client.grpc.TaskClient; +import com.netflix.conductor.client.grpc.WorkflowClient; +import com.netflix.conductor.dao.IndexDAO; +import com.netflix.conductor.test.integration.grpc.AbstractGrpcEndToEndTest; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.junit4.SpringRunner; + +@SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection") +@RunWith(SpringRunner.class) +@TestPropertySource(properties = { + "db=mysql", + "conductor.grpc.server.port=8094", + "jdbc.url=jdbc:mysql://localhost:33307/conductor?useUnicode=true&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC", + "jdbc.username=root", + "jdbc.password=root", + "conductor.mysql.connection.pool.size.min=8", + "conductor.mysql.connection.pool.size.max=8", + "conductor.mysql.connection.pool.idle.min=300000", + "spring.flyway.enabled=false" +}) +public class MySQLGrpcEndToEndTest extends AbstractGrpcEndToEndTest { + + @Autowired + private IndexDAO indexDAO; + + @BeforeClass + public static void setup() { + container.start(); + + String httpHostAddress = container.getHttpHostAddress(); + System.setProperty("workflow.elasticsearch.url", "http://" + httpHostAddress); + } + + @AfterClass + public static void cleanup() { + container.stop(); + } + + @Before + public void init() throws Exception { + indexDAO.setup(); + + taskClient = new TaskClient("localhost", 8094); + workflowClient = new WorkflowClient("localhost", 8094); + metadataClient = new MetadataClient("localhost", 8094); + } +} diff --git a/test-harness/src/test/java/com/netflix/conductor/test/integration/grpc/postgres/PostgresGrpcEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/test/integration/grpc/postgres/PostgresGrpcEndToEndTest.java new file mode 100644 index 0000000000..921034d4a3 --- /dev/null +++ b/test-harness/src/test/java/com/netflix/conductor/test/integration/grpc/postgres/PostgresGrpcEndToEndTest.java @@ -0,0 +1,67 @@ +/* + * Copyright 2020 Netflix, Inc. + *

    + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

    + * http://www.apache.org/licenses/LICENSE-2.0 + *

    + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package com.netflix.conductor.test.integration.grpc.postgres; + +import com.netflix.conductor.client.grpc.MetadataClient; +import com.netflix.conductor.client.grpc.TaskClient; +import com.netflix.conductor.client.grpc.WorkflowClient; +import com.netflix.conductor.dao.IndexDAO; +import com.netflix.conductor.test.integration.grpc.AbstractGrpcEndToEndTest; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.junit4.SpringRunner; + +@SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection") +@RunWith(SpringRunner.class) +@TestPropertySource(properties = { + "db=postgres", + "conductor.grpc.server.port=8098", + "jdbc.url=jdbc:postgresql://localhost:54320/conductor", + "jdbc.username=postgres", + "jdbc.password=postgres", + "conductor.postgres.connection.pool.size.min=8", + "conductor.postgres.connection.pool.size.max=8", + "conductor.postgres.connection.pool.idle.min=300000", + "spring.flyway.enabled=false" +}) +public class PostgresGrpcEndToEndTest extends AbstractGrpcEndToEndTest { + + @Autowired + private IndexDAO indexDAO; + + @BeforeClass + public static void setup() { + container.start(); + + String httpHostAddress = container.getHttpHostAddress(); + System.setProperty("workflow.elasticsearch.url", "http://" + httpHostAddress); + } + + @AfterClass + public static void cleanup() { + container.stop(); + } + + @Before + public void init() throws Exception { + indexDAO.setup(); + + taskClient = new TaskClient("localhost", 8098); + workflowClient = new WorkflowClient("localhost", 8098); + metadataClient = new MetadataClient("localhost", 8098); + } +} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractHttpEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/test/integration/http/AbstractHttpEndToEndTest.java similarity index 90% rename from test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractHttpEndToEndTest.java rename to test-harness/src/test/java/com/netflix/conductor/test/integration/http/AbstractHttpEndToEndTest.java index 052942dba5..f82f4d0b60 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractHttpEndToEndTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/test/integration/http/AbstractHttpEndToEndTest.java @@ -1,5 +1,5 @@ /* - * Copyright 2016 Netflix, Inc. + * Copyright 2020 Netflix, Inc. *

    * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at @@ -10,15 +10,14 @@ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ -package com.netflix.conductor.tests.integration; +package com.netflix.conductor.test.integration.http; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; - -import com.netflix.conductor.client.exceptions.ConductorClientException; +import com.netflix.conductor.client.exception.ConductorClientException; import com.netflix.conductor.client.http.MetadataClient; import com.netflix.conductor.client.http.TaskClient; import com.netflix.conductor.client.http.WorkflowClient; @@ -26,8 +25,8 @@ import com.netflix.conductor.common.metadata.tasks.Task.Status; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.tasks.TaskResult; +import com.netflix.conductor.common.metadata.tasks.TaskType; import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.SearchResult; @@ -35,32 +34,38 @@ import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.common.run.WorkflowSummary; import com.netflix.conductor.common.validation.ValidationError; -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; +import com.netflix.conductor.test.integration.AbstractEndToEndTest; import java.util.ArrayList; import java.util.HashMap; import java.util.List; -import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; - -import org.awaitility.Awaitility; import org.junit.Test; -/** - * @author Viren - */ +import org.junit.runner.RunWith; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.context.SpringBootTest.WebEnvironment; +import org.springframework.boot.web.server.LocalServerPort; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.junit4.SpringRunner; + +@RunWith(SpringRunner.class) +@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT) +@TestPropertySource(locations = "classpath:application-integrationtest.properties") public abstract class AbstractHttpEndToEndTest extends AbstractEndToEndTest { + @LocalServerPort + protected int port; + protected static String apiRoot; protected static TaskClient taskClient; protected static WorkflowClient workflowClient; - protected static EmbeddedElasticSearch search; protected static MetadataClient metadataClient; @Override protected String startWorkflow(String workflowExecutionName, WorkflowDef workflowDefinition) { StartWorkflowRequest workflowRequest = new StartWorkflowRequest() - .withName(workflowExecutionName) - .withWorkflowDef(workflowDefinition); + .withName(workflowExecutionName) + .withWorkflowDef(workflowDefinition); return workflowClient.startWorkflow(workflowRequest); } @@ -107,10 +112,10 @@ public void testAll() throws Exception { String correlationId = "test_corr_id"; StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest() - .withName(def.getName()) - .withCorrelationId(correlationId) - .withPriority(50) - .withInput(new HashMap<>()); + .withName(def.getName()) + .withCorrelationId(correlationId) + .withPriority(50) + .withInput(new HashMap<>()); String workflowId = workflowClient.startWorkflow(startWorkflowRequest); assertNotNull(workflowId); @@ -118,14 +123,6 @@ public void testAll() throws Exception { assertEquals(0, workflow.getTasks().size()); assertEquals(workflowId, workflow.getWorkflowId()); - Awaitility.await() - .atMost(5, TimeUnit.SECONDS) - .untilAsserted(() -> { - List workflowList = workflowClient.getWorkflows(def.getName(), correlationId, false, false); - assertEquals(1, workflowList.size()); - assertEquals(workflowId, workflowList.get(0).getWorkflowId()); - }); - workflow = workflowClient.getWorkflow(workflowId, true); assertNotNull(workflow); assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); @@ -270,7 +267,6 @@ public void testUpdateWorkflow() { tasks.add(taskDef); metadataClient.registerTaskDefs(tasks); - WorkflowDef def = new WorkflowDef(); def.setName("testWorkflowDel"); def.setVersion(1); @@ -321,8 +317,8 @@ public void testUpdateTask() { assertFalse(e.isRetryable()); List errors = e.getValidationErrors(); List errorMessages = errors.stream() - .map(ValidationError::getMessage) - .collect(Collectors.toList()); + .map(ValidationError::getMessage) + .collect(Collectors.toList()); assertEquals(2, errors.size()); assertTrue(errorMessages.contains("Workflow Id cannot be null or empty")); throw e; @@ -352,8 +348,8 @@ public void testEmptyCreateWorkflowDef() { assertFalse(e.isRetryable()); List errors = e.getValidationErrors(); List errorMessages = errors.stream() - .map(ValidationError::getMessage) - .collect(Collectors.toList()); + .map(ValidationError::getMessage) + .collect(Collectors.toList()); assertTrue(errorMessages.contains("WorkflowDef name cannot be null or empty")); assertTrue(errorMessages.contains("WorkflowTask list cannot be empty")); throw e; @@ -373,8 +369,8 @@ public void testUpdateWorkflowDef() { assertFalse(e.isRetryable()); List errors = e.getValidationErrors(); List errorMessages = errors.stream() - .map(ValidationError::getMessage) - .collect(Collectors.toList()); + .map(ValidationError::getMessage) + .collect(Collectors.toList()); assertEquals(3, errors.size()); assertTrue(errorMessages.contains("WorkflowTask list cannot be empty")); assertTrue(errorMessages.contains("WorkflowDef name cannot be null or empty")); @@ -427,8 +423,8 @@ public void testCreateInvalidWorkflowDef() { assertFalse(e.isRetryable()); List errors = e.getValidationErrors(); List errorMessages = errors.stream() - .map(ValidationError::getMessage) - .collect(Collectors.toList()); + .map(ValidationError::getMessage) + .collect(Collectors.toList()); assertTrue(errorMessages.contains("WorkflowDef name cannot be null or empty")); assertTrue(errorMessages.contains("WorkflowTask list cannot be empty")); assertTrue(errorMessages.contains("ownerEmail cannot be empty")); @@ -437,19 +433,19 @@ public void testCreateInvalidWorkflowDef() { } @Test(expected = ConductorClientException.class) - public void testUpdateTaskDefNameNull(){ + public void testUpdateTaskDefNameNull() { TaskDef taskDef = new TaskDef(); - try{ + try { metadataClient.updateTaskDef(taskDef); - } catch (ConductorClientException e){ + } catch (ConductorClientException e) { assertEquals(2, e.getValidationErrors().size()); assertEquals(400, e.getStatus()); assertEquals("Validation failed, check below errors for detail.", e.getMessage()); assertFalse(e.isRetryable()); List errors = e.getValidationErrors(); List errorMessages = errors.stream() - .map(ValidationError::getMessage) - .collect(Collectors.toList()); + .map(ValidationError::getMessage) + .collect(Collectors.toList()); assertTrue(errorMessages.contains("TaskDef name cannot be null or empty")); assertTrue(errorMessages.contains("ownerEmail cannot be empty")); throw e; @@ -457,31 +453,30 @@ public void testUpdateTaskDefNameNull(){ } @Test(expected = IllegalArgumentException.class) - public void testGetTaskDefNotExisting(){ - metadataClient.getTaskDef(""); + public void testGetTaskDefNotExisting() { + metadataClient.getTaskDef(""); } @Test(expected = ConductorClientException.class) - public void testUpdateWorkflowDefNameNull(){ + public void testUpdateWorkflowDefNameNull() { WorkflowDef workflowDef = new WorkflowDef(); List list = new ArrayList<>(); list.add(workflowDef); - try{ + try { metadataClient.updateWorkflowDefs(list); - } catch (ConductorClientException e){ + } catch (ConductorClientException e) { assertEquals(3, e.getValidationErrors().size()); assertEquals(400, e.getStatus()); assertEquals("Validation failed, check below errors for detail.", e.getMessage()); assertFalse(e.isRetryable()); List errors = e.getValidationErrors(); List errorMessages = errors.stream() - .map(ValidationError::getMessage) - .collect(Collectors.toList()); + .map(ValidationError::getMessage) + .collect(Collectors.toList()); assertTrue(errorMessages.contains("WorkflowDef name cannot be null or empty")); assertTrue(errorMessages.contains("WorkflowTask list cannot be empty")); assertTrue(errorMessages.contains("ownerEmail cannot be empty")); throw e; } } - } diff --git a/test-harness/src/test/java/com/netflix/conductor/test/integration/http/HttpEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/test/integration/http/HttpEndToEndTest.java new file mode 100644 index 0000000000..15015cb303 --- /dev/null +++ b/test-harness/src/test/java/com/netflix/conductor/test/integration/http/HttpEndToEndTest.java @@ -0,0 +1,59 @@ +/* + * Copyright 2020 Netflix, Inc. + *

    + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

    + * http://www.apache.org/licenses/LICENSE-2.0 + *

    + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package com.netflix.conductor.test.integration.http; + +import com.netflix.conductor.client.http.MetadataClient; +import com.netflix.conductor.client.http.TaskClient; +import com.netflix.conductor.client.http.WorkflowClient; +import com.netflix.conductor.dao.IndexDAO; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.springframework.beans.factory.annotation.Autowired; + +@SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection") +public class HttpEndToEndTest extends AbstractHttpEndToEndTest { + + @Autowired + private IndexDAO indexDAO; + + @BeforeClass + public static void setup() { + + container.start(); + + String httpHostAddress = container.getHttpHostAddress(); + System.setProperty("workflow.elasticsearch.url", "http://" + httpHostAddress); + } + + @AfterClass + public static void cleanup() { + container.stop(); + } + + @Before + public void init() throws Exception { + indexDAO.setup(); + + apiRoot = String.format("http://localhost:%d/api/", port); + + taskClient = new TaskClient(); + taskClient.setRootURI(apiRoot); + + workflowClient = new WorkflowClient(); + workflowClient.setRootURI(apiRoot); + + metadataClient = new MetadataClient(); + metadataClient.setRootURI(apiRoot); + } +} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/listener/WorkflowStatusPublisherIntegrationTest.java b/test-harness/src/test/java/com/netflix/conductor/test/listener/WorkflowStatusPublisherIntegrationTest.java similarity index 62% rename from test-harness/src/test/java/com/netflix/conductor/tests/listener/WorkflowStatusPublisherIntegrationTest.java rename to test-harness/src/test/java/com/netflix/conductor/test/listener/WorkflowStatusPublisherIntegrationTest.java index 2f60c7039a..e0033d15fb 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/listener/WorkflowStatusPublisherIntegrationTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/test/listener/WorkflowStatusPublisherIntegrationTest.java @@ -1,25 +1,16 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. +/* + * Copyright 2020 Netflix, Inc. + *

    + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

    + * http://www.apache.org/licenses/LICENSE-2.0 + *

    + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ -/** - * - */ -package com.netflix.conductor.tests.listener; - -import static com.netflix.conductor.common.metadata.tasks.Task.Status.COMPLETED; -import static org.junit.Assert.assertEquals; +package com.netflix.conductor.test.listener; import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.common.metadata.tasks.Task; @@ -33,6 +24,15 @@ import com.netflix.conductor.dao.QueueDAO; import com.netflix.conductor.service.ExecutionService; import com.netflix.conductor.service.MetadataService; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.junit4.SpringRunner; + import java.io.IOException; import java.util.Collections; import java.util.HashMap; @@ -40,53 +40,75 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import javax.inject.Inject; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -@RunWith(StatusPublisherTestRunner.class) +import static com.netflix.conductor.common.metadata.tasks.Task.Status.COMPLETED; +import static org.junit.Assert.assertEquals; + +@RunWith(SpringRunner.class) +@SpringBootTest(properties = { + "workflow.status.listener.type=queue_status_publisher", + "workflowstatuslistener.publisher.success.queue=dummy", + "workflowstatuslistener.publisher.failure.queue=dummy" +}) +@TestPropertySource(locations = "classpath:application-integrationtest.properties") public class WorkflowStatusPublisherIntegrationTest { - private String CALLBACK_QUEUE = "dummy"; + private final String CALLBACK_QUEUE = "dummy"; private static final String LINEAR_WORKFLOW_T1_T2 = "junit_test_wf"; private static final int WORKFLOW_VERSION = 1; private static final String INCOMPLETION_REASON = "test reason"; + private static final String DEFAULT_OWNER_EMAIL = "test@harness.com"; - @Inject - private ObjectMapper mapper; + @Autowired + private ObjectMapper objectMapper; - @Inject + @Autowired QueueDAO queueDAO; - @Inject + @Autowired protected MetadataService metadataService; - @Inject + @Autowired protected ExecutionService workflowExecutionService; - @Inject + @Autowired protected WorkflowExecutor workflowExecutor; @Before public void setUp() { - TaskDef task = new TaskDef(); - task.setName("junit_task_1"); - task.setTimeoutSeconds(120); - task.setRetryCount(1); - metadataService.registerTaskDef(Collections.singletonList(task)); + TaskDef taskDef = new TaskDef(); + taskDef.setName("junit_task_1"); + taskDef.setTimeoutSeconds(120); + taskDef.setResponseTimeoutSeconds(120); + taskDef.setRetryCount(1); + taskDef.setOwnerEmail(DEFAULT_OWNER_EMAIL); + metadataService.registerTaskDef(Collections.singletonList(taskDef)); + } + + @After + public void cleanUp() { + List workflows = metadataService.getWorkflowDefs().stream() + .map(WorkflowDef::getName) + .collect(Collectors.toList()); + for (String wfName : workflows) { + List running = workflowExecutionService.getRunningWorkflows(wfName, WORKFLOW_VERSION); + for (String wfid : running) { + workflowExecutor.terminateWorkflow(wfid, "cleanup"); + } + } + queueDAO.queuesDetail().keySet().forEach(queueDAO::flush); } @Test public void testListenerOnTerminatedWorkflow() throws IOException { - String id = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, "testWorkflowTerminatedListener", new HashMap<>(), null, null); + String id = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, "testWorkflowTerminatedListener", + new HashMap<>()); workflowExecutor.terminateWorkflow(id, INCOMPLETION_REASON); List callbackMessages = queueDAO.pollMessages(CALLBACK_QUEUE, 1, 200); queueDAO.ack(CALLBACK_QUEUE, callbackMessages.get(0).getId()); - WorkflowSummary payload = mapper.readValue(callbackMessages.get(0).getPayload(), WorkflowSummary.class); + WorkflowSummary payload = objectMapper.readValue(callbackMessages.get(0).getPayload(), WorkflowSummary.class); assertEquals(id, callbackMessages.get(0).getId()); assertEquals(LINEAR_WORKFLOW_T1_T2, payload.getWorkflowType()); @@ -97,14 +119,13 @@ public void testListenerOnTerminatedWorkflow() throws IOException { @Test public void testListenerOnCompletedWorkflow() throws IOException, InterruptedException { - clearWorkflows(); - - WorkflowDef def = new WorkflowDef(); - def.setName(LINEAR_WORKFLOW_T1_T2); - def.setDescription(def.getName()); - def.setVersion(WORKFLOW_VERSION); - def.setSchemaVersion(2); - def.setWorkflowStatusListenerEnabled(true); + WorkflowDef workflowDef = new WorkflowDef(); + workflowDef.setName(LINEAR_WORKFLOW_T1_T2); + workflowDef.setDescription(workflowDef.getName()); + workflowDef.setVersion(WORKFLOW_VERSION); + workflowDef.setSchemaVersion(2); + workflowDef.setOwnerEmail(DEFAULT_OWNER_EMAIL); + workflowDef.setWorkflowStatusListenerEnabled(true); LinkedList wftasks = new LinkedList<>(); WorkflowTask wft1 = new WorkflowTask(); @@ -112,11 +133,12 @@ public void testListenerOnCompletedWorkflow() throws IOException, InterruptedExc wft1.setTaskReferenceName("t1"); wftasks.add(wft1); - def.setTasks(wftasks); + workflowDef.setTasks(wftasks); - metadataService.updateWorkflowDef(Collections.singletonList(def)); + metadataService.updateWorkflowDef(Collections.singletonList(workflowDef)); - String id = startOrLoadWorkflowExecution(def.getName(), 1, "testWorkflowCompletedListener", new HashMap<>(), null, null); + String id = startOrLoadWorkflowExecution(workflowDef.getName(), 1, "testWorkflowCompletedListener", + new HashMap<>()); List tasks = workflowExecutionService.getTasks("junit_task_1", null, 1); tasks.get(0).setStatus(COMPLETED); @@ -127,7 +149,7 @@ public void testListenerOnCompletedWorkflow() throws IOException, InterruptedExc List callbackMessages = queueDAO.pollMessages(CALLBACK_QUEUE, 1, 200); queueDAO.ack(CALLBACK_QUEUE, callbackMessages.get(0).getId()); - WorkflowSummary payload = mapper.readValue(callbackMessages.get(0).getPayload(), WorkflowSummary.class); + WorkflowSummary payload = objectMapper.readValue(callbackMessages.get(0).getPayload(), WorkflowSummary.class); assertEquals(id, callbackMessages.get(0).getId()); assertEquals(LINEAR_WORKFLOW_T1_T2, payload.getWorkflowType()); @@ -135,8 +157,7 @@ public void testListenerOnCompletedWorkflow() throws IOException, InterruptedExc assertEquals(Workflow.WorkflowStatus.COMPLETED, payload.getStatus()); } - - + @SuppressWarnings("BusyWait") private void checkIfWorkflowIsCompleted(String id) throws InterruptedException { int statusRetrieveAttempts = 0; while (workflowExecutor.getWorkflow(id, false).getStatus() != Workflow.WorkflowStatus.COMPLETED) { @@ -148,28 +169,8 @@ private void checkIfWorkflowIsCompleted(String id) throws InterruptedException { } } - @After - public void clearWorkflows() { - List workflows = metadataService.getWorkflowDefs().stream() - .map(WorkflowDef::getName) - .collect(Collectors.toList()); - for (String wfName : workflows) { - List running = workflowExecutionService.getRunningWorkflows(wfName, WORKFLOW_VERSION); - for (String wfid : running) { - workflowExecutor.terminateWorkflow(wfid, "cleanup"); - } - } - queueDAO.queuesDetail().keySet().forEach(queueDAO::flush); - } - - - private String startOrLoadWorkflowExecution(String workflowName, int version, String correlationId, Map input, String event, Map taskToDomain) { - return startOrLoadWorkflowExecution(workflowName, workflowName, version, correlationId, input, event, taskToDomain); - } - - String startOrLoadWorkflowExecution(String snapshotResourceName, String workflowName, int version, String correlationId, Map input, String event, Map taskToDomain) { - return workflowExecutor.startWorkflow(workflowName, version, correlationId, input, null, event, taskToDomain); + private String startOrLoadWorkflowExecution(String workflowName, int version, String correlationId, + Map input) { + return workflowExecutor.startWorkflow(workflowName, version, correlationId, input, null); } - - } diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/MockExternalPayloadStorage.java b/test-harness/src/test/java/com/netflix/conductor/test/utils/MockExternalPayloadStorage.java similarity index 88% rename from test-harness/src/test/java/com/netflix/conductor/tests/utils/MockExternalPayloadStorage.java rename to test-harness/src/test/java/com/netflix/conductor/test/utils/MockExternalPayloadStorage.java index 8914b93065..9d78a53f1e 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/MockExternalPayloadStorage.java +++ b/test-harness/src/test/java/com/netflix/conductor/test/utils/MockExternalPayloadStorage.java @@ -10,11 +10,17 @@ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ -package com.netflix.conductor.tests.utils; +package com.netflix.conductor.test.utils; import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.common.run.ExternalStorageLocation; import com.netflix.conductor.common.utils.ExternalPayloadStorage; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.stereotype.Component; + import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileOutputStream; @@ -24,10 +30,8 @@ import java.util.HashMap; import java.util.Map; -import com.netflix.conductor.common.utils.JsonMapperProvider; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +@ConditionalOnProperty(prefix = "workflow", name = "external.payload.storage", havingValue = "mock") +@Component public class MockExternalPayloadStorage implements ExternalPayloadStorage { private static final Logger LOGGER = LoggerFactory.getLogger(MockExternalPayloadStorage.class); @@ -40,7 +44,13 @@ public class MockExternalPayloadStorage implements ExternalPayloadStorage { public static final String TEMP_FILE_PATH = "/input.json"; - private final ObjectMapper objectMapper = new JsonMapperProvider().get(); + private final ObjectMapper objectMapper; + + @Autowired + public MockExternalPayloadStorage(ObjectMapper objectMapper) { + this.objectMapper = objectMapper; + LOGGER.info("Initialized {}", this.getClass().getCanonicalName()); + } @Override public ExternalStorageLocation getLocation(Operation operation, PayloadType payloadType, String path) { diff --git a/test-harness/src/test/java/com/netflix/conductor/test/utils/UserTask.java b/test-harness/src/test/java/com/netflix/conductor/test/utils/UserTask.java new file mode 100644 index 0000000000..8c89445f3e --- /dev/null +++ b/test-harness/src/test/java/com/netflix/conductor/test/utils/UserTask.java @@ -0,0 +1,76 @@ +/* + * Copyright 2020 Netflix, Inc. + *

    + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

    + * http://www.apache.org/licenses/LICENSE-2.0 + *

    + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package com.netflix.conductor.test.utils; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.util.concurrent.Uninterruptibles; +import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.tasks.Task.Status; +import com.netflix.conductor.common.run.Workflow; +import com.netflix.conductor.core.execution.WorkflowExecutor; +import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +@Component +public class UserTask extends WorkflowSystemTask { + + private static final Logger LOGGER = LoggerFactory.getLogger(UserTask.class); + + public static final String NAME = "USER_TASK"; + + private final ObjectMapper objectMapper; + + private static final TypeReference>>> mapStringListObjects = + new TypeReference>>>() { + }; + + @Autowired + public UserTask(ObjectMapper objectMapper) { + super(NAME); + this.objectMapper = objectMapper; + LOGGER.info("Initialized system task - {}", getClass().getCanonicalName()); + } + + @Override + public void start(Workflow workflow, Task task, WorkflowExecutor executor) { + Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); + + if (task.getWorkflowTask().isAsyncComplete()) { + task.setStatus(Status.IN_PROGRESS); + } else { + Map>> map = objectMapper + .convertValue(task.getInputData(), mapStringListObjects); + Map output = new HashMap<>(); + Map> defaultLargeInput = new HashMap<>(); + defaultLargeInput.put("TEST_SAMPLE", Collections.singletonList("testDefault")); + output.put("size", map.getOrDefault("largeInput", defaultLargeInput).get("TEST_SAMPLE").size()); + task.setOutputData(output); + task.setStatus(Status.COMPLETED); + } + } + + @Override + public boolean isAsync() { + return true; + } +} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java deleted file mode 100644 index a39f6ec522..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java +++ /dev/null @@ -1,7355 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.tests.integration; - -import static com.netflix.conductor.common.metadata.tasks.Task.Status.COMPLETED; -import static com.netflix.conductor.common.metadata.tasks.Task.Status.COMPLETED_WITH_ERRORS; -import static com.netflix.conductor.common.metadata.tasks.Task.Status.FAILED; -import static com.netflix.conductor.common.metadata.tasks.Task.Status.IN_PROGRESS; -import static com.netflix.conductor.common.metadata.tasks.Task.Status.SCHEDULED; -import static com.netflix.conductor.common.metadata.tasks.Task.Status.TIMED_OUT; -import static com.netflix.conductor.common.metadata.workflow.TaskType.DECISION; -import static com.netflix.conductor.common.metadata.workflow.TaskType.SUB_WORKFLOW; -import static com.netflix.conductor.common.run.Workflow.WorkflowStatus.RUNNING; -import static com.netflix.conductor.common.run.Workflow.WorkflowStatus.TERMINATED; -import static com.netflix.conductor.tests.utils.MockExternalPayloadStorage.INITIAL_WORKFLOW_INPUT_PATH; -import static com.netflix.conductor.tests.utils.MockExternalPayloadStorage.INPUT_PAYLOAD_PATH; -import static com.netflix.conductor.tests.utils.MockExternalPayloadStorage.TASK_OUTPUT_PATH; -import static com.netflix.conductor.tests.utils.MockExternalPayloadStorage.TEMP_FILE_PATH; -import static com.netflix.conductor.tests.utils.MockExternalPayloadStorage.WORKFLOW_OUTPUT_PATH; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.util.concurrent.Uninterruptibles; -import com.netflix.conductor.common.metadata.tasks.PollData; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.Task.Status; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskDef.RetryLogic; -import com.netflix.conductor.common.metadata.tasks.TaskDef.TimeoutPolicy; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList; -import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; -import com.netflix.conductor.common.metadata.workflow.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.Workflow.WorkflowStatus; -import com.netflix.conductor.common.utils.JsonMapperProvider; -import com.netflix.conductor.common.utils.TaskUtils; -import com.netflix.conductor.core.WorkflowContext; -import com.netflix.conductor.core.execution.ApplicationException; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.core.execution.WorkflowRepairService; -import com.netflix.conductor.core.execution.WorkflowSweeper; -import com.netflix.conductor.core.execution.tasks.SubWorkflow; -import com.netflix.conductor.core.execution.tasks.Terminate; -import com.netflix.conductor.core.metadata.MetadataMapperService; -import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.service.ExecutionService; -import com.netflix.conductor.service.MetadataService; -import com.netflix.conductor.tests.utils.UserTask; -import java.io.FileOutputStream; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; -import java.util.function.Predicate; -import java.util.stream.Collectors; -import javax.inject.Inject; -import org.apache.commons.lang3.StringUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.FixMethodOrder; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runners.MethodSorters; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@FixMethodOrder(MethodSorters.NAME_ASCENDING) -public abstract class AbstractWorkflowServiceTest { - - private static final Logger logger = LoggerFactory.getLogger(AbstractWorkflowServiceTest.class); - - private static final String COND_TASK_WF = "ConditionalTaskWF"; - private static final String DECISION_WF = "DecisionWorkflow"; - - private static final String FORK_JOIN_NESTED_WF = "FanInOutNestedTest"; - - private static final String FORK_JOIN_WF = "FanInOutTest"; - private static final String FORK_JOIN_DECISION_WF = "ForkConditionalTest"; - - private static final String DO_WHILE_WF = "DoWhileTest"; - - private static final String DYNAMIC_FORK_JOIN_WF = "DynamicFanInOutTest"; - - private static final String DYNAMIC_FORK_JOIN_WF_LEGACY = "DynamicFanInOutTestLegacy"; - - private static final int RETRY_COUNT = 1; - private static final String JUNIT_TEST_WF_NON_RESTARTABLE = "junit_test_wf_non_restartable"; - private static final String WF_WITH_SUB_WF = "WorkflowWithSubWorkflow"; - private static final String WORKFLOW_WITH_OPTIONAL_TASK = "optional_task_wf"; - - @Rule - public final ExpectedException expectedException = ExpectedException.none(); - - @Inject - protected ExecutionService workflowExecutionService; - - @Inject - protected SubWorkflow subworkflow; - - @Inject - protected UserTask userTask; - - @Inject - protected MetadataService metadataService; - - @Inject - protected WorkflowSweeper workflowSweeper; - - @Inject - protected QueueDAO queueDAO; - - @Inject - protected WorkflowExecutor workflowExecutor; - - @Inject - protected WorkflowRepairService workflowRepairService; - - @Inject - protected MetadataMapperService metadataMapperService; - - private ObjectMapper objectMapper = new JsonMapperProvider().get(); - - SubWorkflow dummySubWorkflowSystemTask = new SubWorkflow(objectMapper); - - private static boolean registered; - - private static List taskDefs; - - private static final String LINEAR_WORKFLOW_T1_T2 = "junit_test_wf"; - - private static final String LINEAR_WORKFLOW_T1_T2_SW = "junit_test_wf_sw"; - - private static final String WORKFLOW_MULTI_LEVEL_SW = "junit_test_multi_level_sw"; - - private static final String WORKFLOW_FORK_JOIN_OPTIONAL_SW = "junit_test_fork_join_optional_sw"; - - private static final String LONG_RUNNING = "longRunningWf"; - - private static final String TEST_WORKFLOW = "junit_test_wf3"; - - private static final String CONDITIONAL_SYSTEM_WORKFLOW = "junit_conditional_http_wf"; - private static final String WF_T1_SWF_T2 = "junit_t1_sw_t2_wf"; - - @BeforeClass - public static void setup() { - registered = false; - } - - @Before - public void init() { - System.setProperty("EC2_REGION", "us-east-1"); - System.setProperty("EC2_AVAILABILITY_ZONE", "us-east-1c"); - if (registered) { - return; - } - - WorkflowContext.set(new WorkflowContext("junit_app")); - for (int i = 0; i < 21; i++) { - - String name = "junit_task_" + i; - if (notFoundSafeGetTaskDef(name) != null) { - continue; - } - - TaskDef task = new TaskDef(); - task.setName(name); - task.setTimeoutSeconds(120); - task.setRetryCount(RETRY_COUNT); - metadataService.registerTaskDef(Collections.singletonList(task)); - } - - for (int i = 0; i < 5; i++) { - - String name = "junit_task_0_RT_" + i; - if (notFoundSafeGetTaskDef(name) != null) { - continue; - } - - TaskDef task = new TaskDef(); - task.setName(name); - task.setTimeoutSeconds(120); - task.setRetryCount(0); - metadataService.registerTaskDef(Collections.singletonList(task)); - } - - TaskDef task = new TaskDef(); - task.setName("short_time_out"); - task.setTimeoutSeconds(5); - task.setRetryCount(RETRY_COUNT); - metadataService.registerTaskDef(Collections.singletonList(task)); - - WorkflowDef def = new WorkflowDef(); - def.setName(LINEAR_WORKFLOW_T1_T2); - def.setDescription(def.getName()); - def.setVersion(1); - def.setInputParameters(Arrays.asList("param1", "param2")); - Map outputParameters = new HashMap<>(); - outputParameters.put("o1", "${workflow.input.param1}"); - outputParameters.put("o2", "${t2.output.uuid}"); - outputParameters.put("o3", "${t1.output.op}"); - def.setOutputParameters(outputParameters); - def.setFailureWorkflow("$workflow.input.failureWfName"); - def.setSchemaVersion(2); - LinkedList wftasks = new LinkedList<>(); - - WorkflowTask wft1 = new WorkflowTask(); - wft1.setName("junit_task_1"); - Map ip1 = new HashMap<>(); - ip1.put("p1", "${workflow.input.param1}"); - ip1.put("p2", "${workflow.input.param2}"); - ip1.put("someNullKey", null); - wft1.setInputParameters(ip1); - wft1.setTaskReferenceName("t1"); - - WorkflowTask wft2 = new WorkflowTask(); - wft2.setName("junit_task_2"); - Map ip2 = new HashMap<>(); - ip2.put("tp1", "${workflow.input.param1}"); - ip2.put("tp2", "${t1.output.op}"); - wft2.setInputParameters(ip2); - wft2.setTaskReferenceName("t2"); - - wftasks.add(wft1); - wftasks.add(wft2); - def.setTasks(wftasks); - - WorkflowTask wft3 = new WorkflowTask(); - wft3.setName("junit_task_3"); - Map ip3 = new HashMap<>(); - ip3.put("tp1", "${workflow.input.param1}"); - ip3.put("tp2", "${t1.output.op}"); - wft3.setInputParameters(ip3); - wft3.setTaskReferenceName("t3"); - - WorkflowDef def2 = new WorkflowDef(); - def2.setName(TEST_WORKFLOW); - def2.setDescription(def2.getName()); - def2.setVersion(1); - def2.setInputParameters(Arrays.asList("param1", "param2")); - LinkedList wftasks2 = new LinkedList<>(); - - wftasks2.add(wft1); - wftasks2.add(wft2); - wftasks2.add(wft3); - def2.setSchemaVersion(2); - def2.setTasks(wftasks2); - - try { - - WorkflowDef[] wdsf = new WorkflowDef[]{def, def2}; - for (WorkflowDef wd : wdsf) { - metadataService.updateWorkflowDef(wd); - } - createForkJoinWorkflow(); - def.setName(LONG_RUNNING); - metadataService.updateWorkflowDef(def); - } catch (Exception e) { - } - - taskDefs = metadataService.getTaskDefs(); - registered = true; - } - - private TaskDef notFoundSafeGetTaskDef(String name) { - try { - return metadataService.getTaskDef(name); - } catch (ApplicationException e) { - if (e.getCode() == ApplicationException.Code.NOT_FOUND) { - return null; - } else { - throw e; - } - } - } - - @Test - public void testWorkflowWithNoTasks() { - - WorkflowDef empty = new WorkflowDef(); - empty.setName("empty_workflow"); - empty.setSchemaVersion(2); - metadataService.registerWorkflowDef(empty); - - String id = startOrLoadWorkflowExecution(empty.getName(), 1, "testWorkflowWithNoTasks", new HashMap<>(), null, null); - assertNotNull(id); - Workflow workflow = workflowExecutionService.getExecutionStatus(id, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(0, workflow.getTasks().size()); - } - - @Test - public void testTaskDefTemplate() throws Exception { - System.setProperty("STACK2", "test_stack"); - TaskDef templatedTask = new TaskDef(); - templatedTask.setName("templated_task"); - Map httpRequest = new HashMap<>(); - httpRequest.put("method", "GET"); - httpRequest.put("vipStack", "${STACK2}"); - httpRequest.put("uri", "/get/something"); - Map body = new HashMap<>(); - body.put("inputPaths", Arrays.asList("${workflow.input.path1}", "${workflow.input.path2}")); - body.put("requestDetails", "${workflow.input.requestDetails}"); - body.put("outputPath", "${workflow.input.outputPath}"); - httpRequest.put("body", body); - templatedTask.getInputTemplate().put("http_request", httpRequest); - metadataService.registerTaskDef(Arrays.asList(templatedTask)); - - WorkflowDef templateWf = new WorkflowDef(); - templateWf.setName("template_workflow"); - WorkflowTask wft = new WorkflowTask(); - wft.setName(templatedTask.getName()); - wft.setWorkflowTaskType(TaskType.SIMPLE); - wft.setTaskReferenceName("t0"); - templateWf.getTasks().add(wft); - templateWf.setSchemaVersion(2); - metadataService.registerWorkflowDef(templateWf); - - Map requestDetails = new HashMap<>(); - requestDetails.put("key1", "value1"); - requestDetails.put("key2", 42); - - Map input = new HashMap<>(); - input.put("path1", "file://path1"); - input.put("path2", "file://path2"); - input.put("outputPath", "s3://bucket/outputPath"); - input.put("requestDetails", requestDetails); - - String id = startOrLoadWorkflowExecution(templateWf.getName(), 1, "testTaskDefTemplate", input, null, null); - assertNotNull(id); - Workflow workflow = workflowExecutionService.getExecutionStatus(id, true); - assertNotNull(workflow); - assertTrue(workflow.getReasonForIncompletion(), !workflow.getStatus().isTerminal()); - assertEquals(1, workflow.getTasks().size()); - Task task = workflow.getTasks().get(0); - Map taskInput = task.getInputData(); - assertNotNull(taskInput); - assertTrue(taskInput.containsKey("http_request")); - assertTrue(taskInput.get("http_request") instanceof Map); - - //Use the commented sysout to get the string value - //System.out.println(om.writeValueAsString(om.writeValueAsString(taskInput))); - String expected = "{\"http_request\":{\"method\":\"GET\",\"vipStack\":\"test_stack\",\"body\":{\"requestDetails\":{\"key1\":\"value1\",\"key2\":42},\"outputPath\":\"s3://bucket/outputPath\",\"inputPaths\":[\"file://path1\",\"file://path2\"]},\"uri\":\"/get/something\"}}"; - assertEquals(expected, objectMapper.writeValueAsString(taskInput)); - } - - @Test - public void testKafkaTaskDefTemplateSuccess() throws Exception { - - try { - registerKafkaWorkflow(); - } catch (ApplicationException e) { - - } - - Map input = getKafkaInput(); - String workflowInstanceId = startOrLoadWorkflowExecution("template_kafka_workflow", 1, "testTaskDefTemplate", input, null, null); - - assertNotNull(workflowInstanceId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); - - assertNotNull(workflow); - assertTrue(workflow.getReasonForIncompletion(), !workflow.getStatus().isTerminal()); - assertEquals(1, workflow.getTasks().size()); - - Task task = workflow.getTasks().get(0); - Map taskInput = task.getInputData(); - - assertNotNull(taskInput); - assertTrue(taskInput.containsKey("kafka_request")); - assertTrue(taskInput.get("kafka_request") instanceof Map); - - String expected = "{\"kafka_request\":{\"topic\":\"test_kafka_topic\",\"bootStrapServers\":\"localhost:9092\",\"value\":{\"requestDetails\":{\"key1\":\"value1\",\"key2\":42},\"outputPath\":\"s3://bucket/outputPath\",\"inputPaths\":[\"file://path1\",\"file://path2\"]}}}"; - - assertEquals(expected, objectMapper.writeValueAsString(taskInput)); - - TaskResult taskResult = new TaskResult(task); - - taskResult.setStatus(TaskResult.Status.COMPLETED); - - - // Polling for the first task - Task task1 = workflowExecutionService.poll("KAFKA_PUBLISH", "test"); - assertNotNull(task1); - - assertTrue(workflowExecutionService.ackTaskReceived(task1.getTaskId())); - assertEquals(workflowInstanceId, task1.getWorkflowInstanceId()); - - workflowExecutionService.updateTask(taskResult); - - workflowExecutor.decide(workflowInstanceId); - - workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - @Test - public void testKafkaTaskDefTemplateFailure() throws Exception { - - try { - registerKafkaWorkflow(); - } catch (ApplicationException e) { - - } - Map input = getKafkaInput(); - String workflowInstanceId = startOrLoadWorkflowExecution("template_kafka_workflow", 1, "testTaskDefTemplate", input, null, null); - - assertNotNull(workflowInstanceId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); - - assertNotNull(workflow); - assertTrue(workflow.getReasonForIncompletion(), !workflow.getStatus().isTerminal()); - assertEquals(1, workflow.getTasks().size()); - - Task task = workflow.getTasks().get(0); - Map taskInput = task.getInputData(); - - assertNotNull(taskInput); - assertTrue(taskInput.containsKey("kafka_request")); - assertTrue(taskInput.get("kafka_request") instanceof Map); - - String expected = "{\"kafka_request\":{\"topic\":\"test_kafka_topic\",\"bootStrapServers\":\"localhost:9092\",\"value\":{\"requestDetails\":{\"key1\":\"value1\",\"key2\":42},\"outputPath\":\"s3://bucket/outputPath\",\"inputPaths\":[\"file://path1\",\"file://path2\"]}}}"; - - assertEquals(expected, objectMapper.writeValueAsString(taskInput)); - - TaskResult taskResult = new TaskResult(task); - taskResult.setReasonForIncompletion("NON TRANSIENT ERROR OCCURRED: An integration point required to complete the task is down"); - taskResult.setStatus(TaskResult.Status.FAILED); - taskResult.addOutputData("TERMINAL_ERROR", "Integration endpoint down: FOOBAR"); - taskResult.addOutputData("ErrorMessage", "There was a terminal error"); - - - // Polling for the first task - Task task1 = workflowExecutionService.poll("KAFKA_PUBLISH", "test"); - assertNotNull(task1); - - assertTrue(workflowExecutionService.ackTaskReceived(task1.getTaskId())); - assertEquals(workflowInstanceId, task1.getWorkflowInstanceId()); - - workflowExecutionService.updateTask(taskResult); - - workflowExecutor.decide(workflowInstanceId); - - workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.FAILED, workflow.getStatus()); - } - - private Map getKafkaInput() { - Map requestDetails = new HashMap<>(); - requestDetails.put("key1", "value1"); - requestDetails.put("key2", 42); - - Map input = new HashMap<>(); - input.put("path1", "file://path1"); - input.put("path2", "file://path2"); - input.put("outputPath", "s3://bucket/outputPath"); - input.put("requestDetails", requestDetails); - return input; - } - - private void registerKafkaWorkflow() { - System.setProperty("STACK_KAFKA", "test_kafka_topic"); - TaskDef templatedTask = new TaskDef(); - templatedTask.setName("templated_kafka_task"); - templatedTask.setRetryCount(0); - Map kafkaRequest = new HashMap<>(); - kafkaRequest.put("topic", "${STACK_KAFKA}"); - kafkaRequest.put("bootStrapServers", "localhost:9092"); - - Map value = new HashMap<>(); - value.put("inputPaths", Arrays.asList("${workflow.input.path1}", "${workflow.input.path2}")); - value.put("requestDetails", "${workflow.input.requestDetails}"); - value.put("outputPath", "${workflow.input.outputPath}"); - kafkaRequest.put("value", value); - templatedTask.getInputTemplate().put("kafka_request", kafkaRequest); - metadataService.registerTaskDef(Arrays.asList(templatedTask)); - - WorkflowDef templateWf = new WorkflowDef(); - - templateWf.setName("template_kafka_workflow"); - WorkflowTask wft = new WorkflowTask(); - wft.setName(templatedTask.getName()); - wft.setWorkflowTaskType(TaskType.KAFKA_PUBLISH); - wft.setTaskReferenceName("t0"); - templateWf.getTasks().add(wft); - templateWf.setSchemaVersion(2); - metadataService.registerWorkflowDef(templateWf); - } - - @Test - public void testWorkflowSchemaVersion() { - WorkflowDef ver2 = new WorkflowDef(); - ver2.setSchemaVersion(2); - ver2.setName("Test_schema_version2"); - ver2.setVersion(1); - - WorkflowDef ver1 = new WorkflowDef(); - ver1.setName("Test_schema_version1"); - ver1.setVersion(1); - - metadataService.updateWorkflowDef(ver1); - metadataService.updateWorkflowDef(ver2); - - WorkflowDef found = metadataService.getWorkflowDef(ver2.getName(), 1); - assertEquals(2, found.getSchemaVersion()); - - WorkflowDef found1 = metadataService.getWorkflowDef(ver1.getName(), 1); - assertEquals(2, found1.getSchemaVersion()); - } - - @SuppressWarnings("ConstantConditions") - @Test - public void testForkJoin() throws Exception { - createForkJoinWorkflow(); - - String taskName = "junit_task_1"; - TaskDef taskDef = notFoundSafeGetTaskDef(taskName); - taskDef.setRetryCount(0); - taskDef.setTimeoutSeconds(0); - metadataService.updateTaskDef(taskDef); - - taskName = "junit_task_2"; - taskDef = notFoundSafeGetTaskDef(taskName); - taskDef.setRetryCount(0); - taskDef.setTimeoutSeconds(0); - metadataService.updateTaskDef(taskDef); - - taskName = "junit_task_3"; - taskDef = notFoundSafeGetTaskDef(taskName); - taskDef.setRetryCount(0); - taskDef.setTimeoutSeconds(0); - metadataService.updateTaskDef(taskDef); - - taskName = "junit_task_4"; - taskDef = notFoundSafeGetTaskDef(taskName); - taskDef.setRetryCount(0); - taskDef.setTimeoutSeconds(0); - metadataService.updateTaskDef(taskDef); - - Map input = new HashMap<>(); - String workflowId = startOrLoadWorkflowExecution(FORK_JOIN_WF, 1, "fanouttest", input, null, null); - assertNotNull(workflowId); - printTaskStatuses(workflowId, "initiated"); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(4, workflow.getTasks().size()); - - Task task1 = workflowExecutionService.poll("junit_task_1", "test"); - assertNotNull(task1); - assertTrue(workflowExecutionService.ackTaskReceived(task1.getTaskId())); - - Task task2 = workflowExecutionService.poll("junit_task_2", "test"); - assertNotNull(task2); - assertTrue(workflowExecutionService.ackTaskReceived(task2.getTaskId())); - - Task task3 = workflowExecutionService.poll("junit_task_3", "test"); - assertNull(task3); - - task1.setStatus(COMPLETED); - workflowExecutionService.updateTask(task1); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks(), RUNNING, workflow.getStatus()); - printTaskStatuses(workflow, "T1 completed"); - - task3 = workflowExecutionService.poll("junit_task_3", "test"); - assertNotNull(task3); - - task2.setStatus(COMPLETED); - task3.setStatus(COMPLETED); - - ExecutorService executorService = Executors.newFixedThreadPool(2); - Future future1 = executorService.submit(() -> { - try { - workflowExecutionService.updateTask(task2); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - future1.get(); - - final Task _t3 = task3; - Future future2 = executorService.submit(() -> { - try { - workflowExecutionService.updateTask(_t3); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - future2.get(); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - printTaskStatuses(workflow, "T2 T3 completed"); - assertEquals("Found " + workflow.getTasks(), RUNNING, workflow.getStatus()); - assertEquals("Found " + workflow.getTasks().stream().map(Task::getTaskType).collect(Collectors.toList()), 6, workflow.getTasks().size()); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks(), RUNNING, workflow.getStatus()); - assertTrue("Found " + workflow.getTasks().stream().map(t -> t.getReferenceTaskName() + "." + t.getStatus()).collect(Collectors.toList()), workflow.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t4"))); - - Task t4 = workflowExecutionService.poll("junit_task_4", "test"); - assertNotNull(t4); - t4.setStatus(COMPLETED); - workflowExecutionService.updateTask(t4); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks(), WorkflowStatus.COMPLETED, workflow.getStatus()); - printTaskStatuses(workflow, "All completed"); - } - - @Test - public void testDoWhileSingleIteration() throws Exception { - try { - createDoWhileWorkflowWithIteration(1, false, true); - } catch (Exception e) { - } - TaskDef taskDef = new TaskDef(); - taskDef.setName("http1"); - taskDef.setTimeoutSeconds(2); - taskDef.setRetryCount(1); - taskDef.setTimeoutPolicy(TimeoutPolicy.RETRY); - taskDef.setRetryDelaySeconds(10); - metadataService.registerTaskDef(Arrays.asList(taskDef)); - - TaskDef taskDef2 = new TaskDef(); - taskDef2.setName("http0"); - taskDef2.setTimeoutSeconds(2); - taskDef2.setRetryCount(1); - taskDef2.setTimeoutPolicy(TimeoutPolicy.RETRY); - taskDef2.setRetryDelaySeconds(10); - metadataService.registerTaskDef(Arrays.asList(taskDef2)); - - TaskDef taskDef1 = new TaskDef(); - taskDef1.setName("http2"); - taskDef1.setTimeoutSeconds(2); - taskDef1.setRetryCount(1); - taskDef1.setTimeoutPolicy(TimeoutPolicy.RETRY); - taskDef1.setRetryDelaySeconds(10); - metadataService.registerTaskDef(Arrays.asList(taskDef1)); - - Map input = new HashMap<>(); - String workflowId = startOrLoadWorkflowExecution(DO_WHILE_WF + "_1", 1, "looptest", input, null, null); - System.out.println("testDoWhile.wfid=" + workflowId); - printTaskStatuses(workflowId, "initiated"); - - Task task = workflowExecutionService.poll("HTTP", "test"); - assertNotNull(task); - assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration()))); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("FORK_JOIN", "test"); - assertNull(task); // fork task is completed - - task = workflowExecutionService.poll("HTTP", "test"); - assertNotNull(task); - assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration()))); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("HTTP", "test"); - assertNotNull(task); - assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration()))); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("JOIN", "test"); - assertNull(task); // Both HTTP task completed. - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks(), WorkflowStatus.COMPLETED, workflow.getStatus()); - printTaskStatuses(workflow, "All completed"); - } - - @Test - public void testDoWhileTwoIteration() throws Exception { - try { - createDoWhileWorkflowWithIteration(2, false, true); - } catch (Exception e) { - } - - TaskDef taskDef = new TaskDef(); - taskDef.setName("http1"); - taskDef.setTimeoutSeconds(5); - taskDef.setRetryCount(1); - taskDef.setTimeoutPolicy(TimeoutPolicy.RETRY); - taskDef.setRetryDelaySeconds(10); - metadataService.registerTaskDef(Arrays.asList(taskDef)); - - TaskDef taskDef2 = new TaskDef(); - taskDef2.setName("http0"); - taskDef2.setTimeoutSeconds(5); - taskDef2.setRetryCount(1); - taskDef2.setTimeoutPolicy(TimeoutPolicy.RETRY); - taskDef2.setRetryDelaySeconds(10); - metadataService.registerTaskDef(Arrays.asList(taskDef2)); - - TaskDef taskDef1 = new TaskDef(); - taskDef1.setName("http2"); - taskDef1.setTimeoutSeconds(5); - taskDef1.setRetryCount(1); - taskDef1.setTimeoutPolicy(TimeoutPolicy.RETRY); - taskDef1.setRetryDelaySeconds(10); - metadataService.registerTaskDef(Arrays.asList(taskDef1)); - - TaskDef taskDef3 = new TaskDef(); - taskDef1.setName("http3"); - taskDef1.setTimeoutSeconds(5); - taskDef1.setRetryCount(1); - taskDef1.setTimeoutPolicy(TimeoutPolicy.RETRY); - taskDef1.setRetryDelaySeconds(10); - metadataService.registerTaskDef(Arrays.asList(taskDef3)); - - Map input = new HashMap<>(); - String workflowId = startOrLoadWorkflowExecution(DO_WHILE_WF + "_2", 1, "looptest", input, null, null); - System.out.println("testDoWhile.wfid=" + workflowId); - printTaskStatuses(workflowId, "initiated"); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks(), RUNNING, workflow.getStatus()); - - Task task = workflowExecutionService.poll("HTTP", "test"); - assertNotNull(task); - assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration()))); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("FORK_JOIN", "test"); - assertNull(task); // fork task is completed - - task = workflowExecutionService.poll("HTTP", "test"); - assertNotNull(task); - assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration()))); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("HTTP", "test"); - assertNotNull(task); - assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration()))); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("JOIN", "test"); - assertNull(task); // Both HTTP task completed. - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks(), RUNNING, workflow.getStatus()); - - task = workflowExecutionService.poll("HTTP", "test"); - assertNotNull(task); - assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration()))); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("FORK_JOIN", "test"); - assertNull(task); // fork task is completed. - - task = workflowExecutionService.poll("HTTP", "test"); - assertNotNull(task); - assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration()))); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("HTTP", "test"); - assertNotNull(task); - assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration()))); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("JOIN", "test"); - assertNull(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks(), RUNNING, workflow.getStatus()); - - task = workflowExecutionService.poll("HTTP", "test"); - assertNotNull(task); - assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration()))); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks(), WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - @Test - public void testLoopConditionWithInputParamter() throws Exception { - try { - createDoWhileWorkflowWithIteration(2, true, true); - } catch (Exception e) { - } - - TaskDef taskDef = new TaskDef(); - taskDef.setName("http1"); - taskDef.setTimeoutSeconds(2); - taskDef.setRetryCount(1); - taskDef.setTimeoutPolicy(TimeoutPolicy.RETRY); - taskDef.setRetryDelaySeconds(10); - metadataService.registerTaskDef(Arrays.asList(taskDef)); - - TaskDef taskDef2 = new TaskDef(); - taskDef2.setName("http0"); - taskDef2.setTimeoutSeconds(2); - taskDef2.setRetryCount(1); - taskDef2.setTimeoutPolicy(TimeoutPolicy.RETRY); - taskDef2.setRetryDelaySeconds(10); - metadataService.registerTaskDef(Arrays.asList(taskDef2)); - - TaskDef taskDef1 = new TaskDef(); - taskDef1.setName("http2"); - taskDef1.setTimeoutSeconds(2); - taskDef1.setRetryCount(1); - taskDef1.setTimeoutPolicy(TimeoutPolicy.RETRY); - taskDef1.setRetryDelaySeconds(10); - metadataService.registerTaskDef(Arrays.asList(taskDef1)); - - Map input = new HashMap<>(); - String workflowId = startOrLoadWorkflowExecution(DO_WHILE_WF + "_3", 1, "looptest", input, null, null); - System.out.println("testDoWhile.wfid=" + workflowId); - printTaskStatuses(workflowId, "initiated"); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks(), RUNNING, workflow.getStatus()); - - Task task = workflowExecutionService.poll("HTTP", "test"); - assertNotNull(task); - assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration()))); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("FORK_JOIN", "test"); - assertNull(task); // fork task is completed - - task = workflowExecutionService.poll("HTTP", "test"); - assertNotNull(task); - assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration()))); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("HTTP", "test"); - assertNotNull(task); - assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration()))); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("JOIN", "test"); - assertNull(task); // Both HTTP task completed. - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks(), WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - @Test - public void testLoopConditionWithInputParamterWithDef() throws Exception { - testLoopConditionWithInputParamter(true); - } - - @Test - public void testLoopConditionWithInputParamterNoDef() throws Exception { - testLoopConditionWithInputParamter(false); - } - - private void testLoopConditionWithInputParamter(boolean useDef) throws Exception { - try { - createDoWhileWorkflowWithIteration(2, true, useDef); - } catch (Exception e) { - } - - TaskDef taskDef = new TaskDef(); - taskDef.setName("http1"); - taskDef.setTimeoutSeconds(2); - taskDef.setRetryCount(1); - taskDef.setTimeoutPolicy(TimeoutPolicy.RETRY); - taskDef.setRetryDelaySeconds(10); - metadataService.registerTaskDef(Arrays.asList(taskDef)); - - TaskDef taskDef2 = new TaskDef(); - taskDef2.setName("http0"); - taskDef2.setTimeoutSeconds(2); - taskDef2.setRetryCount(1); - taskDef2.setTimeoutPolicy(TimeoutPolicy.RETRY); - taskDef2.setRetryDelaySeconds(10); - metadataService.registerTaskDef(Arrays.asList(taskDef2)); - - TaskDef taskDef1 = new TaskDef(); - taskDef1.setName("http2"); - taskDef1.setTimeoutSeconds(2); - taskDef1.setRetryCount(1); - taskDef1.setTimeoutPolicy(TimeoutPolicy.RETRY); - taskDef1.setRetryDelaySeconds(10); - metadataService.registerTaskDef(Arrays.asList(taskDef1)); - - Map input = new HashMap<>(); - String workflowId = startOrLoadWorkflowExecution(DO_WHILE_WF + "_3", 1, "looptest", input, null, null); - System.out.println("testDoWhile.wfid=" + workflowId); - printTaskStatuses(workflowId, "initiated"); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks(), RUNNING, workflow.getStatus()); - - Task task = workflowExecutionService.poll("HTTP", "test"); - assertNotNull(task); - assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration()))); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("FORK_JOIN", "test"); - assertNull(task); // fork task is completed - - task = workflowExecutionService.poll("HTTP", "test"); - assertNotNull(task); - assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration()))); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("HTTP", "test"); - assertNotNull(task); - assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration()))); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("JOIN", "test"); - assertNull(task); // Both HTTP task completed. - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks(), WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - @Test - public void testForkJoinNestedSchemaVersion1() { - createForkJoinNestedWorkflow(1); - - - Map input = new HashMap<>(); - input.put("case", "a"); //This should execute t16 and t19 - String wfid = startOrLoadWorkflowExecution("forkJoinNested", FORK_JOIN_NESTED_WF, 1, "fork_join_nested_test", input, null, null); - System.out.println("testForkJoinNested.wfid=" + wfid); - - Workflow wf = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(wf); - assertEquals(RUNNING, wf.getStatus()); - - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t11"))); - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t12"))); - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t13"))); - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("fork1"))); - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("fork2"))); - assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t16"))); - assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t1"))); - assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t2"))); - - - Task t1 = workflowExecutionService.poll("junit_task_11", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t1.getTaskId())); - - Task t2 = workflowExecutionService.poll("junit_task_12", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId())); - - Task t3 = workflowExecutionService.poll("junit_task_13", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t3.getTaskId())); - - assertNotNull(t1); - assertNotNull(t2); - assertNotNull(t3); - - t1.setStatus(COMPLETED); - t2.setStatus(COMPLETED); - t3.setStatus(COMPLETED); - - workflowExecutionService.updateTask(t1); - workflowExecutionService.updateTask(t2); - workflowExecutionService.updateTask(t3); - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); - - wf = workflowExecutionService.getExecutionStatus(wfid, true); - - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t16"))); - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t14"))); - - String[] tasks = new String[]{"junit_task_14", "junit_task_16"}; - for (String tt : tasks) { - Task polled = workflowExecutionService.poll(tt, "test"); - assertNotNull("poll resulted empty for task: " + tt, polled); - polled.setStatus(COMPLETED); - workflowExecutionService.updateTask(polled); - } - - wf = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(wf); - assertEquals(RUNNING, wf.getStatus()); - - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t19"))); - assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t15"))); //Not there yet - assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t20"))); //Not there yet - - Task task19 = workflowExecutionService.poll("junit_task_19", "test"); - assertNotNull(task19); - task19.setStatus(COMPLETED); - workflowExecutionService.updateTask(task19); - - Task task20 = workflowExecutionService.poll("junit_task_20", "test"); - assertNotNull(task20); - task20.setStatus(COMPLETED); - workflowExecutionService.updateTask(task20); - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); - wf = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(wf); - assertEquals(RUNNING, wf.getStatus()); - - Set pendingTasks = wf.getTasks().stream().filter(t -> !t.getStatus().isTerminal()).map(t -> t.getReferenceTaskName()).collect(Collectors.toSet()); - assertTrue("Found only this: " + pendingTasks, wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("join1"))); - - pendingTasks = wf.getTasks().stream().filter(t -> !t.getStatus().isTerminal()).map(t -> t.getReferenceTaskName()).collect(Collectors.toSet()); - assertTrue("Found only this: " + pendingTasks, wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t15"))); - - Task task15 = workflowExecutionService.poll("junit_task_15", "test"); - assertNotNull(task15); - task15.setStatus(COMPLETED); - workflowExecutionService.updateTask(task15); - - wf = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(wf); - assertEquals(WorkflowStatus.COMPLETED, wf.getStatus()); - - } - - @Test - public void testForkJoinNestedSchemaVersion2() { - createForkJoinNestedWorkflow(2); - - Map input = new HashMap<>(); - input.put("case", "a"); //This should execute t16 and t19 - String wfid = startOrLoadWorkflowExecution("forkJoinNested", FORK_JOIN_NESTED_WF, 1, "fork_join_nested_test", input, null, null); - System.out.println("testForkJoinNested.wfid=" + wfid); - - Workflow wf = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(wf); - assertEquals(RUNNING, wf.getStatus()); - - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t11"))); - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t12"))); - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t13"))); - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("fork1"))); - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("fork2"))); - assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t16"))); - assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t1"))); - assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t2"))); - - - Task t1 = workflowExecutionService.poll("junit_task_11", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t1.getTaskId())); - - Task t2 = workflowExecutionService.poll("junit_task_12", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId())); - - Task t3 = workflowExecutionService.poll("junit_task_13", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t3.getTaskId())); - - assertNotNull(t1); - assertNotNull(t2); - assertNotNull(t3); - - t1.setStatus(COMPLETED); - t2.setStatus(COMPLETED); - t3.setStatus(COMPLETED); - - workflowExecutionService.updateTask(t1); - workflowExecutionService.updateTask(t2); - workflowExecutionService.updateTask(t3); - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); - - wf = workflowExecutionService.getExecutionStatus(wfid, true); - - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t16"))); - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t14"))); - - String[] tasks = new String[]{"junit_task_14", "junit_task_16"}; - for (String tt : tasks) { - Task polled = workflowExecutionService.poll(tt, "test"); - assertNotNull("poll resulted empty for task: " + tt, polled); - polled.setStatus(COMPLETED); - workflowExecutionService.updateTask(polled); - } - - wf = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(wf); - assertEquals(RUNNING, wf.getStatus()); - - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t19"))); - assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t15"))); //Not there yet - assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t20"))); //Not there yet - - Task task19 = workflowExecutionService.poll("junit_task_19", "test"); - assertNotNull(task19); - task19.setStatus(COMPLETED); - workflowExecutionService.updateTask(task19); - - Task task20 = workflowExecutionService.poll("junit_task_20", "test"); - assertNotNull(task20); - task20.setStatus(COMPLETED); - workflowExecutionService.updateTask(task20); - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); - wf = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(wf); - assertEquals(RUNNING, wf.getStatus()); - - Set pendingTasks = wf.getTasks().stream().filter(t -> !t.getStatus().isTerminal()).map(t -> t.getReferenceTaskName()).collect(Collectors.toSet()); - assertTrue("Found only this: " + pendingTasks, wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("join1"))); - - pendingTasks = wf.getTasks().stream().filter(t -> !t.getStatus().isTerminal()).map(t -> t.getReferenceTaskName()).collect(Collectors.toSet()); - assertTrue("Found only this: " + pendingTasks, wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t15"))); - - Task task15 = workflowExecutionService.poll("junit_task_15", "test"); - assertNotNull(task15); - task15.setStatus(COMPLETED); - workflowExecutionService.updateTask(task15); - - wf = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(wf); - assertEquals(WorkflowStatus.COMPLETED, wf.getStatus()); - } - - @Test - public void testForkJoinNestedWithSubWorkflow() { - - createForkJoinNestedWorkflowWithSubworkflow(1); - - Map input = new HashMap<>(); - input.put("case", "a"); //This should execute t16 and t19 - String wfid = startOrLoadWorkflowExecution(FORK_JOIN_NESTED_WF, 1, "fork_join_nested_test", input, null, null); - System.out.println("testForkJoinNested.wfid=" + wfid); - - Workflow workflow = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - - assertTrue(workflow.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t11"))); - assertTrue(workflow.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t12"))); - assertTrue(workflow.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t13"))); - assertTrue(workflow.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("sw1"))); - assertTrue(workflow.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("fork1"))); - assertTrue(workflow.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("fork2"))); - assertFalse(workflow.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t16"))); - assertFalse(workflow.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t1"))); - assertFalse(workflow.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t2"))); - - - Task t1 = workflowExecutionService.poll("junit_task_11", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t1.getTaskId())); - - Task t2 = workflowExecutionService.poll("junit_task_12", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId())); - - Task t3 = workflowExecutionService.poll("junit_task_13", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t3.getTaskId())); - - assertNotNull(t1); - assertNotNull(t2); - assertNotNull(t3); - - t1.setStatus(COMPLETED); - t2.setStatus(COMPLETED); - t3.setStatus(COMPLETED); - - workflowExecutionService.updateTask(t1); - workflowExecutionService.updateTask(t2); - workflowExecutionService.updateTask(t3); - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); - - // Simulating SystemTaskWorkerCoordinator to execute async system tasks - String subWorkflowTaskId = workflow.getTaskByRefName("sw1").getTaskId(); - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - - workflow = workflowExecutionService.getExecutionStatus(wfid, true); - - assertTrue(workflow.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t16"))); - assertTrue(workflow.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t14"))); - - String[] tasks = new String[]{"junit_task_1", "junit_task_2", "junit_task_14", "junit_task_16"}; - for (String tt : tasks) { - Task polled = workflowExecutionService.poll(tt, "test"); - assertNotNull("poll resulted empty for task: " + tt, polled); - polled.setStatus(COMPLETED); - workflowExecutionService.updateTask(polled); - } - - // Execute again to complete the Subworkflow task. - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - - workflow = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - - assertTrue(workflow.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t19"))); - assertFalse(workflow.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t15"))); //Not there yet - assertFalse(workflow.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t20"))); //Not there yet - - Task task19 = workflowExecutionService.poll("junit_task_19", "test"); - assertNotNull(task19); - task19.setStatus(COMPLETED); - workflowExecutionService.updateTask(task19); - - Task task20 = workflowExecutionService.poll("junit_task_20", "test"); - assertNotNull(task20); - task20.setStatus(COMPLETED); - workflowExecutionService.updateTask(task20); - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); - workflow = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - - Set pendingTasks = workflow.getTasks().stream().filter(t -> !t.getStatus().isTerminal()).map(t -> t.getReferenceTaskName()).collect(Collectors.toSet()); - assertTrue("Found only this: " + pendingTasks, workflow.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("join1"))); - - pendingTasks = workflow.getTasks().stream().filter(t -> !t.getStatus().isTerminal()).map(t -> t.getReferenceTaskName()).collect(Collectors.toSet()); - assertTrue("Found only this: " + pendingTasks, workflow.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t15"))); - Task task15 = workflowExecutionService.poll("junit_task_15", "test"); - assertNotNull(task15); - task15.setStatus(COMPLETED); - workflowExecutionService.updateTask(task15); - - workflow = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - - } - - @Test - public void testForkJoinFailure() { - - try { - createForkJoinWorkflow(); - } catch (Exception e) { - } - - String taskName = "junit_task_2"; - TaskDef taskDef = notFoundSafeGetTaskDef(taskName); - int retryCount = taskDef.getRetryCount(); - taskDef.setRetryCount(0); - metadataService.updateTaskDef(taskDef); - - - Map input = new HashMap(); - String wfid = startOrLoadWorkflowExecution(FORK_JOIN_WF, 1, "fanouttest", input, null, null); - System.out.println("testForkJoinFailure.wfid=" + wfid); - - Task t1 = workflowExecutionService.poll("junit_task_2", "test"); - assertNotNull(t1); - assertTrue(workflowExecutionService.ackTaskReceived(t1.getTaskId())); - - Task t2 = workflowExecutionService.poll("junit_task_1", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId())); - - Task t3 = workflowExecutionService.poll("junit_task_3", "test"); - assertNull(t3); - - assertNotNull(t1); - assertNotNull(t2); - t1.setStatus(FAILED); - t2.setStatus(COMPLETED); - - workflowExecutionService.updateTask(t2); - Workflow wf = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(wf); - assertEquals("Found " + wf.getTasks(), RUNNING, wf.getStatus()); - - t3 = workflowExecutionService.poll("junit_task_3", "test"); - assertNotNull(t3); - - - workflowExecutionService.updateTask(t1); - wf = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(wf); - assertEquals("Found " + wf.getTasks(), WorkflowStatus.FAILED, wf.getStatus()); - - - taskDef = notFoundSafeGetTaskDef(taskName); - taskDef.setRetryCount(retryCount); - metadataService.updateTaskDef(taskDef); - } - - @SuppressWarnings("unchecked") - @Test - public void testDynamicForkJoinLegacy() { - try { - createDynamicForkJoinWorkflowDefsLegacy(1); - } catch (Exception e) { - } - - Map input = new HashMap(); - String wfid = startOrLoadWorkflowExecution(DYNAMIC_FORK_JOIN_WF_LEGACY, 1, "dynfanouttest1", input, null, null); - System.out.println("testDynamicForkJoinLegacy.wfid=" + wfid); - - Task t1 = workflowExecutionService.poll("junit_task_1", "test"); - //assertTrue(ess.ackTaskRecieved(t1.getTaskId(), "test")); - - DynamicForkJoinTaskList dynamicForkJoinTasks = new DynamicForkJoinTaskList(); - - input = new HashMap(); - input.put("k1", "v1"); - dynamicForkJoinTasks.add("junit_task_2", null, "xdt1", input); - - HashMap input2 = new HashMap(); - input2.put("k2", "v2"); - dynamicForkJoinTasks.add("junit_task_3", null, "xdt2", input2); - - t1.getOutputData().put("dynamicTasks", dynamicForkJoinTasks); - t1.setStatus(COMPLETED); - - workflowExecutionService.updateTask(t1); - - Task t2 = workflowExecutionService.poll("junit_task_2", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId())); - assertEquals("xdt1", t2.getReferenceTaskName()); - assertTrue(t2.getInputData().containsKey("k1")); - assertEquals("v1", t2.getInputData().get("k1")); - Map output = new HashMap(); - output.put("ok1", "ov1"); - t2.setOutputData(output); - t2.setStatus(COMPLETED); - workflowExecutionService.updateTask(t2); - - Task t3 = workflowExecutionService.poll("junit_task_3", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t3.getTaskId())); - assertEquals("xdt2", t3.getReferenceTaskName()); - assertTrue(t3.getInputData().containsKey("k2")); - assertEquals("v2", t3.getInputData().get("k2")); - - output = new HashMap<>(); - output.put("ok1", "ov1"); - t3.setOutputData(output); - t3.setStatus(COMPLETED); - workflowExecutionService.updateTask(t3); - - Workflow wf = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(wf); - assertEquals(WorkflowStatus.COMPLETED, wf.getStatus()); - - // Check the output - Task joinTask = wf.getTaskByRefName("dynamicfanouttask_join"); - assertEquals("Found:" + joinTask.getOutputData(), 2, joinTask.getOutputData().keySet().size()); - Set joinTaskOutput = joinTask.getOutputData().keySet(); - System.out.println("joinTaskOutput=" + joinTaskOutput); - for (String key : joinTask.getOutputData().keySet()) { - assertTrue(key.equals("xdt1") || key.equals("xdt2")); - assertEquals("ov1", ((Map) joinTask.getOutputData().get(key)).get("ok1")); - } - } - - @SuppressWarnings("unchecked") - @Test - public void testDynamicForkJoin() { - - createDynamicForkJoinWorkflowDefs(); - - String taskName = "junit_task_2"; - TaskDef taskDef = notFoundSafeGetTaskDef(taskName); - int retryCount = taskDef.getRetryCount(); - taskDef.setRetryCount(2); - taskDef.setRetryDelaySeconds(0); - taskDef.setRetryLogic(RetryLogic.FIXED); - metadataService.updateTaskDef(taskDef); - - Map workflowInput = new HashMap<>(); - String workflowId = startOrLoadWorkflowExecution(DYNAMIC_FORK_JOIN_WF, 1, "dynfanouttest1", workflowInput, null, null); - System.out.println("testDynamicForkJoin.wfid=" + workflowId); - Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); - assertNotNull(workflow); - assertEquals(workflow.getReasonForIncompletion(), RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); - - Task task1 = workflowExecutionService.poll("junit_task_1", "test"); - assertNotNull(task1); - assertTrue(workflowExecutionService.ackTaskReceived(task1.getTaskId())); - assertEquals("dt1", task1.getReferenceTaskName()); - - Map inputParams2 = new HashMap<>(); - inputParams2.put("k1", "v1"); - WorkflowTask workflowTask2 = new WorkflowTask(); - workflowTask2.setName("junit_task_2"); - workflowTask2.setTaskReferenceName("xdt1"); - - Map inputParams3 = new HashMap<>(); - inputParams3.put("k2", "v2"); - WorkflowTask workflowTask3 = new WorkflowTask(); - workflowTask3.setName("junit_task_3"); - workflowTask3.setTaskReferenceName("xdt2"); - - HashMap dynamicTasksInput = new HashMap<>(); - dynamicTasksInput.put("xdt1", inputParams2); - dynamicTasksInput.put("xdt2", inputParams3); - task1.getOutputData().put("dynamicTasks", Arrays.asList(workflowTask2, workflowTask3)); - task1.getOutputData().put("dynamicTasksInput", dynamicTasksInput); - task1.setStatus(COMPLETED); - - workflowExecutionService.updateTask(task1); - workflow = workflowExecutor.getWorkflow(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks().stream().map(Task::getTaskType).collect(Collectors.toList()), 5, workflow.getTasks().size()); - - Task task2 = workflowExecutionService.poll("junit_task_2", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(task2.getTaskId())); - assertEquals("xdt1", task2.getReferenceTaskName()); - assertTrue(task2.getInputData().containsKey("k1")); - assertEquals("v1", task2.getInputData().get("k1")); - Map output = new HashMap<>(); - output.put("ok1", "ov1"); - task2.setOutputData(output); - task2.setStatus(FAILED); - workflowExecutionService.updateTask(task2); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(workflow.getReasonForIncompletion(), RUNNING, workflow.getStatus()); - assertEquals(2, workflow.getTasks().stream().filter(t -> t.getTaskType().equals("junit_task_2")).count()); - assertTrue(workflow.getTasks().stream().filter(t -> t.getTaskType().equals("junit_task_2")).allMatch(t -> t.getWorkflowTask() != null)); - assertEquals("Found " + workflow.getTasks().stream().map(Task::getTaskType).collect(Collectors.toList()), 6, workflow.getTasks().size()); - - task2 = workflowExecutionService.poll("junit_task_2", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(task2.getTaskId())); - assertEquals("xdt1", task2.getReferenceTaskName()); - assertTrue(task2.getInputData().containsKey("k1")); - assertEquals("v1", task2.getInputData().get("k1")); - task2.setOutputData(output); - task2.setStatus(COMPLETED); - workflowExecutionService.updateTask(task2); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks().stream().map(Task::getTaskType).collect(Collectors.toList()), 6, workflow.getTasks().size()); - - Task task3 = workflowExecutionService.poll("junit_task_3", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(task3.getTaskId())); - assertEquals("xdt2", task3.getReferenceTaskName()); - assertTrue(task3.getInputData().containsKey("k2")); - assertEquals("v2", task3.getInputData().get("k2")); - output = new HashMap<>(); - output.put("ok1", "ov1"); - task3.setOutputData(output); - task3.setStatus(COMPLETED); - workflowExecutionService.updateTask(task3); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(workflow.getReasonForIncompletion(), RUNNING, workflow.getStatus()); - assertEquals("Found " + workflow.getTasks().stream().map(Task::getTaskType).collect(Collectors.toList()), 7, workflow.getTasks().size()); - - Task task4 = workflowExecutionService.poll("junit_task_4", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(task4.getTaskId())); - assertEquals("task4", task4.getReferenceTaskName()); - task4.setStatus(COMPLETED); - workflowExecutionService.updateTask(task4); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals("Found " + workflow.getTasks().stream().map(Task::getTaskType).collect(Collectors.toList()), 7, workflow.getTasks().size()); - - // Check the output - Task joinTask = workflow.getTaskByRefName("dynamicfanouttask_join"); - assertEquals("Found:" + joinTask.getOutputData(), 2, joinTask.getOutputData().keySet().size()); - Set joinTaskOutput = joinTask.getOutputData().keySet(); - System.out.println("joinTaskOutput=" + joinTaskOutput); - for (String key : joinTask.getOutputData().keySet()) { - assertTrue(key.equals("xdt1") || key.equals("xdt2")); - assertEquals("ov1", ((Map) joinTask.getOutputData().get(key)).get("ok1")); - } - - // reset the task def - taskDef = notFoundSafeGetTaskDef(taskName); - taskDef.setRetryCount(retryCount); - taskDef.setRetryDelaySeconds(1); - metadataService.updateTaskDef(taskDef); - } - - @Test - public void testForkJoinDecisionWorkflow() { - createForkJoinDecisionWorkflow(); - - Map input = new HashMap<>(); - input.put("param1", "p1"); - input.put("param2", "p2"); - input.put("case", "c"); - String workflowId = startOrLoadWorkflowExecution(FORK_JOIN_DECISION_WF, 1, "forkjoin_conditional", input, null, null); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(5, workflow.getTasks().size()); - - // poll task 10 - Task task = workflowExecutionService.poll("junit_task_10", "task10.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - // update to COMPLETED - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(5, workflow.getTasks().size()); - - // poll task 1 - task = workflowExecutionService.poll("junit_task_1", "task1.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - // update to COMPLETED - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(6, workflow.getTasks().size()); - - // poll task 2 - task = workflowExecutionService.poll("junit_task_2", "task2.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - // update to COMPLETED - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(7, workflow.getTasks().size()); - - // poll task 20 - task = workflowExecutionService.poll("junit_task_20", "task20.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - // update to COMPLETED - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(7, workflow.getTasks().size()); - } - - private void createForkJoinWorkflow() { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName(FORK_JOIN_WF); - workflowDef.setDescription(workflowDef.getName()); - workflowDef.setVersion(1); - workflowDef.setInputParameters(Arrays.asList("param1", "param2")); - - // fork task - WorkflowTask fanoutTask = new WorkflowTask(); - fanoutTask.setType(TaskType.FORK_JOIN.name()); - fanoutTask.setTaskReferenceName("fanouttask"); - - Map inputParams1 = new HashMap<>(); - inputParams1.put("p1", "workflow.input.param1"); - inputParams1.put("p2", "workflow.input.param2"); - - // left fork - WorkflowTask workflowTask1 = new WorkflowTask(); - workflowTask1.setName("junit_task_1"); - workflowTask1.setInputParameters(inputParams1); - workflowTask1.setTaskReferenceName("t1"); - - WorkflowTask workflowTask3 = new WorkflowTask(); - workflowTask3.setName("junit_task_3"); - workflowTask3.setInputParameters(inputParams1); - workflowTask3.setTaskReferenceName("t3"); - - // right fork - WorkflowTask workflowTask2 = new WorkflowTask(); - workflowTask2.setName("junit_task_2"); - Map inputParams2 = new HashMap<>(); - inputParams2.put("tp1", "workflow.input.param1"); - workflowTask2.setInputParameters(inputParams2); - workflowTask2.setTaskReferenceName("t2"); - - fanoutTask.getForkTasks().add(Arrays.asList(workflowTask1, workflowTask3)); - fanoutTask.getForkTasks().add(Collections.singletonList(workflowTask2)); - workflowDef.getTasks().add(fanoutTask); - - // join task - WorkflowTask joinTask = new WorkflowTask(); - joinTask.setType(TaskType.JOIN.name()); - joinTask.setTaskReferenceName("fanouttask_join"); - joinTask.setJoinOn(Arrays.asList("t3", "t2")); - - workflowDef.getTasks().add(joinTask); - - // simple task - WorkflowTask workflowTask4 = new WorkflowTask(); - workflowTask4.setName("junit_task_4"); - workflowTask4.setInputParameters(inputParams2); - workflowTask4.setTaskReferenceName("t4"); - - workflowDef.getTasks().add(workflowTask4); - metadataService.updateWorkflowDef(workflowDef); - } - - private void createForkJoinDecisionWorkflow() { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName(FORK_JOIN_DECISION_WF); - workflowDef.setDescription(workflowDef.getName()); - workflowDef.setVersion(1); - - Map inputParams = new HashMap<>(); - inputParams.put("p1", "${workflow.input.param1}"); - inputParams.put("p2", "${workflow.input.param2}"); - - // left decision - WorkflowTask leftCaseTask1 = new WorkflowTask(); - leftCaseTask1.setName("junit_task_1"); - leftCaseTask1.setInputParameters(inputParams); - leftCaseTask1.setTaskReferenceName("t1"); - - WorkflowTask leftCaseTask2 = new WorkflowTask(); - leftCaseTask2.setName("junit_task_2"); - leftCaseTask2.setInputParameters(inputParams); - leftCaseTask2.setTaskReferenceName("t2"); - - // default decision - WorkflowTask defaultCaseTask5 = new WorkflowTask(); - defaultCaseTask5.setName("junit_task_5"); - defaultCaseTask5.setInputParameters(inputParams); - defaultCaseTask5.setTaskReferenceName("t5"); - - // left fork - // decision task - Map decisionInput = new HashMap<>(); - decisionInput.put("case", "${workflow.input.case}"); - - WorkflowTask decisionTask = new WorkflowTask(); - decisionTask.setType(TaskType.DECISION.name()); - decisionTask.setCaseValueParam("case"); - decisionTask.setName("decisionTask"); - decisionTask.setTaskReferenceName("decisionTask"); - decisionTask.setInputParameters(decisionInput); - Map> decisionCases = new HashMap<>(); - decisionCases.put("c", Arrays.asList(leftCaseTask1, leftCaseTask2)); - decisionTask.setDefaultCase(Collections.singletonList(defaultCaseTask5)); - decisionTask.setDecisionCases(decisionCases); - - WorkflowTask workflowTask20 = new WorkflowTask(); - workflowTask20.setName("junit_task_20"); - workflowTask20.setInputParameters(inputParams); - workflowTask20.setTaskReferenceName("t20"); - - // right fork - WorkflowTask rightForkTask10 = new WorkflowTask(); - rightForkTask10.setName("junit_task_10"); - rightForkTask10.setInputParameters(inputParams); - rightForkTask10.setTaskReferenceName("t10"); - - // fork task - WorkflowTask forkTask = new WorkflowTask(); - forkTask.setName("forkTask"); - forkTask.setType(TaskType.FORK_JOIN.name()); - forkTask.setTaskReferenceName("forkTask"); - forkTask.getForkTasks().add(Arrays.asList(decisionTask, workflowTask20)); - forkTask.getForkTasks().add(Collections.singletonList(rightForkTask10)); - - // join task - WorkflowTask joinTask = new WorkflowTask(); - joinTask.setName("joinTask"); - joinTask.setType(TaskType.JOIN.name()); - joinTask.setTaskReferenceName("joinTask"); - joinTask.setJoinOn(Arrays.asList("t20", "t10")); - - workflowDef.getTasks().add(forkTask); - workflowDef.getTasks().add(joinTask); - - metadataService.updateWorkflowDef(workflowDef); - } - - @Test - public void testDecisionWorkflow() { - createDecisionWorkflow(); - - Map input = new HashMap<>(); - input.put("param1", "p1"); - input.put("param2", "p2"); - input.put("case", "c"); - String workflowId = startOrLoadWorkflowExecution(DECISION_WF, 1, "decision", input, null, null); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - - // poll task 1 - Task task = workflowExecutionService.poll("junit_task_1", "task1.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - // update to COMPLETED - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(3, workflow.getTasks().size()); - - // poll task 2 - task = workflowExecutionService.poll("junit_task_2", "task2.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - // update to COMPLETED - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(4, workflow.getTasks().size()); - - // poll task 20 - task = workflowExecutionService.poll("junit_task_20", "task20.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - // update to COMPLETED - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - private void createDecisionWorkflow() { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName(DECISION_WF); - workflowDef.setDescription(workflowDef.getName()); - workflowDef.setVersion(1); - - Map inputParams = new HashMap<>(); - inputParams.put("p1", "${workflow.input.param1}"); - inputParams.put("p2", "${workflow.input.param2}"); - - // left decision - WorkflowTask leftCaseTask1 = new WorkflowTask(); - leftCaseTask1.setName("junit_task_1"); - leftCaseTask1.setInputParameters(inputParams); - leftCaseTask1.setTaskReferenceName("t1"); - - WorkflowTask leftCaseTask2 = new WorkflowTask(); - leftCaseTask2.setName("junit_task_2"); - leftCaseTask2.setInputParameters(inputParams); - leftCaseTask2.setTaskReferenceName("t2"); - - // default decision - WorkflowTask defaultCaseTask5 = new WorkflowTask(); - defaultCaseTask5.setName("junit_task_5"); - defaultCaseTask5.setInputParameters(inputParams); - defaultCaseTask5.setTaskReferenceName("t5"); - - // decision task - Map decisionInput = new HashMap<>(); - decisionInput.put("case", "${workflow.input.case}"); - WorkflowTask decisionTask = new WorkflowTask(); - decisionTask.setType(TaskType.DECISION.name()); - decisionTask.setCaseValueParam("case"); - decisionTask.setName("decisionTask"); - decisionTask.setTaskReferenceName("decisionTask"); - decisionTask.setInputParameters(decisionInput); - Map> decisionCases = new HashMap<>(); - decisionCases.put("c", Arrays.asList(leftCaseTask1, leftCaseTask2)); - decisionTask.setDefaultCase(Collections.singletonList(defaultCaseTask5)); - decisionTask.setDecisionCases(decisionCases); - - WorkflowTask workflowTask20 = new WorkflowTask(); - workflowTask20.setName("junit_task_20"); - workflowTask20.setInputParameters(inputParams); - workflowTask20.setTaskReferenceName("t20"); - - workflowDef.getTasks().add(decisionTask); - workflowDef.getTasks().add(workflowTask20); - - metadataService.updateWorkflowDef(workflowDef); - } - - private void createDoWhileWorkflowWithIteration(int iteration, boolean isInputParameter, boolean useTaskDef) { - WorkflowDef workflowDef = new WorkflowDef(); - if (isInputParameter) { - workflowDef.setName(DO_WHILE_WF + "_3"); - } else { - workflowDef.setName(DO_WHILE_WF + "_" + iteration); - } - workflowDef.setDescription(workflowDef.getName()); - workflowDef.setVersion(1); - workflowDef.setInputParameters(Arrays.asList("param1", "param2")); - - WorkflowTask loopTask = new WorkflowTask(); - loopTask.setType(TaskType.DO_WHILE.name()); - loopTask.setTaskReferenceName("loopTask"); - loopTask.setName("loopTask"); - loopTask.setWorkflowTaskType(TaskType.DO_WHILE); - Map input = new HashMap<>(); - input.put("value", "${workflow.input.loop}"); - loopTask.setInputParameters(input); - - if(useTaskDef) { - TaskDef taskDef = new TaskDef(); - taskDef.setName("loopTask"); - taskDef.setTimeoutSeconds(200); - taskDef.setRetryCount(1); - taskDef.setTimeoutPolicy(TimeoutPolicy.RETRY); - taskDef.setRetryDelaySeconds(10); - metadataService.registerTaskDef(Arrays.asList(taskDef)); - } - - Map inputParams1 = new HashMap<>(); - inputParams1.put("p1", "workflow.input.param1"); - inputParams1.put("p2", "workflow.input.param2"); - - WorkflowTask http1 = new WorkflowTask(); - http1.setName("http1"); - http1.setInputParameters(inputParams1); - http1.setTaskReferenceName("http1"); - http1.setWorkflowTaskType(TaskType.HTTP); - - WorkflowTask http2 = new WorkflowTask(); - http2.setName("http2"); - http2.setInputParameters(inputParams1); - http2.setTaskReferenceName("http2"); - http2.setWorkflowTaskType(TaskType.HTTP); - - WorkflowTask fork = new WorkflowTask(); - fork.setName("fork"); - fork.setInputParameters(inputParams1); - fork.setTaskReferenceName("fork"); - fork.setWorkflowTaskType(TaskType.FORK_JOIN); - fork.setForkTasks(Arrays.asList(Arrays.asList(http1),Arrays.asList(http2))); - - WorkflowTask join = new WorkflowTask(); - join.setName("join"); - join.setInputParameters(inputParams1); - join.setTaskReferenceName("join"); - join.setWorkflowTaskType(TaskType.JOIN); - - WorkflowTask http0 = new WorkflowTask(); - http0.setName("http0"); - http0.setInputParameters(inputParams1); - http0.setTaskReferenceName("http0"); - http0.setWorkflowTaskType(TaskType.HTTP); - - loopTask.getLoopOver().add(http0); - loopTask.getLoopOver().add(fork); - loopTask.getLoopOver().add(join); - if (isInputParameter) { - loopTask.setLoopCondition("if ($.loopTask['iteration'] < $.value) { true; } else { false; }"); - } else { - loopTask.setLoopCondition("if ($.loopTask['iteration'] < " + iteration + " ) { true;} else {false;} "); - } - - workflowDef.getTasks().add(loopTask); - - if (iteration == 2 && isInputParameter == false) { - if(useTaskDef) { - TaskDef taskDef2 = new TaskDef(); - taskDef2.setName("loopTask2"); - taskDef2.setTimeoutSeconds(200); - taskDef2.setRetryCount(3); - taskDef2.setTimeoutPolicy(TimeoutPolicy.RETRY); - taskDef2.setRetryDelaySeconds(10); - metadataService.registerTaskDef(Arrays.asList(taskDef2)); - } - WorkflowTask loopTask2 = new WorkflowTask(); - loopTask2.setType(TaskType.DO_WHILE.name()); - loopTask2.setTaskReferenceName("loopTask2"); - loopTask2.setName("loopTask2"); - loopTask2.setWorkflowTaskType(TaskType.DO_WHILE); - loopTask2.setInputParameters(input); - WorkflowTask http3 = new WorkflowTask(); - http3.setName("http3"); - http3.setInputParameters(inputParams1); - http3.setTaskReferenceName("http3"); - http3.setWorkflowTaskType(TaskType.HTTP); - loopTask2.setLoopCondition("if ($.loopTask2['iteration'] < 1) { true; } else { false; }"); - loopTask2.getLoopOver().add(http3); - workflowDef.getTasks().add(loopTask2); - } - metadataService.registerWorkflowDef(workflowDef); - } - - - private void createForkJoinWorkflowWithZeroRetry() { - WorkflowDef def = new WorkflowDef(); - def.setName(FORK_JOIN_WF + "_2"); - def.setDescription(def.getName()); - def.setVersion(1); - def.setInputParameters(Arrays.asList("param1", "param2")); - - WorkflowTask fanout = new WorkflowTask(); - fanout.setType(TaskType.FORK_JOIN.name()); - fanout.setTaskReferenceName("fanouttask"); - - WorkflowTask wft1 = new WorkflowTask(); - wft1.setName("junit_task_0_RT_1"); - Map ip1 = new HashMap<>(); - ip1.put("p1", "workflow.input.param1"); - ip1.put("p2", "workflow.input.param2"); - wft1.setInputParameters(ip1); - wft1.setTaskReferenceName("t1"); - - WorkflowTask wft3 = new WorkflowTask(); - wft3.setName("junit_task_0_RT_3"); - wft3.setInputParameters(ip1); - wft3.setTaskReferenceName("t3"); - - WorkflowTask wft2 = new WorkflowTask(); - wft2.setName("junit_task_0_RT_2"); - Map ip2 = new HashMap<>(); - ip2.put("tp1", "workflow.input.param1"); - wft2.setInputParameters(ip2); - wft2.setTaskReferenceName("t2"); - - WorkflowTask wft4 = new WorkflowTask(); - wft4.setName("junit_task_0_RT_4"); - wft4.setInputParameters(ip2); - wft4.setTaskReferenceName("t4"); - - fanout.getForkTasks().add(Arrays.asList(wft1, wft3)); - fanout.getForkTasks().add(Arrays.asList(wft2)); - - def.getTasks().add(fanout); - - WorkflowTask join = new WorkflowTask(); - join.setType(TaskType.JOIN.name()); - join.setTaskReferenceName("fanouttask_join"); - join.setJoinOn(Arrays.asList("t3", "t2")); - - def.getTasks().add(join); - def.getTasks().add(wft4); - metadataService.updateWorkflowDef(def); - } - - private void createForkJoinNestedWorkflow(int schemaVersion) { - WorkflowDef def = new WorkflowDef(); - def.setName(FORK_JOIN_NESTED_WF); - def.setDescription(def.getName()); - def.setVersion(1); - def.setSchemaVersion(schemaVersion); - def.setInputParameters(Arrays.asList("param1", "param2")); - - WorkflowTask[] tasks = new WorkflowTask[21]; - - Map ip1 = new HashMap<>(); - if (schemaVersion <= 1) { - ip1.put("p1", "workflow.input.param1"); - ip1.put("p2", "workflow.input.param2"); - ip1.put("case", "workflow.input.case"); - } else { - ip1.put("p1", "${workflow.input.param1}"); - ip1.put("p2", "${workflow.input.param2}"); - ip1.put("case", "${workflow.input.case}"); - } - - for (int i = 10; i < 21; i++) { - WorkflowTask wft = new WorkflowTask(); - wft.setName("junit_task_" + i); - wft.setInputParameters(ip1); - wft.setTaskReferenceName("t" + i); - tasks[i] = wft; - } - - WorkflowTask d1 = new WorkflowTask(); - d1.setType(TaskType.DECISION.name()); - d1.setName("Decision"); - d1.setTaskReferenceName("d1"); - d1.setInputParameters(ip1); - d1.setDefaultCase(Arrays.asList(tasks[18], tasks[20])); - d1.setCaseValueParam("case"); - Map> decisionCases = new HashMap<>(); - decisionCases.put("a", Arrays.asList(tasks[16], tasks[19], tasks[20])); - decisionCases.put("b", Arrays.asList(tasks[17], tasks[20])); - d1.setDecisionCases(decisionCases); - - WorkflowTask fork2 = new WorkflowTask(); - fork2.setType(TaskType.FORK_JOIN.name()); - fork2.setName("fork2"); - fork2.setTaskReferenceName("fork2"); - fork2.getForkTasks().add(Arrays.asList(tasks[12], tasks[14])); - fork2.getForkTasks().add(Arrays.asList(tasks[13], d1)); - - WorkflowTask join2 = new WorkflowTask(); - join2.setType(TaskType.JOIN.name()); - join2.setTaskReferenceName("join2"); - join2.setJoinOn(Arrays.asList("t14", "t20")); - - WorkflowTask fork1 = new WorkflowTask(); - fork1.setType(TaskType.FORK_JOIN.name()); - fork1.setTaskReferenceName("fork1"); - fork1.getForkTasks().add(Arrays.asList(tasks[11])); - fork1.getForkTasks().add(Arrays.asList(fork2, join2)); - - WorkflowTask join1 = new WorkflowTask(); - join1.setType(TaskType.JOIN.name()); - join1.setTaskReferenceName("join1"); - join1.setJoinOn(Arrays.asList("t11", "join2")); - - def.getTasks().add(fork1); - def.getTasks().add(join1); - def.getTasks().add(tasks[15]); - - metadataService.updateWorkflowDef(def); - } - - private void createForkJoinNestedWorkflowWithSubworkflow(int schemaVersion) { - WorkflowDef def = new WorkflowDef(); - def.setName(FORK_JOIN_NESTED_WF); - def.setDescription(def.getName()); - def.setSchemaVersion(1); - def.setVersion(1); - def.setInputParameters(Arrays.asList("param1", "param2")); - - Map ip1 = new HashMap<>(); - ip1.put("p1", "workflow.input.param1"); - ip1.put("p2", "workflow.input.param2"); - ip1.put("case", "workflow.input.case"); - - WorkflowTask[] tasks = new WorkflowTask[21]; - - for (int i = 10; i < 21; i++) { - WorkflowTask wft = new WorkflowTask(); - wft.setName("junit_task_" + i); - wft.setInputParameters(ip1); - wft.setTaskReferenceName("t" + i); - tasks[i] = wft; - } - - WorkflowTask d1 = new WorkflowTask(); - d1.setType(TaskType.DECISION.name()); - d1.setName("Decision"); - d1.setTaskReferenceName("d1"); - d1.setInputParameters(ip1); - d1.setDefaultCase(Arrays.asList(tasks[18], tasks[20])); - d1.setCaseValueParam("case"); - Map> decisionCases = new HashMap<>(); - decisionCases.put("a", Arrays.asList(tasks[16], tasks[19], tasks[20])); - decisionCases.put("b", Arrays.asList(tasks[17], tasks[20])); - d1.setDecisionCases(decisionCases); - - WorkflowTask subWorkflow = new WorkflowTask(); - subWorkflow.setType(SUB_WORKFLOW.name()); - SubWorkflowParams sw = new SubWorkflowParams(); - sw.setName(LINEAR_WORKFLOW_T1_T2); - subWorkflow.setSubWorkflowParam(sw); - subWorkflow.setTaskReferenceName("sw1"); - - WorkflowTask fork2 = new WorkflowTask(); - fork2.setType(TaskType.FORK_JOIN.name()); - fork2.setName("fork2"); - fork2.setTaskReferenceName("fork2"); - fork2.getForkTasks().add(Arrays.asList(tasks[12], tasks[14])); - fork2.getForkTasks().add(Arrays.asList(tasks[13], d1)); - - WorkflowTask join2 = new WorkflowTask(); - join2.setType(TaskType.JOIN.name()); - join2.setTaskReferenceName("join2"); - join2.setJoinOn(Arrays.asList("t14", "t20")); - - WorkflowTask fork1 = new WorkflowTask(); - fork1.setType(TaskType.FORK_JOIN.name()); - fork1.setTaskReferenceName("fork1"); - fork1.getForkTasks().add(Arrays.asList(tasks[11])); - fork1.getForkTasks().add(Arrays.asList(fork2, join2)); - fork1.getForkTasks().add(Arrays.asList(subWorkflow)); - - - WorkflowTask join1 = new WorkflowTask(); - join1.setType(TaskType.JOIN.name()); - join1.setTaskReferenceName("join1"); - join1.setJoinOn(Arrays.asList("t11", "join2", "sw1")); - - def.getTasks().add(fork1); - def.getTasks().add(join1); - def.getTasks().add(tasks[15]); - - metadataService.updateWorkflowDef(def); - } - - private void createDynamicForkJoinWorkflowDefs() { - - WorkflowDef def = new WorkflowDef(); - def.setName(DYNAMIC_FORK_JOIN_WF); - def.setDescription(def.getName()); - def.setVersion(1); - def.setSchemaVersion(2); - def.setInputParameters(Arrays.asList("param1", "param2")); - - WorkflowTask workflowTask1 = new WorkflowTask(); - workflowTask1.setName("junit_task_1"); - Map ip1 = new HashMap<>(); - ip1.put("p1", "${workflow.input.param1}"); - ip1.put("p2", "${workflow.input.param2}"); - workflowTask1.setInputParameters(ip1); - workflowTask1.setTaskReferenceName("dt1"); - - WorkflowTask fanout = new WorkflowTask(); - fanout.setType(TaskType.FORK_JOIN_DYNAMIC.name()); - fanout.setTaskReferenceName("dynamicfanouttask"); - fanout.setDynamicForkTasksParam("dynamicTasks"); - fanout.setDynamicForkTasksInputParamName("dynamicTasksInput"); - fanout.getInputParameters().put("dynamicTasks", "${dt1.output.dynamicTasks}"); - fanout.getInputParameters().put("dynamicTasksInput", "${dt1.output.dynamicTasksInput}"); - - WorkflowTask join = new WorkflowTask(); - join.setType(TaskType.JOIN.name()); - join.setTaskReferenceName("dynamicfanouttask_join"); - - WorkflowTask workflowTask4 = new WorkflowTask(); - workflowTask4.setName("junit_task_4"); - workflowTask4.setTaskReferenceName("task4"); - - def.getTasks().add(workflowTask1); - def.getTasks().add(fanout); - def.getTasks().add(join); - def.getTasks().add(workflowTask4); - - metadataMapperService.populateTaskDefinitions(def); - - metadataService.updateWorkflowDef(def); - } - - @SuppressWarnings("deprecation") - private void createDynamicForkJoinWorkflowDefsLegacy(int schemaVersion) { - WorkflowDef def = new WorkflowDef(); - def.setName(DYNAMIC_FORK_JOIN_WF_LEGACY); - def.setDescription(def.getName()); - def.setVersion(1); - def.setSchemaVersion(schemaVersion); - def.setInputParameters(Arrays.asList("param1", "param2")); - - WorkflowTask wft1 = new WorkflowTask(); - wft1.setName("junit_task_1"); - Map ip1 = new HashMap<>(); - if (schemaVersion <= 1) { - ip1.put("p1", "workflow.input.param1"); - ip1.put("p2", "workflow.input.param2"); - } else { - ip1.put("p1", "${workflow.input.param1}"); - ip1.put("p2", "${workflow.input.param2}"); - } - - wft1.setInputParameters(ip1); - wft1.setTaskReferenceName("dt1"); - - WorkflowTask fanout = new WorkflowTask(); - fanout.setType(TaskType.FORK_JOIN_DYNAMIC.name()); - fanout.setTaskReferenceName("dynamicfanouttask"); - fanout.setDynamicForkJoinTasksParam("dynamicTasks"); - if (schemaVersion <= 1) { - fanout.getInputParameters().put("dynamicTasks", "dt1.output.dynamicTasks"); - fanout.getInputParameters().put("dynamicTasksInput", "dt1.output.dynamicTasksInput"); - } else { - fanout.getInputParameters().put("dynamicTasks", "${dt1.output.dynamicTasks}"); - fanout.getInputParameters().put("dynamicTasksInput", "${dt1.output.dynamicTasksInput}"); - } - WorkflowTask join = new WorkflowTask(); - join.setType(TaskType.JOIN.name()); - join.setTaskReferenceName("dynamicfanouttask_join"); - - def.getTasks().add(wft1); - def.getTasks().add(fanout); - def.getTasks().add(join); - - metadataMapperService.populateTaskDefinitions(def); - - metadataService.updateWorkflowDef(def); - - } - - private void createConditionalWF(int schemaVersion) { - WorkflowTask workflowTask1 = new WorkflowTask(); - workflowTask1.setName("junit_task_1"); - Map ip1 = new HashMap<>(); - - if (schemaVersion <= 1) { - ip1.put("p1", "workflow.input.param1"); - ip1.put("p2", "workflow.input.param2"); - } else { - ip1.put("p1", "${workflow.input.param1}"); - ip1.put("p2", "${workflow.input.param2}"); - } - - workflowTask1.setInputParameters(ip1); - workflowTask1.setTaskReferenceName("t1"); - - WorkflowTask workflowTask2 = new WorkflowTask(); - workflowTask2.setName("junit_task_2"); - Map ip2 = new HashMap<>(); - if (schemaVersion <= 1) { - ip2.put("tp1", "workflow.input.param1"); - } else { - ip2.put("tp1", "${workflow.input.param1}"); - } - workflowTask2.setInputParameters(ip2); - workflowTask2.setTaskReferenceName("t2"); - - WorkflowTask workflowTask3 = new WorkflowTask(); - workflowTask3.setName("junit_task_3"); - Map ip3 = new HashMap<>(); - ip3.put("tp3", "workflow.input.param2"); - workflowTask3.setInputParameters(ip3); - workflowTask3.setTaskReferenceName("t3"); - - WorkflowTask workflowTask10 = new WorkflowTask(); - workflowTask10.setName("junit_task_10"); - Map ip10 = new HashMap<>(); - ip10.put("tp10", "workflow.input.param2"); - workflowTask10.setInputParameters(ip10); - workflowTask10.setTaskReferenceName("t10"); - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName(COND_TASK_WF); - workflowDef.setDescription(COND_TASK_WF); - workflowDef.setInputParameters(Arrays.asList("param1", "param2")); - - WorkflowTask nestedCondition = new WorkflowTask(); - nestedCondition.setType(TaskType.DECISION.name()); - nestedCondition.setCaseValueParam("case"); - nestedCondition.setName("nestedCondition"); - nestedCondition.setTaskReferenceName("nestedCondition"); - Map> dc = new HashMap<>(); - dc.put("one", Collections.singletonList(workflowTask1)); - dc.put("two", Collections.singletonList(workflowTask2)); - nestedCondition.setDecisionCases(dc); - - WorkflowTask condition = new WorkflowTask(); - Map finalCaseInput = new HashMap<>(); - - if (schemaVersion <= 1) { - condition.getInputParameters().put("case", "workflow.input.param1"); - nestedCondition.getInputParameters().put("case", "workflow.input.param2"); - finalCaseInput.put("finalCase", "workflow.input.finalCase"); - } else { - condition.getInputParameters().put("case", "${workflow.input.param1}"); - nestedCondition.getInputParameters().put("case", "${workflow.input.param2}"); - finalCaseInput.put("finalCase", "${workflow.input.finalCase}"); - } - - condition.setType(TaskType.DECISION.name()); - condition.setCaseValueParam("case"); - condition.setName("conditional"); - condition.setTaskReferenceName("conditional"); - Map> decisionCases = new HashMap<>(); - decisionCases.put("nested", Collections.singletonList(nestedCondition)); - decisionCases.put("three", Collections.singletonList(workflowTask3)); - condition.setDecisionCases(decisionCases); - condition.getDefaultCase().add(workflowTask10); - workflowDef.getTasks().add(condition); - - WorkflowTask notifyTask = new WorkflowTask(); - notifyTask.setName("junit_task_4"); - notifyTask.setTaskReferenceName("junit_task_4"); - - WorkflowTask finalTask = new WorkflowTask(); - finalTask.setName("finalcondition"); - finalTask.setTaskReferenceName("finalCase"); - finalTask.setType(TaskType.DECISION.name()); - finalTask.setCaseValueParam("finalCase"); - finalTask.setInputParameters(finalCaseInput); - finalTask.getDecisionCases().put("notify", Collections.singletonList(notifyTask)); - - workflowDef.setSchemaVersion(schemaVersion); - workflowDef.getTasks().add(finalTask); - metadataService.updateWorkflowDef(workflowDef); - } - - @Test - public void testForkJoinWithOptionalSubworkflows() { - createForkJoinWorkflowWithOptionalSubworkflowForks(); - - Map workflowInput = new HashMap<>(); - workflowInput.put("param1", "p1 value"); - workflowInput.put("param2", "p2 value"); - String workflowId = startOrLoadWorkflowExecution(WORKFLOW_FORK_JOIN_OPTIONAL_SW, 1, "", workflowInput, null, null); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals("found " + workflow.getTasks().stream().map(Task::toString).collect(Collectors.toList()), 4, workflow.getTasks().size()); - - // Simulating SystemTaskWorkerCoordinator to execute async system tasks - String subWorkflowTaskId1 = workflow.getTaskByRefName("st1").getTaskId(); - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId1, 1); - String subWorkflowTaskId2 = workflow.getTaskByRefName("st2").getTaskId(); - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId2, 1); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - - String subWorkflowId1 = workflow.getTasks().get(1).getSubWorkflowId(); - Workflow subWorkflow1 = workflowExecutionService.getExecutionStatus(subWorkflowId1, true); - assertNotNull(subWorkflow1); - assertEquals(RUNNING, subWorkflow1.getStatus()); - assertEquals(1, subWorkflow1.getTasks().size()); - - String subWorkflowId2 = workflow.getTasks().get(2).getSubWorkflowId(); - Workflow subWorkflow2 = workflowExecutionService.getExecutionStatus(subWorkflowId2, true); - assertNotNull(subWorkflow2); - assertEquals(RUNNING, subWorkflow2.getStatus()); - assertEquals(1, subWorkflow2.getTasks().size()); - - // fail sub-workflow 1 - Task task = new Task(); - while (!subWorkflowId1.equals(task.getWorkflowInstanceId())) { - task = workflowExecutionService.poll("simple_task_in_sub_wf", "junit.worker"); - } - assertNotNull(task); - assertEquals("simple_task_in_sub_wf", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(subWorkflowId1, task.getWorkflowInstanceId()); - - TaskResult taskResult = new TaskResult(task); - taskResult.setReasonForIncompletion("fail task 1"); - taskResult.setStatus(TaskResult.Status.FAILED); - workflowExecutionService.updateTask(taskResult); - - subWorkflow1 = workflowExecutionService.getExecutionStatus(subWorkflowId1, true); - assertNotNull(subWorkflow1); - assertEquals(WorkflowStatus.FAILED, subWorkflow1.getStatus()); - - subWorkflow2 = workflowExecutionService.getExecutionStatus(subWorkflowId2, true); - assertNotNull(subWorkflow2); - assertEquals(RUNNING, subWorkflow2.getStatus()); - - // Execute again to re-evaluate the Subworkflow task. - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId1, 1); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(COMPLETED_WITH_ERRORS, workflow.getTasks().get(1).getStatus()); - assertEquals(IN_PROGRESS, workflow.getTasks().get(2).getStatus()); - - // fail sub workflow 2 - task = new Task(); - while (!subWorkflowId2.equals(task.getWorkflowInstanceId())) { - task = workflowExecutionService.poll("simple_task_in_sub_wf", "junit.worker"); - } - assertNotNull(task); - assertEquals("simple_task_in_sub_wf", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(subWorkflowId2, task.getWorkflowInstanceId()); - - taskResult = new TaskResult(task); - taskResult.setReasonForIncompletion("fail task 2"); - taskResult.setStatus(TaskResult.Status.FAILED); - workflowExecutionService.updateTask(taskResult); - - subWorkflow1 = workflowExecutionService.getExecutionStatus(subWorkflowId1, true); - assertNotNull(subWorkflow1); - assertEquals(WorkflowStatus.FAILED, subWorkflow1.getStatus()); - - subWorkflow2 = workflowExecutionService.getExecutionStatus(subWorkflowId2, true); - assertNotNull(subWorkflow2); - assertEquals(WorkflowStatus.FAILED, subWorkflow2.getStatus()); - - // Execute again to re-evaluate the Subworkflow task. - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId2, 1); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(4, workflow.getTasks().size()); - assertEquals(COMPLETED_WITH_ERRORS, workflow.getTasks().get(1).getStatus()); - assertEquals(COMPLETED_WITH_ERRORS, workflow.getTasks().get(2).getStatus()); - } - - - @Test - public void testDefDAO() { - List taskDefs = metadataService.getTaskDefs(); - assertNotNull(taskDefs); - assertFalse(taskDefs.isEmpty()); - } - - @Test - public void testSimpleWorkflowFailureWithTerminalError() throws Exception { - clearWorkflows(); - - TaskDef taskDef = notFoundSafeGetTaskDef("junit_task_1"); - taskDef.setRetryCount(1); - metadataService.updateTaskDef(taskDef); - - WorkflowDef workflowDef = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(workflowDef); - Map outputParameters = workflowDef.getOutputParameters(); - outputParameters.put("validationErrors", "${t1.output.ErrorMessage}"); - metadataService.updateWorkflowDef(workflowDef); - - String correlationId = "unit_test_1"; - Map input = new HashMap<>(); - input.put("param1", "p1 value"); - input.put("param2", "p2 value"); - String workflowInstanceId = startOrLoadWorkflowExecution("simpleWorkflowFailureWithTerminalError", LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); - assertNotNull(workflowInstanceId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); - assertNotNull(workflow); - assertEquals(workflow.getReasonForIncompletion(), RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); //The very first task is the one that should be scheduled. - - boolean failed = false; - try { - workflowExecutor.rewind(workflowInstanceId, false); - } catch (ApplicationException ae) { - failed = true; - } - assertTrue(failed); - - // Polling for the first task should return the same task as before - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertEquals("junit_task_1", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(workflowInstanceId, task.getWorkflowInstanceId()); - - TaskResult taskResult = new TaskResult(task); - taskResult.setReasonForIncompletion("NON TRANSIENT ERROR OCCURRED: An integration point required to complete the task is down"); - taskResult.setStatus(TaskResult.Status.FAILED_WITH_TERMINAL_ERROR); - taskResult.addOutputData("TERMINAL_ERROR", "Integration endpoint down: FOOBAR"); - taskResult.addOutputData("ErrorMessage", "There was a terminal error"); - - workflowExecutionService.updateTask(taskResult); - workflowExecutor.decide(workflowInstanceId); - - workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); - TaskDef junit_task_1 = notFoundSafeGetTaskDef("junit_task_1"); - Task t1 = workflow.getTaskByRefName("t1"); - assertNotNull(workflow); - assertEquals(WorkflowStatus.FAILED, workflow.getStatus()); - assertEquals("NON TRANSIENT ERROR OCCURRED: An integration point required to complete the task is down", workflow.getReasonForIncompletion()); - assertEquals(1, junit_task_1.getRetryCount()); //Configured retries at the task definition level - assertEquals(0, t1.getRetryCount()); //Actual retries done on the task - assertTrue(workflow.getOutput().containsKey("o1")); - assertEquals("p1 value", workflow.getOutput().get("o1")); - assertEquals(workflow.getOutput().get("validationErrors").toString(), "There was a terminal error"); - - outputParameters.remove("validationErrors"); - metadataService.updateWorkflowDef(workflowDef); - } - - @Test - public void testSimpleWorkflow() throws Exception { - - clearWorkflows(); - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - - String correlationId = "unit_test_1"; - Map input = new HashMap<>(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String workflowInstanceId = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); - logger.info("testSimpleWorkflow.wfid= {}", workflowInstanceId); - assertNotNull(workflowInstanceId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); //The very first task is the one that should be scheduled. - - boolean failed = false; - try { - workflowExecutor.rewind(workflowInstanceId, false); - } catch (ApplicationException ae) { - failed = true; - } - assertTrue(failed); - - // Polling for the first task - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertEquals("junit_task_1", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(workflowInstanceId, task.getWorkflowInstanceId()); - - workflowExecutor.decide(workflowInstanceId); - - String task1Op = "task1.Done"; - List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 1); - assertNotNull(tasks); - assertEquals(1, tasks.size()); - task = tasks.get(0); - assertEquals(workflowInstanceId, task.getWorkflowInstanceId()); - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, false); - assertNotNull(workflow); - assertNotNull(workflow.getOutput()); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertEquals("junit_task_2", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull("Found=" + task.getInputData(), task2Input); - assertEquals(task1Op, task2Input); - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - task.setStatus(COMPLETED); - task.setReasonForIncompletion("unit test failure"); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - tasks = workflow.getTasks(); - assertNotNull(tasks); - assertEquals(2, tasks.size()); - - assertTrue("Found " + workflow.getOutput().toString(), workflow.getOutput().containsKey("o3")); - assertEquals("task1.Done", workflow.getOutput().get("o3")); - } - - @Test - public void testSimpleWorkflowNullInputOutputs() throws Exception { - clearWorkflows(); - - WorkflowDef workflowDefinition = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - // Assert null keys are preserved in task definition's input parameters. - assertTrue(workflowDefinition.getTasks().get(0).getInputParameters().containsKey("someNullKey")); - - String correlationId = "unit_test_1"; - Map input = new HashMap<>(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", null); - String workflowInstanceId = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); - logger.info("testSimpleWorkflow.wfid= {}", workflowInstanceId); - assertNotNull(workflowInstanceId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); //The very first task is the one that should be scheduled. - - // Assert null values are passed through workflow input. - assertNull(workflow.getInput().get("param2")); - // Assert null values are carried from task definition to task execution. - assertNull(workflow.getTasks().get(0).getInputData().get("someNullKey")); - - // Polling for the first task - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertEquals("junit_task_1", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(workflowInstanceId, task.getWorkflowInstanceId()); - - task.setStatus(COMPLETED); - task.getOutputData().put("someKey", null); - Map someOtherKey = new HashMap<>(); - someOtherKey.put("a", 1); - someOtherKey.put("A", null); - task.getOutputData().put("someOtherKey", someOtherKey); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); - assertNotNull(workflow); - - task = workflow.getTasks().get(0); - // Assert null keys are preserved in task outputs. - assertTrue(task.getOutputData().containsKey("someKey")); - assertNull(task.getOutputData().get("someKey")); - someOtherKey = (Map) task.getOutputData().get("someOtherKey"); - assertTrue(someOtherKey.containsKey("A")); - assertNull(someOtherKey.get("A")); - } - - @Test - public void testTerminateMultiLevelWorkflow() { - createWorkflowWthMultiLevelSubWorkflows(); - - Map workflowInput = new HashMap<>(); - workflowInput.put("param1", "p1 value"); - workflowInput.put("param2", "p2 value"); - String workflowId = startOrLoadWorkflowExecution(WORKFLOW_MULTI_LEVEL_SW, 1, "", workflowInput, null, null); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); - - // Simulating SystemTaskWorkerCoordinator to execute async system tasks - String subWorkflowTaskIdLevel1 = workflow.getTaskByRefName("junit_sw_level_1_task").getTaskId(); - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskIdLevel1, 1); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - - String level1SubWorkflowId = workflow.getTasks().get(0).getSubWorkflowId(); - Workflow level1SubWorkflow = workflowExecutionService.getExecutionStatus(level1SubWorkflowId, true); - assertNotNull(level1SubWorkflow); - assertEquals(RUNNING, level1SubWorkflow.getStatus()); - assertEquals(1, level1SubWorkflow.getTasks().size()); - - String subWorkflowTaskIdLevel2 = level1SubWorkflow.getTaskByRefName("junit_sw_level_2_task").getTaskId(); - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskIdLevel2, 1); - - level1SubWorkflow = workflowExecutionService.getExecutionStatus(level1SubWorkflowId, true); - - String level2SubWorkflowId = level1SubWorkflow.getTasks().get(0).getSubWorkflowId(); - Workflow level2SubWorkflow = workflowExecutionService.getExecutionStatus(level2SubWorkflowId, true); - assertNotNull(level2SubWorkflow); - assertEquals(RUNNING, level2SubWorkflow.getStatus()); - assertEquals(1, level2SubWorkflow.getTasks().size()); - - String subWorkflowTaskIdLevel3 = level2SubWorkflow.getTaskByRefName("junit_sw_level_3_task").getTaskId(); - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskIdLevel3, 1); - - level2SubWorkflow = workflowExecutionService.getExecutionStatus(level2SubWorkflowId, true); - - String level3SubWorkflowId = level2SubWorkflow.getTasks().get(0).getSubWorkflowId(); - Workflow level3SubWorkflow = workflowExecutionService.getExecutionStatus(level3SubWorkflowId, true); - assertNotNull(level3SubWorkflow); - assertEquals(RUNNING, level3SubWorkflow.getStatus()); - assertEquals(1, level3SubWorkflow.getTasks().size()); - assertEquals("junit_task_3", level3SubWorkflow.getTasks().get(0).getTaskType()); - - // terminate the top-level parent workflow - workflowExecutor.terminateWorkflow(workflow.getWorkflowId(), "terminate_test"); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertEquals(TERMINATED, workflow.getStatus()); - - level1SubWorkflow = workflowExecutionService.getExecutionStatus(level1SubWorkflowId, true); - assertEquals(TERMINATED, level1SubWorkflow.getStatus()); - - level2SubWorkflow = workflowExecutionService.getExecutionStatus(level2SubWorkflowId, true); - assertEquals(TERMINATED, level2SubWorkflow.getStatus()); - - level3SubWorkflow = workflowExecutionService.getExecutionStatus(level3SubWorkflowId, true); - assertEquals(TERMINATED, level3SubWorkflow.getStatus()); - } - - @Test - public void testSimpleWorkflowWithResponseTimeout() throws Exception { - - createWFWithResponseTimeout(); - - String correlationId = "unit_test_1"; - Map workflowInput = new HashMap<>(); - String inputParam1 = "p1 value"; - workflowInput.put("param1", inputParam1); - workflowInput.put("param2", "p2 value"); - String workflowId = startOrLoadWorkflowExecution("RTOWF", 1, correlationId, workflowInput, null, null); - logger.debug("testSimpleWorkflowWithResponseTimeout.wfid=" + workflowId); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); //The very first task is the one that should be scheduled. - assertEquals(1, queueDAO.getSize("task_rt")); - - // Polling for the first task should return the first task - Task task = workflowExecutionService.poll("task_rt", "task1.junit.worker.testTimeout"); - assertNotNull(task); - assertEquals("task_rt", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(workflowId, task.getWorkflowInstanceId()); - - // As the task_rt is out of the queue, the next poll should not get it - Task nullTask = workflowExecutionService.poll("task_rt", "task1.junit.worker.testTimeout"); - assertNull(nullTask); - - Thread.sleep(10000); - workflowExecutor.decide(workflowId); - assertEquals(1, queueDAO.getSize("task_rt")); - - // The first task would be timed_out and a new task will be scheduled - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - assertTrue(workflow.getTasks().stream().allMatch(t -> t.getReferenceTaskName().equals("task_rt_t1"))); - assertEquals(TIMED_OUT, workflow.getTasks().get(0).getStatus()); - assertEquals(SCHEDULED, workflow.getTasks().get(1).getStatus()); - - // Polling now should get the same task back because it should have been put back in the queue - Task taskAgain = workflowExecutionService.poll("task_rt", "task1.junit.worker"); - assertNotNull(taskAgain); - - // update task with callback after seconds greater than the response timeout - taskAgain.setStatus(IN_PROGRESS); - taskAgain.setCallbackAfterSeconds(2); - workflowExecutionService.updateTask(taskAgain); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - assertEquals(SCHEDULED, workflow.getTasks().get(1).getStatus()); - - // wait for callback after seconds which is longer than response timeout seconds and then call decide - Thread.sleep(2010); - // Ensure unacks are processed. - queueDAO.processUnacks(taskAgain.getTaskDefName()); - workflowExecutor.decide(workflowId); - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - - // Poll for task again - taskAgain = workflowExecutionService.poll("task_rt", "task1.junit.worker"); - assertNotNull(taskAgain); - - taskAgain.getOutputData().put("op", "task1.Done"); - taskAgain.setStatus(COMPLETED); - workflowExecutionService.updateTask(taskAgain); - - // poll for next task - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker.testTimeout"); - assertNotNull(task); - assertEquals("junit_task_2", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - task.setReasonForIncompletion("unit test failure"); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - @Test - public void testWorkflowRerunWithSubWorkflows() throws Exception { - // Execute a workflow with sub-workflow - String workflowId = this.runWorkflowWithSubworkflow(); - // Check it completed - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - - // Now lets pickup the first task in the sub workflow and rerun it from there - String subWorkflowId = null; - for (Task task : workflow.getTasks()) { - if (task.getTaskType().equalsIgnoreCase(SubWorkflow.NAME)) { - subWorkflowId = task.getSubWorkflowId(); - } - } - assertNotNull(subWorkflowId); - Workflow subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - Task subWorkflowTask1 = null; - for (Task task : subWorkflow.getTasks()) { - if (task.getTaskDefName().equalsIgnoreCase("junit_task_1")) { - subWorkflowTask1 = task; - } - } - assertNotNull(subWorkflowTask1); - - RerunWorkflowRequest rerunWorkflowRequest = new RerunWorkflowRequest(); - - Map newInput = new HashMap<>(); - newInput.put("p1", "1"); - newInput.put("p2", "2"); - rerunWorkflowRequest.setTaskInput(newInput); - - String correlationId = "unit_test_sw_new"; - Map input = new HashMap<>(); - input.put("param1", "New p1 value"); - input.put("param2", "New p2 value"); - rerunWorkflowRequest.setCorrelationId(correlationId); - rerunWorkflowRequest.setWorkflowInput(input); - - rerunWorkflowRequest.setReRunFromWorkflowId(workflowId); - rerunWorkflowRequest.setReRunFromTaskId(subWorkflowTask1.getTaskId()); - // Rerun - workflowExecutor.rerun(rerunWorkflowRequest); - - // The main WF and the sub WF should be in RUNNING state - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - assertEquals(correlationId, workflow.getCorrelationId()); - assertEquals("New p1 value", workflow.getInput().get("param1")); - assertEquals("New p2 value", workflow.getInput().get("param2")); - - subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertEquals(RUNNING, subWorkflow.getStatus()); - // Since we are re running from the sub workflow task, there - // should be only 1 task that is SCHEDULED - assertEquals(1, subWorkflow.getTasks().size()); - assertEquals(SCHEDULED, subWorkflow.getTasks().get(0).getStatus()); - - // Now execute the task - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(task.getInputData().get("p1").toString(), "1"); - assertEquals(task.getInputData().get("p2").toString(), "2"); - task.getOutputData().put("op", "junit_task_1.done"); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertEquals(RUNNING, subWorkflow.getStatus()); - assertEquals(2, subWorkflow.getTasks().size()); - - // Poll for second task of the sub workflow and execute it - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - task.getOutputData().put("op", "junit_task_2.done"); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - // Now the sub workflow and the main workflow must have finished - subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertEquals(WorkflowStatus.COMPLETED, subWorkflow.getStatus()); - assertEquals(2, subWorkflow.getTasks().size()); - - // Execute again to re-evaluate the Subworkflow task. - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflow.getParentWorkflowTaskId(), 1); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - } - - @Test - public void testSimpleWorkflowWithTaskSpecificDomain() throws Exception { - - long startTimeTimestamp = System.currentTimeMillis(); - - clearWorkflows(); - createWorkflowWithSubWorkflow(); - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2_SW, 1); - - String correlationId = "unit_test_sw"; - Map input = new HashMap<>(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - Map taskToDomain = new HashMap<>(); - taskToDomain.put("junit_task_3", "domain1"); - taskToDomain.put("junit_task_2", "domain1"); - - // Poll before so that a polling for this task is "active" - Task task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker", "domain1"); - assertNull(task); - task = workflowExecutionService.poll("junit_task_2", "task1.junit.worker", "domain1"); - assertNull(task); - - String workflowId = startOrLoadWorkflowExecution("simpleWorkflowWithTaskSpecificDomain", LINEAR_WORKFLOW_T1_T2_SW, 1, correlationId, input, null, taskToDomain); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutor.getWorkflow(workflowId, false); - assertNotNull(workflow); - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(workflow.getReasonForIncompletion(), RUNNING, workflow.getStatus()); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); //The very first task is the one that should be scheduled. - - // Check Size - Map sizes = workflowExecutionService.getTaskQueueSizes(Arrays.asList("domain1:junit_task_3", "junit_task_3")); - assertEquals(sizes.get("domain1:junit_task_3").intValue(), 1); - assertEquals(sizes.get("junit_task_3").intValue(), 0); - - // Polling for the first task - task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker"); - assertNull(task); - task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker", "domain1"); - assertNotNull(task); - assertEquals("junit_task_3", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(workflowId, task.getWorkflowInstanceId()); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - - List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 10); - assertNotNull(tasks); - assertEquals(1, tasks.size()); - task = tasks.get(0); - assertEquals(workflowId, task.getWorkflowInstanceId()); - - String task1Op = "task1.Done"; - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - - // Simulating SystemTaskWorkerCoordinator to execute async system tasks - String subWorkflowTaskId = workflow.getTaskByRefName("sw1").getTaskId(); - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - - task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertEquals("junit_task_1", task.getTaskType()); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, false); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertNotNull(workflow.getTaskToDomain()); - assertEquals(workflow.getTaskToDomain().size(), 2); - - task.setStatus(COMPLETED); - task.setReasonForIncompletion("unit test failure"); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker", "domain1"); - assertNotNull(task); - assertEquals("junit_task_2", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - task.setReasonForIncompletion("unit test failure"); - workflowExecutionService.updateTask(task); - - // Execute again to re-evaluate the Subworkflow task. - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - tasks = workflow.getTasks(); - assertNotNull(tasks); - assertEquals(2, tasks.size()); - assertTrue("Found " + workflow.getOutput().toString(), workflow.getOutput().containsKey("o3")); - assertEquals("task1.Done", workflow.getOutput().get("o3")); - - Predicate pollDataWithinTestTimes = pollData -> pollData.getLastPollTime() != 0 && pollData.getLastPollTime() > startTimeTimestamp; - - List pollData = workflowExecutionService.getPollData("junit_task_3").stream() - .filter(pollDataWithinTestTimes) - .collect(Collectors.toList()); - assertEquals(2, pollData.size()); - for (PollData pd : pollData) { - assertEquals(pd.getQueueName(), "junit_task_3"); - assertEquals(pd.getWorkerId(), "task1.junit.worker"); - assertTrue(pd.getLastPollTime() != 0); - if (pd.getDomain() != null) { - assertEquals(pd.getDomain(), "domain1"); - } - } - - List pdList = workflowExecutionService.getAllPollData().stream() - .filter(pollDataWithinTestTimes) - .collect(Collectors.toList()); - int count = 0; - for (PollData pd : pdList) { - if (pd.getQueueName().equals("junit_task_3")) { - count++; - } - } - assertEquals(2, count); - } - - @Test - public void testSimpleWorkflowWithAllTaskInOneDomain() throws Exception { - - clearWorkflows(); - createWorkflowWithSubWorkflow(); - - WorkflowDef def = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2_SW, 1); - - String correlationId = "unit_test_sw"; - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - Map taskToDomain = new HashMap(); - taskToDomain.put("*", "domain11,, domain12"); - - // Poll before so that a polling for this task is "active" - Task task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker", "domain11"); - assertNull(task); - task = workflowExecutionService.poll("junit_task_2", "task1.junit.worker", "domain12"); - assertNull(task); - - String workflowId = startOrLoadWorkflowExecution("simpleWorkflowWithTasksInOneDomain", LINEAR_WORKFLOW_T1_T2_SW, 1, correlationId, input, null, taskToDomain); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutor.getWorkflow(workflowId, false); - assertNotNull(workflow); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(workflow.getReasonForIncompletion(), RUNNING, workflow.getStatus()); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); //The very first task is the one that should be scheduled. - - // Check Size - Map sizes = workflowExecutionService.getTaskQueueSizes(Arrays.asList("domain11:junit_task_3", "junit_task_3")); - assertEquals(sizes.get("domain11:junit_task_3").intValue(), 1); - assertEquals(sizes.get("junit_task_3").intValue(), 0); - - // Polling for the first task - task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker"); - assertNull(task); - task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker", "domain11"); - assertNotNull(task); - assertEquals("junit_task_3", task.getTaskType()); - assertEquals("domain11", task.getDomain()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(workflowId, task.getWorkflowInstanceId()); - - List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 1); - assertNotNull(tasks); - assertEquals(1, tasks.size()); - task = tasks.get(0); - - String task1Op = "task1.Done"; - assertEquals(workflowId, task.getWorkflowInstanceId()); - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertEquals(workflowId, task.getWorkflowInstanceId()); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - - // Simulating SystemTaskWorkerCoordinator to execute async system tasks - String subWorkflowTaskId = workflow.getTaskByRefName("sw1").getTaskId(); - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - - task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNull(task); - task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker", "domain12"); - assertNotNull(task); - assertEquals("junit_task_1", task.getTaskType()); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, false); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertNotNull(workflow.getTaskToDomain()); - assertEquals(workflow.getTaskToDomain().size(), 1); - - task.setStatus(COMPLETED); - task.setReasonForIncompletion("unit test failure"); - workflowExecutionService.updateTask(task); - - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker", "domain11"); - assertNull(task); - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker", "domain12"); - assertNotNull(task); - assertEquals("junit_task_2", task.getTaskType()); - assertEquals("domain12", task.getDomain()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - task.setReasonForIncompletion("unit test failure"); - workflowExecutionService.updateTask(task); - - // Execute again to re-evaluate the Subworkflow task. - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - tasks = workflow.getTasks(); - assertNotNull(tasks); - assertEquals(2, tasks.size()); - assertTrue("Found " + workflow.getOutput().toString(), workflow.getOutput().containsKey("o3")); - assertEquals("task1.Done", workflow.getOutput().get("o3")); - } - - @After - public void clearWorkflows() throws Exception { - List workflowsWithVersion = metadataService.getWorkflowDefs().stream() - .map(def -> def.getName() + ":" + def.getVersion()) - .collect(Collectors.toList()); - for (String workflowWithVersion : workflowsWithVersion) { - String workflowName = StringUtils.substringBefore(workflowWithVersion, ":"); - int version = Integer.parseInt(StringUtils.substringAfter(workflowWithVersion, ":")); - List running = workflowExecutionService.getRunningWorkflows(workflowName, version); - for (String wfid : running) { - Workflow workflow = workflowExecutor.getWorkflow(wfid, false); - if (!workflow.getStatus().isTerminal()) { - workflowExecutor.terminateWorkflow(wfid, "cleanup"); - } - } - } - queueDAO.queuesDetail().keySet().forEach(queueDAO::flush); - - new FileOutputStream(this.getClass().getResource(TEMP_FILE_PATH).getPath()).close(); - } - - @Test - public void testLongRunning() throws Exception { - - clearWorkflows(); - - metadataService.getWorkflowDef(LONG_RUNNING, 1); - - String correlationId = "unit_test_1"; - Map input = new HashMap<>(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - - String workflowId = startOrLoadWorkflowExecution(LONG_RUNNING, 1, correlationId, input, null, null); - System.out.println("testLongRunning.wfid=" + workflowId); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - - // Check the queue - assertEquals(Integer.valueOf(1), workflowExecutionService.getTaskQueueSizes(Collections.singletonList("junit_task_1")).get("junit_task_1")); - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - String param1 = (String) task.getInputData().get("p1"); - String param2 = (String) task.getInputData().get("p2"); - - assertNotNull(param1); - assertNotNull(param2); - assertEquals("p1 value", param1); - assertEquals("p2 value", param2); - - String task1Output = "task1.In.Progress"; - task.getOutputData().put("op", task1Output); - task.setStatus(IN_PROGRESS); - task.setCallbackAfterSeconds(5); - workflowExecutionService.updateTask(task); - String taskId = task.getTaskId(); - - // Check the queue - assertEquals(Integer.valueOf(1), workflowExecutionService.getTaskQueueSizes(Collections.singletonList("junit_task_1")).get("junit_task_1")); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - - // Polling for next task should not return anything - Task task2 = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNull(task2); - - task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNull(task); - - Uninterruptibles.sleepUninterruptibly(5, TimeUnit.SECONDS); - // Polling for the first task should return the same task as before - task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(task.getTaskId(), taskId); - - task1Output = "task1.Done"; - List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 1); - assertNotNull(tasks); - assertEquals(1, tasks.size()); - assertEquals(workflowId, task.getWorkflowInstanceId()); - task = tasks.get(0); - task.getOutputData().put("op", task1Output); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull(task2Input); - assertEquals(task1Output, task2Input); - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - task.setStatus(COMPLETED); - task.setReasonForIncompletion("unit test failure"); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - tasks = workflow.getTasks(); - assertNotNull(tasks); - assertEquals(2, tasks.size()); - } - - @Test - public void testResetWorkflowInProgressTasks() { - WorkflowDef workflowDef = metadataService.getWorkflowDef(LONG_RUNNING, 1); - assertNotNull(workflowDef); - - String correlationId = "unit_test_1"; - Map input = new HashMap<>(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String workflowId = startOrLoadWorkflowExecution(LONG_RUNNING, 1, correlationId, input, null, null); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - - // Check the queue - assertEquals(Integer.valueOf(1), workflowExecutionService.getTaskQueueSizes(Collections.singletonList("junit_task_1")).get("junit_task_1")); - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - // Verify the task - String param1 = (String) task.getInputData().get("p1"); - String param2 = (String) task.getInputData().get("p2"); - assertNotNull(param1); - assertNotNull(param2); - assertEquals("p1 value", param1); - assertEquals("p2 value", param2); - - // Update the task with callbackAfterSeconds - String task1Output = "task1.In.Progress"; - task.getOutputData().put("op", task1Output); - task.setStatus(IN_PROGRESS); - task.setCallbackAfterSeconds(3600); - workflowExecutionService.updateTask(task); - String taskId = task.getTaskId(); - - // Check the queue - assertEquals(Integer.valueOf(1), workflowExecutionService.getTaskQueueSizes(Collections.singletonList("junit_task_1")).get("junit_task_1")); - - // Check the workflow - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); - assertEquals(SCHEDULED, workflow.getTasks().get(0).getStatus()); - - // Polling for next task should not return anything - Task task2 = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNull(task2); - - task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNull(task); - - // Reset the callbackAfterSeconds - workflowExecutor.resetCallbacksForWorkflow(workflowId); - - // Now Polling for the first task should return the same task as before - task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(task.getTaskId(), taskId); - assertEquals(task.getCallbackAfterSeconds(), 0); - - // update task with COMPLETED status - task1Output = "task1.Done"; - task.getOutputData().put("op", task1Output); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - // poll for next task - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull(task2Input); - assertEquals(task1Output, task2Input); - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - // update task with COMPLETED status - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - } - - @Test - public void testConcurrentWorkflowExecutions() { - - int count = 3; - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - - String correlationId = "unit_test_concurrrent"; - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String[] wfids = new String[count]; - - for (int i = 0; i < count; i++) { - String wfid = startOrLoadWorkflowExecution("concurrentWorkflowExecutions", LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); - System.out.println("testConcurrentWorkflowExecutions.wfid=" + wfid); - assertNotNull(wfid); - - List ids = workflowExecutionService.getRunningWorkflows(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(ids); - assertTrue("found no ids: " + ids, ids.size() > 0); //if there are concurrent tests running, this would be more than 1 - boolean foundId = false; - for (String id : ids) { - if (id.equals(wfid)) { - foundId = true; - } - } - assertTrue(foundId); - wfids[i] = wfid; - } - - - String task1Op = ""; - for (int i = 0; i < count; i++) { - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - String param1 = (String) task.getInputData().get("p1"); - String param2 = (String) task.getInputData().get("p2"); - - assertNotNull(param1); - assertNotNull(param2); - assertEquals("p1 value", param1); - assertEquals("p2 value", param2); - - task1Op = "task1.output->" + param1 + "." + param2; - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - } - - for (int i = 0; i < count; i++) { - Task task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull(task2Input); - assertEquals(task1Op, task2Input); - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - } - - List wfs = workflowExecutionService.getWorkflowInstances(LINEAR_WORKFLOW_T1_T2, correlationId, false, false); - wfs.forEach(wf -> { - assertEquals(WorkflowStatus.COMPLETED, wf.getStatus()); - }); - } - - @Test - public void testCaseStatementsSchemaVersion1() { - createConditionalWF(1); - runConditionalWorkflowTest(); - } - - - @Test - public void testCaseStatementsSchemaVersion2() { - createConditionalWF(2); - runConditionalWorkflowTest(); - } - - private void runConditionalWorkflowTest() { - String correlationId = "testCaseStatements: " + System.currentTimeMillis(); - Map input = new HashMap<>(); - - //default case - input.put("param1", "xxx"); - input.put("param2", "two"); - String workflowId = startOrLoadWorkflowExecution(COND_TASK_WF, 1, correlationId, input, null, null); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - - Task task = workflowExecutionService.poll("junit_task_10", "junit"); - assertNotNull(task); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(3, workflow.getTasks().size()); - - //nested - one - input.put("param1", "nested"); - input.put("param2", "one"); - workflowId = startOrLoadWorkflowExecution(COND_TASK_WF + 2, COND_TASK_WF, 1, correlationId, input, null, null); - assertNotNull(workflowId); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(3, workflow.getTasks().size()); - assertEquals("junit_task_1", workflow.getTasks().get(2).getTaskDefName()); - - //nested - two - input.put("param1", "nested"); - input.put("param2", "two"); - workflowId = startOrLoadWorkflowExecution(COND_TASK_WF + 3, COND_TASK_WF, 1, correlationId, input, null, null); - assertNotNull(workflowId); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(3, workflow.getTasks().size()); - assertEquals("junit_task_2", workflow.getTasks().get(2).getTaskDefName()); - - //three - input.put("param1", "three"); - input.put("param2", "two"); - input.put("finalCase", "notify"); - workflowId = startOrLoadWorkflowExecution(COND_TASK_WF + 4, COND_TASK_WF, 1, correlationId, input, null, null); - assertNotNull(workflowId); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - assertEquals("junit_task_3", workflow.getTasks().get(1).getTaskDefName()); - - task = workflowExecutionService.poll("junit_task_3", "junit"); - assertNotNull(task); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(4, workflow.getTasks().size()); - assertEquals("junit_task_4", workflow.getTasks().get(3).getTaskDefName()); - - task = workflowExecutionService.poll("junit_task_4", "junit"); - assertNotNull(task); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - private Task getTask(String taskType) { - Task task; - int count = 2; - do { - task = workflowExecutionService.poll(taskType, "junit"); - if (task == null) { - count--; - } - if (count < 0) { - break; - } - - } while (task == null); - if (task != null) { - workflowExecutionService.ackTaskReceived(task.getTaskId()); - } - return task; - } - - @Test - public void testRetries() { - - String taskName = "junit_task_2"; - TaskDef taskDef = notFoundSafeGetTaskDef(taskName); - taskDef.setRetryCount(2); - taskDef.setRetryDelaySeconds(1); - metadataService.updateTaskDef(taskDef); - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - - String correlationId = "unit_test_1"; - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); - System.out.println("testRetries.wfid=" + wfid); - assertNotNull(wfid); - - List ids = workflowExecutionService.getRunningWorkflows(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(ids); - assertTrue("found no ids: " + ids, ids.size() > 0); //if there are concurrent tests running, this would be more than 1 - boolean foundId = false; - for (String id : ids) { - if (id.equals(wfid)) { - foundId = true; - } - } - assertTrue(foundId); - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(RUNNING, es.getStatus()); - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - String param1 = (String) task.getInputData().get("p1"); - String param2 = (String) task.getInputData().get("p2"); - - assertNotNull(param1); - assertNotNull(param2); - assertEquals("p1 value", param1); - assertEquals("p2 value", param2); - - String task1Op = "task1.output->" + param1 + "." + param2; - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - //fail the task twice and then succeed - verify(inputParam1, wfid, task1Op, true); - Uninterruptibles.sleepUninterruptibly(2, TimeUnit.SECONDS); - verify(inputParam1, wfid, task1Op, false); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - assertEquals(3, es.getTasks().size()); //task 1, and 2 of the task 2 - - assertEquals("junit_task_1", es.getTasks().get(0).getTaskType()); - assertEquals("junit_task_2", es.getTasks().get(1).getTaskType()); - assertEquals("junit_task_2", es.getTasks().get(2).getTaskType()); - assertEquals(COMPLETED, es.getTasks().get(0).getStatus()); - assertEquals(FAILED, es.getTasks().get(1).getStatus()); - assertEquals(COMPLETED, es.getTasks().get(2).getStatus()); - assertEquals(es.getTasks().get(1).getTaskId(), es.getTasks().get(2).getRetriedTaskId()); - - - } - - @Test - public void testSuccess() { - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - - String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); - assertNotNull(wfid); - - List ids = workflowExecutionService.getRunningWorkflows(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(ids); - assertTrue("found no ids: " + ids, ids.size() > 0); //if there are concurrent tests running, this would be more than 1 - boolean foundId = false; - for (String id : ids) { - if (id.equals(wfid)) { - foundId = true; - } - } - assertTrue(foundId); - - /* - * @correlationId - List byCorrelationId = ess.getWorkflowInstances(LINEAR_WORKFLOW_T1_T2, correlationId, false, false); - assertNotNull(byCorrelationId); - assertTrue(!byCorrelationId.isEmpty()); - assertEquals(1, byCorrelationId.size()); - */ - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(RUNNING, es.getStatus()); - // The first task would be marked as scheduled - assertEquals(1, es.getTasks().size()); - assertEquals(SCHEDULED, es.getTasks().get(0).getStatus()); - - // decideNow should be idempotent if re-run on the same state! - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(RUNNING, es.getStatus()); - assertEquals(1, es.getTasks().size()); - Task t = es.getTasks().get(0); - assertEquals(SCHEDULED, t.getStatus()); - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - assertNotNull(task); - assertEquals(t.getTaskId(), task.getTaskId()); - es = workflowExecutionService.getExecutionStatus(wfid, true); - t = es.getTasks().get(0); - assertEquals(IN_PROGRESS, t.getStatus()); - String taskId = t.getTaskId(); - - String param1 = (String) task.getInputData().get("p1"); - String param2 = (String) task.getInputData().get("p2"); - - assertNotNull(param1); - assertNotNull(param2); - assertEquals("p1 value", param1); - assertEquals("p2 value", param2); - - String task1Op = "task1.output->" + param1 + "." + param2; - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - // If we get the full workflow here then, last task should be completed and the next task should be scheduled - es = workflowExecutionService.getExecutionStatus(wfid, true); - es.getTasks().forEach(wfTask -> { - if (wfTask.getTaskId().equals(taskId)) { - assertEquals(COMPLETED, wfTask.getStatus()); - } else { - assertEquals(SCHEDULED, wfTask.getStatus()); - } - }); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertNotNull(task); - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull(task2Input); - assertEquals(task1Op, task2Input); - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - // Check the tasks, at this time there should be 2 task - assertEquals(es.getTasks().size(), 2); - es.getTasks().forEach(wfTask -> { - assertEquals(wfTask.getStatus(), COMPLETED); - }); - - System.out.println("Total tasks=" + es.getTasks().size()); - assertTrue(es.getTasks().size() < 10); - - - } - - @Test - public void testDeciderUpdate() { - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - - String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); - assertNotNull(wfid); - - Workflow workflow = workflowExecutor.getWorkflow(wfid, false); - long updated1 = workflow.getUpdateTime(); - Uninterruptibles.sleepUninterruptibly(100, TimeUnit.MILLISECONDS); - workflowExecutor.decide(wfid); - workflow = workflowExecutor.getWorkflow(wfid, false); - long updated2 = workflow.getUpdateTime(); - assertEquals(updated1, updated2); - - Uninterruptibles.sleepUninterruptibly(100, TimeUnit.MILLISECONDS); - workflowExecutor.terminateWorkflow(wfid, "done"); - workflow = workflowExecutor.getWorkflow(wfid, false); - updated2 = workflow.getUpdateTime(); - assertTrue("updated1[" + updated1 + "] >? updated2[" + updated2 + "]", updated2 > updated1); - - } - - @Test - @Ignore - //Ignore for now, will improve this in the future - public void testFailurePoints() { - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - - String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); - assertNotNull(wfid); - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(RUNNING, es.getStatus()); - // The first task would be marked as scheduled - assertEquals(1, es.getTasks().size()); - assertEquals(SCHEDULED, es.getTasks().get(0).getStatus()); - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - String taskId = task.getTaskId(); - - String task1Op = "task1.output"; - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - try { - workflowExecutionService.updateTask(task); - } catch (Exception e) { - workflowExecutionService.updateTask(task); - } - - // If we get the full workflow here then, last task should be completed and the next task should be scheduled - es = workflowExecutionService.getExecutionStatus(wfid, true); - es.getTasks().forEach(wfTask -> { - if (wfTask.getTaskId().equals(taskId)) { - assertEquals(COMPLETED, wfTask.getStatus()); - } else { - assertEquals(SCHEDULED, wfTask.getStatus()); - } - }); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertNotNull(task); - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull(task2Input); - assertEquals(task1Op, task2Input); - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - // Check the tasks, at this time there should be 2 task - assertEquals(es.getTasks().size(), 2); - es.getTasks().forEach(wfTask -> { - assertEquals(wfTask.getStatus(), COMPLETED); - }); - - System.out.println("Total tasks=" + es.getTasks().size()); - assertTrue(es.getTasks().size() < 10); - - - } - - @Test - public void testDeciderMix() throws Exception { - - ExecutorService executors = Executors.newFixedThreadPool(3); - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - - String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); - assertNotNull(wfid); - - List ids = workflowExecutionService.getRunningWorkflows(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(ids); - assertTrue("found no ids: " + ids, ids.size() > 0); //if there are concurrent tests running, this would be more than 1 - boolean foundId = false; - for (String id : ids) { - if (id.equals(wfid)) { - foundId = true; - } - } - assertTrue(foundId); - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(RUNNING, es.getStatus()); - // The first task would be marked as scheduled - assertEquals(1, es.getTasks().size()); - assertEquals(SCHEDULED, es.getTasks().get(0).getStatus()); - - List> futures = new LinkedList<>(); - for (int i = 0; i < 10; i++) { - futures.add(executors.submit(() -> { - workflowExecutor.decide(wfid); - return null; - })); - } - for (Future future : futures) { - future.get(); - } - futures.clear(); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(RUNNING, es.getStatus()); - // The first task would be marked as scheduled - assertEquals(1, es.getTasks().size()); - assertEquals(SCHEDULED, es.getTasks().get(0).getStatus()); - - - // decideNow should be idempotent if re-run on the same state! - workflowExecutor.decide(wfid); - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(RUNNING, es.getStatus()); - assertEquals(1, es.getTasks().size()); - Task t = es.getTasks().get(0); - assertEquals(SCHEDULED, t.getStatus()); - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - assertNotNull(task); - assertEquals(t.getTaskId(), task.getTaskId()); - es = workflowExecutionService.getExecutionStatus(wfid, true); - t = es.getTasks().get(0); - assertEquals(IN_PROGRESS, t.getStatus()); - String taskId = t.getTaskId(); - - String param1 = (String) task.getInputData().get("p1"); - String param2 = (String) task.getInputData().get("p2"); - - assertNotNull(param1); - assertNotNull(param2); - assertEquals("p1 value", param1); - assertEquals("p2 value", param2); - - String task1Op = "task1.output->" + param1 + "." + param2; - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - // If we get the full workflow here then, last task should be completed and the next task should be scheduled - es = workflowExecutionService.getExecutionStatus(wfid, true); - es.getTasks().forEach(wfTask -> { - if (wfTask.getTaskId().equals(taskId)) { - assertEquals(COMPLETED, wfTask.getStatus()); - } else { - assertEquals(SCHEDULED, wfTask.getStatus()); - } - }); - - //Run sweep 10 times! - for (int i = 0; i < 10; i++) { - futures.add(executors.submit(() -> { - long s = System.currentTimeMillis(); - workflowExecutor.decide(wfid); - System.out.println("Took " + (System.currentTimeMillis() - s) + " ms to run decider"); - return null; - })); - } - for (Future future : futures) { - future.get(); - } - futures.clear(); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertEquals(RUNNING, es.getStatus()); - assertEquals(2, es.getTasks().size()); - - System.out.println("Workflow tasks=" + es.getTasks()); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertNotNull(task); - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull(task2Input); - assertEquals(task1Op, task2Input); - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - // Check the tasks, at this time there should be 2 task - assertEquals(es.getTasks().size(), 2); - es.getTasks().forEach(wfTask -> { - assertEquals(wfTask.getStatus(), COMPLETED); - }); - - System.out.println("Total tasks=" + es.getTasks().size()); - assertTrue(es.getTasks().size() < 10); - } - - @Test - public void testFailures() { - metadataService.getWorkflowDef(FORK_JOIN_WF, 1); - - String taskName = "junit_task_1"; - TaskDef taskDef = notFoundSafeGetTaskDef(taskName); - taskDef.setRetryCount(0); - metadataService.updateTaskDef(taskDef); - - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(found.getFailureWorkflow()); - assertFalse(StringUtils.isBlank(found.getFailureWorkflow())); - - String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - input.put("failureWfName", "FanInOutTest"); - String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); - assertNotNull(wfid); - - Task task = getTask("junit_task_1"); - assertNotNull(task); - task.setStatus(FAILED); - workflowExecutionService.updateTask(task); - - // If we get the full workflow here then, last task should be completed and the next task should be scheduled - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.FAILED, es.getStatus()); - - taskDef.setRetryCount(RETRY_COUNT); - metadataService.updateTaskDef(taskDef); - - } - - @Test - public void testRetryWithForkJoin() throws Exception { - String workflowId = this.runAFailedForkJoinWF(); - workflowExecutor.retry(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(workflow.getStatus(), RUNNING); - - printTaskStatuses(workflow, "After retry called"); - - Task t2 = workflowExecutionService.poll("junit_task_0_RT_2", "test"); - assertNotNull(t2); - assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId())); - - Task t3 = workflowExecutionService.poll("junit_task_0_RT_3", "test"); - assertNotNull(t3); - assertTrue(workflowExecutionService.ackTaskReceived(t3.getTaskId())); - - t2.setStatus(COMPLETED); - t3.setStatus(COMPLETED); - - ExecutorService es = Executors.newFixedThreadPool(2); - Future future1 = es.submit(() -> { - try { - workflowExecutionService.updateTask(t2); - } catch (Exception e) { - throw new RuntimeException(e); - } - - }); - final Task _t3 = t3; - Future future2 = es.submit(() -> { - try { - workflowExecutionService.updateTask(_t3); - } catch (Exception e) { - throw new RuntimeException(e); - } - - }); - future1.get(); - future2.get(); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - - printTaskStatuses(workflow, "T2, T3 complete"); - workflowExecutor.decide(workflowId); - - Task t4 = workflowExecutionService.poll("junit_task_0_RT_4", "test"); - assertNotNull(t4); - t4.setStatus(COMPLETED); - workflowExecutionService.updateTask(t4); - - printTaskStatuses(workflowId, "After complete"); - } - - @Test - public void testRetryWithDoWhile() throws Exception { - String workflowId = this.runAFailedDoWhileWF(); - workflowExecutor.retry(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(workflow.getStatus(), RUNNING); - - printTaskStatuses(workflow, "After retry called"); - - Task t2 = workflowExecutionService.poll("junit_task_0_RT_2", "test"); - assertNotNull(t2); - assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId())); - - t2.setStatus(COMPLETED); - - ExecutorService es = Executors.newFixedThreadPool(2); - Future future1 = es.submit(() -> { - try { - workflowExecutionService.updateTask(t2); - } catch (Exception e) { - throw new RuntimeException(e); - } - - }); - future1.get(); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - - printTaskStatuses(workflow, "T2, T3 complete"); - workflowExecutor.decide(workflowId); - - printTaskStatuses(workflowId, "After complete"); - } - - @Test - public void testRetry() { - String taskName = "junit_task_1"; - TaskDef taskDef = notFoundSafeGetTaskDef(taskName); - int retryCount = taskDef.getRetryCount(); - taskDef.setRetryCount(1); - int retryDelay = taskDef.getRetryDelaySeconds(); - taskDef.setRetryDelaySeconds(0); - metadataService.updateTaskDef(taskDef); - - WorkflowDef workflowDef = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(workflowDef.getFailureWorkflow()); - assertFalse(StringUtils.isBlank(workflowDef.getFailureWorkflow())); - - String correlationId = "retry_test_" + UUID.randomUUID().toString(); - Map input = new HashMap<>(); - input.put("param1", "p1 value"); - input.put("param2", "p2 value"); - String workflowId = startOrLoadWorkflowExecution("retry", LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); - assertNotNull(workflowId); - printTaskStatuses(workflowId, "initial"); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); - - Task task = getTask("junit_task_1"); - assertNotNull(task); - task.setStatus(FAILED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - - task = getTask("junit_task_1"); - assertNotNull(task); - task.setStatus(FAILED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.FAILED, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - - printTaskStatuses(workflowId, "before retry"); - - workflowExecutor.retry(workflowId); - - printTaskStatuses(workflowId, "after retry"); - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(3, workflow.getTasks().size()); - - task = getTask("junit_task_1"); - assertNotNull(task); - assertEquals(workflowId, task.getWorkflowInstanceId()); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(4, workflow.getTasks().size()); - - task = getTask("junit_task_2"); - assertNotNull(task); - assertEquals(workflowId, task.getWorkflowInstanceId()); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(4, workflow.getTasks().size()); - assertEquals(3, workflow.getTasks().stream().filter(t -> t.getTaskType().equals("junit_task_1")).count()); - - taskDef.setRetryCount(retryCount); - taskDef.setRetryDelaySeconds(retryDelay); - metadataService.updateTaskDef(taskDef); - - printTaskStatuses(workflowId, "final"); - } - - @Test - public void testNonRestartartableWorkflows() { - String taskName = "junit_task_1"; - TaskDef taskDef = notFoundSafeGetTaskDef(taskName); - taskDef.setRetryCount(0); - metadataService.updateTaskDef(taskDef); - - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - found.setName(JUNIT_TEST_WF_NON_RESTARTABLE); - found.setRestartable(false); - metadataService.updateWorkflowDef(found); - - assertNotNull(found); - assertNotNull(found.getFailureWorkflow()); - assertFalse(StringUtils.isBlank(found.getFailureWorkflow())); - - String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap<>(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String workflowId = startOrLoadWorkflowExecution(JUNIT_TEST_WF_NON_RESTARTABLE, 1, correlationId, input, null, null); - assertNotNull(workflowId); - - Task task = getTask("junit_task_1"); - task.setStatus(FAILED); - workflowExecutionService.updateTask(task); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.FAILED, workflow.getStatus()); - - workflowExecutor.rewind(workflow.getWorkflowId(), false); - - // Polling for the first task should return the same task as before - task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertEquals("junit_task_1", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(workflowId, task.getWorkflowInstanceId()); - - List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 1); - assertNotNull(tasks); - assertEquals(1, tasks.size()); - task = tasks.get(0); - assertEquals(workflowId, task.getWorkflowInstanceId()); - - String task1Op = "task1.Done"; - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, false); - assertNotNull(workflow); - assertNotNull(workflow.getOutput()); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertEquals("junit_task_2", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull("Found=" + task.getInputData(), task2Input); - assertEquals(task1Op, task2Input); - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - task.setStatus(COMPLETED); - task.setReasonForIncompletion("unit test failure"); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - tasks = workflow.getTasks(); - assertNotNull(tasks); - assertEquals(2, tasks.size()); - assertTrue("Found " + workflow.getOutput().toString(), workflow.getOutput().containsKey("o3")); - assertEquals("task1.Done", workflow.getOutput().get("o3")); - - expectedException.expect(ApplicationException.class); - expectedException.expectMessage(String.format("%s is non-restartable", workflow)); - workflowExecutor.rewind(workflow.getWorkflowId(), false); - } - - - @Test - public void testRestart() { - String taskName = "junit_task_1"; - TaskDef taskDef = notFoundSafeGetTaskDef(taskName); - taskDef.setRetryCount(0); - metadataService.updateTaskDef(taskDef); - - WorkflowDef workflowDef = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(workflowDef.getFailureWorkflow()); - assertFalse(StringUtils.isBlank(workflowDef.getFailureWorkflow())); - - String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap<>(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String workflowId = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); - assertNotNull(workflowId); - - Task task = getTask("junit_task_1"); - task.setStatus(FAILED); - workflowExecutionService.updateTask(task); - - // If we get the full workflow here then, last task should be completed and the next task should be scheduled - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.FAILED, workflow.getStatus()); - - workflowExecutor.rewind(workflow.getWorkflowId(), false); - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - - task = getTask("junit_task_1"); - assertNotNull(task); - assertEquals(workflowId, task.getWorkflowInstanceId()); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - - task = getTask("junit_task_2"); - assertNotNull(task); - assertEquals(workflowId, task.getWorkflowInstanceId()); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - - // Add a new version of the definition with an additional task - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("junit_task_20"); - workflowTask.setTaskReferenceName("task_added"); - workflowTask.setWorkflowTaskType(TaskType.SIMPLE); - - workflowDef.getTasks().add(workflowTask); - workflowDef.setVersion(2); - metadataService.updateWorkflowDef(workflowDef); - - // restart with the latest definition - workflowExecutor.rewind(workflow.getWorkflowId(), true); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - - task = getTask("junit_task_1"); - assertNotNull(task); - assertEquals(workflowId, task.getWorkflowInstanceId()); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - - task = getTask("junit_task_2"); - assertNotNull(task); - assertEquals(workflowId, task.getWorkflowInstanceId()); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - - task = getTask("junit_task_20"); - assertNotNull(task); - assertEquals(workflowId, task.getWorkflowInstanceId()); - assertEquals("task_added", task.getReferenceTaskName()); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(3, workflow.getTasks().size()); - - // cleanup - metadataService.unregisterWorkflowDef(workflowDef.getName(), 2); - } - - - @Test - public void testTaskTimeout() throws Exception { - - String taskName = "junit_task_1"; - TaskDef taskDef = notFoundSafeGetTaskDef(taskName); - taskDef.setRetryCount(1); - taskDef.setTimeoutSeconds(1); - taskDef.setRetryDelaySeconds(0); - taskDef.setTimeoutPolicy(TimeoutPolicy.RETRY); - metadataService.updateTaskDef(taskDef); - - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(found.getFailureWorkflow()); - assertFalse(StringUtils.isBlank(found.getFailureWorkflow())); - - String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap<>(); - input.put("param1", "p1 value"); - input.put("param2", "p2 value"); - input.put("failureWfName", "FanInOutTest"); - String workflowId = startOrLoadWorkflowExecution("timeout", LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); - assertNotNull(workflowId); - - //Ensure that we have a workflow queued up for evaluation here... - long size = queueDAO.getSize(WorkflowExecutor.DECIDER_QUEUE); - assertEquals(1, size); - - // If we get the full workflow here then, last task should be completed and the next task should be scheduled - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals("found: " + workflow.getTasks().stream().map(Task::toString).collect(Collectors.toList()), 1, workflow.getTasks().size()); - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertEquals(workflowId, task.getWorkflowInstanceId()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - //Ensure that we have a workflow queued up for evaluation here... - size = queueDAO.getSize(WorkflowExecutor.DECIDER_QUEUE); - assertEquals(1, size); - - Uninterruptibles.sleepUninterruptibly(3, TimeUnit.SECONDS); - workflowSweeper.sweep(Collections.singletonList(workflowId), workflowExecutor, workflowRepairService); - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("found: " + workflow.getTasks().stream().map(Task::toString).collect(Collectors.toList()), 2, workflow.getTasks().size()); - - Task task1 = workflow.getTasks().get(0); - assertEquals(TIMED_OUT, task1.getStatus()); - Task task2 = workflow.getTasks().get(1); - assertEquals(SCHEDULED, task2.getStatus()); - - task = workflowExecutionService.poll(task2.getTaskDefName(), "task1.junit.worker"); - assertNotNull(task); - assertEquals(workflowId, task.getWorkflowInstanceId()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - Uninterruptibles.sleepUninterruptibly(3, TimeUnit.SECONDS); - workflowExecutor.decide(workflowId); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(2, workflow.getTasks().size()); - - assertEquals(TIMED_OUT, workflow.getTasks().get(0).getStatus()); - assertEquals(TIMED_OUT, workflow.getTasks().get(1).getStatus()); - assertEquals(WorkflowStatus.TIMED_OUT, workflow.getStatus()); - - assertEquals(1, queueDAO.getSize(WorkflowExecutor.DECIDER_QUEUE)); - - taskDef.setTimeoutSeconds(0); - taskDef.setRetryCount(RETRY_COUNT); - metadataService.updateTaskDef(taskDef); - } - - @Test - public void testWorkflowTimeouts() throws Exception { - WorkflowDef workflowDef = metadataService.getWorkflowDef(TEST_WORKFLOW, 1); - workflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF); - workflowDef.setTimeoutSeconds(5); - metadataService.updateWorkflowDef(workflowDef); - - String correlationId = "test_workflow_timeout" + UUID.randomUUID().toString(); - Map input = new HashMap<>(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String workflowId = startOrLoadWorkflowExecution(TEST_WORKFLOW, 1, correlationId, input, null, null); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - Uninterruptibles.sleepUninterruptibly(6, TimeUnit.SECONDS); - workflowSweeper.sweep(Collections.singletonList(workflowId), workflowExecutor, workflowRepairService); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.TIMED_OUT, workflow.getStatus()); - - workflowDef.setTimeoutSeconds(0); - workflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.ALERT_ONLY); - metadataService.updateWorkflowDef(workflowDef); - } - - @Test - public void testReruns() { - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - - String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); - assertNotNull(wfid); - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(RUNNING, es.getStatus()); - // Check the tasks, at this time there should be 1 task - assertEquals(es.getTasks().size(), 1); - Task t = es.getTasks().get(0); - assertEquals(SCHEDULED, t.getStatus()); - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(t.getTaskId(), task.getTaskId()); - - String param1 = (String) task.getInputData().get("p1"); - String param2 = (String) task.getInputData().get("p2"); - - assertNotNull(param1); - assertNotNull(param2); - assertEquals("p1 value", param1); - assertEquals("p2 value", param2); - - String task1Op = "task1.output->" + param1 + "." + param2; - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - // If we get the full workflow here then, last task should be completed and the next task should be scheduled - es = workflowExecutionService.getExecutionStatus(wfid, true); - es.getTasks().forEach(wfTask -> { - if (wfTask.getTaskId().equals(t.getTaskId())) { - assertEquals(wfTask.getStatus(), COMPLETED); - } else { - assertEquals(wfTask.getStatus(), SCHEDULED); - } - }); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull(task2Input); - assertEquals(task1Op, task2Input); - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - - // Now rerun lets rerun the workflow from the second task - RerunWorkflowRequest request = new RerunWorkflowRequest(); - request.setReRunFromWorkflowId(wfid); - request.setReRunFromTaskId(es.getTasks().get(1).getTaskId()); - - String reRunwfid = workflowExecutor.rerun(request); - - Workflow esRR = workflowExecutionService.getExecutionStatus(reRunwfid, true); - assertNotNull(esRR); - assertEquals(esRR.getReasonForIncompletion(), RUNNING, esRR.getStatus()); - // Check the tasks, at this time there should be 2 tasks - // first one is skipped and the second one is scheduled - assertEquals(esRR.getTasks().toString(), 2, esRR.getTasks().size()); - assertEquals(COMPLETED, esRR.getTasks().get(0).getStatus()); - Task tRR = esRR.getTasks().get(1); - assertEquals(esRR.getTasks().toString(), SCHEDULED, tRR.getStatus()); - assertEquals(tRR.getTaskType(), "junit_task_2"); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - task2Input = (String) task.getInputData().get("tp2"); - assertNotNull(task2Input); - assertEquals(task1Op, task2Input); - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(reRunwfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - - ////////////////////// - // Now rerun the entire workflow - RerunWorkflowRequest request1 = new RerunWorkflowRequest(); - request1.setReRunFromWorkflowId(wfid); - - String reRunwfid1 = workflowExecutor.rerun(request1); - - es = workflowExecutionService.getExecutionStatus(reRunwfid1, true); - assertNotNull(es); - assertEquals(RUNNING, es.getStatus()); - // Check the tasks, at this time there should be 1 task - assertEquals(es.getTasks().size(), 1); - assertEquals(SCHEDULED, es.getTasks().get(0).getStatus()); - - task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - } - - @Test - public void testTaskSkipping() { - - String taskName = "junit_task_1"; - TaskDef taskDef = notFoundSafeGetTaskDef(taskName); - taskDef.setRetryCount(0); - taskDef.setTimeoutSeconds(0); - metadataService.updateTaskDef(taskDef); - - - metadataService.getWorkflowDef(TEST_WORKFLOW, 1); - - String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(TEST_WORKFLOW, 1, correlationId, input, null, null); - assertNotNull(wfid); - - // Now Skip the second task - workflowExecutor.skipTaskFromWorkflow(wfid, "t2", null); - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(RUNNING, es.getStatus()); - // Check the tasks, at this time there should be 3 task - assertEquals(2, es.getTasks().size()); - - assertEquals(SCHEDULED, es.getTasks().stream().filter(task -> "t1".equals(task.getReferenceTaskName())).findFirst().orElse(null).getStatus()); - assertEquals(Status.SKIPPED, es.getTasks().stream().filter(task -> "t2".equals(task.getReferenceTaskName())).findFirst().orElse(null).getStatus()); - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - assertEquals("t1", task.getReferenceTaskName()); - - String param1 = (String) task.getInputData().get("p1"); - String param2 = (String) task.getInputData().get("p2"); - - assertNotNull(param1); - assertNotNull(param2); - assertEquals("p1 value", param1); - assertEquals("p2 value", param2); - - String task1Op = "task1.output->" + param1 + "." + param2; - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - // If we get the full workflow here then, last task should be completed and the next task should be scheduled - es = workflowExecutionService.getExecutionStatus(wfid, true); - es.getTasks().forEach(wfTask -> { - if (wfTask.getReferenceTaskName().equals("t1")) { - assertEquals(COMPLETED, wfTask.getStatus()); - } else if (wfTask.getReferenceTaskName().equals("t2")) { - assertEquals(Status.SKIPPED, wfTask.getStatus()); - } else { - assertEquals(SCHEDULED, wfTask.getStatus()); - } - }); - - task = workflowExecutionService.poll("junit_task_3", "task3.junit.worker"); - assertNotNull(task); - assertEquals(IN_PROGRESS, task.getStatus()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - } - - @Test - public void testPauseResume() { - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - - String correlationId = "unit_test_1" + System.nanoTime(); - Map input = new HashMap<>(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); - - assertNotNull(wfid); - - List ids = workflowExecutionService.getRunningWorkflows(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(ids); - assertTrue("found no ids: " + ids, ids.size() > 0); //if there are concurrent tests running, this would be more than 1 - boolean foundId = false; - for (String id : ids) { - if (id.equals(wfid)) { - foundId = true; - } - } - assertTrue(foundId); - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(RUNNING, es.getStatus()); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(RUNNING, es.getStatus()); - Task t = es.getTasks().get(0); - assertEquals(SCHEDULED, t.getStatus()); - - // PAUSE - workflowExecutor.pauseWorkflow(wfid); - - // The workflow is paused but the scheduled task should be pollable - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(t.getTaskId(), task.getTaskId()); - - String param1 = (String) task.getInputData().get("p1"); - String param2 = (String) task.getInputData().get("p2"); - - assertNotNull(param1); - assertNotNull(param2); - assertEquals("p1 value", param1); - assertEquals("p2 value", param2); - - String task1Op = "task1.output->" + param1 + "." + param2; - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - // This decide should not schedule the next task - //ds.decideNow(wfid, task); - - // If we get the full workflow here then, last task should be completed and the rest (including PAUSE task) should be scheduled - es = workflowExecutionService.getExecutionStatus(wfid, true); - es.getTasks().forEach(wfTask -> { - if (wfTask.getTaskId().equals(t.getTaskId())) { - assertEquals(wfTask.getStatus(), COMPLETED); - } - }); - - // This should return null as workflow is paused - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNull("Found: " + task, task); - - // Even if decide is run again the next task will not be scheduled as the workflow is still paused-- - workflowExecutor.decide(wfid); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertTrue(task == null); - - // RESUME - workflowExecutor.resumeWorkflow(wfid); - - // Now polling should get the second task - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull(task2Input); - assertEquals(task1Op, task2Input); - - Task byRefName = workflowExecutionService.getPendingTaskForWorkflow("t2", wfid); - assertNotNull(byRefName); - assertEquals(task.getTaskId(), byRefName.getTaskId()); - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - } - - @Test - public void testSubWorkflow() { - - createSubWorkflow(); - metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1); - Map input = new HashMap<>(); - input.put("param1", "param 1 value"); - input.put("param3", "param 2 value"); - input.put("wfName", LINEAR_WORKFLOW_T1_T2); - String wfId = startOrLoadWorkflowExecution(WF_WITH_SUB_WF, 1, "test", input, null, null); - assertNotNull(wfId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(wfId, true); - assertNotNull(workflow); - - Task simpleTask = workflowExecutionService.poll("junit_task_5", "test"); - assertNotNull(simpleTask); - simpleTask.setStatus(COMPLETED); - workflowExecutionService.updateTask(simpleTask); - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); - - workflow = workflowExecutionService.getExecutionStatus(wfId, true); - assertNotNull(workflow); - assertNotNull(workflow.getTasks()); - - // Simulating SystemTaskWorkerCoordinator to execute async system tasks - String subWorkflowTaskId = workflow.getTaskByRefName("a2").getTaskId(); - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); - - workflow = workflowExecutionService.getExecutionStatus(wfId, true); - assertNotNull(workflow); - assertNotNull(workflow.getTasks()); - - Task subWorkflowTask = workflow.getTasks().stream().filter(t -> t.getTaskType().equals(SUB_WORKFLOW.name())).findAny().get(); - assertNotNull(subWorkflowTask); - assertNotNull(subWorkflowTask.getOutputData()); - assertNotNull(subWorkflowTask.getInputData()); - assertNotNull("Output: " + subWorkflowTask.getSubWorkflowId() + ", status: " + subWorkflowTask.getStatus(), subWorkflowTask.getSubWorkflowId()); - assertTrue(subWorkflowTask.getInputData().containsKey("workflowInput")); - assertEquals(42, ((Map) subWorkflowTask.getInputData().get("workflowInput")).get("param2")); - String subWorkflowId = subWorkflowTask.getSubWorkflowId(); - - workflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(workflow); - assertNotNull(workflow.getTasks()); - assertEquals(wfId, workflow.getParentWorkflowId()); - assertEquals(RUNNING, workflow.getStatus()); - - simpleTask = workflowExecutionService.poll("junit_task_1", "test"); - simpleTask.setStatus(COMPLETED); - workflowExecutionService.updateTask(simpleTask); - - simpleTask = workflowExecutionService.poll("junit_task_2", "test"); - assertEquals(subWorkflowId, simpleTask.getWorkflowInstanceId()); - String uuid = UUID.randomUUID().toString(); - simpleTask.getOutputData().put("uuid", uuid); - simpleTask.setStatus(COMPLETED); - workflowExecutionService.updateTask(simpleTask); - - workflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(LINEAR_WORKFLOW_T1_T2, workflow.getWorkflowName()); - assertNotNull(workflow.getOutput()); - assertTrue(workflow.getOutput().containsKey("o1")); - assertTrue(workflow.getOutput().containsKey("o2")); - assertEquals("sub workflow input param1", workflow.getOutput().get("o1")); - assertEquals(uuid, workflow.getOutput().get("o2")); - - // Simulating SystemTaskWorkerCoordinator to execute async system tasks - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - - subWorkflowTask = workflowExecutionService.poll("junit_task_6", "test"); - assertNotNull(subWorkflowTask); - subWorkflowTask.setStatus(COMPLETED); - workflowExecutionService.updateTask(subWorkflowTask); - - workflow = workflowExecutionService.getExecutionStatus(wfId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - @Test - public void testSubWorkflowFailure() { - - TaskDef taskDef = notFoundSafeGetTaskDef("junit_task_1"); - assertNotNull(taskDef); - taskDef.setRetryCount(0); - taskDef.setTimeoutSeconds(2); - metadataService.updateTaskDef(taskDef); - - - createSubWorkflow(); - metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1); - - Map input = new HashMap<>(); - input.put("param1", "param 1 value"); - input.put("param3", "param 2 value"); - input.put("wfName", LINEAR_WORKFLOW_T1_T2); - String workflowId = startOrLoadWorkflowExecution(WF_WITH_SUB_WF, 1, "test", input, null, null); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - - Task task = workflowExecutionService.poll("junit_task_5", "test"); - assertNotNull(task); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - - // Simulating SystemTaskWorkerCoordinator to execute async system tasks - String subWorkflowTaskId = workflow.getTaskByRefName("a2").getTaskId(); - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertNotNull(workflow.getTasks()); - task = workflow.getTasks().stream().filter(t -> t.getTaskType().equals(SUB_WORKFLOW.name())).findAny().get(); - assertNotNull(task); - assertNotNull(task.getOutputData()); - assertNotNull(task.getSubWorkflowId()); - String subWorkflowId = task.getSubWorkflowId(); - - Workflow subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertNotNull(subWorkflow.getTasks()); - - assertEquals(workflowId, subWorkflow.getParentWorkflowId()); - assertEquals(RUNNING, subWorkflow.getStatus()); - - task = workflowExecutionService.poll("junit_task_1", "test"); - assertNotNull(task); - task.setStatus(FAILED); - workflowExecutionService.updateTask(task); - - subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertEquals(WorkflowStatus.FAILED, subWorkflow.getStatus()); - workflowExecutor.executeSystemTask(subworkflow, subWorkflow.getParentWorkflowTaskId(), 1); - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertEquals(WorkflowStatus.FAILED, workflow.getStatus()); - task = workflow.getTasks().stream().filter(t -> t.getTaskType().equals(SUB_WORKFLOW.name())).findAny().get(); - assertEquals(FAILED, task.getStatus()); - } - - @Test - public void testSubWorkflowFailureInverse() { - - TaskDef taskDef = notFoundSafeGetTaskDef("junit_task_1"); - assertNotNull(taskDef); - taskDef.setRetryCount(0); - taskDef.setTimeoutSeconds(2); - metadataService.updateTaskDef(taskDef); - - - createSubWorkflow(); - - WorkflowDef found = metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1); - assertNotNull(found); - Map input = new HashMap<>(); - input.put("param1", "param 1 value"); - input.put("param3", "param 2 value"); - input.put("wfName", LINEAR_WORKFLOW_T1_T2); - String workflowId = startOrLoadWorkflowExecution(WF_WITH_SUB_WF, 1, "test", input, null, null); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - - Task task = workflowExecutionService.poll("junit_task_5", "test"); - assertNotNull(task); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - - // Simulating SystemTaskWorkerCoordinator to execute async system tasks - String subWorkflowTaskId = workflow.getTaskByRefName("a2").getTaskId(); - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertNotNull(workflow.getTasks()); - task = workflow.getTasks().stream().filter(t -> t.getTaskType().equals(SUB_WORKFLOW.name())).findAny().get(); - assertNotNull(task); - assertNotNull(task.getOutputData()); - assertNotNull(task.getSubWorkflowId()); - String subWorkflowId = task.getSubWorkflowId(); - - workflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(workflow); - assertNotNull(workflow.getTasks()); - assertEquals(workflowId, workflow.getParentWorkflowId()); - assertEquals(RUNNING, workflow.getStatus()); - - workflowExecutor.terminateWorkflow(workflowId, "fail"); - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertEquals(WorkflowStatus.TERMINATED, workflow.getStatus()); - - workflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertEquals(WorkflowStatus.TERMINATED, workflow.getStatus()); - - } - - @Test - public void testSubWorkflowRetry() { - String taskName = "junit_task_1"; - TaskDef taskDef = notFoundSafeGetTaskDef(taskName); - int retryCount = notFoundSafeGetTaskDef(taskName).getRetryCount(); - taskDef.setRetryCount(0); - metadataService.updateTaskDef(taskDef); - - // create a workflow with sub-workflow - createSubWorkflow(); - WorkflowDef found = metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1); - - WorkflowTask workflowTask = found.getTasks().stream().filter(t -> t.getType().equals(SUB_WORKFLOW.name())).findAny().orElse(null); - - // Set subworkflow task retry count to 1. - TaskDef subWorkflowTaskDef = new TaskDef(); - subWorkflowTaskDef.setRetryCount(1); - subWorkflowTaskDef.setName("test_subworkflow_task"); - subWorkflowTaskDef.setOwnerEmail("test@qbc.com"); - workflowTask.setTaskDefinition(subWorkflowTaskDef); - - metadataService.updateWorkflowDef(found); - - // start the workflow - Map workflowInputParams = new HashMap<>(); - workflowInputParams.put("param1", "param 1"); - workflowInputParams.put("param3", "param 2"); - workflowInputParams.put("wfName", LINEAR_WORKFLOW_T1_T2); - String workflowId = startOrLoadWorkflowExecution(WF_WITH_SUB_WF, 1, "test", workflowInputParams, null, null); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - - // poll and complete first task - Task task = workflowExecutionService.poll("junit_task_5", "test"); - assertNotNull(task); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - - // Simulating SystemTaskWorkerCoordinator to execute async system tasks - String subWorkflowTaskId = workflow.getTaskByRefName("a2").getTaskId(); - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertNotNull(workflow.getTasks()); - assertEquals(2, workflow.getTasks().size()); - - task = workflow.getTasks().stream().filter(t -> t.getTaskType().equals(SUB_WORKFLOW.name())).findAny().orElse(null); - assertNotNull(task); - assertNotNull(task.getOutputData()); - assertNotNull("Output: " + task.getOutputData().toString() + ", status: " + task.getStatus(), task.getSubWorkflowId()); - String subWorkflowId = task.getSubWorkflowId(); - - workflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(workflow); - assertNotNull(workflow.getTasks()); - assertEquals(workflowId, workflow.getParentWorkflowId()); - assertEquals(RUNNING, workflow.getStatus()); - - // poll and fail the first task in sub-workflow - task = workflowExecutionService.poll("junit_task_1", "test"); - task.setStatus(FAILED); - workflowExecutionService.updateTask(task); - - Workflow subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertEquals(WorkflowStatus.FAILED, subWorkflow.getStatus()); - subWorkflowTaskId = subWorkflow.getParentWorkflowTaskId(); - - workflowExecutor.executeSystemTask(subworkflow, subWorkflowTaskId, 1); - - // Ensure failed Subworkflow task is rescheduled. - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - - task = workflow.getTasks().stream() - .filter(t -> t.getTaskType().equals(SUB_WORKFLOW.name())) - .filter(t -> t.getStatus().equals(SCHEDULED)) - .findAny().orElse(null); - assertNotNull(task); - subWorkflowTaskId = task.getTaskId(); - - workflowExecutor.executeSystemTask(subworkflow, task.getTaskId(), 1); - - // Get the latest workflow and task, and then acquire latest subWorkflowId - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - - task = workflow.getTasks().stream() - .filter(t -> t.getTaskType().equals(SUB_WORKFLOW.name())) - .filter(t -> t.getStatus().equals(IN_PROGRESS)) - .findAny().orElse(null); - assertNotNull(task); - assertNotNull("Retried task in scheduled state shouldn't have a SubworkflowId yet", task.getSubWorkflowId()); - subWorkflowId = task.getSubWorkflowId(); - - // poll and fail the first task in sub-workflow - task = workflowExecutionService.poll("junit_task_1", "test"); - task.setStatus(FAILED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.FAILED, workflow.getStatus()); - - // Retry the failed sub workflow - workflowExecutor.retry(subWorkflowId); - task = workflowExecutionService.poll("junit_task_1", "test"); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertEquals(RUNNING, subWorkflow.getStatus()); - - task = workflowExecutionService.poll("junit_task_2", "test"); - assertEquals(subWorkflowId, task.getWorkflowInstanceId()); - String uuid = UUID.randomUUID().toString(); - task.getOutputData().put("uuid", uuid); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertEquals(WorkflowStatus.COMPLETED, subWorkflow.getStatus()); - assertNotNull(subWorkflow.getOutput()); - assertTrue(subWorkflow.getOutput().containsKey("o1")); - assertTrue(subWorkflow.getOutput().containsKey("o2")); - assertEquals("sub workflow input param1", subWorkflow.getOutput().get("o1")); - assertEquals(uuid, subWorkflow.getOutput().get("o2")); - - // Simulating SystemTaskWorkerCoordinator - workflowExecutor.executeSystemTask(subworkflow, subWorkflow.getParentWorkflowTaskId(), 1); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - - task = workflowExecutionService.poll("junit_task_6", "test"); - assertNotNull(task); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - - // reset retry count - taskDef = notFoundSafeGetTaskDef(taskName); - taskDef.setRetryCount(retryCount); - metadataService.updateTaskDef(taskDef); - - workflowTask = found.getTasks().stream().filter(t -> t.getType().equals(SUB_WORKFLOW.name())).findAny().orElse(null); - workflowTask.setTaskDefinition(null); - metadataService.updateWorkflowDef(found); - } - - @Test - public void testWait() { - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("test_wait"); - workflowDef.setSchemaVersion(2); - - WorkflowTask waitWorkflowTask = new WorkflowTask(); - waitWorkflowTask.setWorkflowTaskType(TaskType.WAIT); - waitWorkflowTask.setName("wait"); - waitWorkflowTask.setTaskReferenceName("wait0"); - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("junit_task_1"); - workflowTask.setTaskReferenceName("t1"); - - workflowDef.getTasks().add(waitWorkflowTask); - workflowDef.getTasks().add(workflowTask); - metadataService.registerWorkflowDef(workflowDef); - - String workflowId = startOrLoadWorkflowExecution(workflowDef.getName(), workflowDef.getVersion(), "", new HashMap<>(), null, null); - Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); - assertNotNull(workflow); - assertEquals(1, workflow.getTasks().size()); - assertEquals(RUNNING, workflow.getStatus()); - - Task waitTask = workflow.getTasks().get(0); - assertEquals(TaskType.WAIT.name(), waitTask.getTaskType()); - waitTask.setStatus(COMPLETED); - workflowExecutor.updateTask(new TaskResult(waitTask)); - - Task task = workflowExecutionService.poll("junit_task_1", "test"); - assertNotNull(task); - task.setStatus(Status.COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("tasks:" + workflow.getTasks(), WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - @Test - public void testWaitTimeout() throws Exception { - - TaskDef taskDef = new TaskDef(); - taskDef.setName("waitTimeout"); - taskDef.setTimeoutSeconds(2); - taskDef.setRetryCount(1); - taskDef.setTimeoutPolicy(TimeoutPolicy.RETRY); - taskDef.setRetryDelaySeconds(10); - metadataService.registerTaskDef(Collections.singletonList(taskDef)); - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("test_wait_timeout"); - workflowDef.setSchemaVersion(2); - - WorkflowTask waitWorkflowTask = new WorkflowTask(); - waitWorkflowTask.setWorkflowTaskType(TaskType.WAIT); - waitWorkflowTask.setName("waitTimeout"); - waitWorkflowTask.setTaskReferenceName("wait0"); - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("junit_task_1"); - workflowTask.setTaskReferenceName("t1"); - - workflowDef.getTasks().add(waitWorkflowTask); - workflowDef.getTasks().add(workflowTask); - metadataService.registerWorkflowDef(workflowDef); - - String workflowId = startOrLoadWorkflowExecution(workflowDef.getName(), workflowDef.getVersion(), "", new HashMap<>(), null, null); - Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); - assertNotNull(workflow); - assertEquals(1, workflow.getTasks().size()); - assertEquals(RUNNING, workflow.getStatus()); - - // timeout the wait task and ensure it is retried - Thread.sleep(3000); - workflowExecutor.decide(workflowId); - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - assertEquals(TIMED_OUT, workflow.getTasks().get(0).getStatus()); - assertEquals(IN_PROGRESS, workflow.getTasks().get(1).getStatus()); - - Task waitTask = workflow.getTasks().get(1); - assertEquals(TaskType.WAIT.name(), waitTask.getTaskType()); - waitTask.setStatus(COMPLETED); - workflowExecutor.updateTask(new TaskResult(waitTask)); - - Task task = workflowExecutionService.poll("junit_task_1", "test"); - assertNotNull(task); - task.setStatus(Status.COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("tasks:" + workflow.getTasks(), WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - - @Test - public void testLambda() { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("test_lambda_wf"); - workflowDef.setSchemaVersion(2); - - Map inputParams = new HashMap<>(); - inputParams.put("input", "${workflow.input}"); - inputParams.put("scriptExpression", "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false} }"); - WorkflowTask lambdaWorkflowTask = new WorkflowTask(); - lambdaWorkflowTask.setWorkflowTaskType(TaskType.LAMBDA); - lambdaWorkflowTask.setName("lambda"); - lambdaWorkflowTask.setInputParameters(inputParams); - lambdaWorkflowTask.setTaskReferenceName("lambda0"); - - workflowDef.getTasks().add(lambdaWorkflowTask); - - assertNotNull(workflowDef); - metadataService.registerWorkflowDef(workflowDef); - - Map inputs = new HashMap<>(); - inputs.put("a", 1); - String workflowId = startOrLoadWorkflowExecution(workflowDef.getName(), workflowDef.getVersion(), "", inputs, null, null); - Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); - - assertNotNull(workflow); - assertEquals(1, workflow.getTasks().size()); - - workflowExecutor.decide(workflowId); - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - Task lambda_task = workflow.getTasks().get(0); - - assertEquals(lambda_task.getOutputData().toString(), "{result={testvalue=true}}"); - assertNotNull(workflow); - assertEquals("tasks:" + workflow.getTasks(), WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - @Test - public void testTerminateTaskWithCompletedStatus() { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("test_terminate_task_wf"); - workflowDef.setSchemaVersion(2); - - Map lambdaTaskInputParams = new HashMap<>(); - lambdaTaskInputParams.put("input", "${workflow.input}"); - lambdaTaskInputParams.put("scriptExpression", "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false}}"); - - WorkflowTask lambdaWorkflowTask = new WorkflowTask(); - lambdaWorkflowTask.setWorkflowTaskType(TaskType.LAMBDA); - lambdaWorkflowTask.setName("lambda"); - lambdaWorkflowTask.setInputParameters(lambdaTaskInputParams); - lambdaWorkflowTask.setTaskReferenceName("lambda0"); - - Map terminateTaskInputParams = new HashMap<>(); - terminateTaskInputParams.put(Terminate.getTerminationStatusParameter(), "COMPLETED"); - terminateTaskInputParams.put(Terminate.getTerminationWorkflowOutputParameter(), "${lambda0.output}"); - - WorkflowTask terminateWorkflowTask = new WorkflowTask(); - terminateWorkflowTask.setType(TaskType.TASK_TYPE_TERMINATE); - terminateWorkflowTask.setName("terminate"); - terminateWorkflowTask.setInputParameters(terminateTaskInputParams); - terminateWorkflowTask.setTaskReferenceName("terminate0"); - - WorkflowTask workflowTask2 = new WorkflowTask(); - workflowTask2.setName("junit_task_2"); - workflowTask2.setTaskReferenceName("t2"); - - workflowDef.getTasks().addAll(Arrays.asList(lambdaWorkflowTask, terminateWorkflowTask, workflowTask2)); - - assertNotNull(workflowDef); - metadataService.registerWorkflowDef(workflowDef); - - Map wfInput = new HashMap<>(); - wfInput.put("a", 1); - String workflowId = startOrLoadWorkflowExecution(workflowDef.getName(), workflowDef.getVersion(), "", wfInput, null, null); - Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); - - assertNotNull(workflow); - assertEquals(2, workflow.getTasks().size()); - - workflowExecutor.decide(workflowId); - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - - assertNotNull(workflow); - assertEquals("tasks:" + workflow.getTasks(), WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(TaskType.TASK_TYPE_LAMBDA, workflow.getTasks().get(0).getTaskType()); - assertEquals(TaskType.TASK_TYPE_TERMINATE, workflow.getTasks().get(1).getTaskType()); - assertEquals(workflow.getTasks().get(1).getOutputData(), workflow.getOutput()); - - metadataService.unregisterWorkflowDef("test_terminate_task_wf", 1); - } - - @Test - public void testTerminateTaskWithFailedStatus() { - String failureWorkflowName = "failure_workflow"; - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("test_terminate_task_wf"); - workflowDef.setSchemaVersion(2); - - Map lambdaTaskInputParams = new HashMap<>(); - lambdaTaskInputParams.put("input", "${workflow.input}"); - lambdaTaskInputParams.put("scriptExpression", "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false}}"); - - WorkflowTask lambdaWorkflowTask = new WorkflowTask(); - lambdaWorkflowTask.setWorkflowTaskType(TaskType.LAMBDA); - lambdaWorkflowTask.setName("lambda"); - lambdaWorkflowTask.setInputParameters(lambdaTaskInputParams); - lambdaWorkflowTask.setTaskReferenceName("lambda0"); - - Map terminateTaskInputParams = new HashMap<>(); - terminateTaskInputParams.put(Terminate.getTerminationStatusParameter(), "FAILED"); - terminateTaskInputParams.put(Terminate.getTerminationWorkflowOutputParameter(), "${lambda0.output}"); - - WorkflowTask terminateWorkflowTask = new WorkflowTask(); - terminateWorkflowTask.setType(TaskType.TASK_TYPE_TERMINATE); - terminateWorkflowTask.setName("terminate"); - terminateWorkflowTask.setInputParameters(terminateTaskInputParams); - terminateWorkflowTask.setTaskReferenceName("terminate0"); - - WorkflowTask workflowTask2 = new WorkflowTask(); - workflowTask2.setName("junit_task_2"); - workflowTask2.setTaskReferenceName("t2"); - - workflowDef.getTasks().addAll(Arrays.asList(lambdaWorkflowTask, terminateWorkflowTask, workflowTask2)); - - WorkflowDef failureWorkflowDef = new WorkflowDef(); - failureWorkflowDef.setName(failureWorkflowName); - failureWorkflowDef.setTasks(Collections.singletonList(lambdaWorkflowTask)); - - workflowDef.setFailureWorkflow(failureWorkflowName); - - metadataService.registerWorkflowDef(failureWorkflowDef); - metadataService.registerWorkflowDef(workflowDef); - - Map wfInput = new HashMap<>(); - wfInput.put("a", 1); - String workflowId = startOrLoadWorkflowExecution(workflowDef.getName(), workflowDef.getVersion(), "", wfInput, null, null); - Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); - - assertNotNull(workflow); - assertEquals(2, workflow.getTasks().size()); - - workflowExecutor.decide(workflowId); - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - - assertNotNull(workflow); - assertEquals("tasks:" + workflow.getTasks(), WorkflowStatus.FAILED, workflow.getStatus()); - assertEquals(TaskType.TASK_TYPE_LAMBDA, workflow.getTasks().get(0).getTaskType()); - assertEquals(TaskType.TASK_TYPE_TERMINATE, workflow.getTasks().get(1).getTaskType()); - assertNotNull(workflow.getOutput()); - assertNotNull(workflow.getOutput().get("conductor.failure_workflow")); - - String failureWorkflowId = (String)workflow.getOutput().get("conductor.failure_workflow"); - Workflow failureWorkflow = workflowExecutionService.getExecutionStatus(failureWorkflowId, true); - assertNotNull(failureWorkflow); - assertEquals(failureWorkflowName, failureWorkflow.getWorkflowName()); - assertEquals(workflowId, failureWorkflow.getInput().get("workflowId")); - assertEquals(WorkflowStatus.COMPLETED, failureWorkflow.getStatus()); - assertEquals(1, failureWorkflow.getTasks().size()); - assertEquals(TaskType.TASK_TYPE_LAMBDA, failureWorkflow.getTasks().get(0).getTaskType()); - - metadataService.unregisterWorkflowDef("test_terminate_task_wf", 1); - metadataService.unregisterWorkflowDef(failureWorkflowName, 1); - } - - @Test - public void testEventWorkflow() { - - TaskDef taskDef = new TaskDef(); - taskDef.setName("eventX"); - taskDef.setTimeoutSeconds(1); - - metadataService.registerTaskDef(Collections.singletonList(taskDef)); - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("test_event"); - workflowDef.setSchemaVersion(2); - - WorkflowTask eventWorkflowTask = new WorkflowTask(); - eventWorkflowTask.setWorkflowTaskType(TaskType.EVENT); - eventWorkflowTask.setName("eventX"); - eventWorkflowTask.setTaskReferenceName("wait0"); - eventWorkflowTask.setSink("conductor"); - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("junit_task_1"); - workflowTask.setTaskReferenceName("t1"); - - workflowDef.getTasks().add(eventWorkflowTask); - workflowDef.getTasks().add(workflowTask); - metadataService.registerWorkflowDef(workflowDef); - - String workflowId = startOrLoadWorkflowExecution(workflowDef.getName(), workflowDef.getVersion(), "", new HashMap<>(), null, null); - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); - Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); - assertNotNull(workflow); - - Task eventTask = workflow.getTasks().get(0); - assertEquals(TaskType.EVENT.name(), eventTask.getTaskType()); - assertEquals(COMPLETED, eventTask.getStatus()); - assertFalse(eventTask.getOutputData().isEmpty()); - assertNotNull(eventTask.getOutputData().get("event_produced")); - - Task task = workflowExecutionService.poll("junit_task_1", "test"); - assertNotNull(task); - task.setStatus(Status.COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("tasks:" + workflow.getTasks(), WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - @Test - public void testTaskWithCallbackAfterSecondsInWorkflow() throws InterruptedException { - WorkflowDef workflowDef = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(workflowDef); - - String workflowId = startOrLoadWorkflowExecution(workflowDef.getName(), workflowDef.getVersion(), "", new HashMap<>(), null, null); - Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); - assertNotNull(workflow); - - Task task = workflowExecutionService.poll("junit_task_1", "test"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - String taskId = task.getTaskId(); - task.setStatus(IN_PROGRESS); - task.setCallbackAfterSeconds(2L); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(1, workflow.getTasks().size()); - - // task should not be available - task = workflowExecutionService.poll("junit_task_1", "test"); - assertNull(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(1, workflow.getTasks().size()); - - Thread.sleep(2050); - queueDAO.processUnacks("junit_task_1"); - task = workflowExecutionService.poll("junit_task_1", "test"); - assertNotNull(task); - assertEquals(taskId, task.getTaskId()); - - task.setStatus(Status.COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(2, workflow.getTasks().size()); - - task = workflowExecutionService.poll("junit_task_2", "test"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - taskId = task.getTaskId(); - task.setStatus(IN_PROGRESS); - task.setCallbackAfterSeconds(5L); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(2, workflow.getTasks().size()); - - // task should not be available - task = workflowExecutionService.poll("junit_task_1", "test"); - assertNull(task); - - Uninterruptibles.sleepUninterruptibly(5, TimeUnit.SECONDS); - - task = workflowExecutionService.poll("junit_task_2", "test"); - assertNotNull(task); - assertEquals(taskId, task.getTaskId()); - - task.setStatus(Status.COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(2, workflow.getTasks().size()); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - @Test - public void testWorkflowUsingExternalPayloadStorage() { - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(found); - - Map outputParameters = found.getOutputParameters(); - outputParameters.put("workflow_output", "${t1.output.op}"); - metadataService.updateWorkflowDef(found); - - String workflowInputPath = INITIAL_WORKFLOW_INPUT_PATH; - String correlationId = "wf_external_storage"; - String workflowId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, null, workflowInputPath, null, null); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertTrue("The workflow input should not be persisted", workflow.getInput().isEmpty()); - assertEquals(workflowInputPath, workflow.getExternalInputPayloadStoragePath()); - assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); - - // Polling for the first task - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertEquals("junit_task_1", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(workflowId, task.getWorkflowInstanceId()); - - // update first task with COMPLETED - String taskOutputPath = TASK_OUTPUT_PATH; - task.setOutputData(null); - task.setExternalOutputPayloadStoragePath(taskOutputPath); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertTrue("The workflow input should not be persisted", workflow.getInput().isEmpty()); - assertEquals(workflowInputPath, workflow.getExternalInputPayloadStoragePath()); - assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - assertTrue("The first task output should not be persisted", workflow.getTasks().get(0).getOutputData().isEmpty()); - assertTrue("The second task input should not be persisted", workflow.getTasks().get(1).getInputData().isEmpty()); - assertEquals(taskOutputPath, workflow.getTasks().get(0).getExternalOutputPayloadStoragePath()); - assertEquals(INPUT_PAYLOAD_PATH, workflow.getTasks().get(1).getExternalInputPayloadStoragePath()); - - // Polling for the second task - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertEquals("junit_task_2", task.getTaskType()); - assertTrue(task.getInputData().isEmpty()); - assertNotNull(task.getExternalInputPayloadStoragePath()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(workflowId, task.getWorkflowInstanceId()); - - // update second task with COMPLETED - task.getOutputData().put("op", "success_task2"); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertTrue("The workflow input should not be persisted", workflow.getInput().isEmpty()); - assertEquals(workflowInputPath, workflow.getExternalInputPayloadStoragePath()); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - assertTrue("The first task output should not be persisted", workflow.getTasks().get(0).getOutputData().isEmpty()); - assertTrue("The second task input should not be persisted", workflow.getTasks().get(1).getInputData().isEmpty()); - assertEquals(taskOutputPath, workflow.getTasks().get(0).getExternalOutputPayloadStoragePath()); - assertEquals(INPUT_PAYLOAD_PATH, workflow.getTasks().get(1).getExternalInputPayloadStoragePath()); - assertTrue(workflow.getOutput().isEmpty()); - assertNotNull(workflow.getExternalOutputPayloadStoragePath()); - assertEquals(WORKFLOW_OUTPUT_PATH, workflow.getExternalOutputPayloadStoragePath()); - } - - @Test - public void testWorkflowWithConditionalSystemTaskUsingExternalPayloadStorage() { - createConditionalWFWithSystemTask(); - WorkflowDef workflowDef = metadataService.getWorkflowDef(CONDITIONAL_SYSTEM_WORKFLOW, 1); - assertNotNull(workflowDef); - - String workflowInputPath = INITIAL_WORKFLOW_INPUT_PATH; - String correlationId = "conditional_http_external_storage"; - String workflowId = workflowExecutor.startWorkflow(CONDITIONAL_SYSTEM_WORKFLOW, 1, correlationId, null, workflowInputPath, null, null); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertTrue("The workflow input should not be persisted", workflow.getInput().isEmpty()); - assertEquals(workflowInputPath, workflow.getExternalInputPayloadStoragePath()); - assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); - - // Polling for the first task - Task task = workflowExecutionService.poll("junit_task_1", "junit.worker.task_1"); - assertNotNull(task); - assertEquals("junit_task_1", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(workflowId, task.getWorkflowInstanceId()); - - // update first task with COMPLETED and using external payload storage for output data - String taskOutputPath = TASK_OUTPUT_PATH; - task.setOutputData(null); - task.setExternalOutputPayloadStoragePath(taskOutputPath); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(3, workflow.getTasks().size()); - assertEquals(DECISION.name(), workflow.getTasks().get(1).getTaskType()); - assertEquals(UserTask.NAME, workflow.getTasks().get(2).getTaskType()); - assertEquals(0, workflow.getTasks().get(2).getPollCount()); - - // simulate the SystemTaskWorkerCoordinator action - String taskId = workflow.getTaskByRefName("user_task").getTaskId(); - workflowExecutor.executeSystemTask(userTask, taskId, 1); - - task = workflowExecutionService.getTask(taskId); - assertEquals(COMPLETED, task.getStatus()); - assertEquals(0, workflow.getTasks().get(2).getPollCount()); - assertTrue("The task input should not be persisted", task.getInputData().isEmpty()); - assertEquals(INPUT_PAYLOAD_PATH, task.getExternalInputPayloadStoragePath()); - assertEquals(104, task.getOutputData().get("size")); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(4, workflow.getTasks().size()); - - // Polling for the last task - task = workflowExecutionService.poll("junit_task_3", "junit.worker.task_3"); - assertNotNull(task); - assertEquals("junit_task_3", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(workflowId, task.getWorkflowInstanceId()); - - // update final task with COMPLETED - task.getOutputData().put("op", "success_task3"); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertTrue("The workflow input should not be persisted", workflow.getInput().isEmpty()); - assertEquals(workflowInputPath, workflow.getExternalInputPayloadStoragePath()); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(4, workflow.getTasks().size()); - assertTrue(workflow.getOutput().isEmpty()); - assertNotNull(workflow.getExternalOutputPayloadStoragePath()); - assertEquals(WORKFLOW_OUTPUT_PATH, workflow.getExternalOutputPayloadStoragePath()); - } - - @Test - public void testWorkflowWithForkJoinUsingExternalPayloadStorage() { - createForkJoinWorkflow(); - - WorkflowDef workflowDef = metadataService.getWorkflowDef(FORK_JOIN_WF, 1); - assertNotNull(workflowDef); - - String workflowInputPath = "workflow/input"; - String correlationId = "fork_join_external_storage"; - String workflowId = workflowExecutor.startWorkflow(FORK_JOIN_WF, 1, correlationId, null, workflowInputPath, null, null); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertTrue("The workflow input should not be persisted", workflow.getInput().isEmpty()); - assertEquals(workflowInputPath, workflow.getExternalInputPayloadStoragePath()); - assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(4, workflow.getTasks().size()); - - // Polling for first task from left fork - Task task1 = workflowExecutionService.poll("junit_task_1", "junit.worker.task_1"); - assertNotNull(task1); - assertEquals("junit_task_1", task1.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task1.getTaskId())); - assertEquals(workflowId, task1.getWorkflowInstanceId()); - - // Polling for second task from left fork should not return a task - Task task3 = workflowExecutionService.poll("junit_task_3", "junit.worker.task_3"); - assertNull(task3); - - // Polling for first task from right fork - Task task2 = workflowExecutionService.poll("junit_task_2", "junit.worker.task_2"); - assertNotNull(task2); - assertEquals("junit_task_2", task2.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task2.getTaskId())); - assertEquals(workflowId, task2.getWorkflowInstanceId()); - - // Update first task of left fork to COMPLETED - task1.setStatus(COMPLETED); - workflowExecutionService.updateTask(task1); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(5, workflow.getTasks().size()); - - // Polling for second task from left fork - task3 = workflowExecutionService.poll("junit_task_3", "junit.worker.task_3"); - assertNotNull(task3); - assertEquals("junit_task_3", task3.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task3.getTaskId())); - assertEquals(workflowId, task3.getWorkflowInstanceId()); - - // Update both tasks to COMPLETED with output in external storage - task2.setOutputData(null); - task2.setExternalOutputPayloadStoragePath(TASK_OUTPUT_PATH); - task2.setStatus(COMPLETED); - workflowExecutionService.updateTask(task2); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - - task3.setOutputData(null); - task3.setExternalOutputPayloadStoragePath(TASK_OUTPUT_PATH); - task3.setStatus(COMPLETED); - workflowExecutionService.updateTask(task3); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(6, workflow.getTasks().size()); - assertTrue("The JOIN task output should not be persisted", workflow.getTasks().get(3).getOutputData().isEmpty()); - assertEquals(TASK_OUTPUT_PATH, workflow.getTasks().get(3).getExternalOutputPayloadStoragePath()); - - // Polling for task after the JOIN task - Task task4 = workflowExecutionService.poll("junit_task_4", "junit.worker.task_4"); - assertNotNull(task4); - assertEquals("junit_task_4", task4.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task4.getTaskId())); - assertEquals(workflowId, task4.getWorkflowInstanceId()); - - task4.setStatus(COMPLETED); - workflowExecutionService.updateTask(task4); - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertTrue("The workflow input should not be persisted", workflow.getInput().isEmpty()); - assertEquals(workflowInputPath, workflow.getExternalInputPayloadStoragePath()); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(6, workflow.getTasks().size()); - assertTrue("The task_2 output should not be persisted", workflow.getTasks().get(2).getOutputData().isEmpty()); - assertEquals(TASK_OUTPUT_PATH, workflow.getTasks().get(3).getExternalOutputPayloadStoragePath()); - assertTrue("The JOIN task output should not be persisted", workflow.getTasks().get(3).getOutputData().isEmpty()); - assertEquals(TASK_OUTPUT_PATH, workflow.getTasks().get(3).getExternalOutputPayloadStoragePath()); - assertTrue("The task_3 output should not be persisted", workflow.getTasks().get(4).getOutputData().isEmpty()); - assertEquals(TASK_OUTPUT_PATH, workflow.getTasks().get(3).getExternalOutputPayloadStoragePath()); - } - - @Test - public void testWorkflowWithSubWorkflowUsingExternalPayloadStorage() { - createWorkflow_TaskSubworkflowTask(); - WorkflowDef workflowDef = metadataService.getWorkflowDef(WF_T1_SWF_T2, 1); - assertNotNull(workflowDef); - - String workflowInputPath = INITIAL_WORKFLOW_INPUT_PATH; - String correlationId = "subwf_external_storage"; - String workflowId = workflowExecutor.startWorkflow(WF_T1_SWF_T2, 1, correlationId, null, workflowInputPath, null, null); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertTrue("The workflow input should not be persisted", workflow.getInput().isEmpty()); - assertEquals(workflowInputPath, workflow.getExternalInputPayloadStoragePath()); - assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); - - // Polling for the first task - Task task = workflowExecutionService.poll("junit_task_1", "junit.worker.task_1"); - assertNotNull(task); - assertEquals("junit_task_1", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(workflowId, task.getWorkflowInstanceId()); - - // update first task with COMPLETED and using external payload storage for output data - String taskOutputPath = TASK_OUTPUT_PATH; - task.setOutputData(null); - task.setExternalOutputPayloadStoragePath(taskOutputPath); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertTrue("The workflow input should not be persisted", workflow.getInput().isEmpty()); - assertEquals(workflowInputPath, workflow.getExternalInputPayloadStoragePath()); - assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - assertTrue("The first task output should not be persisted", workflow.getTasks().get(0).getOutputData().isEmpty()); - assertTrue("The second task input should not be persisted", workflow.getTasks().get(1).getInputData().isEmpty()); - assertEquals(taskOutputPath, workflow.getTasks().get(0).getExternalOutputPayloadStoragePath()); - assertEquals(INPUT_PAYLOAD_PATH, workflow.getTasks().get(1).getExternalInputPayloadStoragePath()); - assertEquals(SUB_WORKFLOW.name(), workflow.getTasks().get(1).getTaskType()); - - // Simulating SystemTaskWorkerCoordinator to execute async system tasks - String subWorkflowTaskId = workflow.getTaskByRefName("swt").getTaskId(); - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - - // Polling for the task within the sub_workflow - task = workflowExecutionService.poll("junit_task_3", "task3.junit.worker"); - assertNotNull(task); - assertEquals("junit_task_3", task.getTaskType()); - assertEquals(IN_PROGRESS, task.getStatus()); - assertEquals("TEST_SAMPLE", task.getInputData().get("p1")); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - // Get the sub-workflow - String subWorkflowId = task.getWorkflowInstanceId(); - Workflow subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertEquals(workflowId, subWorkflow.getParentWorkflowId()); - assertTrue("The sub-workflow input should not be persisted", subWorkflow.getInput().isEmpty()); - assertEquals(INPUT_PAYLOAD_PATH, subWorkflow.getExternalInputPayloadStoragePath()); - assertEquals(subWorkflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, subWorkflow.getStatus()); - assertEquals(1, subWorkflow.getTasks().size()); - - // update the task within sub-workflow to COMPLETED - taskOutputPath = TASK_OUTPUT_PATH; - task.setOutputData(null); - task.setExternalOutputPayloadStoragePath(taskOutputPath); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - // Execute again to re-evaluate the Subworkflow task. - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - - // check the sub workflow - subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertEquals(1, subWorkflow.getTasks().size()); - assertEquals(WorkflowStatus.COMPLETED, subWorkflow.getStatus()); - assertTrue(subWorkflow.getOutput().isEmpty()); - assertNotNull(subWorkflow.getExternalOutputPayloadStoragePath()); - assertEquals(WORKFLOW_OUTPUT_PATH, subWorkflow.getExternalOutputPayloadStoragePath()); - - // check the workflow - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(3, workflow.getTasks().size()); - - // Check if subworkflow task has external payload path copied from subworkflow - Task subWorkflowTask = workflow.getTasks().stream() - .filter(wtask -> wtask.getTaskType().equals(SUB_WORKFLOW.name())) - .collect(Collectors.toList()).get(0); - - assertEquals(subWorkflowTask.getStatus(), COMPLETED); - assertTrue(subWorkflowTask.getOutputData().isEmpty()); - assertNotNull(subWorkflowTask.getExternalOutputPayloadStoragePath()); - - // Polling for the last task - task = workflowExecutionService.poll("junit_task_2", "junit.worker.task_2"); - assertNotNull(task); - assertEquals("junit_task_2", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(workflowId, task.getWorkflowInstanceId()); - assertTrue("The task input should not be persisted", task.getInputData().isEmpty()); - assertEquals(INPUT_PAYLOAD_PATH, task.getExternalInputPayloadStoragePath()); - - // update last task with COMPLETED and using external payload storage for output data - taskOutputPath = TASK_OUTPUT_PATH; - task.setOutputData(null); - task.setExternalOutputPayloadStoragePath(taskOutputPath); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - // check the parent workflow - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(3, workflow.getTasks().size()); - assertTrue(workflow.getOutput().isEmpty()); - assertNotNull(workflow.getExternalOutputPayloadStoragePath()); - assertEquals(WORKFLOW_OUTPUT_PATH, workflow.getExternalOutputPayloadStoragePath()); - } - - @Test - public void testExecutionTimes() { - - String taskName = "junit_task_1"; - TaskDef taskDef = notFoundSafeGetTaskDef(taskName); - taskDef.setTimeoutSeconds(10); - metadataService.updateTaskDef(taskDef); - - metadataService.registerTaskDef(Collections.singletonList(taskDef)); - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("test_execution_times_wf"); - workflowDef.setSchemaVersion(2); - - WorkflowTask workflowTask1 = new WorkflowTask(); - workflowTask1.setName("junit_task_1"); - workflowTask1.setTaskReferenceName("task1"); - - WorkflowTask workflowTask2 = new WorkflowTask(); - workflowTask2.setName("junit_task_1"); - workflowTask2.setTaskReferenceName("task2"); - - WorkflowTask workflowTask3 = new WorkflowTask(); - workflowTask3.setName("junit_task_1"); - workflowTask3.setTaskReferenceName("task3"); - - WorkflowTask forkTask = new WorkflowTask(); - forkTask.setType(TaskType.FORK_JOIN.name()); - forkTask.setName("forktask1"); - forkTask.setTaskReferenceName("forktask1"); - - forkTask.getForkTasks().add(Collections.singletonList(workflowTask2)); - forkTask.getForkTasks().add(Collections.singletonList(workflowTask3)); - - WorkflowTask joinTask = new WorkflowTask(); - joinTask.setType(TaskType.JOIN.name()); - joinTask.setTaskReferenceName("jointask"); - joinTask.setJoinOn(Arrays.asList("task2", "task3")); - - Map decisionInputParameters = new HashMap<>(); - decisionInputParameters.put("case", "a"); - - WorkflowTask decisionTask = new WorkflowTask(); - decisionTask.setType(TaskType.DECISION.name()); - decisionTask.setName("decision1"); - decisionTask.setTaskReferenceName("decision1"); - decisionTask.setInputParameters(decisionInputParameters); - decisionTask.setDefaultCase(Collections.singletonList(workflowTask1)); - decisionTask.setCaseValueParam("case"); - Map> decisionCases = new HashMap<>(); - decisionCases.put("a", Arrays.asList(forkTask, joinTask)); - decisionTask.setDecisionCases(decisionCases); - - workflowDef.getTasks().add(decisionTask); - - assertNotNull(workflowDef); - - metadataService.registerWorkflowDef(workflowDef); - - Map workflowInput = Collections.emptyMap(); - //noinspection unchecked - String workflowId = startOrLoadWorkflowExecution(workflowDef.getName(), workflowDef.getVersion(), "test", workflowInput, null, null); - Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); - - assertNotNull(workflow); - assertEquals(5, workflow.getTasks().size()); - - Task task = workflowExecutionService.poll("junit_task_1", "test"); - assertNotNull(task); - task.setStatus(Status.COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("junit_task_1", "test"); - assertNotNull(task); - task.setStatus(Status.COMPLETED); - workflowExecutionService.updateTask(task); - - workflowExecutor.decide(workflowId); - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - - workflow.getTasks().forEach(workflowTask -> { - assertTrue(workflowTask.getScheduledTime() <= workflowTask.getStartTime()); - assertTrue("" + (workflowTask.getStartTime() - workflowTask.getEndTime()), workflowTask.getStartTime() <= workflowTask.getEndTime()); - }); - - assertEquals("decision1", workflow.getTasks().get(0).getReferenceTaskName()); - assertEquals("forktask1", workflow.getTasks().get(1).getReferenceTaskName()); - assertEquals("task2", workflow.getTasks().get(2).getReferenceTaskName()); - assertEquals("task3", workflow.getTasks().get(3).getReferenceTaskName()); - assertEquals("jointask", workflow.getTasks().get(4).getReferenceTaskName()); - - metadataService.unregisterWorkflowDef(workflowDef.getName(), 1); - - } - - @Test - public void testRetryWorkflowUsingExternalPayloadStorage() { - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(found); - - Map outputParameters = found.getOutputParameters(); - outputParameters.put("workflow_output", "${t1.output.op}"); - metadataService.updateWorkflowDef(found); - - String taskName = "junit_task_2"; - TaskDef taskDef = metadataService.getTaskDef(taskName); - taskDef.setRetryCount(2); - taskDef.setRetryDelaySeconds(0); - metadataService.updateTaskDef(taskDef); - - String workflowInputPath = INITIAL_WORKFLOW_INPUT_PATH; - String correlationId = "wf_external_storage"; - String workflowId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, null, workflowInputPath, null, null); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertTrue("The workflow input should not be persisted", workflow.getInput().isEmpty()); - assertEquals(workflowInputPath, workflow.getExternalInputPayloadStoragePath()); - assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); - - // Polling for the first task - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertEquals("junit_task_1", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(workflowId, task.getWorkflowInstanceId()); - - // update first task with COMPLETED - String taskOutputPath = TASK_OUTPUT_PATH; - task.setOutputData(null); - task.setExternalOutputPayloadStoragePath(taskOutputPath); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - // Polling for the second task - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertEquals("junit_task_2", task.getTaskType()); - assertTrue(task.getInputData().isEmpty()); - assertNotNull(task.getExternalInputPayloadStoragePath()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(workflowId, task.getWorkflowInstanceId()); - - // update second task with FAILED - task.getOutputData().put("op", "failed_task2"); - task.setStatus(FAILED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertTrue("The workflow input should not be persisted", workflow.getInput().isEmpty()); - assertEquals(workflowInputPath, workflow.getExternalInputPayloadStoragePath()); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - - // Polling again for the second task - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertEquals("junit_task_2", task.getTaskType()); - assertTrue(task.getInputData().isEmpty()); - assertNotNull(task.getExternalInputPayloadStoragePath()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(workflowId, task.getWorkflowInstanceId()); - - // update second task with COMPLETED - task.getOutputData().put("op", "success_task2"); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertTrue("The workflow input should not be persisted", workflow.getInput().isEmpty()); - assertEquals(workflowInputPath, workflow.getExternalInputPayloadStoragePath()); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(3, workflow.getTasks().size()); - assertTrue("The first task output should not be persisted", workflow.getTasks().get(0).getOutputData().isEmpty()); - assertTrue("The second task input should not be persisted", workflow.getTasks().get(1).getInputData().isEmpty()); - assertTrue("The second task input should not be persisted", workflow.getTasks().get(2).getInputData().isEmpty()); - assertEquals(taskOutputPath, workflow.getTasks().get(0).getExternalOutputPayloadStoragePath()); - assertEquals(INPUT_PAYLOAD_PATH, workflow.getTasks().get(1).getExternalInputPayloadStoragePath()); - assertEquals(INPUT_PAYLOAD_PATH, workflow.getTasks().get(2).getExternalInputPayloadStoragePath()); - assertTrue(workflow.getOutput().isEmpty()); - assertNotNull(workflow.getExternalOutputPayloadStoragePath()); - assertEquals(WORKFLOW_OUTPUT_PATH, workflow.getExternalOutputPayloadStoragePath()); - } - - @Test - public void testRateLimiting() { - // Create a dynamic workflow definition with one simple task - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("test_concurrency_limits"); - workflowDef.setVersion(1); - - TaskDef taskDef = new TaskDef(); - taskDef.setName("test_task_with_ratelimits"); - taskDef.setRateLimitFrequencyInSeconds(600); - taskDef.setRateLimitPerFrequency(1); - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setTaskReferenceName("test_task_with_ratelimits"); - workflowTask.setName("test_task_with_ratelimits"); - workflowTask.setType(UserTask.NAME); - workflowTask.setTaskDefinition(taskDef); - Map userIP = new HashMap<>(); - - workflowDef.setTasks(Arrays.asList(workflowTask)); - - String workflowInstanceId1 = workflowExecutor.startWorkflow(workflowDef, new HashMap<>(), - "", - "", - 0, - "", - "", - "", - new HashMap<>()); - - assertNotNull(workflowInstanceId1); - - Workflow workflow1 = workflowExecutionService.getExecutionStatus(workflowInstanceId1, true); - assertNotNull(workflow1); - assertEquals(RUNNING, workflow1.getStatus()); - assertEquals(1, workflow1.getTasks().size()); //The very first task is the one that should be scheduled. - - UserTask userTask = new UserTask(); - - Task task = workflow1.getTasks().get(0); - workflowExecutor.executeSystemTask(userTask, task.getTaskId(), 30); - - workflow1 = workflowExecutionService.getExecutionStatus(workflowInstanceId1, true); - - String workflowInstanceId2 = workflowExecutor.startWorkflow(workflowDef, new HashMap<>(), - "", - "", - 0, - "", - "", - "", - new HashMap<>()); - - assertNotNull(workflowInstanceId2); - - Workflow workflow2 = workflowExecutionService.getExecutionStatus(workflowInstanceId2, true); - assertNotNull(workflow2); - assertEquals(RUNNING, workflow2.getStatus()); - assertEquals(1, workflow2.getTasks().size()); //The very first task is the one that should be scheduled. - - // Try to execute second task - Task task2 = workflow2.getTasks().get(0); - workflowExecutor.executeSystemTask(userTask, task2.getTaskId(), 30); - workflow2 = workflowExecutionService.getExecutionStatus(workflowInstanceId2, true); - task2 = workflow2.getTasks().get(0); - assertEquals(SCHEDULED, task2.getStatus()); - } - - @Test - public void testSimpleWorkflowWithOptionalTask() throws Exception { - createOptionalTaskWorkflow(); - - metadataService.getWorkflowDef(WORKFLOW_WITH_OPTIONAL_TASK, 1); - - String correlationId = "unit_test_1"; - Map workflowInput = new HashMap<>(); - String inputParam1 = "p1 value"; - workflowInput.put("param1", inputParam1); - workflowInput.put("param2", "p2 value"); - String workflowId = startOrLoadWorkflowExecution(WORKFLOW_WITH_OPTIONAL_TASK, 1, correlationId, workflowInput, null, null); - logger.debug("testSimpleWorkflowWithOptionalTask.wfid=" + workflowId); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); //The very first task is the one that should be scheduled. - assertEquals(1, queueDAO.getSize("task_optional")); - - // Polling for the first task should return the first task - Task task = workflowExecutionService.poll("task_optional", "task1.junit.worker.optional"); - assertNotNull(task); - assertEquals("task_optional", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(workflowId, task.getWorkflowInstanceId()); - - // As the task_optional is out of the queue, the next poll should not get it - Task nullTask = workflowExecutionService.poll("task_optional", "task1.junit.worker.optional"); - assertNull(nullTask); - - TaskResult taskResult = new TaskResult(task); - taskResult.setReasonForIncompletion("NETWORK ERROR"); - taskResult.setStatus(TaskResult.Status.FAILED); - - workflowExecutionService.updateTask(taskResult); - - workflowExecutor.decide(workflowId); - assertEquals(1, queueDAO.getSize("task_optional")); - - // The first task would be failed and a new task will be scheduled - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - assertTrue(workflow.getTasks().stream().allMatch(t -> t.getReferenceTaskName().equals("task_optional_t1"))); - assertEquals(FAILED, workflow.getTasks().get(0).getStatus()); - assertEquals(SCHEDULED, workflow.getTasks().get(1).getStatus()); - - // Polling now should get the same task back because it should have been put back in the queue - Task taskAgain = workflowExecutionService.poll("task_optional", "task1.junit.worker"); - assertNotNull(taskAgain); - - Thread.sleep(5000); - - // The second task would be timed-out and completed with errors - workflowExecutor.decide(workflowId); - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - - assertEquals(0, queueDAO.getSize("task_optional")); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - System.out.println(workflow.getTasks()); - System.out.println(workflow.getTasks().get(1)); - System.out.println(workflow.getTasks().get(2)); - assertEquals(3, workflow.getTasks().size()); - assertEquals(COMPLETED_WITH_ERRORS, workflow.getTasks().get(1).getStatus()); - - // poll for next task - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker.testTimeout"); - assertNotNull(task); - assertEquals("junit_task_2", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - task.setReasonForIncompletion("unit test failure"); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - private void createOptionalTaskWorkflow() { - TaskDef task = new TaskDef(); - task.setName("task_optional"); - task.setTimeoutSeconds(5); - task.setRetryCount(RETRY_COUNT); - task.setTimeoutPolicy(TimeoutPolicy.RETRY); - task.setRetryDelaySeconds(0); - - metadataService.registerTaskDef(Collections.singletonList(task)); - - WorkflowDef def = new WorkflowDef(); - def.setName(WORKFLOW_WITH_OPTIONAL_TASK); - def.setDescription(def.getName()); - def.setVersion(1); - def.setInputParameters(Arrays.asList("param1", "param2")); - Map outputParameters = new HashMap<>(); - outputParameters.put("o1", "${workflow.input.param1}"); - outputParameters.put("o2", "${t2.output.uuid}"); - outputParameters.put("o3", "${t1.output.op}"); - def.setOutputParameters(outputParameters); - def.setSchemaVersion(2); - LinkedList wftasks = new LinkedList<>(); - - WorkflowTask wft1 = new WorkflowTask(); - wft1.setName("task_optional"); - Map ip1 = new HashMap<>(); - ip1.put("p1", "${workflow.input.param1}"); - ip1.put("p2", "${workflow.input.param2}"); - wft1.setInputParameters(ip1); - wft1.setOptional(true); - wft1.setTaskReferenceName("task_optional_t1"); - - WorkflowTask wft2 = new WorkflowTask(); - wft2.setName("junit_task_2"); - Map ip2 = new HashMap<>(); - ip2.put("tp1", "${workflow.input.param1}"); - ip2.put("tp2", "${t1.output.op}"); - wft2.setInputParameters(ip2); - wft2.setTaskReferenceName("t2"); - - wftasks.add(wft1); - wftasks.add(wft2); - def.setTasks(wftasks); - - metadataService.updateWorkflowDef(def); - } - - @Test - public void testSubWorkflowTaskToDomain() { - Map taskToDomain = new HashMap<>(); - taskToDomain.put("junit_task_1", "unittest1"); - taskToDomain.put("junit_task_2", "unittest2"); - createSubWorkflow(taskToDomain); - metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1); - Map input = new HashMap<>(); - input.put("param1", "param 1 value"); - input.put("param3", "param 2 value"); - - input.put("wfName", LINEAR_WORKFLOW_T1_T2); - String workflowId = startOrLoadWorkflowExecution(WF_WITH_SUB_WF, 1, "test", input, null, null); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - - Task task = workflowExecutionService.poll("junit_task_5", "test"); - assertNotNull(task); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertNotNull(workflow.getTasks()); - - // Simulating SystemTaskWorkerCoordinator to execute async system tasks - String subWorkflowTaskId = workflow.getTaskByRefName("a2").getTaskId(); - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - - task = workflow.getTasks().stream().filter(t -> t.getTaskType().equals(SUB_WORKFLOW.name())).findAny().get(); - assertNotNull(task); - assertNotNull(task.getOutputData()); - assertNotNull("Output: " + task.getOutputData().toString() + ", status: " + task.getStatus(), task.getSubWorkflowId()); - assertNotNull(task.getInputData()); - assertTrue(task.getInputData().containsKey("workflowInput")); - assertEquals(42, ((Map) task.getInputData().get("workflowInput")).get("param2")); - String subWorkflowId = task.getSubWorkflowId(); - - Workflow subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertNotNull(subWorkflow.getTasks()); - assertEquals(workflowId, subWorkflow.getParentWorkflowId()); - assertEquals(RUNNING, subWorkflow.getStatus()); - - task = workflowExecutionService.poll("junit_task_1", "test", "unittest1"); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("junit_task_2", "test", "unittest2"); - assertEquals(subWorkflowId, task.getWorkflowInstanceId()); - String uuid = UUID.randomUUID().toString(); - task.getOutputData().put("uuid", uuid); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertEquals(WorkflowStatus.COMPLETED, subWorkflow.getStatus()); - assertNotNull(subWorkflow.getOutput()); - assertTrue(subWorkflow.getOutput().containsKey("o1")); - assertTrue(subWorkflow.getOutput().containsKey("o2")); - assertEquals("sub workflow input param1", subWorkflow.getOutput().get("o1")); - assertEquals(uuid, subWorkflow.getOutput().get("o2")); - assertEquals(taskToDomain, subWorkflow.getTaskToDomain()); - - // Execute again to re-evaluate the Subworkflow task. - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - - task = workflowExecutionService.poll("junit_task_6", "test"); - assertNotNull(task); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - @Test - public void testSubWorkflowTaskToDomainWildcard() { - Map taskToDomain = new HashMap<>(); - taskToDomain.put("*", "unittest"); - createSubWorkflow(taskToDomain); - metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1); - Map input = new HashMap<>(); - input.put("param1", "param 1 value"); - input.put("param3", "param 2 value"); - - input.put("wfName", LINEAR_WORKFLOW_T1_T2); - String workflowId = startOrLoadWorkflowExecution(WF_WITH_SUB_WF, 1, "test", input, null, null); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - - Task task = workflowExecutionService.poll("junit_task_5", "test"); - assertNotNull(task); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertNotNull(workflow.getTasks()); - - // Simulating SystemTaskWorkerCoordinator to execute async system tasks - String subWorkflowTaskId = workflow.getTaskByRefName("a2").getTaskId(); - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - - task = workflow.getTasks().stream().filter(t -> t.getTaskType().equals(SUB_WORKFLOW.name())).findAny().get(); - assertNotNull(task); - assertNotNull(task.getOutputData()); - assertNotNull("Output: " + task.getOutputData().toString() + ", status: " + task.getStatus(), task.getSubWorkflowId()); - assertNotNull(task.getInputData()); - assertTrue(task.getInputData().containsKey("workflowInput")); - assertEquals(42, ((Map) task.getInputData().get("workflowInput")).get("param2")); - String subWorkflowId = task.getSubWorkflowId(); - - Workflow subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertNotNull(subWorkflow.getTasks()); - assertEquals(workflowId, subWorkflow.getParentWorkflowId()); - assertEquals(RUNNING, subWorkflow.getStatus()); - - task = workflowExecutionService.poll("junit_task_1", "test", "unittest"); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("junit_task_2", "test", "unittest"); - assertEquals(subWorkflowId, task.getWorkflowInstanceId()); - String uuid = UUID.randomUUID().toString(); - task.getOutputData().put("uuid", uuid); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertEquals(WorkflowStatus.COMPLETED, subWorkflow.getStatus()); - assertNotNull(subWorkflow.getOutput()); - assertTrue(subWorkflow.getOutput().containsKey("o1")); - assertTrue(subWorkflow.getOutput().containsKey("o2")); - assertEquals("sub workflow input param1", subWorkflow.getOutput().get("o1")); - assertEquals(uuid, subWorkflow.getOutput().get("o2")); - assertEquals(taskToDomain, subWorkflow.getTaskToDomain()); - - // Execute again to re-evaluate the Subworkflow task. - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - - task = workflowExecutionService.poll("junit_task_6", "test"); - assertNotNull(task); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - /** - * This test verifies that a Subworkflow with Terminate task calls decide on parent, and helps progress it immediately. - */ - @Test - public void testTerminateTaskInASubworkflow() { - WorkflowDef subWorkflowDef = new WorkflowDef(); - subWorkflowDef.setName("test_terminate_task_wf"); - subWorkflowDef.setSchemaVersion(2); - subWorkflowDef.setVersion(1); - - Map lambdaTaskInputParams = new HashMap<>(); - lambdaTaskInputParams.put("input", "${workflow.input}"); - lambdaTaskInputParams.put("scriptExpression", "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false}}"); - - WorkflowTask lambdaWorkflowTask = new WorkflowTask(); - lambdaWorkflowTask.setWorkflowTaskType(TaskType.LAMBDA); - lambdaWorkflowTask.setName("lambda"); - lambdaWorkflowTask.setInputParameters(lambdaTaskInputParams); - lambdaWorkflowTask.setTaskReferenceName("lambda0"); - - Map terminateTaskInputParams = new HashMap<>(); - terminateTaskInputParams.put(Terminate.getTerminationStatusParameter(), "COMPLETED"); - terminateTaskInputParams.put(Terminate.getTerminationWorkflowOutputParameter(), "${lambda0.output}"); - - WorkflowTask terminateWorkflowTask = new WorkflowTask(); - terminateWorkflowTask.setType(TaskType.TASK_TYPE_TERMINATE); - terminateWorkflowTask.setName("terminate"); - terminateWorkflowTask.setInputParameters(terminateTaskInputParams); - terminateWorkflowTask.setTaskReferenceName("terminate0"); - - WorkflowTask workflowTask2 = new WorkflowTask(); - workflowTask2.setName("junit_task_2"); - workflowTask2.setTaskReferenceName("t2"); - - subWorkflowDef.getTasks().addAll(Arrays.asList(lambdaWorkflowTask, terminateWorkflowTask, workflowTask2)); - - assertNotNull(subWorkflowDef); - metadataService.registerWorkflowDef(subWorkflowDef); - - // Create Parent workflow - WorkflowDef parentWorkflowDef = new WorkflowDef(); - parentWorkflowDef.setName("test_parent_wf_for_terminate_task_subwf"); - parentWorkflowDef.setSchemaVersion(2); - - WorkflowTask subWorkflowTask = new WorkflowTask(); - subWorkflowTask.setWorkflowTaskType(SUB_WORKFLOW); - subWorkflowTask.setName("subWF"); - subWorkflowTask.setTaskReferenceName("subWF"); - SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); - subWorkflowParams.setName(subWorkflowDef.getName()); - subWorkflowParams.setVersion(subWorkflowDef.getVersion()); - subWorkflowTask.setSubWorkflowParam(subWorkflowParams); - - parentWorkflowDef.getTasks().addAll(Arrays.asList(subWorkflowTask)); - - assertNotNull(parentWorkflowDef); - metadataService.registerWorkflowDef(parentWorkflowDef); - - Map wfInput = Collections.singletonMap("a", 1); - String workflowId = startOrLoadWorkflowExecution(parentWorkflowDef.getName(), parentWorkflowDef.getVersion(), "", wfInput, null, null); - Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); - - assertNotNull(workflow); - assertEquals(1, workflow.getTasks().size()); - - SubWorkflow subWorkflowSystemTask = new SubWorkflow(); - // Simulating SystemTaskWorkerCoordinator to execute async system tasks - String subWorkflowTaskId = workflow.getTaskByRefName("subWF").getTaskId(); - workflowExecutor.executeSystemTask(subWorkflowSystemTask, subWorkflowTaskId, 1); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - Task task = workflow.getTaskByRefName("subWF"); - - Workflow subWorkflow = workflowExecutionService.getExecutionStatus(task.getSubWorkflowId(), true); - - assertNotNull(workflow); - assertNotNull(task); - assertEquals(COMPLETED, task.getStatus()); - assertNotNull(subWorkflow); - assertEquals("tasks:" + workflow.getTasks(), WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals("tasks:" + subWorkflow.getTasks(), WorkflowStatus.COMPLETED, subWorkflow.getStatus()); - assertEquals(TaskType.TASK_TYPE_LAMBDA, subWorkflow.getTasks().get(0).getTaskType()); - assertEquals(TaskType.TASK_TYPE_TERMINATE, subWorkflow.getTasks().get(1).getTaskType()); - assertEquals(subWorkflow.getTasks().get(1).getOutputData(), subWorkflow.getOutput()); - assertEquals(SUB_WORKFLOW.name(), workflow.getTasks().get(0).getTaskType()); - - metadataService.unregisterWorkflowDef(parentWorkflowDef.getName(), parentWorkflowDef.getVersion()); - metadataService.unregisterWorkflowDef(subWorkflowDef.getName(), subWorkflowDef.getVersion()); - } - - @Test - public void testPollWithConcurrentExecutionLimits() { - // Create a dynamic workflow definition with one simple task - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("test_concurrency_limits"); - workflowDef.setVersion(1); - - TaskDef taskDef = new TaskDef(); - taskDef.setName("test_task_with_cl"); - taskDef.setConcurrentExecLimit(1); - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setTaskReferenceName("test_task_with_cl"); - workflowTask.setName("test_task_with_cl"); - workflowTask.setType("SIMPLE"); - workflowTask.setTaskDefinition(taskDef); - - workflowDef.setTasks(Arrays.asList(workflowTask)); - - String workflowInstanceId1 = workflowExecutor.startWorkflow(workflowDef, new HashMap<>(), - "", - "", - 0, - "", - "", - "", - new HashMap<>()); - - assertNotNull(workflowInstanceId1); - - Workflow workflow1 = workflowExecutionService.getExecutionStatus(workflowInstanceId1, true); - assertNotNull(workflow1); - assertEquals(RUNNING, workflow1.getStatus()); - assertEquals(1, workflow1.getTasks().size()); //The very first task is the one that should be scheduled. - - // Polling for the first task - Task task = workflowExecutionService.poll("test_task_with_cl", "test.worker"); - assertNotNull(task); - assertEquals("test_task_with_cl", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(workflowInstanceId1, task.getWorkflowInstanceId()); - - String workflowInstanceId2 = workflowExecutor.startWorkflow(workflowDef, new HashMap<>(), - "", - "", - 0, - "", - "", - "", - new HashMap<>()); - - assertNotNull(workflowInstanceId2); - - Workflow workflow2 = workflowExecutionService.getExecutionStatus(workflowInstanceId2, true); - assertNotNull(workflow2); - assertEquals(RUNNING, workflow2.getStatus()); - assertEquals(1, workflow2.getTasks().size()); //The very first task is the one that should be scheduled. - - // Polling for the second task - Task task2 = workflowExecutionService.poll("test_task_with_cl", "test.worker"); - assertNull("Polling for the task shouldn't return anything, as concurrency limit is met.", task2); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - // Reset task offset time to make it available for poll again. - queueDAO.resetOffsetTime("test_task_with_cl", workflow2.getTasks().get(0).getTaskId()); - - // Polling for the second task - task2 = workflowExecutionService.poll("test_task_with_cl", "test.worker"); - assertNotNull(task2); - assertEquals("test_task_with_cl", task2.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task2.getTaskId())); - assertEquals(workflowInstanceId2, task2.getWorkflowInstanceId()); - } - - private void createSubWorkflow() { - createSubWorkflow(null); - } - - private void createSubWorkflow(Map subWorkflowTaskToDomain) { - - WorkflowTask wft1 = new WorkflowTask(); - wft1.setName("junit_task_5"); - Map ip1 = new HashMap<>(); - ip1.put("p1", "${workflow.input.param1}"); - ip1.put("p2", "${workflow.input.param2}"); - wft1.setInputParameters(ip1); - wft1.setTaskReferenceName("a1"); - - WorkflowTask wft2 = new WorkflowTask(); - wft2.setName("subWorkflowTask"); - wft2.setType(SUB_WORKFLOW.name()); - SubWorkflowParams swp = new SubWorkflowParams(); - swp.setName(LINEAR_WORKFLOW_T1_T2); - swp.setTaskToDomain(subWorkflowTaskToDomain); - wft2.setSubWorkflowParam(swp); - Map ip2 = new HashMap<>(); - ip2.put("test", "test value"); - ip2.put("param1", "sub workflow input param1"); - ip2.put("param2", 42); - wft2.setInputParameters(ip2); - wft2.setTaskReferenceName("a2"); - - WorkflowTask wft3 = new WorkflowTask(); - wft3.setName("junit_task_6"); - Map ip3 = new HashMap<>(); - ip3.put("p1", "${workflow.input.param1}"); - ip3.put("p2", "${workflow.input.param2}"); - wft3.setInputParameters(ip3); - wft3.setTaskReferenceName("a3"); - - WorkflowDef main = new WorkflowDef(); - main.setSchemaVersion(2); - main.setInputParameters(Arrays.asList("param1", "param2")); - main.setName(WF_WITH_SUB_WF); - main.getTasks().addAll(Arrays.asList(wft1, wft2, wft3)); - - metadataService.updateWorkflowDef(Collections.singletonList(main)); - - } - - private void verify(String inputParam1, String wfid, String task1Op, boolean fail) { - Task task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull(task2Input); - assertEquals(task1Op, task2Input); - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - if (fail) { - task.setStatus(FAILED); - task.setReasonForIncompletion("failure...0"); - } else { - task.setStatus(COMPLETED); - } - - workflowExecutionService.updateTask(task); - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, false); - assertNotNull(es); - if (fail) { - assertEquals(RUNNING, es.getStatus()); - } else { - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - } - } - - @Before - public void flushAllTaskQueues() { - queueDAO.queuesDetail().keySet().forEach(queueName -> { - queueDAO.flush(queueName); - }); - - if (taskDefs == null) { - return; - } - for (TaskDef td : taskDefs) { - queueDAO.flush(td.getName()); - } - } - - private void createWorkflow_TaskSubworkflowTask() { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName(WF_T1_SWF_T2); - workflowDef.setDescription(workflowDef.getName()); - workflowDef.setVersion(1); - workflowDef.setInputParameters(Arrays.asList("param1", "param2")); - Map outputParameters = new HashMap<>(); - outputParameters.put("o3", "${t1.output.op}"); - workflowDef.setOutputParameters(outputParameters); - workflowDef.setSchemaVersion(2); - LinkedList wftasks = new LinkedList<>(); - - WorkflowTask wft1 = new WorkflowTask(); - wft1.setName("junit_task_1"); - Map ip1 = new HashMap<>(); - ip1.put("tp11", "${workflow.input.param1}"); - ip1.put("tp12", "${workflow.input.param2}"); - wft1.setInputParameters(ip1); - wft1.setTaskReferenceName("t1"); - - // create the sub-workflow def - WorkflowDef subWorkflowDef = new WorkflowDef(); - subWorkflowDef.setName("one_task_workflow"); - subWorkflowDef.setVersion(1); - subWorkflowDef.setInputParameters(Arrays.asList("imageType", "op")); - outputParameters = new HashMap<>(); - outputParameters.put("op", "${t3.output.op}"); - subWorkflowDef.setOutputParameters(outputParameters); - subWorkflowDef.setSchemaVersion(2); - LinkedList subWfTasks = new LinkedList<>(); - - WorkflowTask wft3 = new WorkflowTask(); - wft3.setName("junit_task_3"); - Map ip3 = new HashMap<>(); - ip3.put("p1", "${workflow.input.imageType}"); - wft3.setInputParameters(ip3); - wft3.setTaskReferenceName("t3"); - - subWfTasks.add(wft3); - subWorkflowDef.setTasks(subWfTasks); - metadataService.updateWorkflowDef(subWorkflowDef); - - // create the sub workflow task - WorkflowTask subWorkflow = new WorkflowTask(); - subWorkflow.setType(SUB_WORKFLOW.name()); - SubWorkflowParams sw = new SubWorkflowParams(); - sw.setName("one_task_workflow"); - subWorkflow.setSubWorkflowParam(sw); - subWorkflow.setTaskReferenceName("swt"); - Map ipsw = new HashMap<>(); - ipsw.put("imageType", "${t1.output.imageType}"); - ipsw.put("op", "${t1.output.op}"); - subWorkflow.setInputParameters(ipsw); - - WorkflowTask wft2 = new WorkflowTask(); - wft2.setName("junit_task_2"); - Map ip2 = new HashMap<>(); - ip2.put("op", "${t1.output.op}"); - wft2.setInputParameters(ip2); - wft2.setTaskReferenceName("t2"); - - wftasks.add(wft1); - wftasks.add(subWorkflow); - wftasks.add(wft2); - workflowDef.setTasks(wftasks); - - metadataService.updateWorkflowDef(workflowDef); - } - - private void createWorkflowWithSubWorkflow() { - WorkflowDef defSW = new WorkflowDef(); - defSW.setName(LINEAR_WORKFLOW_T1_T2_SW); - defSW.setDescription(defSW.getName()); - defSW.setVersion(1); - defSW.setInputParameters(Arrays.asList("param1", "param2")); - Map outputParameters = new HashMap<>(); - outputParameters.put("o1", "${workflow.input.param1}"); - outputParameters.put("o2", "${t2.output.uuid}"); - outputParameters.put("o3", "${t1.output.op}"); - defSW.setOutputParameters(outputParameters); - defSW.setFailureWorkflow("$workflow.input.failureWfName"); - defSW.setSchemaVersion(2); - LinkedList wftasks = new LinkedList<>(); - - WorkflowTask wft1 = new WorkflowTask(); - wft1.setName("junit_task_3"); - Map ip1 = new HashMap<>(); - ip1.put("p1", "${workflow.input.param1}"); - ip1.put("p2", "${workflow.input.param2}"); - wft1.setInputParameters(ip1); - wft1.setTaskReferenceName("t1"); - - WorkflowTask subWorkflow = new WorkflowTask(); - subWorkflow.setType(SUB_WORKFLOW.name()); - SubWorkflowParams sw = new SubWorkflowParams(); - sw.setName(LINEAR_WORKFLOW_T1_T2); - subWorkflow.setSubWorkflowParam(sw); - subWorkflow.setTaskReferenceName("sw1"); - Map ip2 = new HashMap<>(); - ip2.put("tp1", "${workflow.input.param1}"); - ip2.put("tp2", "${t1.output.op}"); - subWorkflow.setInputParameters(ip2); - - wftasks.add(wft1); - wftasks.add(subWorkflow); - defSW.setTasks(wftasks); - - try { - metadataService.updateWorkflowDef(defSW); - } catch (Exception e) { - } - } - - private void createConditionalWFWithSystemTask() { - WorkflowDef defConditionalHttp = new WorkflowDef(); - defConditionalHttp.setName(CONDITIONAL_SYSTEM_WORKFLOW); - defConditionalHttp.setDescription(defConditionalHttp.getName()); - defConditionalHttp.setVersion(1); - defConditionalHttp.setInputParameters(Arrays.asList("param1", "param2")); - Map outputParameters = new HashMap<>(); - outputParameters.put("o2", "${t1.output.op}"); - defConditionalHttp.setOutputParameters(outputParameters); - defConditionalHttp.setSchemaVersion(2); - LinkedList wftasks = new LinkedList<>(); - - WorkflowTask wft1 = new WorkflowTask(); - wft1.setName("junit_task_1"); - Map ip1 = new HashMap<>(); - ip1.put("tp11", "${workflow.input.param1}"); - ip1.put("tp12", "${workflow.input.param2}"); - wft1.setInputParameters(ip1); - wft1.setTaskReferenceName("t1"); - - TaskDef taskDef = new TaskDef(); - taskDef.setName("user_task"); - taskDef.setTimeoutSeconds(20); - taskDef.setRetryCount(1); - taskDef.setTimeoutPolicy(TimeoutPolicy.RETRY); - taskDef.setRetryDelaySeconds(10); - metadataService.registerTaskDef(Collections.singletonList(taskDef)); - - WorkflowTask userTask = new WorkflowTask(); - userTask.setName(taskDef.getName()); - userTask.setType(UserTask.NAME); - Map userIP = new HashMap<>(); - userIP.put("largeInput", "${t1.output.op}"); - userTask.setInputParameters(userIP); - userTask.setTaskReferenceName("user_task"); - - WorkflowTask wft2 = new WorkflowTask(); - wft2.setName("junit_task_2"); - Map ip2 = new HashMap<>(); - ip2.put("tp21", "${workflow.input.param1}"); - wft2.setInputParameters(ip2); - wft2.setTaskReferenceName("t2"); - - WorkflowTask decisionTask = new WorkflowTask(); - decisionTask.setType(TaskType.DECISION.name()); - decisionTask.setCaseValueParam("case"); - decisionTask.setName("conditional2"); - decisionTask.setTaskReferenceName("conditional2"); - Map decisionIP = new HashMap<>(); - decisionIP.put("case", "${t1.output.case}"); - decisionTask.setInputParameters(decisionIP); - Map> decisionCases = new HashMap<>(); - decisionCases.put("one", Collections.singletonList(wft2)); - decisionCases.put("two", Collections.singletonList(userTask)); - decisionTask.setDecisionCases(decisionCases); - - WorkflowTask wft3 = new WorkflowTask(); - wft3.setName("junit_task_3"); - Map ip3 = new HashMap<>(); - ip3.put("tp31", "${workflow.input.param2}"); - wft3.setInputParameters(ip3); - wft3.setTaskReferenceName("t3"); - - wftasks.add(wft1); - wftasks.add(decisionTask); - wftasks.add(wft3); - defConditionalHttp.setTasks(wftasks); - - metadataService.updateWorkflowDef(defConditionalHttp); - } - - private void createWFWithResponseTimeout() { - TaskDef task = new TaskDef(); - task.setName("task_rt"); - task.setTimeoutSeconds(120); - task.setRetryCount(RETRY_COUNT); - task.setRetryDelaySeconds(0); - task.setResponseTimeoutSeconds(10); - metadataService.registerTaskDef(Collections.singletonList(task)); - - WorkflowDef def = new WorkflowDef(); - def.setName("RTOWF"); - def.setDescription(def.getName()); - def.setVersion(1); - def.setInputParameters(Arrays.asList("param1", "param2")); - Map outputParameters = new HashMap<>(); - outputParameters.put("o1", "${workflow.input.param1}"); - outputParameters.put("o2", "${t2.output.uuid}"); - outputParameters.put("o3", "${t1.output.op}"); - def.setOutputParameters(outputParameters); - def.setFailureWorkflow("$workflow.input.failureWfName"); - def.setSchemaVersion(2); - LinkedList wftasks = new LinkedList<>(); - - WorkflowTask wft1 = new WorkflowTask(); - wft1.setName("task_rt"); - Map ip1 = new HashMap<>(); - ip1.put("p1", "${workflow.input.param1}"); - ip1.put("p2", "${workflow.input.param2}"); - wft1.setInputParameters(ip1); - wft1.setTaskReferenceName("task_rt_t1"); - - WorkflowTask wft2 = new WorkflowTask(); - wft2.setName("junit_task_2"); - Map ip2 = new HashMap<>(); - ip2.put("tp1", "${workflow.input.param1}"); - ip2.put("tp2", "${t1.output.op}"); - wft2.setInputParameters(ip2); - wft2.setTaskReferenceName("t2"); - - wftasks.add(wft1); - wftasks.add(wft2); - def.setTasks(wftasks); - - metadataService.updateWorkflowDef(def); - } - - private void createWorkflowWthMultiLevelSubWorkflows() { - final String subWorkflowLevel1 = "junit_sw_level_1"; - final String subWorkflowLevel2 = "junit_sw_level_2"; - final String subWorkflowLevel3 = "junit_sw_level_3"; - - // level 3 - WorkflowDef workflowDef_level3 = new WorkflowDef(); - workflowDef_level3.setName(subWorkflowLevel3); - workflowDef_level3.setDescription(workflowDef_level3.getName()); - workflowDef_level3.setVersion(1); - workflowDef_level3.setSchemaVersion(2); - - LinkedList workflowTasks_level3 = new LinkedList<>(); - WorkflowTask simpleWorkflowTask = new WorkflowTask(); - simpleWorkflowTask.setName("junit_task_3"); - simpleWorkflowTask.setInputParameters(new HashMap<>()); - simpleWorkflowTask.setTaskReferenceName("t1"); - workflowTasks_level3.add(simpleWorkflowTask); - workflowDef_level3.setTasks(workflowTasks_level3); - - metadataService.updateWorkflowDef(workflowDef_level3); - - // level 2 - WorkflowDef workflowDef_level2 = new WorkflowDef(); - workflowDef_level2.setName(subWorkflowLevel2); - workflowDef_level2.setDescription(workflowDef_level2.getName()); - workflowDef_level2.setVersion(1); - workflowDef_level2.setSchemaVersion(2); - - LinkedList workflowTasks_level2 = new LinkedList<>(); - workflowTasks_level2.add(createSubWorkflowTask(subWorkflowLevel3)); - workflowDef_level2.setTasks(workflowTasks_level2); - - metadataService.updateWorkflowDef(workflowDef_level2); - - // level 1 - WorkflowDef workflowDef_level1 = new WorkflowDef(); - workflowDef_level1.setName(subWorkflowLevel1); - workflowDef_level1.setDescription(workflowDef_level1.getName()); - workflowDef_level1.setVersion(1); - workflowDef_level1.setSchemaVersion(2); - - LinkedList workflowTasks_level1 = new LinkedList<>(); - workflowTasks_level1.add(createSubWorkflowTask(subWorkflowLevel2)); - workflowDef_level1.setTasks(workflowTasks_level1); - - metadataService.updateWorkflowDef(workflowDef_level1); - - // top-level parent workflow - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName(WORKFLOW_MULTI_LEVEL_SW); - workflowDef.setDescription(workflowDef.getName()); - workflowDef.setVersion(1); - workflowDef.setInputParameters(Arrays.asList("param1", "param2")); - workflowDef.setSchemaVersion(2); - - LinkedList workflowTasks = new LinkedList<>(); - workflowTasks.add(createSubWorkflowTask(subWorkflowLevel1)); - workflowDef.setTasks(workflowTasks); - - metadataService.updateWorkflowDef(workflowDef); - } - - private WorkflowTask createSubWorkflowTask(String subWorkflowName) { - WorkflowTask subWorkflowTask = new WorkflowTask(); - subWorkflowTask.setType(SUB_WORKFLOW.name()); - SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); - subWorkflowParams.setName(subWorkflowName); - subWorkflowTask.setSubWorkflowParam(subWorkflowParams); - subWorkflowTask.setTaskReferenceName(subWorkflowName + "_task"); - return subWorkflowTask; - } - - private void createForkJoinWorkflowWithOptionalSubworkflowForks() { - String taskName = "simple_task_in_sub_wf"; - TaskDef task = new TaskDef(); - task.setName(taskName); - task.setRetryCount(0); - metadataService.registerTaskDef(Collections.singletonList(task)); - - // sub workflow - WorkflowDef subworkflow_def = new WorkflowDef(); - subworkflow_def.setName("sub_workflow"); - subworkflow_def.setDescription(subworkflow_def.getName()); - subworkflow_def.setVersion(1); - subworkflow_def.setSchemaVersion(2); - - LinkedList subworkflowDef_Task = new LinkedList<>(); - WorkflowTask simpleTask = new WorkflowTask(); - simpleTask.setName(taskName); - simpleTask.setInputParameters(new HashMap<>()); - simpleTask.setTaskReferenceName("t1"); - subworkflowDef_Task.add(simpleTask); - subworkflow_def.setTasks(subworkflowDef_Task); - - metadataService.updateWorkflowDef(subworkflow_def); - - // parent workflow - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName(WORKFLOW_FORK_JOIN_OPTIONAL_SW); - workflowDef.setDescription(workflowDef.getName()); - workflowDef.setVersion(1); - workflowDef.setInputParameters(Arrays.asList("param1", "param2")); - - // fork task - WorkflowTask fanoutTask = new WorkflowTask(); - fanoutTask.setType(TaskType.FORK_JOIN.name()); - fanoutTask.setTaskReferenceName("fanouttask"); - - // sub workflow tasks - WorkflowTask subWorkflowTask1 = new WorkflowTask(); - subWorkflowTask1.setType(SUB_WORKFLOW.name()); - SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); - subWorkflowParams.setName("sub_workflow"); - subWorkflowTask1.setSubWorkflowParam(subWorkflowParams); - subWorkflowTask1.setTaskReferenceName("st1"); - subWorkflowTask1.setOptional(true); - - WorkflowTask subWorkflowTask2 = new WorkflowTask(); - subWorkflowTask2.setType(SUB_WORKFLOW.name()); - subWorkflowParams = new SubWorkflowParams(); - subWorkflowParams.setName("sub_workflow"); - subWorkflowTask2.setSubWorkflowParam(subWorkflowParams); - subWorkflowTask2.setTaskReferenceName("st2"); - subWorkflowTask2.setOptional(true); - - // join task - WorkflowTask joinTask = new WorkflowTask(); - joinTask.setType(TaskType.JOIN.name()); - joinTask.setTaskReferenceName("fanouttask_join"); - joinTask.setJoinOn(Arrays.asList("st1", "st2")); - - fanoutTask.getForkTasks().add(Collections.singletonList(subWorkflowTask1)); - fanoutTask.getForkTasks().add(Collections.singletonList(subWorkflowTask2)); - - workflowDef.getTasks().add(fanoutTask); - workflowDef.getTasks().add(joinTask); - metadataService.updateWorkflowDef(workflowDef); - } - - private String runWorkflowWithSubworkflow() throws Exception { - clearWorkflows(); - createWorkflowWithSubWorkflow(); - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2_SW, 1); - - String correlationId = "unit_test_sw"; - Map input = new HashMap<>(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - - String workflowId = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2_SW, 1, correlationId, input, null, null); - System.out.println("testSimpleWorkflow.wfid=" + workflowId); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); //The very first task is the one that should be scheduled. - - // Poll for first task and execute it - Task task = workflowExecutionService.poll("junit_task_3", "task3.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - task.getOutputData().put("op", "junit_task_3.done"); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - - // Simulating SystemTaskWorkerCoordinator to execute async system tasks - String subWorkflowTaskId = workflow.getTaskByRefName("sw1").getTaskId(); - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - - // Get the sub workflow id - String subWorkflowId = null; - for (Task t : workflow.getTasks()) { - if (t.getTaskType().equalsIgnoreCase("SUB_WORKFLOW")) { - subWorkflowId = t.getSubWorkflowId(); - } - } - assertNotNull(subWorkflowId); - - Workflow subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertEquals(RUNNING, subWorkflow.getStatus()); - assertEquals(1, subWorkflow.getTasks().size()); - - // Now the Sub workflow is triggered - // Poll for first task of the sub workflow and execute it - task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - task.getOutputData().put("op", "junit_task_1.done"); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertEquals(RUNNING, subWorkflow.getStatus()); - assertEquals(2, subWorkflow.getTasks().size()); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(RUNNING, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - - // Poll for second task of the sub workflow and execute it - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - task.getOutputData().put("op", "junit_task_2.done"); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - // Now the sub workflow and the main workflow must have finished - subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertEquals(WorkflowStatus.COMPLETED, subWorkflow.getStatus()); - assertEquals(2, subWorkflow.getTasks().size()); - - // Execute again to re-evaluate the Subworkflow task. - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - - return workflowId; - } - - private String runAFailedForkJoinWF() throws Exception { - try { - this.createForkJoinWorkflowWithZeroRetry(); - } catch (Exception e) { - } - - Map input = new HashMap<>(); - String workflowId = startOrLoadWorkflowExecution(FORK_JOIN_WF + "_2", 1, "fanouttest", input, null, null); - System.out.println("testForkJoin.wfid=" + workflowId); - Task t1 = workflowExecutionService.poll("junit_task_0_RT_1", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t1.getTaskId())); - - Task t2 = workflowExecutionService.poll("junit_task_0_RT_2", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId())); - assertNotNull(t1); - assertNotNull(t2); - - t1.setStatus(COMPLETED); - workflowExecutionService.updateTask(t1); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks(), RUNNING, workflow.getStatus()); - printTaskStatuses(workflow, "Initial"); - - t2.setStatus(FAILED); - - ExecutorService executorService = Executors.newFixedThreadPool(2); - Future future1 = executorService.submit(() -> { - try { - workflowExecutionService.updateTask(t2); - } catch (Exception e) { - throw new RuntimeException(e); - } - - }); - future1.get(); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.FAILED, workflow.getStatus()); - - return workflowId; - } - - private String runAFailedDoWhileWF() throws Exception { - try { - this.createForkJoinWorkflowWithZeroRetry(); - } catch (Exception e) { - } - - Map input = new HashMap<>(); - String workflowId = startOrLoadWorkflowExecution(FORK_JOIN_WF + "_2", 1, "fanouttest", input, null, null); - System.out.println("testForkJoin.wfid=" + workflowId); - Task t1 = workflowExecutionService.poll("junit_task_0_RT_1", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t1.getTaskId())); - - Task t2 = workflowExecutionService.poll("junit_task_0_RT_2", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId())); - assertNotNull(t1); - assertNotNull(t2); - - t1.setStatus(COMPLETED); - workflowExecutionService.updateTask(t1); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks(), RUNNING, workflow.getStatus()); - printTaskStatuses(workflow, "Initial"); - - t2.setStatus(FAILED); - workflowExecutionService.updateTask(t2); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.FAILED, workflow.getStatus()); - - return workflowId; - } - - private void printTaskStatuses(String wfid, String message) { - Workflow wf = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(wf); - printTaskStatuses(wf, message); - } - - private String startOrLoadWorkflowExecution(String workflowName, int version, String correlationId, Map input, String event, Map taskToDomain) { - return startOrLoadWorkflowExecution(workflowName, workflowName, version, correlationId, input, event, taskToDomain); - } - - abstract String startOrLoadWorkflowExecution(String snapshotResourceName, String workflowName, int version, String correlationId, Map input, String event, Map taskToDomain); - - private boolean printWFTaskDetails = false; - - private void printTaskStatuses(Workflow wf, String message) { - if (printWFTaskDetails) { - System.out.println(message + " >>> Workflow status " + wf.getStatus().name()); - wf.getTasks().forEach(t -> { - System.out.println("Task " + String.format("%-15s", t.getTaskType()) + "\t" + String.format("%-15s", t.getReferenceTaskName()) + "\t" + String.format("%-15s", t.getWorkflowTask().getType()) + "\t" + t.getSeq() + "\t" + t.getStatus() + "\t" + t.getTaskId()); - }); - System.out.println(); - } - } -} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/ESRestClientHttpEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/ESRestClientHttpEndToEndTest.java deleted file mode 100644 index 0d84713045..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/ESRestClientHttpEndToEndTest.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright 2016 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.tests.integration; - -import com.google.inject.Guice; -import com.google.inject.Injector; -import com.netflix.conductor.bootstrap.BootstrapModule; -import com.netflix.conductor.bootstrap.ModulesProvider; -import com.netflix.conductor.client.http.MetadataClient; -import com.netflix.conductor.client.http.TaskClient; -import com.netflix.conductor.client.http.WorkflowClient; -import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; -import com.netflix.conductor.elasticsearch.ElasticSearchRestClientProvider; -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; -import com.netflix.conductor.elasticsearch.SystemPropertiesElasticSearchConfiguration; -import com.netflix.conductor.jetty.server.JettyServer; -import com.netflix.conductor.tests.utils.TestEnvironment; -import java.util.HashMap; -import java.util.Map; -import org.elasticsearch.client.RestClient; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ESRestClientHttpEndToEndTest extends AbstractHttpEndToEndTest { - - private static final Logger logger = - LoggerFactory.getLogger(ESRestClientHttpEndToEndTest.class); - - private static final int SERVER_PORT = 8083; - - private static RestClient elasticSearchAdminClient; - - @BeforeClass - public static void setup() throws Exception { - TestEnvironment.setup(); - System.setProperty(ElasticSearchConfiguration.EMBEDDED_PORT_PROPERTY_NAME, "9203"); - System.setProperty(ElasticSearchConfiguration.ELASTIC_SEARCH_URL_PROPERTY_NAME, "http://localhost:9203"); - System.setProperty(ElasticSearchConfiguration.ELASTIC_SEARCH_INDEX_BATCH_SIZE_PROPERTY_NAME, "1"); - - Injector bootInjector = Guice.createInjector(new BootstrapModule()); - Injector serverInjector = Guice.createInjector(bootInjector.getInstance(ModulesProvider.class).get()); - - search = serverInjector.getInstance(EmbeddedElasticSearchProvider.class).get().get(); - search.start(); - - SystemPropertiesElasticSearchConfiguration configuration = new SystemPropertiesElasticSearchConfiguration(); - ElasticSearchRestClientProvider restClientProvider = new ElasticSearchRestClientProvider(configuration); - elasticSearchAdminClient = restClientProvider.get(); - - waitForGreenCluster(); - - JettyServer server = new JettyServer(SERVER_PORT, false); - server.start(); - - apiRoot = String.format("http://localhost:%d/api/", SERVER_PORT); - - taskClient = new TaskClient(); - taskClient.setRootURI(apiRoot); - - workflowClient = new WorkflowClient(); - workflowClient.setRootURI(apiRoot); - - metadataClient = new MetadataClient(); - metadataClient.setRootURI(apiRoot); - } - - @AfterClass - public static void teardown() throws Exception { - TestEnvironment.teardown(); - search.stop(); - } - - private static void waitForGreenCluster() throws Exception { - long startTime = System.currentTimeMillis(); - - Map params = new HashMap<>(); - params.put("wait_for_status", "green"); - params.put("timeout", "30s"); - - elasticSearchAdminClient.performRequest("GET", "/_cluster/health", params); - logger.info("Elasticsearch Cluster ready in {} ms", System.currentTimeMillis() - startTime); - } - -} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/ExclusiveJoinEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/ExclusiveJoinEndToEndTest.java deleted file mode 100644 index 185add296d..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/ExclusiveJoinEndToEndTest.java +++ /dev/null @@ -1,272 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.netflix.conductor.tests.integration; - -import com.google.inject.Guice; -import com.google.inject.Injector; -import com.netflix.conductor.bootstrap.BootstrapModule; -import com.netflix.conductor.bootstrap.ModulesProvider; -import com.netflix.conductor.client.http.MetadataClient; -import com.netflix.conductor.client.http.TaskClient; -import com.netflix.conductor.client.http.WorkflowClient; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; -import com.netflix.conductor.jetty.server.JettyServer; -import com.netflix.conductor.tests.integration.model.TaskWrapper; -import com.netflix.conductor.tests.utils.JsonUtils; -import com.netflix.conductor.tests.utils.TestEnvironment; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Ignore; -import org.junit.Test; - -import java.util.HashMap; -import java.util.Map; - -import static org.junit.Assert.assertEquals; - -@Ignore("This is taken care in the spock test, to de deleted once verified") -public class ExclusiveJoinEndToEndTest { - - private static TaskClient taskClient; - - private static WorkflowClient workflowClient; - - private static MetadataClient metadataClient; - - private static EmbeddedElasticSearch search; - - private static final int SERVER_PORT = 8093; - - private static String CONDUCTOR_WORKFLOW_DEF_NAME = "ExclusiveJoinTestWorkflow"; - - private static Map workflowInput = new HashMap<>(); - - private static Map taskOutput = new HashMap<>(); - - @BeforeClass - public static void setUp() throws Exception { - TestEnvironment.setup(); - System.setProperty(ElasticSearchConfiguration.EMBEDDED_PORT_PROPERTY_NAME, "9205"); - System.setProperty(ElasticSearchConfiguration.ELASTIC_SEARCH_URL_PROPERTY_NAME, "localhost:9305"); - System.setProperty(Configuration.EXECUTION_LOCK_ENABLED_PROPERTY_NAME, "false"); - - Injector bootInjector = Guice.createInjector(new BootstrapModule()); - Injector serverInjector = Guice.createInjector(bootInjector.getInstance(ModulesProvider.class).get()); - - search = serverInjector.getInstance(EmbeddedElasticSearchProvider.class).get().get(); - search.start(); - - JettyServer server = new JettyServer(SERVER_PORT, false); - server.start(); - String apiRoot = String.format("http://localhost:%d/api/", SERVER_PORT); - taskClient = new TaskClient(); - taskClient.setRootURI(apiRoot); - workflowClient = new WorkflowClient(); - workflowClient.setRootURI(apiRoot); - metadataClient = new MetadataClient(); - metadataClient.setRootURI(apiRoot); - } - - @Before - public void registerWorkflows() throws Exception { - registerWorkflowDefinitions(); - } - - @Test - public void testDecision1Default() { - workflowInput.put("decision_1", "null"); - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest().withName(CONDUCTOR_WORKFLOW_DEF_NAME) - .withCorrelationId("").withInput(workflowInput).withVersion(1); - String wfInstanceId = workflowClient.startWorkflow(startWorkflowRequest); - - String taskId = taskClient.getPendingTaskForWorkflow(wfInstanceId, "task1").getTaskId(); - taskOutput.put("taskReferenceName", "task1"); - TaskResult taskResult = setTaskResult(wfInstanceId, taskId, TaskResult.Status.COMPLETED, taskOutput); - taskClient.updateTask(taskResult); - - Workflow workflow = workflowClient.getWorkflow(wfInstanceId, true); - String taskReferenceName = workflow.getTaskByRefName("exclusiveJoin").getOutputData().get("taskReferenceName") - .toString(); - - assertEquals("task1", taskReferenceName); - assertEquals(Workflow.WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - @Test - public void testDecision1TrueAndDecision2Default() { - workflowInput.put("decision_1", "true"); - workflowInput.put("decision_2", "null"); - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest().withName(CONDUCTOR_WORKFLOW_DEF_NAME) - .withCorrelationId("").withInput(workflowInput).withVersion(1); - String wfInstanceId = workflowClient.startWorkflow(startWorkflowRequest); - - String taskId = taskClient.getPendingTaskForWorkflow(wfInstanceId, "task1").getTaskId(); - taskOutput.put("taskReferenceName", "task1"); - TaskResult taskResult = setTaskResult(wfInstanceId, taskId, TaskResult.Status.COMPLETED, taskOutput); - taskClient.updateTask(taskResult); - - taskId = taskClient.getPendingTaskForWorkflow(wfInstanceId, "task2").getTaskId(); - taskOutput.put("taskReferenceName", "task2"); - taskResult = setTaskResult(wfInstanceId, taskId, TaskResult.Status.COMPLETED, taskOutput); - taskClient.updateTask(taskResult); - - Workflow workflow = workflowClient.getWorkflow(wfInstanceId, true); - String taskReferenceName = workflow.getTaskByRefName("exclusiveJoin").getOutputData().get("taskReferenceName") - .toString(); - - assertEquals("task2", taskReferenceName); - assertEquals(Workflow.WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - @Test - public void testDecision1TrueAndDecision2True() { - workflowInput.put("decision_1", "true"); - workflowInput.put("decision_2", "true"); - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest().withName(CONDUCTOR_WORKFLOW_DEF_NAME) - .withCorrelationId("").withInput(workflowInput).withVersion(1); - String wfInstanceId = workflowClient.startWorkflow(startWorkflowRequest); - - String taskId = taskClient.getPendingTaskForWorkflow(wfInstanceId, "task1").getTaskId(); - taskOutput.put("taskReferenceName", "task1"); - TaskResult taskResult = setTaskResult(wfInstanceId, taskId, TaskResult.Status.COMPLETED, taskOutput); - taskClient.updateTask(taskResult); - - taskId = taskClient.getPendingTaskForWorkflow(wfInstanceId, "task2").getTaskId(); - taskOutput.put("taskReferenceName", "task2"); - taskResult = setTaskResult(wfInstanceId, taskId, TaskResult.Status.COMPLETED, taskOutput); - taskClient.updateTask(taskResult); - - taskId = taskClient.getPendingTaskForWorkflow(wfInstanceId, "task3").getTaskId(); - taskOutput.put("taskReferenceName", "task3"); - taskResult = setTaskResult(wfInstanceId, taskId, TaskResult.Status.COMPLETED, taskOutput); - taskClient.updateTask(taskResult); - - Workflow workflow = workflowClient.getWorkflow(wfInstanceId, true); - String taskReferenceName = workflow.getTaskByRefName("exclusiveJoin").getOutputData().get("taskReferenceName") - .toString(); - - assertEquals("task3", taskReferenceName); - assertEquals(Workflow.WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - @Test - public void testDecision1FalseAndDecision3Default() { - workflowInput.put("decision_1", "false"); - workflowInput.put("decision_3", "null"); - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest().withName(CONDUCTOR_WORKFLOW_DEF_NAME) - .withCorrelationId("").withInput(workflowInput).withVersion(1); - String wfInstanceId = workflowClient.startWorkflow(startWorkflowRequest); - - String taskId = taskClient.getPendingTaskForWorkflow(wfInstanceId, "task1").getTaskId(); - taskOutput.put("taskReferenceName", "task1"); - TaskResult taskResult = setTaskResult(wfInstanceId, taskId, TaskResult.Status.COMPLETED, taskOutput); - taskClient.updateTask(taskResult); - - taskId = taskClient.getPendingTaskForWorkflow(wfInstanceId, "task4").getTaskId(); - taskOutput.put("taskReferenceName", "task4"); - taskResult = setTaskResult(wfInstanceId, taskId, TaskResult.Status.COMPLETED, taskOutput); - taskClient.updateTask(taskResult); - - Workflow workflow = workflowClient.getWorkflow(wfInstanceId, true); - String taskReferenceName = workflow.getTaskByRefName("exclusiveJoin").getOutputData().get("taskReferenceName") - .toString(); - - assertEquals("task4", taskReferenceName); - assertEquals(Workflow.WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - @Test - public void testDecision1FalseAndDecision3True() { - workflowInput.put("decision_1", "false"); - workflowInput.put("decision_3", "true"); - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest().withName(CONDUCTOR_WORKFLOW_DEF_NAME) - .withCorrelationId("").withInput(workflowInput).withVersion(1); - String wfInstanceId = workflowClient.startWorkflow(startWorkflowRequest); - - String taskId = taskClient.getPendingTaskForWorkflow(wfInstanceId, "task1").getTaskId(); - taskOutput.put("taskReferenceName", "task1"); - TaskResult taskResult = setTaskResult(wfInstanceId, taskId, TaskResult.Status.COMPLETED, taskOutput); - taskClient.updateTask(taskResult); - - taskId = taskClient.getPendingTaskForWorkflow(wfInstanceId, "task4").getTaskId(); - taskOutput.put("taskReferenceName", "task4"); - taskResult = setTaskResult(wfInstanceId, taskId, TaskResult.Status.COMPLETED, taskOutput); - taskClient.updateTask(taskResult); - - taskId = taskClient.getPendingTaskForWorkflow(wfInstanceId, "task5").getTaskId(); - taskOutput.put("taskReferenceName", "task5"); - taskResult = setTaskResult(wfInstanceId, taskId, TaskResult.Status.COMPLETED, taskOutput); - taskClient.updateTask(taskResult); - - Workflow workflow = workflowClient.getWorkflow(wfInstanceId, true); - String taskReferenceName = workflow.getTaskByRefName("exclusiveJoin").getOutputData().get("taskReferenceName") - .toString(); - - assertEquals("task5", taskReferenceName); - assertEquals(Workflow.WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - private TaskResult setTaskResult(String workflowInstanceId, String taskId, TaskResult.Status status, - Map output) { - TaskResult taskResult = new TaskResult(); - taskResult.setTaskId(taskId); - taskResult.setWorkflowInstanceId(workflowInstanceId); - taskResult.setStatus(status); - taskResult.setOutputData(output); - return taskResult; - } - - private void registerWorkflowDefinitions() throws Exception { - TaskWrapper taskWrapper = JsonUtils.fromJson("integration/scenarios/legacy/ExclusiveJoinTaskDef.json", TaskWrapper.class); - metadataClient.registerTaskDefs(taskWrapper.getTaskDefs()); - - WorkflowDef conductorWorkflowDef = JsonUtils.fromJson("integration/scenarios/legacy/ExclusiveJoinWorkflowDef.json", - WorkflowDef.class); - metadataClient.registerWorkflowDef(conductorWorkflowDef); - } - - private void unRegisterWorkflowDefinitions() throws Exception { - WorkflowDef conductorWorkflowDef = JsonUtils.fromJson("integration/scenarios/legacy/ExclusiveJoinWorkflowDef.json", - WorkflowDef.class); - metadataClient.unregisterWorkflowDef(conductorWorkflowDef.getName(), conductorWorkflowDef.getVersion()); - } - - @After - public void unRegisterWorkflows() throws Exception { - unRegisterWorkflowDefinitions(); - } - - @AfterClass - public static void teardown() throws Exception { - TestEnvironment.teardown(); - search.stop(); - } -} \ No newline at end of file diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/GrpcEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/GrpcEndToEndTest.java deleted file mode 100644 index 8536256906..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/GrpcEndToEndTest.java +++ /dev/null @@ -1,76 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.tests.integration; - -import com.google.inject.Guice; -import com.google.inject.Injector; -import com.netflix.conductor.bootstrap.BootstrapModule; -import com.netflix.conductor.bootstrap.ModulesProvider; -import com.netflix.conductor.client.grpc.MetadataClient; -import com.netflix.conductor.client.grpc.TaskClient; -import com.netflix.conductor.client.grpc.WorkflowClient; -import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; -import com.netflix.conductor.grpc.server.GRPCServer; -import com.netflix.conductor.grpc.server.GRPCServerConfiguration; -import com.netflix.conductor.grpc.server.GRPCServerProvider; -import com.netflix.conductor.tests.utils.TestEnvironment; -import org.junit.AfterClass; -import org.junit.BeforeClass; - -import java.util.Optional; - -import static org.junit.Assert.assertTrue; - -/** - * @author Viren - */ -public class GrpcEndToEndTest extends AbstractGrpcEndToEndTest { - - private static final int SERVER_PORT = 8092; - - @BeforeClass - public static void setup() throws Exception { - TestEnvironment.setup(); - System.setProperty(GRPCServerConfiguration.ENABLED_PROPERTY_NAME, "true"); - System.setProperty(GRPCServerConfiguration.PORT_PROPERTY_NAME, "8092"); - System.setProperty(ElasticSearchConfiguration.EMBEDDED_PORT_PROPERTY_NAME, "9202"); - System.setProperty(ElasticSearchConfiguration.ELASTIC_SEARCH_URL_PROPERTY_NAME, "localhost:9302"); - System.setProperty(Configuration.EXECUTION_LOCK_ENABLED_PROPERTY_NAME, "false"); - - Injector bootInjector = Guice.createInjector(new BootstrapModule()); - Injector serverInjector = Guice.createInjector(bootInjector.getInstance(ModulesProvider.class).get()); - - search = serverInjector.getInstance(EmbeddedElasticSearchProvider.class).get().get(); - search.start(); - - Optional server = serverInjector.getInstance(GRPCServerProvider.class).get(); - assertTrue("failed to instantiate GRPCServer", server.isPresent()); - server.get().start(); - - taskClient = new TaskClient("localhost", SERVER_PORT); - workflowClient = new WorkflowClient("localhost", SERVER_PORT); - metadataClient = new MetadataClient("localhost", SERVER_PORT); - } - - @AfterClass - public static void teardown() throws Exception { - TestEnvironment.teardown(); - search.stop(); - } - -} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/HttpEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/HttpEndToEndTest.java deleted file mode 100644 index d2fb3d0ea6..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/HttpEndToEndTest.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2016 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.tests.integration; - -import com.google.inject.Guice; -import com.google.inject.Injector; -import com.netflix.conductor.bootstrap.BootstrapModule; -import com.netflix.conductor.bootstrap.ModulesProvider; -import com.netflix.conductor.client.http.MetadataClient; -import com.netflix.conductor.client.http.TaskClient; -import com.netflix.conductor.client.http.WorkflowClient; -import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; -import com.netflix.conductor.jetty.server.JettyServer; -import com.netflix.conductor.tests.utils.TestEnvironment; -import org.junit.AfterClass; -import org.junit.BeforeClass; - -/** - * @author Viren - */ -public class HttpEndToEndTest extends AbstractHttpEndToEndTest { - - private static final int SERVER_PORT = 8080; - - @BeforeClass - public static void setup() throws Exception { - TestEnvironment.setup(); - System.setProperty(ElasticSearchConfiguration.EMBEDDED_PORT_PROPERTY_NAME, "9201"); - System.setProperty(ElasticSearchConfiguration.ELASTIC_SEARCH_URL_PROPERTY_NAME, "localhost:9301"); - System.setProperty(Configuration.EXECUTION_LOCK_ENABLED_PROPERTY_NAME, "false"); - - Injector bootInjector = Guice.createInjector(new BootstrapModule()); - Injector serverInjector = Guice.createInjector(bootInjector.getInstance(ModulesProvider.class).get()); - - search = serverInjector.getInstance(EmbeddedElasticSearchProvider.class).get().get(); - search.start(); - - JettyServer server = new JettyServer(SERVER_PORT, false); - server.start(); - - apiRoot = String.format("http://localhost:%d/api/", SERVER_PORT); - - taskClient = new TaskClient(); - taskClient.setRootURI(apiRoot); - - workflowClient = new WorkflowClient(); - workflowClient.setRootURI(apiRoot); - - metadataClient = new MetadataClient(); - metadataClient.setRootURI(apiRoot); - } - - @AfterClass - public static void teardown() throws Exception { - TestEnvironment.teardown(); - search.stop(); - } -} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/MySQLGrpcEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/MySQLGrpcEndToEndTest.java deleted file mode 100644 index 63bb25ff09..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/MySQLGrpcEndToEndTest.java +++ /dev/null @@ -1,79 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.tests.integration; - -import com.google.inject.Guice; -import com.google.inject.Injector; -import com.netflix.conductor.bootstrap.BootstrapModule; -import com.netflix.conductor.bootstrap.ModulesProvider; -import com.netflix.conductor.client.grpc.MetadataClient; -import com.netflix.conductor.client.grpc.TaskClient; -import com.netflix.conductor.client.grpc.WorkflowClient; -import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; -import com.netflix.conductor.grpc.server.GRPCServer; -import com.netflix.conductor.grpc.server.GRPCServerConfiguration; -import com.netflix.conductor.grpc.server.GRPCServerProvider; -import com.netflix.conductor.tests.utils.MySQLTestRunner; -import com.netflix.conductor.tests.utils.TestEnvironment; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.runner.RunWith; - -import java.util.Optional; - -import static org.junit.Assert.assertTrue; - -/** - * @author Viren - */ -@RunWith(MySQLTestRunner.class) -public class MySQLGrpcEndToEndTest extends AbstractGrpcEndToEndTest { - - private static final int SERVER_PORT = 8094; - - @BeforeClass - public static void setup() throws Exception { - TestEnvironment.setup(); - System.setProperty(GRPCServerConfiguration.ENABLED_PROPERTY_NAME, "true"); - System.setProperty(GRPCServerConfiguration.PORT_PROPERTY_NAME, "8094"); - System.setProperty(ElasticSearchConfiguration.EMBEDDED_PORT_PROPERTY_NAME, "9204"); - System.setProperty(ElasticSearchConfiguration.ELASTIC_SEARCH_URL_PROPERTY_NAME, "localhost:9304"); - System.setProperty(Configuration.EXECUTION_LOCK_ENABLED_PROPERTY_NAME, "false"); - - Injector bootInjector = Guice.createInjector(new BootstrapModule()); - Injector serverInjector = Guice.createInjector(bootInjector.getInstance(ModulesProvider.class).get()); - - search = serverInjector.getInstance(EmbeddedElasticSearchProvider.class).get().get(); - search.start(); - - Optional server = serverInjector.getInstance(GRPCServerProvider.class).get(); - assertTrue("failed to instantiate GRPCServer", server.isPresent()); - server.get().start(); - - taskClient = new TaskClient("localhost", SERVER_PORT); - workflowClient = new WorkflowClient("localhost", SERVER_PORT); - metadataClient = new MetadataClient("localhost", SERVER_PORT); - } - - @AfterClass - public static void teardown() throws Exception { - TestEnvironment.teardown(); - search.stop(); - } - -} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/MySQLWorkflowServiceTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/MySQLWorkflowServiceTest.java deleted file mode 100644 index 74a6deb640..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/MySQLWorkflowServiceTest.java +++ /dev/null @@ -1,13 +0,0 @@ - -package com.netflix.conductor.tests.integration; - -import java.util.Map; - -public class MySQLWorkflowServiceTest { - - - String startOrLoadWorkflowExecution(String snapshotResourceName, String workflowName, int version, String correlationId, Map input, String event, Map taskToDomain) { - // return workflowExecutor.startWorkflow(workflowName, version, correlationId, input, null, event, taskToDomain); - return null; - } -} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/PostgresGrpcEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/PostgresGrpcEndToEndTest.java deleted file mode 100644 index 06be6fec1e..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/PostgresGrpcEndToEndTest.java +++ /dev/null @@ -1,76 +0,0 @@ -package com.netflix.conductor.tests.integration; - -import com.google.inject.Guice; -import com.google.inject.Injector; -import com.netflix.conductor.bootstrap.BootstrapModule; -import com.netflix.conductor.bootstrap.ModulesProvider; -import com.netflix.conductor.client.grpc.MetadataClient; -import com.netflix.conductor.client.grpc.TaskClient; -import com.netflix.conductor.client.grpc.WorkflowClient; -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; -import com.netflix.conductor.grpc.server.GRPCServer; -import com.netflix.conductor.grpc.server.GRPCServerProvider; -import com.netflix.conductor.tests.utils.TestEnvironment; -import org.junit.AfterClass; -import org.junit.BeforeClass; - -import java.util.Optional; - -import static com.netflix.conductor.core.config.Configuration.DB_PROPERTY_NAME; -import static com.netflix.conductor.elasticsearch.ElasticSearchConfiguration.ELASTIC_SEARCH_URL_PROPERTY_NAME; -import static com.netflix.conductor.elasticsearch.ElasticSearchConfiguration.EMBEDDED_PORT_PROPERTY_NAME; -import static com.netflix.conductor.grpc.server.GRPCServerConfiguration.ENABLED_PROPERTY_NAME; -import static com.netflix.conductor.grpc.server.GRPCServerConfiguration.PORT_PROPERTY_NAME; -import static com.netflix.conductor.postgres.PostgresConfiguration.CONNECTION_POOL_MAX_SIZE_PROPERTY_NAME; -import static com.netflix.conductor.postgres.PostgresConfiguration.CONNECTION_POOL_MINIMUM_IDLE_PROPERTY_NAME; -import static com.netflix.conductor.postgres.PostgresConfiguration.JDBC_PASSWORD_PROPERTY_NAME; -import static com.netflix.conductor.postgres.PostgresConfiguration.JDBC_URL_PROPERTY_NAME; -import static com.netflix.conductor.postgres.PostgresConfiguration.JDBC_USER_NAME_PROPERTY_NAME; -import static org.junit.Assert.assertTrue; - -public class PostgresGrpcEndToEndTest extends AbstractGrpcEndToEndTest { - - private static final int SERVER_PORT = 8098; - protected static Optional server; - - @BeforeClass - public static void setup() throws Exception { - TestEnvironment.setup(); - - System.setProperty("workflow.namespace.prefix", "conductor" + System.getProperty("user.name")); - System.setProperty(DB_PROPERTY_NAME, "postgres"); - System.setProperty(ENABLED_PROPERTY_NAME, "true"); - System.setProperty(PORT_PROPERTY_NAME, "8098"); - System.setProperty(EMBEDDED_PORT_PROPERTY_NAME, "9208"); - System.setProperty(ELASTIC_SEARCH_URL_PROPERTY_NAME, "localhost:9308"); - System.setProperty(JDBC_URL_PROPERTY_NAME, "jdbc:postgresql://localhost:54320/conductor"); - System.setProperty(JDBC_USER_NAME_PROPERTY_NAME, "postgres"); - System.setProperty(JDBC_PASSWORD_PROPERTY_NAME, "postgres"); - - System.setProperty(CONNECTION_POOL_MINIMUM_IDLE_PROPERTY_NAME, "8"); - System.setProperty(CONNECTION_POOL_MAX_SIZE_PROPERTY_NAME, "8"); - System.setProperty(CONNECTION_POOL_MINIMUM_IDLE_PROPERTY_NAME, "300000"); - - Injector bootInjector = Guice.createInjector(new BootstrapModule()); - Injector serverInjector = Guice.createInjector(bootInjector.getInstance(ModulesProvider.class).get()); - - search = serverInjector.getInstance(EmbeddedElasticSearchProvider .class).get().get(); - search.start(); - - server = serverInjector.getInstance(GRPCServerProvider.class).get(); - assertTrue("failed to instantiate GRPCServer", server.isPresent()); - server.get().start(); - - taskClient = new TaskClient("localhost", SERVER_PORT); - workflowClient = new WorkflowClient("localhost", SERVER_PORT); - metadataClient = new MetadataClient("localhost", SERVER_PORT); - } - - @AfterClass - public static void teardown() throws Exception { - TestEnvironment.teardown(); - search.stop(); - server.ifPresent(GRPCServer::stop); - } - -} \ No newline at end of file diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowLegacyMigrationTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowLegacyMigrationTest.java deleted file mode 100644 index 3e5983d305..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowLegacyMigrationTest.java +++ /dev/null @@ -1,155 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.tests.integration; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.io.Resources; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.core.orchestration.ExecutionDAOFacade; -import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.dao.ExecutionDAO; -import com.netflix.conductor.tests.utils.TestRunner; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.runner.RunWith; - -import javax.inject.Inject; -import java.nio.charset.StandardCharsets; -import java.util.Map; - -import static org.junit.Assert.fail; - -@RunWith(TestRunner.class) -public class WorkflowLegacyMigrationTest extends AbstractWorkflowServiceTest { - - private static final String WORKFLOW_SCENARIOS_PATH_PREFIX = "/integration/scenarios/legacy/"; - private static final String WORKFLOW_SCENARIO_EXTENSION = ".json"; - private static final String WORKFLOW_INSTANCE_ID_PLACEHOLDER = "WORKFLOW_INSTANCE_ID"; - - @Inject - private ExecutionDAO executionDAO; - - @Inject - private ObjectMapper objectMapper; - - @Inject - private Configuration configuration; - - @Inject - ExecutionDAOFacade executionDAOFacade; - - @Override - public String startOrLoadWorkflowExecution(String snapshotResourceName, String workflowName, - int version, String correlationId, Map input, - String event, Map taskToDomain) { - Workflow workflow = null; - try { - workflow = loadWorkflowSnapshot(getWorkflowResourcePath(snapshotResourceName)); - } catch (Exception e) { - fail("Error loading workflow scenario " + snapshotResourceName); - } - - final String workflowId = workflow.getWorkflowId(); - - workflow.setCorrelationId(correlationId); - workflow.setInput(input); - workflow.setEvent(event); - workflow.setTaskToDomain(taskToDomain); - workflow.setVersion(version); - - workflow.getTasks().forEach(task -> { - task.setTaskId(IDGenerator.generate()); - task.setWorkflowInstanceId(workflowId); - task.setCorrelationId(correlationId); - }); - - executionDAOFacade.createWorkflow(workflow); - executionDAOFacade.createTasks(workflow.getTasks()); - - /* - * Apart from loading a workflow snapshot, - * in order to represent a workflow on the system, we need to populate the - * respective queues related to tasks in progress or decisions. - */ - workflow.getTasks().forEach(task -> { - workflowExecutor.addTaskToQueue(task); - queueDAO.push(WorkflowExecutor.DECIDER_QUEUE, workflowId, configuration.getSweepFrequency()); - }); - - return workflow.getWorkflowId(); - } - - private Workflow loadWorkflowSnapshot(String resourcePath) throws Exception { - - String content = Resources.toString(WorkflowLegacyMigrationTest.class.getResource(resourcePath), StandardCharsets.UTF_8); - String workflowId = IDGenerator.generate(); - content = content.replace(WORKFLOW_INSTANCE_ID_PLACEHOLDER, workflowId); - - Workflow workflow = objectMapper.readValue(content, Workflow.class); - workflow.setWorkflowId(workflowId); - - return workflow; - } - - private String getWorkflowResourcePath(String workflowName) { - return WORKFLOW_SCENARIOS_PATH_PREFIX + workflowName + WORKFLOW_SCENARIO_EXTENSION; - } - - @Ignore - @Test - @Override - /* - * This scenario cannot be recreated loading a workflow snapshot. - * ForkJoins are also tested on testForkJoin() - */ - public void testForkJoinNestedWithSubWorkflow() { - } - - @Ignore - @Test - @Override - public void testTerminateTaskWithFailedStatus() { - } - - @Ignore - @Test - @Override - public void testTerminateTaskWithCompletedStatus() { - } - - @Ignore - @Test - @Override - public void testTerminateMultiLevelWorkflow() { - } - - @Ignore - @Test - @Override - public void testForkJoinWithOptionalSubworkflows() { - } - - @Ignore - @Test - @Override - public void testTerminateTaskInASubworkflow() { - } -} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java deleted file mode 100644 index b41c30ed08..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java +++ /dev/null @@ -1,328 +0,0 @@ -/* - * Copyright 2020 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.netflix.conductor.tests.integration; - -import com.google.common.util.concurrent.Uninterruptibles; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.tests.utils.TestRunner; -import org.junit.Test; -import org.junit.runner.RunWith; - -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.TimeUnit; - -import static com.netflix.conductor.common.metadata.tasks.Task.Status.COMPLETED; -import static com.netflix.conductor.common.metadata.tasks.Task.Status.IN_PROGRESS; -import static com.netflix.conductor.common.metadata.workflow.TaskType.SUB_WORKFLOW; -import static com.netflix.conductor.common.run.Workflow.WorkflowStatus.RUNNING; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -@RunWith(TestRunner.class) -public class WorkflowServiceTest extends AbstractWorkflowServiceTest { - private static final String WF_WITH_INLINE_SUB_WF = "WorkflowWithInlineSubWorkflow"; - private static final String WF_WITH_SUB_WF_WITH_INLINE_SUB_WF = "WorkflowWithSubWorkflowWithInlineSubWorkflow"; - private static final String WF_WITH_INLINE_SUB_WF_WITH_INLINE_SUB_WF = "WorkflowWithInlineSubWorkflowWithInlineSubWorkflow"; - - @Override - String startOrLoadWorkflowExecution(String snapshotResourceName, String workflowName, int version, - String correlationId, Map input, String event, - Map taskToDomain) { - return workflowExecutor.startWorkflow(workflowName, version, - correlationId, input, null, event, taskToDomain); - } - - @Test - public void testSubWorkflowWithInlineWorkflowDefinition() { - registerWorkflow(createWorkflowWithInlineSubWorkflow()); - metadataService.getWorkflowDef(WF_WITH_INLINE_SUB_WF, 2); - - Map input = new HashMap<>(); - input.put("param1", "param 1 value"); - input.put("param3", "param 3 value"); - String wfId = startOrLoadWorkflowExecution(WF_WITH_INLINE_SUB_WF, WF_WITH_INLINE_SUB_WF, - 2, "test", input, null, null); - assertNotNull(wfId); - - validateWorkflowWithInlineSubWorkflowExecution(wfId); - } - - @Test - public void testWorkflowWithSubWorkflowWithInlineSubWorkflow() { - createWorkflowWithSubWorkflowWithInlineSubWorkflow(); - metadataService.getWorkflowDef(WF_WITH_SUB_WF_WITH_INLINE_SUB_WF, 1); - - Map input = new HashMap<>(); - input.put("param1", "parent param 1 value"); - input.put("param3", "parent param 3 value"); - - String wfId = startOrLoadWorkflowExecution(WF_WITH_SUB_WF_WITH_INLINE_SUB_WF, - WF_WITH_SUB_WF_WITH_INLINE_SUB_WF, 1, "test", input, null, null); - assertNotNull(wfId); - - validateWorkflowWithSubWorkflowWithInlineSubWorkflowExecution(wfId); - } - - @Test - public void testWorkflowWithInlineSubWorkflowWithInlineSubWorkflow() { - createWorkflowWithInlineSubWorkflowWithInlineSubWorkflow(); - metadataService.getWorkflowDef(WF_WITH_INLINE_SUB_WF_WITH_INLINE_SUB_WF, 1); - - Map input = new HashMap<>(); - input.put("param1", "parent param 1 value"); - input.put("param3", "parent param 3 value"); - - String wfId = startOrLoadWorkflowExecution(WF_WITH_INLINE_SUB_WF_WITH_INLINE_SUB_WF, - WF_WITH_INLINE_SUB_WF_WITH_INLINE_SUB_WF, 1, "test", input, null, null); - assertNotNull(wfId); - - validateWorkflowWithSubWorkflowWithInlineSubWorkflowExecution(wfId); - } - - private WorkflowDef createInlineSubWorkflow() { - // create inline subworkflow - WorkflowDef subWorkflowDef = new WorkflowDef(); - subWorkflowDef.setName("inline_sw_1"); - subWorkflowDef.setDescription(subWorkflowDef.getName()); - subWorkflowDef.setVersion(3); - subWorkflowDef.setSchemaVersion(2); - Map outputParameters = new HashMap<>(); - outputParameters.put("o1", "${workflow.input.param1}"); - outputParameters.put("o2", "${isw_t1.output.uuid}"); - subWorkflowDef.setOutputParameters(outputParameters); - - WorkflowTask wft1 = new WorkflowTask(); - wft1.setName("junit_task_1"); - wft1.setTaskReferenceName("isw_t1"); - subWorkflowDef.setTasks(Collections.singletonList(wft1)); - - return subWorkflowDef; - } - - private void registerWorkflow(WorkflowDef def) { - metadataService.updateWorkflowDef(Collections.singletonList(def)); - } - - private WorkflowDef createWorkflowWithInlineSubWorkflow() { - WorkflowDef subWorkflowDef = createInlineSubWorkflow(); - - WorkflowTask subWfTask = new WorkflowTask(); - subWfTask.setName("subWorkflowTask"); - subWfTask.setType(SUB_WORKFLOW.name()); - SubWorkflowParams swp = new SubWorkflowParams(); - swp.setName("does-not-existing-wf"); - swp.setWorkflowDefinition(subWorkflowDef); - subWfTask.setSubWorkflowParam(swp); - Map inputParam = new HashMap<>(); - inputParam.put("test", "test value"); - inputParam.put("param1", "sub workflow input param1"); - inputParam.put("param2", subWorkflowDef.getVersion()); - subWfTask.setInputParameters(inputParam); - subWfTask.setTaskReferenceName("sw1"); - - WorkflowDef main = new WorkflowDef(); - main.setVersion(2); - main.setSchemaVersion(2); - main.setInputParameters(Arrays.asList("param1", "param2")); - main.setName(WF_WITH_INLINE_SUB_WF); - main.getTasks().add(subWfTask); - - return main; - } - - private void createWorkflowWithSubWorkflowWithInlineSubWorkflow() { - registerWorkflow(createWorkflowWithInlineSubWorkflow()); - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("subWorkflowTask"); - workflowTask.setType(SUB_WORKFLOW.name()); - SubWorkflowParams swp = new SubWorkflowParams(); - swp.setName(WF_WITH_INLINE_SUB_WF); - workflowTask.setSubWorkflowParam(swp); - Map input = new HashMap<>(); - input.put("test", "test value"); - input.put("param1", "sub workflow task input param1"); - input.put("param2", 21); - workflowTask.setInputParameters(input); - workflowTask.setTaskReferenceName("sw2"); - - WorkflowDef main = new WorkflowDef(); - main.setSchemaVersion(2); - main.setInputParameters(Arrays.asList("param1", "param2")); - main.setName(WF_WITH_SUB_WF_WITH_INLINE_SUB_WF); - main.getTasks().add(workflowTask); - - Map outputParameters = new HashMap<>(); - outputParameters.put("o1", "${workflow.input.param1}"); - outputParameters.put("o2", "${sw2.output.o2}"); - main.setOutputParameters(outputParameters); - - metadataService.updateWorkflowDef(Collections.singletonList(main)); - } - - private void createWorkflowWithInlineSubWorkflowWithInlineSubWorkflow() { - WorkflowDef subWorkflowDef = createWorkflowWithInlineSubWorkflow(); - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("subWorkflowTask"); - workflowTask.setType(SUB_WORKFLOW.name()); - SubWorkflowParams swp = new SubWorkflowParams(); - swp.setName("dummy-name"); - swp.setWorkflowDef(subWorkflowDef); - workflowTask.setSubWorkflowParam(swp); - Map input = new HashMap<>(); - input.put("test", "test value"); - input.put("param1", "sub workflow task input param1"); - input.put("param2", 21); - workflowTask.setInputParameters(input); - workflowTask.setTaskReferenceName("sw2"); - - WorkflowDef main = new WorkflowDef(); - main.setSchemaVersion(2); - main.setInputParameters(Arrays.asList("param1", "param2")); - main.setName(WF_WITH_INLINE_SUB_WF_WITH_INLINE_SUB_WF); - main.getTasks().add(workflowTask); - - Map outputParameters = new HashMap<>(); - outputParameters.put("o1", "${workflow.input.param1}"); - outputParameters.put("o2", "${sw2.output.o2}"); - main.setOutputParameters(outputParameters); - - metadataService.updateWorkflowDef(Collections.singletonList(main)); - } - - private void validateWorkflowWithInlineSubWorkflowExecution(String wfId) { - Workflow workflow = workflowExecutionService.getExecutionStatus(wfId, true); - assertNotNull(workflow); - assertNotNull(workflow.getTasks()); - assertEquals(RUNNING, workflow.getStatus()); - - // Simulating SystemTaskWorkerCoordinator to execute async system tasks - String subWorkflowTaskId = workflow.getTaskByRefName("sw1").getTaskId(); - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); - - workflow = workflowExecutionService.getExecutionStatus(wfId, true); - assertNotNull(workflow); - assertNotNull(workflow.getTasks()); - - Task subWorkflowTask = workflow.getTasks().stream().filter(t -> t.getTaskType().equals(SUB_WORKFLOW.name())).findAny().get(); - assertNotNull(subWorkflowTask); - assertNotNull(subWorkflowTask.getOutputData()); - assertNotNull(subWorkflowTask.getInputData()); - assertNotNull("Output: " + subWorkflowTask.getSubWorkflowId() + ", status: " + subWorkflowTask.getStatus(), subWorkflowTask.getSubWorkflowId()); - assertTrue(subWorkflowTask.getInputData().containsKey("workflowInput")); - assertEquals(3, ((Map) subWorkflowTask.getInputData().get("workflowInput")).get("param2")); - assertEquals("inline_sw_1", subWorkflowTask.getInputData().get("subWorkflowName")); - assertEquals(3, subWorkflowTask.getInputData().get("subWorkflowVersion")); - assertEquals(IN_PROGRESS, subWorkflowTask.getStatus()); - - String subWorkflowId = subWorkflowTask.getSubWorkflowId(); - workflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(workflow); - assertNotNull(workflow.getTasks()); - assertEquals(wfId, workflow.getParentWorkflowId()); - assertEquals(RUNNING, workflow.getStatus()); - - Task simpleTask = workflowExecutionService.poll("junit_task_1", "test"); - String uuid = UUID.nameUUIDFromBytes("hello".getBytes()).toString(); - simpleTask.getOutputData().put("uuid", uuid); - simpleTask.setStatus(COMPLETED); - workflowExecutionService.updateTask(simpleTask); - - workflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(workflow); - assertEquals(Workflow.WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals("inline_sw_1", workflow.getWorkflowName()); - assertNotNull(workflow.getOutput()); - assertTrue(workflow.getOutput().containsKey("o1")); - assertTrue(workflow.getOutput().containsKey("o2")); - assertEquals("sub workflow input param1", workflow.getOutput().get("o1")); - assertEquals(uuid, workflow.getOutput().get("o2")); - - // Simulating SystemTaskWorkerCoordinator to execute async system tasks - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - - workflow = workflowExecutionService.getExecutionStatus(wfId, true); - assertNotNull(workflow); - assertEquals(Workflow.WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals("sub workflow input param1", workflow.getOutput().get("o1")); - assertEquals(uuid, workflow.getOutput().get("o2")); - - subWorkflowTask = workflow.getTaskByRefName("sw1"); - assertEquals(COMPLETED, subWorkflowTask.getStatus()); - assertEquals("sub workflow input param1", subWorkflowTask.getOutputData().get("o1")); - assertEquals(uuid, subWorkflowTask.getOutputData().get("o2")); - } - - private void validateWorkflowWithSubWorkflowWithInlineSubWorkflowExecution(String wfId) { - Workflow workflow = workflowExecutionService.getExecutionStatus(wfId, true); - assertNotNull(workflow); - assertNotNull(workflow.getTasks()); - assertEquals(RUNNING, workflow.getStatus()); - - // Simulating SystemTaskWorkerCoordinator to execute async system tasks - String subWorkflowTaskId = workflow.getTaskByRefName("sw2").getTaskId(); - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); - - workflow = workflowExecutionService.getExecutionStatus(wfId, true); - assertNotNull(workflow); - assertNotNull(workflow.getTasks()); - - Task subWorkflowTask = workflow.getTasks().stream().filter(t -> t.getTaskType().equals(SUB_WORKFLOW.name())).findAny().get(); - assertNotNull(subWorkflowTask); - assertNotNull(subWorkflowTask.getOutputData()); - assertNotNull(subWorkflowTask.getInputData()); - assertNotNull("Output: " + subWorkflowTask.getSubWorkflowId() + ", status: " + subWorkflowTask.getStatus(), subWorkflowTask.getSubWorkflowId()); - assertTrue(subWorkflowTask.getInputData().containsKey("workflowInput")); - assertEquals(21, ((Map) subWorkflowTask.getInputData().get("workflowInput")).get("param2")); - assertEquals(WF_WITH_INLINE_SUB_WF, subWorkflowTask.getInputData().get("subWorkflowName")); - assertEquals(2, subWorkflowTask.getInputData().get("subWorkflowVersion")); - assertEquals(IN_PROGRESS, subWorkflowTask.getStatus()); - - String subWorkflowId = subWorkflowTask.getSubWorkflowId(); - workflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertEquals(wfId, workflow.getParentWorkflowId()); - - validateWorkflowWithInlineSubWorkflowExecution(subWorkflowId); - - // Simulating SystemTaskWorkerCoordinator to execute async system tasks - workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1); - - workflow = workflowExecutionService.getExecutionStatus(wfId, true); - assertNotNull(workflow); - assertEquals(Workflow.WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(Workflow.WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals("parent param 1 value", workflow.getOutput().get("o1")); - String uuid = UUID.nameUUIDFromBytes("hello".getBytes()).toString(); - assertEquals(uuid, workflow.getOutput().get("o2")); - - subWorkflowTask = workflow.getTaskByRefName("sw2"); - assertEquals(COMPLETED, subWorkflowTask.getStatus()); - assertEquals("sub workflow input param1", subWorkflowTask.getOutputData().get("o1")); - assertEquals(uuid, subWorkflowTask.getOutputData().get("o2")); - } -} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/model/TaskWrapper.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/model/TaskWrapper.java deleted file mode 100644 index ed39b2efe9..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/model/TaskWrapper.java +++ /dev/null @@ -1,39 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.netflix.conductor.tests.integration.model; - -import java.util.List; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; - -public class TaskWrapper { - - private List taskDefs; - - public List getTaskDefs() { - return taskDefs; - } - - public void setTaskDefs(List taskDefs) { - this.taskDefs = taskDefs; - } - - @Override - public String toString() { - return "TaskWrapper{" + "taskDefs=" + taskDefs + '}'; - } -} \ No newline at end of file diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/listener/StatusPublisherTestRunner.java b/test-harness/src/test/java/com/netflix/conductor/tests/listener/StatusPublisherTestRunner.java deleted file mode 100644 index 8e27824e64..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/listener/StatusPublisherTestRunner.java +++ /dev/null @@ -1,52 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.tests.listener; - -import com.google.inject.Guice; -import com.google.inject.Injector; -import com.google.inject.util.Modules; -import com.netflix.conductor.contribs.DynoQueueStatusPublisherModule; -import com.netflix.conductor.tests.utils.TestModule; -import org.junit.runners.BlockJUnit4ClassRunner; - -public class StatusPublisherTestRunner extends BlockJUnit4ClassRunner { - - private Injector dependenciesInjector; - - static { - System.setProperty("EC2_REGION", "us-east-1"); - System.setProperty("EC2_AVAILABILITY_ZONE", "us-east-1c"); - } - - - public StatusPublisherTestRunner(Class klass) throws Exception { - super(klass); - System.setProperty("workflow.namespace.prefix", "conductor" + System.getProperty("user.name")); - dependenciesInjector = Guice.createInjector(Modules.override(new TestModule()).with(new DynoQueueStatusPublisherModule())); - } - - @Override - protected Object createTest() throws Exception { - Object test = super.createTest(); - dependenciesInjector.injectMembers(test); - return test; - } - - -} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/EmbeddedTestElasticSearch.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/EmbeddedTestElasticSearch.java deleted file mode 100644 index 4a4c497bf9..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/EmbeddedTestElasticSearch.java +++ /dev/null @@ -1,52 +0,0 @@ -package com.netflix.conductor.tests.utils; - -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.annotation.PostConstruct; -import javax.annotation.PreDestroy; -import javax.inject.Inject; -import javax.inject.Singleton; - - -/** - * A helper class to be used only during tests, this class has Guice hooks to start the embedded elastic search on construction - * and stop before destruction - * - */ -@Singleton -public class EmbeddedTestElasticSearch { - - private static final Logger logger = LoggerFactory.getLogger(EmbeddedTestElasticSearch.class); - - private final EmbeddedElasticSearch embeddedElasticSearch; - - @Inject - public EmbeddedTestElasticSearch(EmbeddedElasticSearchProvider embeddedElasticSearchV5Provider) { - embeddedElasticSearch = embeddedElasticSearchV5Provider.get() - .orElseThrow(() -> new RuntimeException("Unable to load in memory elastic search")); - } - - @PostConstruct - public void init() { - try { - embeddedElasticSearch.start(); - } catch (Exception e) { - logger.error("Error starting the Embedded elastic search", e); - throw new RuntimeException(e); - } - } - - @PreDestroy - public void cleanup() { - try { - embeddedElasticSearch.stop(); - } catch (Exception e) { - logger.error("Error stopping the Embedded elastic search", e); - throw new RuntimeException(e); - } - } - -} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/IntegrationTestModule.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/IntegrationTestModule.java deleted file mode 100644 index bcf3a7402b..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/IntegrationTestModule.java +++ /dev/null @@ -1,20 +0,0 @@ -package com.netflix.conductor.tests.utils; - -import com.google.inject.AbstractModule; -import com.google.inject.Guice; -import com.google.inject.Provides; -import com.netflix.conductor.bootstrap.ModulesProvider; -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; - -public class IntegrationTestModule extends AbstractModule { - - @Override - protected void configure() { - - } - - @Provides - public EmbeddedElasticSearchProvider getElasticSearchProvider(ModulesProvider modulesProvider) { - return Guice.createInjector(modulesProvider.get()).getInstance(EmbeddedElasticSearchProvider.class); - } -} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/JsonUtils.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/JsonUtils.java deleted file mode 100644 index a63651ddc7..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/JsonUtils.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.netflix.conductor.tests.utils; - -import java.io.InputStream; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.netflix.conductor.common.utils.JsonMapperProvider; - -public class JsonUtils { - - public static T fromJson(String fileName, Class classObject) throws Exception { - - ObjectMapper objectMapper = new JsonMapperProvider().get(); - - InputStream inputStream = ClassLoader.getSystemResourceAsStream(fileName); - return objectMapper.readValue(inputStream, classObject); - - } -} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/MockConfiguration.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/MockConfiguration.java deleted file mode 100644 index 26a7dfe402..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/MockConfiguration.java +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.netflix.conductor.tests.utils; - -import com.google.inject.AbstractModule; -import com.netflix.conductor.core.config.Configuration; - -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -public class MockConfiguration implements Configuration { - - @Override - public int getSweepFrequency() { - return 30; - } - - @Override - public boolean disableSweep() { - return false; - } - - @Override - public boolean disableAsyncWorkers() { - return false; - } - - @Override - public boolean isEventMessageIndexingEnabled() { - return true; - } - - @Override - public boolean isEventExecutionIndexingEnabled() { - return true; - } - - @Override - public String getServerId() { - try { - return InetAddress.getLocalHost().getHostName(); - } catch (UnknownHostException e) { - return "unknown"; - } - } - - @Override - public String getEnvironment() { - return "test"; - } - - @Override - public String getStack() { - return "test"; - } - - @Override - public String getAppId() { - return "conductor"; - } - - @Override - public String getProperty(String string, String def) { - return "dummy"; - } - - @Override - public String getAvailabilityZone() { - return "us-east-1c"; - } - - @Override - public int getIntProperty(String string, int def) { - return 100; - } - - @Override - public String getRegion() { - return "us-east-1"; - } - - @Override - public Long getWorkflowInputPayloadSizeThresholdKB() { - return 10L; - } - - @Override - public Long getMaxWorkflowInputPayloadSizeThresholdKB() { - return 10240L; - } - - @Override - public Long getWorkflowOutputPayloadSizeThresholdKB() { - return 10L; - } - - @Override - public Long getMaxWorkflowOutputPayloadSizeThresholdKB() { - return 10240L; - } - - @Override - public Long getMaxWorkflowVariablesPayloadSizeThresholdKB() { - return 2L; - } - - @Override - public Long getTaskInputPayloadSizeThresholdKB() { - return 10L; - } - - @Override - public Long getMaxTaskInputPayloadSizeThresholdKB() { - return 10240L; - } - - @Override - public Long getTaskOutputPayloadSizeThresholdKB() { - return 10L; - } - - @Override - public Long getMaxTaskOutputPayloadSizeThresholdKB() { - return 10240L; - } - - @Override - public Map getAll() { - return null; - } - - @Override - public long getLongProperty(String name, long defaultValue) { - return 1000000L; - } - - @Override - public boolean getBooleanProperty(String name, boolean defaultValue) { - return defaultValue; - } - - @Override - public boolean getBoolProperty(String name, boolean defaultValue) { - return defaultValue; - } - - @Override - public List getAdditionalModules() { - return Collections.emptyList(); - } -} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/MockIndexDAO.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/MockIndexDAO.java deleted file mode 100644 index 52b76b41d0..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/MockIndexDAO.java +++ /dev/null @@ -1,151 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.tests.utils; - -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.CompletableFuture; - -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.core.events.queue.Message; -import com.netflix.conductor.dao.IndexDAO; - -/** - * @author Viren - * - */ -public class MockIndexDAO implements IndexDAO { - - @Override - public void setup() { - } - - @Override - public void indexWorkflow(Workflow workflow) { - } - - @Override - public CompletableFuture asyncIndexWorkflow(Workflow workflow) { - return null; - } - - @Override - public SearchResult searchWorkflows(String query, String freeText, int start, int count, List sort) { - return new SearchResult<>(0, new ArrayList<>()); - } - - @Override - public SearchResult searchTasks(String query, String freeText, int start, int count, List sort) { - return new SearchResult<>(0, new ArrayList<>()); - } - - @Override - public void removeWorkflow(String workflowId) { - } - - @Override - public CompletableFuture asyncRemoveWorkflow(String workflowId) { - return null; - } - - @Override - public void updateWorkflow(String workflowInstanceId, String[] key, Object[] value) { - - } - - @Override - public CompletableFuture asyncUpdateWorkflow(String workflowInstanceId, String[] keys, Object[] values) { - return null; - } - - @Override - public void indexTask(Task task) { - - } - - @Override - public CompletableFuture asyncIndexTask(Task task) { - return null; - } - - @Override - public void addTaskExecutionLogs(List logs) { - - } - - @Override - public CompletableFuture asyncAddTaskExecutionLogs(List logs) { - return null; - } - - @Override - public void addEventExecution(EventExecution eventExecution) { - - } - - @Override - public List getEventExecutions(String event) { - return null; - } - - @Override - public CompletableFuture asyncAddEventExecution(EventExecution eventExecution) { - return null; - } - - @Override - public void addMessage(String queue, Message msg) { - - } - - @Override - public CompletableFuture asyncAddMessage(String queue, Message message) { - return null; - } - - @Override - public List getMessages(String queue) { - return null; - } - - @Override - public String get(String workflowInstanceId, String key) { - return null; - } - - - @Override - public List getTaskExecutionLogs(String taskId) { - return null; - } - - @Override - public List searchArchivableWorkflows(String indexName, long archiveTtlDays) { - return null; - } - - @Override - public List searchRecentRunningWorkflows(int lastModifiedHoursAgoFrom, int lastModifiedHoursAgoTo) { - return null; - } -} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/MySQLTestModule.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/MySQLTestModule.java deleted file mode 100644 index f451e5b9e1..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/MySQLTestModule.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright 2019 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.tests.utils; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.inject.AbstractModule; -import com.google.inject.Provides; -import com.google.inject.Scopes; -import com.google.inject.Singleton; -import com.netflix.conductor.common.utils.ExternalPayloadStorage; -import com.netflix.conductor.common.utils.JsonMapperProvider; -import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.core.config.CoreModule; -import com.netflix.conductor.core.config.EventModule; -import com.netflix.conductor.core.execution.WorkflowStatusListener; -import com.netflix.conductor.core.execution.WorkflowStatusListenerStub; -import com.netflix.conductor.core.utils.NoopLockModule; -import com.netflix.conductor.dao.EventHandlerDAO; -import com.netflix.conductor.dao.ExecutionDAO; -import com.netflix.conductor.dao.IndexDAO; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.dao.PollDataDAO; -import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.dao.RateLimitingDAO; -import com.netflix.conductor.dao.mysql.MySQLExecutionDAO; -import com.netflix.conductor.dao.mysql.MySQLMetadataDAO; -import com.netflix.conductor.dao.mysql.MySQLQueueDAO; -import com.netflix.conductor.mysql.MySQLConfiguration; -import com.netflix.conductor.mysql.MySQLDataSourceProvider; -import com.netflix.conductor.mysql.SystemPropertiesMySQLConfiguration; -import com.netflix.conductor.service.MetadataService; -import com.netflix.conductor.service.MetadataServiceImpl; - -import javax.sql.DataSource; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.atomic.AtomicInteger; - -/** - * @author jvemugunta - */ -public class MySQLTestModule extends AbstractModule { - - private int maxThreads = 50; - - private ExecutorService executorService; - - @Override - protected void configure() { - - - bind(Configuration.class).to(SystemPropertiesMySQLConfiguration.class).in(Singleton.class); - bind(MySQLConfiguration.class).to(SystemPropertiesMySQLConfiguration.class).in(Singleton.class); - - bind(DataSource.class).toProvider(MySQLDataSourceProvider.class).in(Scopes.SINGLETON); - bind(MetadataDAO.class).to(MySQLMetadataDAO.class); - bind(EventHandlerDAO.class).to(MySQLMetadataDAO.class); - bind(ExecutionDAO.class).to(MySQLExecutionDAO.class); - bind(RateLimitingDAO.class).to(MySQLExecutionDAO.class); - bind(PollDataDAO.class).to(MySQLExecutionDAO.class); - bind(QueueDAO.class).to(MySQLQueueDAO.class); - bind(IndexDAO.class).to(MockIndexDAO.class); - bind(WorkflowStatusListener.class).to(WorkflowStatusListenerStub.class); - - install(new CoreModule()); - install(new EventModule()); - bind(UserTask.class).asEagerSingleton(); - bind(ObjectMapper.class).toProvider(JsonMapperProvider.class); - bind(ExternalPayloadStorage.class).to(MockExternalPayloadStorage.class); - - bind(MetadataService.class).to(MetadataServiceImpl.class); - install(new NoopLockModule()); - } - - - @Provides - public ExecutorService getExecutorService() { - return this.executorService; - } - - private void configureExecutorService() { - AtomicInteger count = new AtomicInteger(0); - this.executorService = java.util.concurrent.Executors.newFixedThreadPool(maxThreads, runnable -> { - Thread workflowWorkerThread = new Thread(runnable); - workflowWorkerThread.setName(String.format("workflow-worker-%d", count.getAndIncrement())); - return workflowWorkerThread; - }); - } -} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/MySQLTestRunner.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/MySQLTestRunner.java deleted file mode 100644 index 45e8f9228a..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/MySQLTestRunner.java +++ /dev/null @@ -1,69 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.tests.utils; - -import com.google.inject.Guice; -import com.google.inject.Injector; -import org.junit.runners.BlockJUnit4ClassRunner; - -/** - * @author Viren - * - */ -public class MySQLTestRunner extends BlockJUnit4ClassRunner { - - private Injector injector; - - static { - System.setProperty("EC2_REGION", "us-east-1"); - System.setProperty("EC2_AVAILABILITY_ZONE", "us-east-1c"); - - System.setProperty("conductor.workflow.input.payload.threshold.kb", "10"); - System.setProperty("conductor.max.workflow.input.payload.threshold.kb", "10240"); - System.setProperty("conductor.workflow.output.payload.threshold.kb", "10"); - System.setProperty("conductor.max.workflow.output.payload.threshold.kb", "10240"); - System.setProperty("conductor.task.input.payload.threshold.kb", "1"); - System.setProperty("conductor.max.task.input.payload.threshold.kb", "10240"); - System.setProperty("conductor.task.output.payload.threshold.kb", "10"); - System.setProperty("conductor.max.task.output.payload.threshold.kb", "10240"); - - // jdbc properties - - System.setProperty("jdbc.url", "jdbc:mysql://localhost:33307/conductor?useUnicode=true&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC"); - System.setProperty("jdbc.username", "root"); - System.setProperty("jdbc.password", "root"); - System.setProperty("conductor.mysql.connection.pool.size.min", "8"); - System.setProperty("conductor.mysql.connection.pool.size.max", "8"); - System.setProperty("conductor.mysql.connection.pool.idle.min", "300000"); - - } - - public MySQLTestRunner(Class klass) throws Exception { - super(klass); - System.setProperty("workflow.namespace.prefix", "conductor" + System.getProperty("user.name")); - injector = Guice.createInjector(new MySQLTestModule()); - } - - @Override - protected Object createTest() throws Exception { - Object test = super.createTest(); - injector.injectMembers(test); - return test; - } -} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestEnvironment.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestEnvironment.java deleted file mode 100644 index daf4d16130..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestEnvironment.java +++ /dev/null @@ -1,21 +0,0 @@ -package com.netflix.conductor.tests.utils; - -public class TestEnvironment { - private TestEnvironment() {} - - private static void setupSystemProperties() { - System.setProperty("EC2_REGION", "us-east-1"); - System.setProperty("EC2_AVAILABILITY_ZONE", "us-east-1c"); - System.setProperty("workflow.elasticsearch.index.name", "conductor"); - System.setProperty("workflow.namespace.prefix", "integration-test"); - System.setProperty("db", "memory"); - } - - public static void setup() { - setupSystemProperties(); - } - - public static void teardown() { - System.setProperties(null); - } -} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestJettyServer.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestJettyServer.java deleted file mode 100644 index 6e471c6b19..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestJettyServer.java +++ /dev/null @@ -1,67 +0,0 @@ -package com.netflix.conductor.tests.utils; - -import com.netflix.conductor.client.http.MetadataClient; -import com.netflix.conductor.client.http.TaskClient; -import com.netflix.conductor.client.http.WorkflowClient; -import com.netflix.conductor.jetty.server.JettyServer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.annotation.PostConstruct; -import javax.annotation.PreDestroy; -import javax.inject.Singleton; - -@Singleton -public class TestJettyServer { - - private static final Logger logger = LoggerFactory.getLogger(TestJettyServer.class); - - public static final int SERVER_PORT = 8903; - public static final String API_ROOT = String.format("http://localhost:%d/api/", SERVER_PORT); - - private final JettyServer jettyServer; - - @PostConstruct - public void init() { - try { - jettyServer.start(); - } catch (Exception e) { - logger.error("Error starting the server ", e); - throw new RuntimeException(e); - } - } - - @PreDestroy - public void cleanup() { - try { - jettyServer.stop(); - } catch (Exception e) { - logger.error("Error stopping the server ", e); - throw new RuntimeException(e); - } - } - - public TestJettyServer() { - jettyServer = new JettyServer(SERVER_PORT, false); - } - - - public static TaskClient getTaskClient() { - TaskClient taskClient = new TaskClient(); - taskClient.setRootURI(API_ROOT); - return taskClient; - } - - public static WorkflowClient getWorkflowClient() { - WorkflowClient workflowClient = new WorkflowClient(); - workflowClient.setRootURI(API_ROOT); - return workflowClient; - } - - public static MetadataClient getMetaDataClient() { - MetadataClient metadataClient = new MetadataClient(); - metadataClient.setRootURI(API_ROOT); - return metadataClient; - } - -} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java deleted file mode 100644 index c4d633528f..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright 2016 Netflix, Inc. - *

    - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

    - * http://www.apache.org/licenses/LICENSE-2.0 - *

    - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.tests.utils; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.inject.AbstractModule; -import com.google.inject.Provides; -import com.netflix.conductor.common.utils.ExternalPayloadStorage; -import com.netflix.conductor.common.utils.JsonMapperProvider; -import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.core.config.CoreModule; -import com.netflix.conductor.core.config.EventModule; -import com.netflix.conductor.core.execution.WorkflowStatusListener; -import com.netflix.conductor.core.execution.WorkflowStatusListenerStub; -import com.netflix.conductor.core.utils.LocalOnlyLockModule; -import com.netflix.conductor.dao.EventHandlerDAO; -import com.netflix.conductor.dao.ExecutionDAO; -import com.netflix.conductor.dao.IndexDAO; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.dao.PollDataDAO; -import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.dao.RateLimitingDAO; -import com.netflix.conductor.dao.dynomite.RedisEventHandlerDAO; -import com.netflix.conductor.dao.dynomite.RedisExecutionDAO; -import com.netflix.conductor.dao.dynomite.RedisMetadataDAO; -import com.netflix.conductor.dao.dynomite.RedisPollDataDAO; -import com.netflix.conductor.dao.dynomite.RedisRateLimitingDAO; -import com.netflix.conductor.dao.dynomite.queue.DynoQueueDAO; -import com.netflix.conductor.dyno.RedisQueuesProvider; -import com.netflix.conductor.dyno.RedisQueuesShardingStrategyProvider; -import com.netflix.conductor.server.LocalRedisModule; -import com.netflix.conductor.service.MetadataService; -import com.netflix.conductor.service.MetadataServiceImpl; -import com.netflix.dyno.queues.redis.RedisQueues; -import com.netflix.dyno.queues.redis.sharding.ShardingStrategy; - -import java.util.concurrent.ExecutorService; -import java.util.concurrent.atomic.AtomicInteger; - -/** - * @author Viren - */ -public class TestModule extends AbstractModule { - private int maxThreads = 50; - - private ExecutorService executorService; - - @Override - protected void configure() { - - System.setProperty("workflow.system.task.worker.callback.seconds", "0"); - System.setProperty("workflow.system.task.worker.queue.size", "10000"); - System.setProperty("workflow.system.task.worker.thread.count", "10"); - - configureExecutorService(); - - MockConfiguration config = new MockConfiguration(); - bind(Configuration.class).toInstance(config); - install(new LocalRedisModule()); - bind(ShardingStrategy.class).toProvider(RedisQueuesShardingStrategyProvider.class).asEagerSingleton(); - bind(RedisQueues.class).toProvider(RedisQueuesProvider.class); - - bind(MetadataDAO.class).to(RedisMetadataDAO.class); - bind(ExecutionDAO.class).to(RedisExecutionDAO.class); - bind(RateLimitingDAO.class).to(RedisRateLimitingDAO.class); - bind(EventHandlerDAO.class).to(RedisEventHandlerDAO.class); - bind(PollDataDAO.class).to(RedisPollDataDAO.class); - bind(IndexDAO.class).to(MockIndexDAO.class); - configureQueueDAO(); - - bind(WorkflowStatusListener.class).to(WorkflowStatusListenerStub.class); - - bind(MetadataService.class).to(MetadataServiceImpl.class); - - install(new CoreModule()); - install(new EventModule()); - bind(UserTask.class).asEagerSingleton(); - bind(ObjectMapper.class).toProvider(JsonMapperProvider.class); - bind(ExternalPayloadStorage.class).to(MockExternalPayloadStorage.class); - install(new LocalOnlyLockModule()); - } - - @Provides - public ExecutorService getExecutorService() { - return this.executorService; - } - - private void configureExecutorService() { - AtomicInteger count = new AtomicInteger(0); - this.executorService = java.util.concurrent.Executors.newFixedThreadPool(maxThreads, runnable -> { - Thread workflowWorkerThread = new Thread(runnable); - workflowWorkerThread.setName(String.format("workflow-worker-%d", count.getAndIncrement())); - return workflowWorkerThread; - }); - } - - public void configureQueueDAO() { - bind(QueueDAO.class).to(DynoQueueDAO.class); - } -} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestRunner.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestRunner.java deleted file mode 100644 index 87d0fadcee..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestRunner.java +++ /dev/null @@ -1,54 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.tests.utils; - -import org.junit.runners.BlockJUnit4ClassRunner; - -import com.google.inject.Guice; -import com.google.inject.Injector; - -/** - * @author Viren - * - */ -public class TestRunner extends BlockJUnit4ClassRunner { - - private Injector injector; - - static { - System.setProperty("EC2_REGION", "us-east-1"); - System.setProperty("EC2_AVAILABILITY_ZONE", "us-east-1c"); - } - - - public TestRunner(Class klass) throws Exception { - super(klass); - System.setProperty("workflow.namespace.prefix", "conductor" + System.getProperty("user.name")); - injector = Guice.createInjector(new TestModule()); - } - - @Override - protected Object createTest() throws Exception { - Object test = super.createTest(); - injector.injectMembers(test); - return test; - } - - -} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/UserTask.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/UserTask.java deleted file mode 100644 index 5b647e02b5..0000000000 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/UserTask.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright 2017 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.netflix.conductor.tests.utils; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.util.concurrent.Uninterruptibles; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.Task.Status; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.utils.JsonMapperProvider; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -/** - * @author Viren - * - */ -public class UserTask extends WorkflowSystemTask { - - public static final String NAME = "USER_TASK"; - - private final ObjectMapper objectMapper = new JsonMapperProvider().get(); - - private static final TypeReference>>> mapStringListObjects = - new TypeReference>>>() {}; - - public UserTask() { - super(NAME); - } - - @Override - public void start(Workflow workflow, Task task, WorkflowExecutor executor) { - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); - - if (task.getWorkflowTask().isAsyncComplete()) { - task.setStatus(Status.IN_PROGRESS); - } else { - Map>> map = objectMapper.convertValue(task.getInputData(), mapStringListObjects); - Map output = new HashMap<>(); - Map> defaultLargeInput = new HashMap<>(); - defaultLargeInput.put("TEST_SAMPLE", Collections.singletonList("testDefault")); - output.put("size", map.getOrDefault("largeInput", defaultLargeInput).get("TEST_SAMPLE").size()); - task.setOutputData(output); - task.setStatus(Status.COMPLETED); - } - } - - @Override - public boolean isAsync() { - return true; - } -} diff --git a/test-harness/src/test/resources/application-integrationtest.properties b/test-harness/src/test/resources/application-integrationtest.properties new file mode 100644 index 0000000000..a7244eaf99 --- /dev/null +++ b/test-harness/src/test/resources/application-integrationtest.properties @@ -0,0 +1,46 @@ + +environment=test +STACK=test +APP_ID=conductor +EC2_AVAILABILITY_ZONE=us-east-1c +EC2_REGION=us-east-1 + +db=memory + +decider.sweep.disable=false +decider.sweep.frequency.seconds=30 + +conductor.disable.async.workers=false + +workflow.system.task.worker.callback.seconds=0 +workflow.system.task.worker.queue.size=10000 +workflow.system.task.worker.thread.count=10 + +workflow.event.message.indexing.enabled=true +workflow.event.execution.indexing.enabled=true + +conductor.workflow.input.payload.threshold.kb=10 +conductor.max.workflow.input.payload.threshold.kb=10240 +conductor.workflow.output.payload.threshold.kb=10 +conductor.max.workflow.output.payload.threshold.kb=10240 +conductor.task.input.payload.threshold.kb=10 +conductor.max.task.input.payload.threshold.kb=10240 +conductor.task.output.payload.threshold.kb=10 +conductor.max.task.output.payload.threshold.kb=10240 +conductor.max.workflow.variables.payload.threshold.kb=2 + +workflow.decider.locking.server=LOCAL_ONLY + +workflow.indexing.enabled=false + +workflow.repairservice.enabled=false + +workflow.decider.locking.enabled=false + +workflow.external.payload.storage=mock + +workflow.namespace.prefix=integration-test +workflow.namespace.queue.prefix=integtest + +workflow.elasticsearch.index.name=conductor +workflow.elasticsearch.cluster.health.color=yellow diff --git a/test-harness/src/test/resources/concurrency_limited_task_workflow_integration_test.json b/test-harness/src/test/resources/concurrency_limited_task_workflow_integration_test.json index 3d47a50af1..b63724743a 100644 --- a/test-harness/src/test/resources/concurrency_limited_task_workflow_integration_test.json +++ b/test-harness/src/test/resources/concurrency_limited_task_workflow_integration_test.json @@ -1,26 +1,29 @@ { - "name" : "test_concurrency_limits_workflow", - "version" : 1, - "tasks" : [ { - "name" : "test_task_with_concurrency_limit", - "taskReferenceName" : "test_task_with_concurrency_limit", - "inputParameters" : { }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ ], - "outputParameters" : { }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + "name": "test_concurrency_limits_workflow", + "version": 1, + "tasks": [ + { + "name": "test_task_with_concurrency_limit", + "taskReferenceName": "test_task_with_concurrency_limit", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/conditional_system_task_workflow_integration_test.json b/test-harness/src/test/resources/conditional_system_task_workflow_integration_test.json index 4a71ef3587..275f928f86 100644 --- a/test-harness/src/test/resources/conditional_system_task_workflow_integration_test.json +++ b/test-harness/src/test/resources/conditional_system_task_workflow_integration_test.json @@ -2,93 +2,104 @@ "name": "ConditionalSystemWorkflow", "description": "ConditionalSystemWorkflow", "version": 1, - "tasks": [{ - "name": "integration_task_1", - "taskReferenceName": "t1", - "inputParameters": { - "tp11": "${workflow.input.param1}", - "tp12": "${workflow.input.param2}" + "tasks": [ + { + "name": "integration_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "tp11": "${workflow.input.param1}", + "tp12": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, { - "name": "decision", - "taskReferenceName": "decision", - "inputParameters": { - "case": "${t1.output.case}" + { + "name": "decision", + "taskReferenceName": "decision", + "inputParameters": { + "case": "${t1.output.case}" + }, + "type": "DECISION", + "caseValueParam": "case", + "decisionCases": { + "one": [ + { + "name": "integration_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp21": "${workflow.input.param1}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "two": [ + { + "name": "user_task", + "taskReferenceName": "user_task", + "inputParameters": { + "largeInput": "${t1.output.op}" + }, + "type": "USER_TASK", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] + }, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type": "DECISION", - "caseValueParam": "case", - "decisionCases": { - "one": [{ - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "tp21": "${workflow.input.param1}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }], - "two": [{ - "name": "user_task", - "taskReferenceName": "user_task", - "inputParameters": { - "largeInput": "${t1.output.op}" - }, - "type": "USER_TASK", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }] - }, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, { - "name": "integration_task_3", - "taskReferenceName": "t3", - "inputParameters": { - "tp31": "${workflow.input.param2}" - }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }], - "inputParameters": ["param1", "param2"], + { + "name": "integration_task_3", + "taskReferenceName": "t3", + "inputParameters": { + "tp31": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [ + "param1", + "param2" + ], "outputParameters": { "o2": "${t1.output.op}" }, @@ -96,5 +107,6 @@ "restartable": true, "workflowStatusListenerEnabled": false, "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0 + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/conditional_task_workflow_integration_test.json b/test-harness/src/test/resources/conditional_task_workflow_integration_test.json index 2c4e081e01..bc9c59de05 100644 --- a/test-harness/src/test/resources/conditional_task_workflow_integration_test.json +++ b/test-harness/src/test/resources/conditional_task_workflow_integration_test.json @@ -1,151 +1,170 @@ { - "name" : "ConditionalTaskWF", - "description" : "ConditionalTaskWF", - "version" : 1, - "tasks" : [ { - "name" : "conditional", - "taskReferenceName" : "conditional", - "inputParameters" : { - "case" : "${workflow.input.param1}" - }, - "type" : "DECISION", - "caseValueParam" : "case", - "decisionCases" : { - "nested" : [ { - "name" : "nestedCondition", - "taskReferenceName" : "nestedCondition", - "inputParameters" : { - "case" : "${workflow.input.param2}" - }, - "type" : "DECISION", - "caseValueParam" : "case", - "decisionCases" : { - "one" : [ { - "name" : "integration_task_1", - "taskReferenceName" : "t1", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}" + "name": "ConditionalTaskWF", + "description": "ConditionalTaskWF", + "version": 1, + "tasks": [ + { + "name": "conditional", + "taskReferenceName": "conditional", + "inputParameters": { + "case": "${workflow.input.param1}" + }, + "type": "DECISION", + "caseValueParam": "case", + "decisionCases": { + "nested": [ + { + "name": "nestedCondition", + "taskReferenceName": "nestedCondition", + "inputParameters": { + "case": "${workflow.input.param2}" }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "two" : [ { - "name" : "integration_task_2", - "taskReferenceName" : "t2", - "inputParameters" : { - "tp1" : "${workflow.input.param1}" + "type": "DECISION", + "caseValueParam": "case", + "decisionCases": { + "one": [ + { + "name": "integration_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "two": [ + { + "name": "integration_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "${workflow.input.param1}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ] - }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "three" : [ { - "name" : "integration_task_3", - "taskReferenceName" : "t3", - "inputParameters" : { - "tp3" : "workflow.input.param2" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ] - }, - "defaultCase" : [ { - "name" : "integration_task_10", - "taskReferenceName" : "t10", - "inputParameters" : { - "tp10" : "workflow.input.param2" + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "three": [ + { + "name": "integration_task_3", + "taskReferenceName": "t3", + "inputParameters": { + "tp3": "workflow.input.param2" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "finalcondition", - "taskReferenceName" : "finalCase", - "inputParameters" : { - "finalCase" : "${workflow.input.finalCase}" + "defaultCase": [ + { + "name": "integration_task_10", + "taskReferenceName": "t10", + "inputParameters": { + "tp10": "workflow.input.param2" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type" : "DECISION", - "caseValueParam" : "finalCase", - "decisionCases" : { - "notify" : [ { - "name" : "integration_task_4", - "taskReferenceName" : "integration_task_4", - "inputParameters" : { }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ] - }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ "param1", "param2" ], - "outputParameters" : { }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + { + "name": "finalcondition", + "taskReferenceName": "finalCase", + "inputParameters": { + "finalCase": "${workflow.input.finalCase}" + }, + "type": "DECISION", + "caseValueParam": "finalCase", + "decisionCases": { + "notify": [ + { + "name": "integration_task_4", + "taskReferenceName": "integration_task_4", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] + }, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [ + "param1", + "param2" + ], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/decision_and_fork_join_integration_test.json b/test-harness/src/test/resources/decision_and_fork_join_integration_test.json index f80634101b..d2fb055d28 100644 --- a/test-harness/src/test/resources/decision_and_fork_join_integration_test.json +++ b/test-harness/src/test/resources/decision_and_fork_join_integration_test.json @@ -1,145 +1,165 @@ { - "name" : "ForkConditionalTest", - "description" : "ForkConditionalTest", - "version" : 1, - "tasks" : [ { - "name" : "forkTask", - "taskReferenceName" : "forkTask", - "inputParameters" : { }, - "type" : "FORK_JOIN", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ [ { - "name" : "decisionTask", - "taskReferenceName" : "decisionTask", - "inputParameters" : { - "case" : "${workflow.input.case}" - }, - "type" : "DECISION", - "caseValueParam" : "case", - "decisionCases" : { - "c" : [ { - "name" : "integration_task_1", - "taskReferenceName" : "t1", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}" + "name": "ForkConditionalTest", + "description": "ForkConditionalTest", + "version": 1, + "tasks": [ + { + "name": "forkTask", + "taskReferenceName": "forkTask", + "inputParameters": {}, + "type": "FORK_JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [ + [ + { + "name": "decisionTask", + "taskReferenceName": "decisionTask", + "inputParameters": { + "case": "${workflow.input.case}" + }, + "type": "DECISION", + "caseValueParam": "case", + "decisionCases": { + "c": [ + { + "name": "integration_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "integration_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] + }, + "defaultCase": [ + { + "name": "integration_task_5", + "taskReferenceName": "t5", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_2", - "taskReferenceName" : "t2", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ] - }, - "defaultCase" : [ { - "name" : "integration_task_5", - "taskReferenceName" : "t5", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_20", - "taskReferenceName" : "t20", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], [ { - "name" : "integration_task_10", - "taskReferenceName" : "t10", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ] ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "joinTask", - "taskReferenceName" : "joinTask", - "inputParameters" : { }, - "type" : "JOIN", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ "t20", "t10" ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ ], - "outputParameters" : { }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + { + "name": "integration_task_20", + "taskReferenceName": "t20", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + [ + { + "name": "integration_task_10", + "taskReferenceName": "t10", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] + ], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "joinTask", + "taskReferenceName": "joinTask", + "inputParameters": {}, + "type": "JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [ + "t20", + "t10" + ], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/do_while_as_subtask_integration_test.json b/test-harness/src/test/resources/do_while_as_subtask_integration_test.json index fb0b00a468..2fc471efcb 100644 --- a/test-harness/src/test/resources/do_while_as_subtask_integration_test.json +++ b/test-harness/src/test/resources/do_while_as_subtask_integration_test.json @@ -96,7 +96,10 @@ "defaultCase": [], "forkTasks": [], "startDelay": 0, - "joinOn": ["loopTask", "integration_task_2"], + "joinOn": [ + "loopTask", + "integration_task_2" + ], "optional": false, "defaultExclusiveJoinTask": [], "asyncComplete": false, @@ -109,5 +112,6 @@ "restartable": true, "workflowStatusListenerEnabled": false, "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0 + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } diff --git a/test-harness/src/test/resources/do_while_integration_test.json b/test-harness/src/test/resources/do_while_integration_test.json index 2d91db174b..e6723a3ed2 100644 --- a/test-harness/src/test/resources/do_while_integration_test.json +++ b/test-harness/src/test/resources/do_while_integration_test.json @@ -94,7 +94,10 @@ "defaultCase": [], "forkTasks": [], "startDelay": 0, - "joinOn": ["integration_task_1", "integration_task_2"], + "joinOn": [ + "integration_task_1", + "integration_task_2" + ], "optional": false, "defaultExclusiveJoinTask": [], "asyncComplete": false, @@ -109,5 +112,6 @@ "restartable": true, "workflowStatusListenerEnabled": false, "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0 + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/do_while_multiple_integration_test.json b/test-harness/src/test/resources/do_while_multiple_integration_test.json index 74078e2903..1da5bc2db1 100644 --- a/test-harness/src/test/resources/do_while_multiple_integration_test.json +++ b/test-harness/src/test/resources/do_while_multiple_integration_test.json @@ -94,7 +94,10 @@ "defaultCase": [], "forkTasks": [], "startDelay": 0, - "joinOn": ["integration_task_1", "integration_task_2"], + "joinOn": [ + "integration_task_1", + "integration_task_2" + ], "optional": false, "defaultExclusiveJoinTask": [], "asyncComplete": false, @@ -143,5 +146,6 @@ "restartable": true, "workflowStatusListenerEnabled": false, "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0 + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/dynamic_fork_join_integration_test.json b/test-harness/src/test/resources/dynamic_fork_join_integration_test.json index 2a2bf45dc0..ec5f11fecc 100644 --- a/test-harness/src/test/resources/dynamic_fork_join_integration_test.json +++ b/test-harness/src/test/resources/dynamic_fork_join_integration_test.json @@ -1,109 +1,120 @@ { - "name" : "DynamicFanInOutTest", - "description" : "DynamicFanInOutTest", - "version" : 1, - "tasks" : [ { - "name" : "integration_task_1", - "taskReferenceName" : "dt1", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}" + "name": "DynamicFanInOutTest", + "description": "DynamicFanInOutTest", + "version": 1, + "tasks": [ + { + "name": "integration_task_1", + "taskReferenceName": "dt1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "taskDefinition": { + "createTime": 1590000191803, + "createdBy": "integration_app", + "name": "integration_task_1", + "description": "integration_task_1", + "retryCount": 1, + "timeoutSeconds": 120, + "inputKeys": [], + "outputKeys": [], + "timeoutPolicy": "TIME_OUT_WF", + "retryLogic": "FIXED", + "retryDelaySeconds": 60, + "responseTimeoutSeconds": 3600, + "inputTemplate": {}, + "rateLimitPerFrequency": 0, + "rateLimitFrequencyInSeconds": 1 + }, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "taskDefinition" : { - "createTime" : 1590000191803, - "createdBy" : "integration_app", - "name" : "integration_task_1", - "description" : "integration_task_1", - "retryCount" : 1, - "timeoutSeconds" : 120, - "inputKeys" : [ ], - "outputKeys" : [ ], - "timeoutPolicy" : "TIME_OUT_WF", - "retryLogic" : "FIXED", - "retryDelaySeconds" : 60, - "responseTimeoutSeconds" : 3600, - "inputTemplate" : { }, - "rateLimitPerFrequency" : 0, - "rateLimitFrequencyInSeconds" : 1 + { + "name": "fork", + "taskReferenceName": "dynamicfanouttask", + "inputParameters": { + "dynamicTasks": "${dt1.output.dynamicTasks}", + "dynamicTasksInput": "${dt1.output.dynamicTasksInput}" + }, + "type": "FORK_JOIN_DYNAMIC", + "decisionCases": {}, + "dynamicForkTasksParam": "dynamicTasks", + "dynamicForkTasksInputParamName": "dynamicTasksInput", + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "taskReferenceName" : "dynamicfanouttask", - "inputParameters" : { - "dynamicTasks" : "${dt1.output.dynamicTasks}", - "dynamicTasksInput" : "${dt1.output.dynamicTasksInput}" + { + "name": "join", + "taskReferenceName": "dynamicfanouttask_join", + "inputParameters": {}, + "type": "JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type" : "FORK_JOIN_DYNAMIC", - "decisionCases" : { }, - "dynamicForkTasksParam" : "dynamicTasks", - "dynamicForkTasksInputParamName" : "dynamicTasksInput", - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "taskReferenceName" : "dynamicfanouttask_join", - "inputParameters" : { }, - "type" : "JOIN", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_4", - "taskReferenceName" : "task4", - "inputParameters" : { }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "taskDefinition" : { - "createTime" : 1590000191806, - "createdBy" : "integration_app", - "name" : "integration_task_4", - "description" : "integration_task_4", - "retryCount" : 1, - "timeoutSeconds" : 120, - "inputKeys" : [ ], - "outputKeys" : [ ], - "timeoutPolicy" : "TIME_OUT_WF", - "retryLogic" : "FIXED", - "retryDelaySeconds" : 60, - "responseTimeoutSeconds" : 3600, - "inputTemplate" : { }, - "rateLimitPerFrequency" : 0, - "rateLimitFrequencyInSeconds" : 1 - }, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ "param1", "param2" ], - "outputParameters" : { }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + { + "name": "integration_task_4", + "taskReferenceName": "task4", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "taskDefinition": { + "createTime": 1590000191806, + "createdBy": "integration_app", + "name": "integration_task_4", + "description": "integration_task_4", + "retryCount": 1, + "timeoutSeconds": 120, + "inputKeys": [], + "outputKeys": [], + "timeoutPolicy": "TIME_OUT_WF", + "retryLogic": "FIXED", + "retryDelaySeconds": 60, + "responseTimeoutSeconds": 3600, + "inputTemplate": {}, + "rateLimitPerFrequency": 0, + "rateLimitFrequencyInSeconds": 1 + }, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [ + "param1", + "param2" + ], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/event_workflow_integration_test.json b/test-harness/src/test/resources/event_workflow_integration_test.json index 74ac73ff8b..d7aa466a8b 100644 --- a/test-harness/src/test/resources/event_workflow_integration_test.json +++ b/test-harness/src/test/resources/event_workflow_integration_test.json @@ -1,41 +1,45 @@ { - "name" : "test_event_workflow", - "version" : 1, - "tasks" : [ { - "name" : "eventX", - "taskReferenceName" : "wait0", - "inputParameters" : { }, - "type" : "EVENT", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "sink" : "conductor", - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_1", - "taskReferenceName" : "t1", - "inputParameters" : { }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ ], - "outputParameters" : { }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + "name": "test_event_workflow", + "version": 1, + "tasks": [ + { + "name": "eventX", + "taskReferenceName": "wait0", + "inputParameters": {}, + "type": "EVENT", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "sink": "conductor", + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "integration_task_1", + "taskReferenceName": "t1", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/exclusive_join_integration_test.json b/test-harness/src/test/resources/exclusive_join_integration_test.json new file mode 100644 index 0000000000..17b6671dff --- /dev/null +++ b/test-harness/src/test/resources/exclusive_join_integration_test.json @@ -0,0 +1,114 @@ +{ + "name": "ExclusiveJoinTestWorkflow", + "description": "Exclusive Join Test Workflow", + "version": 1, + "tasks": [ + { + "name": "integration_task_1", + "taskReferenceName": "task1", + "inputParameters": { + "payload": "${workflow.input.payload}" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "decide_task", + "taskReferenceName": "decision1", + "inputParameters": { + "decision_1": "${workflow.input.decision_1}" + }, + "type": "DECISION", + "caseValueParam": "decision_1", + "decisionCases": { + "true": [ + { + "name": "integration_task_2", + "taskReferenceName": "task2", + "inputParameters": { + "payload": "${task1.output.payload}" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "decide_task", + "taskReferenceName": "decision2", + "inputParameters": { + "decision_2": "${workflow.input.decision_2}" + }, + "type": "DECISION", + "caseValueParam": "decision_2", + "decisionCases": { + "true": [ + { + "name": "integration_task_3", + "taskReferenceName": "task3", + "inputParameters": { + "payload": "${task2.output.payload}" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ] + } + } + ], + "false": [ + { + "name": "integration_task_4", + "taskReferenceName": "task4", + "inputParameters": { + "payload": "${task1.output.payload}" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "decide_task", + "taskReferenceName": "decision3", + "inputParameters": { + "decision_3": "${workflow.input.decision_3}" + }, + "type": "DECISION", + "caseValueParam": "decision_3", + "decisionCases": { + "true": [ + { + "name": "integration_task_5", + "taskReferenceName": "task5", + "inputParameters": { + "payload": "${task4.output.payload}" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ] + } + } + ] + } + }, + { + "name": "exclusive_join", + "taskReferenceName": "exclusiveJoin", + "type": "EXCLUSIVE_JOIN", + "joinOn": [ + "task3", + "task5" + ], + "defaultExclusiveJoinTask": [ + "task2", + "task4", + "task1" + ] + } + ], + "schemaVersion": 2, + "ownerEmail": "test@harness.com" +} \ No newline at end of file diff --git a/test-harness/src/test/resources/failure_workflow_for_terminate_task_workflow.json b/test-harness/src/test/resources/failure_workflow_for_terminate_task_workflow.json index c88bd13934..c0ad47d05b 100644 --- a/test-harness/src/test/resources/failure_workflow_for_terminate_task_workflow.json +++ b/test-harness/src/test/resources/failure_workflow_for_terminate_task_workflow.json @@ -1,29 +1,32 @@ { - "name" : "failure_workflow", - "version" : 1, - "tasks" : [ { - "name" : "lambda", - "taskReferenceName" : "lambda0", - "inputParameters" : { - "input" : "${workflow.input}", - "scriptExpression" : "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false}}" - }, - "type" : "LAMBDA", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ ], - "outputParameters" : { }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + "name": "failure_workflow", + "version": 1, + "tasks": [ + { + "name": "lambda", + "taskReferenceName": "lambda0", + "inputParameters": { + "input": "${workflow.input}", + "scriptExpression": "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false}}" + }, + "type": "LAMBDA", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/fork_join_integration_test.json b/test-harness/src/test/resources/fork_join_integration_test.json index 16cd531e07..7e99338405 100644 --- a/test-harness/src/test/resources/fork_join_integration_test.json +++ b/test-harness/src/test/resources/fork_join_integration_test.json @@ -1,105 +1,126 @@ { - "name" : "FanInOutTest", - "description" : "FanInOutTest", - "version" : 1, - "tasks" : [ { - "taskReferenceName" : "fanouttask", - "inputParameters" : { }, - "type" : "FORK_JOIN", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ [ { - "name" : "integration_task_1", - "taskReferenceName" : "t1", - "inputParameters" : { - "p1" : "workflow.input.param1", - "p2" : "workflow.input.param2" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_3", - "taskReferenceName" : "t3", - "inputParameters" : { - "p1" : "workflow.input.param1", - "p2" : "workflow.input.param2" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], [ { - "name" : "integration_task_2", - "taskReferenceName" : "t2", - "inputParameters" : { - "tp1" : "workflow.input.param1" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ] ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "taskReferenceName" : "fanouttask_join", - "inputParameters" : { }, - "type" : "JOIN", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ "t3", "t2" ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_4", - "taskReferenceName" : "t4", - "inputParameters" : { - "tp1" : "workflow.input.param1" + "name": "FanInOutTest", + "description": "FanInOutTest", + "version": 1, + "tasks": [ + { + "name": "fork", + "taskReferenceName": "fanouttask", + "inputParameters": {}, + "type": "FORK_JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [ + [ + { + "name": "integration_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "integration_task_3", + "taskReferenceName": "t3", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + [ + { + "name": "integration_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "workflow.input.param1" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] + ], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ "param1", "param2" ], - "outputParameters" : { }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + { + "name": "join", + "taskReferenceName": "fanouttask_join", + "inputParameters": {}, + "type": "JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [ + "t3", + "t2" + ], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "integration_task_4", + "taskReferenceName": "t4", + "inputParameters": { + "tp1": "workflow.input.param1" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [ + "param1", + "param2" + ], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/fork_join_sub_workflow.json b/test-harness/src/test/resources/fork_join_sub_workflow.json index 10600d5709..1401751636 100644 --- a/test-harness/src/test/resources/fork_join_sub_workflow.json +++ b/test-harness/src/test/resources/fork_join_sub_workflow.json @@ -2,74 +2,91 @@ "name": "integration_test_fork_join_sw", "description": "integration_test_fork_join_sw", "version": 1, - "tasks": [{ - "taskReferenceName": "fanouttask", - "inputParameters": {}, - "type": "FORK_JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [ - [{ - "name": "sub_workflow_task", - "taskReferenceName": "st1", - "inputParameters": {}, - "type": "SUB_WORKFLOW", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_workflow" - }, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }], [{ - "name" : "integration_task_2", - "taskReferenceName" : "t2", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }] - ], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, { - "taskReferenceName": "fanouttask_join", - "inputParameters": {}, - "type": "JOIN", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": ["st1", "t2"], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }], - "inputParameters": ["param1", "param2"], + "tasks": [ + { + "name": "fork", + "taskReferenceName": "fanouttask", + "inputParameters": {}, + "type": "FORK_JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [ + [ + { + "name": "sub_workflow_task", + "taskReferenceName": "st1", + "inputParameters": {}, + "type": "SUB_WORKFLOW", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "subWorkflowParam": { + "name": "sub_workflow" + }, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + [ + { + "name": "integration_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] + ], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "join", + "taskReferenceName": "fanouttask_join", + "inputParameters": {}, + "type": "JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [ + "st1", + "t2" + ], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [ + "param1", + "param2" + ], "outputParameters": {}, "schemaVersion": 2, "restartable": true, "workflowStatusListenerEnabled": false, "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0 + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/fork_join_with_no_task_retry_integration_test.json b/test-harness/src/test/resources/fork_join_with_no_task_retry_integration_test.json index 8069c6006a..ffdaf97f1a 100644 --- a/test-harness/src/test/resources/fork_join_with_no_task_retry_integration_test.json +++ b/test-harness/src/test/resources/fork_join_with_no_task_retry_integration_test.json @@ -1,105 +1,126 @@ { - "name" : "FanInOutTest_2", - "description" : "FanInOutTest_2", - "version" : 1, - "tasks" : [ { - "taskReferenceName" : "fanouttask", - "inputParameters" : { }, - "type" : "FORK_JOIN", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ [ { - "name" : "integration_task_0_RT_1", - "taskReferenceName" : "t1", - "inputParameters" : { - "p1" : "workflow.input.param1", - "p2" : "workflow.input.param2" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_0_RT_3", - "taskReferenceName" : "t3", - "inputParameters" : { - "p1" : "workflow.input.param1", - "p2" : "workflow.input.param2" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], [ { - "name" : "integration_task_0_RT_2", - "taskReferenceName" : "t2", - "inputParameters" : { - "tp1" : "workflow.input.param1" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ] ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "taskReferenceName" : "fanouttask_join", - "inputParameters" : { }, - "type" : "JOIN", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ "t3", "t2" ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_0_RT_4", - "taskReferenceName" : "t4", - "inputParameters" : { - "tp1" : "workflow.input.param1" + "name": "FanInOutTest_2", + "description": "FanInOutTest_2", + "version": 1, + "tasks": [ + { + "name": "fork", + "taskReferenceName": "fanouttask", + "inputParameters": {}, + "type": "FORK_JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [ + [ + { + "name": "integration_task_0_RT_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "integration_task_0_RT_3", + "taskReferenceName": "t3", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + [ + { + "name": "integration_task_0_RT_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "workflow.input.param1" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] + ], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ "param1", "param2" ], - "outputParameters" : { }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + { + "name": "join", + "taskReferenceName": "fanouttask_join", + "inputParameters": {}, + "type": "JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [ + "t3", + "t2" + ], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "integration_task_0_RT_4", + "taskReferenceName": "t4", + "inputParameters": { + "tp1": "workflow.input.param1" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [ + "param1", + "param2" + ], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/fork_join_with_optional_sub_workflow_forks_integration_test.json b/test-harness/src/test/resources/fork_join_with_optional_sub_workflow_forks_integration_test.json index 624b3dcf41..35ea60d7d7 100644 --- a/test-harness/src/test/resources/fork_join_with_optional_sub_workflow_forks_integration_test.json +++ b/test-harness/src/test/resources/fork_join_with_optional_sub_workflow_forks_integration_test.json @@ -1,71 +1,92 @@ { - "name" : "integration_test_fork_join_optional_sw", - "description" : "integration_test_fork_join_optional_sw", - "version" : 1, - "tasks" : [ { - "taskReferenceName" : "fanouttask", - "inputParameters" : { }, - "type" : "FORK_JOIN", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ [ { - "taskReferenceName" : "st1", - "inputParameters" : { }, - "type" : "SUB_WORKFLOW", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "subWorkflowParam" : { - "name" : "sub_workflow" - }, - "joinOn" : [ ], - "optional" : true, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], [ { - "taskReferenceName" : "st2", - "inputParameters" : { }, - "type" : "SUB_WORKFLOW", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "subWorkflowParam" : { - "name" : "sub_workflow" - }, - "joinOn" : [ ], - "optional" : true, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ] ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "taskReferenceName" : "fanouttask_join", - "inputParameters" : { }, - "type" : "JOIN", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ "st1", "st2" ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ "param1", "param2" ], - "outputParameters" : { }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + "name": "integration_test_fork_join_optional_sw", + "description": "integration_test_fork_join_optional_sw", + "version": 1, + "tasks": [ + { + "name": "fork", + "taskReferenceName": "fanouttask", + "inputParameters": {}, + "type": "FORK_JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [ + [ + { + "name": "st1", + "taskReferenceName": "st1", + "inputParameters": {}, + "type": "SUB_WORKFLOW", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "subWorkflowParam": { + "name": "sub_workflow" + }, + "joinOn": [], + "optional": true, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + [ + { + "name": "st2", + "taskReferenceName": "st2", + "inputParameters": {}, + "type": "SUB_WORKFLOW", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "subWorkflowParam": { + "name": "sub_workflow" + }, + "joinOn": [], + "optional": true, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] + ], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "join", + "taskReferenceName": "fanouttask_join", + "inputParameters": {}, + "type": "JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [ + "st1", + "st2" + ], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [ + "param1", + "param2" + ], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/nested_fork_join_integration_test.json b/test-harness/src/test/resources/nested_fork_join_integration_test.json index ed54e4b384..17f607ae66 100644 --- a/test-harness/src/test/resources/nested_fork_join_integration_test.json +++ b/test-harness/src/test/resources/nested_fork_join_integration_test.json @@ -1,304 +1,348 @@ { - "name" : "FanInOutNestedTest", - "description" : "FanInOutNestedTest", - "version" : 1, - "tasks" : [ { - "taskReferenceName" : "fork1", - "inputParameters" : { }, - "type" : "FORK_JOIN", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ [ { - "name" : "integration_task_11", - "taskReferenceName" : "t11", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], [ { - "name" : "fork2", - "taskReferenceName" : "fork2", - "inputParameters" : { }, - "type" : "FORK_JOIN", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ [ { - "name" : "integration_task_12", - "taskReferenceName" : "t12", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_14", - "taskReferenceName" : "t14", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], [ { - "name" : "integration_task_13", - "taskReferenceName" : "t13", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "Decision", - "taskReferenceName" : "d1", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" - }, - "type" : "DECISION", - "caseValueParam" : "case", - "decisionCases" : { - "a" : [ { - "name" : "integration_task_16", - "taskReferenceName" : "t16", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_19", - "taskReferenceName" : "t19", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_20", - "taskReferenceName" : "t20", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "b" : [ { - "name" : "integration_task_17", - "taskReferenceName" : "t17", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" + "name": "FanInOutNestedTest", + "description": "FanInOutNestedTest", + "version": 1, + "tasks": [ + { + "name": "fork1", + "taskReferenceName": "fork1", + "inputParameters": {}, + "type": "FORK_JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [ + [ + { + "name": "integration_task_11", + "taskReferenceName": "t11", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_20", - "taskReferenceName" : "t20", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ] - }, - "defaultCase" : [ { - "name" : "integration_task_18", - "taskReferenceName" : "t18", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_20", - "taskReferenceName" : "t20", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + [ + { + "name": "fork2", + "taskReferenceName": "fork2", + "inputParameters": {}, + "type": "FORK_JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [ + [ + { + "name": "integration_task_12", + "taskReferenceName": "t12", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "integration_task_14", + "taskReferenceName": "t14", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + [ + { + "name": "integration_task_13", + "taskReferenceName": "t13", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "Decision", + "taskReferenceName": "d1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "DECISION", + "caseValueParam": "case", + "decisionCases": { + "a": [ + { + "name": "integration_task_16", + "taskReferenceName": "t16", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "integration_task_19", + "taskReferenceName": "t19", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "integration_task_20", + "taskReferenceName": "t20", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "b": [ + { + "name": "integration_task_17", + "taskReferenceName": "t17", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "integration_task_20", + "taskReferenceName": "t20b", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] + }, + "defaultCase": [ + { + "name": "integration_task_18", + "taskReferenceName": "t18", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "integration_task_20", + "taskReferenceName": "t20def", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] + ], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ] ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "taskReferenceName" : "join2", - "inputParameters" : { }, - "type" : "JOIN", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ "t14", "t20" ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ] ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "taskReferenceName" : "join1", - "inputParameters" : { }, - "type" : "JOIN", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ "t11", "join2" ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_15", - "taskReferenceName" : "t15", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" + { + "name": "join2", + "taskReferenceName": "join2", + "inputParameters": {}, + "type": "JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [ + "t14", + "t20" + ], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] + ], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ "param1", "param2" ], - "outputParameters" : { }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + { + "name": "join1", + "taskReferenceName": "join1", + "inputParameters": {}, + "type": "JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [ + "t11", + "join2" + ], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "integration_task_15", + "taskReferenceName": "t15", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [ + "param1", + "param2" + ], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/nested_fork_join_with_sub_workflow_integration_test.json b/test-harness/src/test/resources/nested_fork_join_with_sub_workflow_integration_test.json index 080e851e2f..6c9cbbc7b3 100644 --- a/test-harness/src/test/resources/nested_fork_join_with_sub_workflow_integration_test.json +++ b/test-harness/src/test/resources/nested_fork_join_with_sub_workflow_integration_test.json @@ -1,320 +1,369 @@ { - "name" : "FanInOutNestedSubWorkflowTest", - "description" : "FanInOutNestedSubWorkflowTest", - "version" : 1, - "tasks" : [ { - "taskReferenceName" : "fork1", - "inputParameters" : { }, - "type" : "FORK_JOIN", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ [ { - "name" : "integration_task_11", - "taskReferenceName" : "t11", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], [ { - "name" : "fork2", - "taskReferenceName" : "fork2", - "inputParameters" : { }, - "type" : "FORK_JOIN", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ [ { - "name" : "integration_task_12", - "taskReferenceName" : "t12", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_14", - "taskReferenceName" : "t14", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], [ { - "name" : "integration_task_13", - "taskReferenceName" : "t13", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "Decision", - "taskReferenceName" : "d1", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" - }, - "type" : "DECISION", - "caseValueParam" : "case", - "decisionCases" : { - "a" : [ { - "name" : "integration_task_16", - "taskReferenceName" : "t16", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_19", - "taskReferenceName" : "t19", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_20", - "taskReferenceName" : "t20", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "b" : [ { - "name" : "integration_task_17", - "taskReferenceName" : "t17", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_20", - "taskReferenceName" : "t20", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" + "name": "FanInOutNestedSubWorkflowTest", + "description": "FanInOutNestedSubWorkflowTest", + "version": 1, + "tasks": [ + { + "name": "fork1", + "taskReferenceName": "fork1", + "inputParameters": {}, + "type": "FORK_JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [ + [ + { + "name": "integration_task_11", + "taskReferenceName": "t11", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ] - }, - "defaultCase" : [ { - "name" : "integration_task_18", - "taskReferenceName" : "t18", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + [ + { + "name": "fork2", + "taskReferenceName": "fork2", + "inputParameters": {}, + "type": "FORK_JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [ + [ + { + "name": "integration_task_12", + "taskReferenceName": "t12", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "integration_task_14", + "taskReferenceName": "t14", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + [ + { + "name": "integration_task_13", + "taskReferenceName": "t13", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "Decision", + "taskReferenceName": "d1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "DECISION", + "caseValueParam": "case", + "decisionCases": { + "a": [ + { + "name": "integration_task_16", + "taskReferenceName": "t16", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "integration_task_19", + "taskReferenceName": "t19", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "integration_task_20", + "taskReferenceName": "t20", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "b": [ + { + "name": "integration_task_17", + "taskReferenceName": "t17", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "integration_task_20", + "taskReferenceName": "t20b", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] + }, + "defaultCase": [ + { + "name": "integration_task_18", + "taskReferenceName": "t18", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "integration_task_20", + "taskReferenceName": "t20def", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] + ], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_20", - "taskReferenceName" : "t20", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ] ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "taskReferenceName" : "join2", - "inputParameters" : { }, - "type" : "JOIN", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ "t14", "t20" ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], [ { - "taskReferenceName" : "sw1", - "inputParameters" : { }, - "type" : "SUB_WORKFLOW", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "subWorkflowParam" : { - "name" : "integration_test_wf" - }, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ] ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "taskReferenceName" : "join1", - "inputParameters" : { }, - "type" : "JOIN", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ "t11", "join2", "sw1" ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_15", - "taskReferenceName" : "t15", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "case" : "${workflow.input.case}" + { + "name": "join2", + "taskReferenceName": "join2", + "inputParameters": {}, + "type": "JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [ + "t14", + "t20" + ], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + [ + { + "name": "sw1", + "taskReferenceName": "sw1", + "inputParameters": {}, + "type": "SUB_WORKFLOW", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "subWorkflowParam": { + "name": "integration_test_wf" + }, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] + ], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ "param1", "param2" ], - "outputParameters" : { }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + { + "name": "join1", + "taskReferenceName": "join1", + "inputParameters": {}, + "type": "JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [ + "t11", + "join2", + "sw1" + ], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "integration_task_15", + "taskReferenceName": "t15", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "case": "${workflow.input.case}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [ + "param1", + "param2" + ], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/rate_limited_simple_task_workflow_integration_test.json b/test-harness/src/test/resources/rate_limited_simple_task_workflow_integration_test.json index 61295599e3..5a04ca0c13 100644 --- a/test-harness/src/test/resources/rate_limited_simple_task_workflow_integration_test.json +++ b/test-harness/src/test/resources/rate_limited_simple_task_workflow_integration_test.json @@ -1,26 +1,29 @@ { - "name" : "test_rate_limit_simple_task_workflow", - "version" : 1, - "tasks" : [ { - "name" : "test_simple_task_with_rateLimits", - "taskReferenceName" : "test_simple_task_with_rateLimits", - "inputParameters" : { }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ ], - "outputParameters" : { }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + "name": "test_rate_limit_simple_task_workflow", + "version": 1, + "tasks": [ + { + "name": "test_simple_task_with_rateLimits", + "taskReferenceName": "test_simple_task_with_rateLimits", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/rate_limited_system_task_workflow_integration_test.json b/test-harness/src/test/resources/rate_limited_system_task_workflow_integration_test.json index 465d880339..29690b6530 100644 --- a/test-harness/src/test/resources/rate_limited_system_task_workflow_integration_test.json +++ b/test-harness/src/test/resources/rate_limited_system_task_workflow_integration_test.json @@ -1,26 +1,29 @@ { - "name" : "test_rate_limit_system_task_workflow", - "version" : 1, - "tasks" : [ { - "name" : "test_task_with_rateLimits", - "taskReferenceName" : "test_task_with_rateLimits", - "inputParameters" : { }, - "type" : "USER_TASK", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ ], - "outputParameters" : { }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + "name": "test_rate_limit_system_task_workflow", + "version": 1, + "tasks": [ + { + "name": "test_task_with_rateLimits", + "taskReferenceName": "test_task_with_rateLimits", + "inputParameters": {}, + "type": "USER_TASK", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_decision_task_integration_test.json b/test-harness/src/test/resources/simple_decision_task_integration_test.json index aa44fe1120..3e69ada86a 100644 --- a/test-harness/src/test/resources/simple_decision_task_integration_test.json +++ b/test-harness/src/test/resources/simple_decision_task_integration_test.json @@ -1,100 +1,109 @@ { - "name" : "DecisionWorkflow", - "description" : "DecisionWorkflow", - "version" : 1, - "tasks" : [ { - "name" : "decisionTask", - "taskReferenceName" : "decisionTask", - "inputParameters" : { - "case" : "${workflow.input.case}" - }, - "type" : "DECISION", - "caseValueParam" : "case", - "decisionCases" : { - "c" : [ { - "name" : "integration_task_1", - "taskReferenceName" : "t1", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_2", - "taskReferenceName" : "t2", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ] - }, - "defaultCase" : [ { - "name" : "integration_task_5", - "taskReferenceName" : "t5", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}" + "name": "DecisionWorkflow", + "description": "DecisionWorkflow", + "version": 1, + "tasks": [ + { + "name": "decisionTask", + "taskReferenceName": "decisionTask", + "inputParameters": { + "case": "${workflow.input.case}" }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_20", - "taskReferenceName" : "t20", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}" + "type": "DECISION", + "caseValueParam": "case", + "decisionCases": { + "c": [ + { + "name": "integration_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "integration_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] + }, + "defaultCase": [ + { + "name": "integration_task_5", + "taskReferenceName": "t5", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ ], - "outputParameters" : { }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + { + "name": "integration_task_20", + "taskReferenceName": "t20", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_json_jq_transform_integration_test.json b/test-harness/src/test/resources/simple_json_jq_transform_integration_test.json index 60135a8631..dc394775db 100644 --- a/test-harness/src/test/resources/simple_json_jq_transform_integration_test.json +++ b/test-harness/src/test/resources/simple_json_jq_transform_integration_test.json @@ -1,29 +1,32 @@ { - "name" : "test_json_jq_transform_wf", - "version" : 1, - "tasks" : [ { - "name" : "jq", - "taskReferenceName" : "jq_1", - "inputParameters" : { - "input" : "${workflow.input}", - "queryExpression": ".input as $_ | { out: ($_.in1.array + $_.in2.array) }" - }, - "type" : "JSON_JQ_TRANSFORM", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ ], - "outputParameters" : { }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + "name": "test_json_jq_transform_wf", + "version": 1, + "tasks": [ + { + "name": "jq", + "taskReferenceName": "jq_1", + "inputParameters": { + "input": "${workflow.input}", + "queryExpression": ".input as $_ | { out: ($_.in1.array + $_.in2.array) }" + }, + "type": "JSON_JQ_TRANSFORM", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_lambda_workflow_integration_test.json b/test-harness/src/test/resources/simple_lambda_workflow_integration_test.json index c142d83df5..1496e56b74 100644 --- a/test-harness/src/test/resources/simple_lambda_workflow_integration_test.json +++ b/test-harness/src/test/resources/simple_lambda_workflow_integration_test.json @@ -1,29 +1,32 @@ { - "name" : "test_lambda_wf", - "version" : 1, - "tasks" : [ { - "name" : "lambda", - "taskReferenceName" : "lambda0", - "inputParameters" : { - "input" : "${workflow.input}", - "scriptExpression" : "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false} }" - }, - "type" : "LAMBDA", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ ], - "outputParameters" : { }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + "name": "test_lambda_wf", + "version": 1, + "tasks": [ + { + "name": "lambda", + "taskReferenceName": "lambda0", + "inputParameters": { + "input": "${workflow.input}", + "scriptExpression": "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false} }" + }, + "type": "LAMBDA", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_one_task_sub_workflow_integration_test.json b/test-harness/src/test/resources/simple_one_task_sub_workflow_integration_test.json index 3dd18046a8..1cfcb8debe 100644 --- a/test-harness/src/test/resources/simple_one_task_sub_workflow_integration_test.json +++ b/test-harness/src/test/resources/simple_one_task_sub_workflow_integration_test.json @@ -1,27 +1,30 @@ { - "name" : "sub_workflow", - "description" : "sub_workflow", - "version" : 1, - "tasks" : [ { - "name" : "simple_task_in_sub_wf", - "taskReferenceName" : "t1", - "inputParameters" : { }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ ], - "outputParameters" : { }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + "name": "sub_workflow", + "description": "sub_workflow", + "version": 1, + "tasks": [ + { + "name": "simple_task_in_sub_wf", + "taskReferenceName": "t1", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_set_variable_workflow_integration_test.json b/test-harness/src/test/resources/simple_set_variable_workflow_integration_test.json index 022aa756d8..69e66ac1df 100644 --- a/test-harness/src/test/resources/simple_set_variable_workflow_integration_test.json +++ b/test-harness/src/test/resources/simple_set_variable_workflow_integration_test.json @@ -1,30 +1,33 @@ { - "name" : "test_set_variable_wf", - "version" : 1, - "tasks" : [ { - "name" : "set_variable", - "taskReferenceName" : "set_variable_1", - "inputParameters" : { - "var" : "${workflow.input.var}" - }, - "type" : "SET_VARIABLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ ], - "outputParameters" : { + "name": "test_set_variable_wf", + "version": 1, + "tasks": [ + { + "name": "set_variable", + "taskReferenceName": "set_variable_1", + "inputParameters": { + "var": "${workflow.input.var}" + }, + "type": "SET_VARIABLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [], + "outputParameters": { "variables": "${workflow.variables}" }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_wait_task_workflow_integration_test.json b/test-harness/src/test/resources/simple_wait_task_workflow_integration_test.json index d72ffeb49d..f6968a9a64 100644 --- a/test-harness/src/test/resources/simple_wait_task_workflow_integration_test.json +++ b/test-harness/src/test/resources/simple_wait_task_workflow_integration_test.json @@ -1,40 +1,44 @@ { - "name" : "test_wait_timeout", - "version" : 1, - "tasks" : [ { - "name" : "waitTimeout", - "taskReferenceName" : "wait0", - "inputParameters" : { }, - "type" : "WAIT", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_1", - "taskReferenceName" : "t1", - "inputParameters" : { }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ ], - "outputParameters" : { }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + "name": "test_wait_timeout", + "version": 1, + "tasks": [ + { + "name": "waitTimeout", + "taskReferenceName": "wait0", + "inputParameters": {}, + "type": "WAIT", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "integration_task_1", + "taskReferenceName": "t1", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_workflow_1_integration_test.json b/test-harness/src/test/resources/simple_workflow_1_integration_test.json index 3df1606e9c..7336e4f9e9 100644 --- a/test-harness/src/test/resources/simple_workflow_1_integration_test.json +++ b/test-harness/src/test/resources/simple_workflow_1_integration_test.json @@ -1,55 +1,62 @@ { - "name" : "integration_test_wf", - "description" : "integration_test_wf", - "version" : 1, - "tasks" : [ { - "name" : "integration_task_1", - "taskReferenceName" : "t1", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "p3" : "${CPEWF_TASK_ID}", - "someNullKey" : null + "name": "integration_test_wf", + "description": "integration_test_wf", + "version": 1, + "tasks": [ + { + "name": "integration_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "p3": "${CPEWF_TASK_ID}", + "someNullKey": null + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_2", - "taskReferenceName" : "t2", - "inputParameters" : { - "tp1" : "${workflow.input.param1}", - "tp2" : "${t1.output.op}", - "tp3" : "${CPEWF_TASK_ID}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ "param1", "param2" ], - "outputParameters" : { - "o1" : "${workflow.input.param1}", - "o2" : "${t2.output.uuid}", - "o3" : "${t1.output.op}" + { + "name": "integration_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "${workflow.input.param1}", + "tp2": "${t1.output.op}", + "tp3": "${CPEWF_TASK_ID}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [ + "param1", + "param2" + ], + "outputParameters": { + "o1": "${workflow.input.param1}", + "o2": "${t2.output.uuid}", + "o3": "${t1.output.op}" }, - "failureWorkflow" : "$workflow.input.failureWfName", - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + "failureWorkflow": "$workflow.input.failureWfName", + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_workflow_3_integration_test.json b/test-harness/src/test/resources/simple_workflow_3_integration_test.json index 7bee58513e..4d5e687c51 100644 --- a/test-harness/src/test/resources/simple_workflow_3_integration_test.json +++ b/test-harness/src/test/resources/simple_workflow_3_integration_test.json @@ -1,65 +1,73 @@ { - "name" : "integration_test_wf3", - "description" : "integration_test_wf3", - "version" : 1, - "tasks" : [ { - "name" : "integration_task_1", - "taskReferenceName" : "t1", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "someNullKey" : null + "name": "integration_test_wf3", + "description": "integration_test_wf3", + "version": 1, + "tasks": [ + { + "name": "integration_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "someNullKey": null + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_2", - "taskReferenceName" : "t2", - "inputParameters" : { - "tp1" : "${workflow.input.param1}", - "tp2" : "${t1.output.op}" + { + "name": "integration_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "${workflow.input.param1}", + "tp2": "${t1.output.op}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_3", - "taskReferenceName" : "t3", - "inputParameters" : { - "tp1" : "${workflow.input.param1}", - "tp2" : "${t1.output.op}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ "param1", "param2" ], - "outputParameters" : { }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + { + "name": "integration_task_3", + "taskReferenceName": "t3", + "inputParameters": { + "tp1": "${workflow.input.param1}", + "tp2": "${t1.output.op}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [ + "param1", + "param2" + ], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_workflow_with_async_complete_system_task_integration_test.json b/test-harness/src/test/resources/simple_workflow_with_async_complete_system_task_integration_test.json index 0b7687b5d7..d085bd33b5 100644 --- a/test-harness/src/test/resources/simple_workflow_with_async_complete_system_task_integration_test.json +++ b/test-harness/src/test/resources/simple_workflow_with_async_complete_system_task_integration_test.json @@ -1,52 +1,59 @@ { - "name" : "async_complete_integration_test_wf", - "description" : "async_complete_integration_test_wf", - "version" : 1, - "tasks" : [ { - "name" : "integration_task_1", - "taskReferenceName" : "t1", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}", - "p3" : "${CPEWF_TASK_ID}", - "someNullKey" : null + "name": "async_complete_integration_test_wf", + "description": "async_complete_integration_test_wf", + "version": 1, + "tasks": [ + { + "name": "integration_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "p3": "${CPEWF_TASK_ID}", + "someNullKey": null + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name": "user_task", - "taskReferenceName": "user_task", - "inputParameters": { - "input": "${t1.output.op}" - }, - "type": "USER_TASK", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": true, - "loopOver": [] - }], - "inputParameters" : [ "param1", "param2" ], - "outputParameters" : { - "o1" : "${workflow.input.param1}", - "o2" : "${user_task.output.uuid}", - "o3" : "${t1.output.op}" + { + "name": "user_task", + "taskReferenceName": "user_task", + "inputParameters": { + "input": "${t1.output.op}" + }, + "type": "USER_TASK", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": true, + "loopOver": [] + } + ], + "inputParameters": [ + "param1", + "param2" + ], + "outputParameters": { + "o1": "${workflow.input.param1}", + "o2": "${user_task.output.uuid}", + "o3": "${t1.output.op}" }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_workflow_with_optional_task_integration_test.json b/test-harness/src/test/resources/simple_workflow_with_optional_task_integration_test.json index f1d4c53e5d..de280d601d 100644 --- a/test-harness/src/test/resources/simple_workflow_with_optional_task_integration_test.json +++ b/test-harness/src/test/resources/simple_workflow_with_optional_task_integration_test.json @@ -1,51 +1,58 @@ { - "name" : "optional_task_wf", - "description" : "optional_task_wf", - "version" : 1, - "tasks" : [ { - "name" : "task_optional", - "taskReferenceName" : "task_optional_t1", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}" + "name": "optional_task_wf", + "description": "optional_task_wf", + "version": 1, + "tasks": [ + { + "name": "task_optional", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": true, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : true, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_2", - "taskReferenceName" : "t2", - "inputParameters" : { - "tp1" : "${workflow.input.param1}", - "tp2" : "${t1.output.op}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ "param1", "param2" ], - "outputParameters" : { - "o1" : "${workflow.input.param1}", - "o2" : "${t2.output.uuid}", - "o3" : "${t1.output.op}" + { + "name": "integration_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "${workflow.input.param1}", + "tp2": "${t1.output.op}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [ + "param1", + "param2" + ], + "outputParameters": { + "o1": "${workflow.input.param1}", + "o2": "${t2.output.uuid}", + "o3": "${t1.output.op}" }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_workflow_with_resp_time_out_integration_test.json b/test-harness/src/test/resources/simple_workflow_with_resp_time_out_integration_test.json index 3428d40a78..812d9b5b83 100644 --- a/test-harness/src/test/resources/simple_workflow_with_resp_time_out_integration_test.json +++ b/test-harness/src/test/resources/simple_workflow_with_resp_time_out_integration_test.json @@ -1,52 +1,59 @@ { - "name" : "RTOWF", - "description" : "RTOWF", - "version" : 1, - "tasks" : [ { - "name" : "task_rt", - "taskReferenceName" : "task_rt_t1", - "inputParameters" : { - "p1" : "${workflow.input.param1}", - "p2" : "${workflow.input.param2}" + "name": "RTOWF", + "description": "RTOWF", + "version": 1, + "tasks": [ + { + "name": "task_rt", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_2", - "taskReferenceName" : "t2", - "inputParameters" : { - "tp1" : "${workflow.input.param1}", - "tp2" : "${t1.output.op}" - }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ "param1", "param2" ], - "outputParameters" : { - "o1" : "${workflow.input.param1}", - "o2" : "${t2.output.uuid}", - "o3" : "${t1.output.op}" + { + "name": "integration_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "${workflow.input.param1}", + "tp2": "${t1.output.op}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [ + "param1", + "param2" + ], + "outputParameters": { + "o1": "${workflow.input.param1}", + "o2": "${t2.output.uuid}", + "o3": "${t1.output.op}" }, - "failureWorkflow" : "$workflow.input.failureWfName", - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + "failureWorkflow": "$workflow.input.failureWfName", + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/simple_workflow_with_sub_workflow_inline_def_integration_test.json b/test-harness/src/test/resources/simple_workflow_with_sub_workflow_inline_def_integration_test.json index 61cf1d1f96..de3d6dd67f 100644 --- a/test-harness/src/test/resources/simple_workflow_with_sub_workflow_inline_def_integration_test.json +++ b/test-harness/src/test/resources/simple_workflow_with_sub_workflow_inline_def_integration_test.json @@ -2,91 +2,104 @@ "name": "WorkflowWithInlineSubWorkflow", "description": "WorkflowWithInlineSubWorkflow", "version": 1, - "tasks": [{ - "name": "integration_task_1", - "taskReferenceName": "t1", - "inputParameters": { - "tp11": "${workflow.input.param1}", - "tp12": "${workflow.input.param2}" + "tasks": [ + { + "name": "integration_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "tp11": "${workflow.input.param1}", + "tp12": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, { - "taskReferenceName": "swt", - "inputParameters": { - "op": "${t1.output.op}", - "imageType": "${t1.output.imageType}" - }, - "type": "SUB_WORKFLOW", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "subWorkflowParam": { - "name": "one_task_workflow", - "version": 1, - "workflowDefinition": { + { + "name": "swt", + "taskReferenceName": "swt", + "inputParameters": { + "op": "${t1.output.op}", + "imageType": "${t1.output.imageType}" + }, + "type": "SUB_WORKFLOW", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "subWorkflowParam": { "name": "one_task_workflow", "version": 1, - "tasks": [{ - "name": "integration_task_3", - "taskReferenceName": "t3", - "inputParameters": { - "p1": "${workflow.input.imageType}" + "workflowDefinition": { + "name": "one_task_workflow", + "version": 1, + "tasks": [ + { + "name": "integration_task_3", + "taskReferenceName": "t3", + "inputParameters": { + "p1": "${workflow.input.imageType}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [ + "imageType", + "op" + ], + "outputParameters": { + "op": "${t3.output.op}" }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }], - "inputParameters": ["imageType", "op"], - "outputParameters": { - "op": "${t3.output.op}" - }, - "schemaVersion": 2, - "restartable": true, - "workflowStatusListenerEnabled": false, - "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0 - } - }, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }, { - "name": "integration_task_2", - "taskReferenceName": "t2", - "inputParameters": { - "op": "${t1.output.op}" + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0 + } + }, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type": "SIMPLE", - "decisionCases": {}, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "optional": false, - "defaultExclusiveJoinTask": [], - "asyncComplete": false, - "loopOver": [] - }], - "inputParameters": ["param1", "param2"], + { + "name": "integration_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "op": "${t1.output.op}" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [ + "param1", + "param2" + ], "outputParameters": { "o3": "${t1.output.op}" }, @@ -94,5 +107,6 @@ "restartable": true, "workflowStatusListenerEnabled": false, "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0 + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/terminate_task_completed_workflow_integration_test.json b/test-harness/src/test/resources/terminate_task_completed_workflow_integration_test.json index 1fe0f0f6c9..ee91b9928b 100644 --- a/test-harness/src/test/resources/terminate_task_completed_workflow_integration_test.json +++ b/test-harness/src/test/resources/terminate_task_completed_workflow_integration_test.json @@ -1,61 +1,66 @@ { - "name" : "test_terminate_task_wf", - "version" : 1, - "tasks" : [ { - "name" : "lambda", - "taskReferenceName" : "lambda0", - "inputParameters" : { - "input" : "${workflow.input}", - "scriptExpression" : "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false}}" + "name": "test_terminate_task_wf", + "version": 1, + "tasks": [ + { + "name": "lambda", + "taskReferenceName": "lambda0", + "inputParameters": { + "input": "${workflow.input}", + "scriptExpression": "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false}}" + }, + "type": "LAMBDA", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type" : "LAMBDA", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "terminate", - "taskReferenceName" : "terminate0", - "inputParameters" : { - "terminationStatus" : "COMPLETED", - "workflowOutput" : "${lambda0.output}" + { + "name": "terminate", + "taskReferenceName": "terminate0", + "inputParameters": { + "terminationStatus": "COMPLETED", + "workflowOutput": "${lambda0.output}" + }, + "type": "TERMINATE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type" : "TERMINATE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_2", - "taskReferenceName" : "t2", - "inputParameters" : { }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ ], - "outputParameters" : { }, - "failureWorkflow" : "failure_workflow", - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + { + "name": "integration_task_2", + "taskReferenceName": "t2", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [], + "outputParameters": {}, + "failureWorkflow": "failure_workflow", + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/terminate_task_failed_workflow_integration.json b/test-harness/src/test/resources/terminate_task_failed_workflow_integration.json index 6bd83769d7..68be1fc674 100644 --- a/test-harness/src/test/resources/terminate_task_failed_workflow_integration.json +++ b/test-harness/src/test/resources/terminate_task_failed_workflow_integration.json @@ -1,61 +1,66 @@ { - "name" : "test_terminate_task_failed_wf", - "version" : 1, - "tasks" : [ { - "name" : "lambda", - "taskReferenceName" : "lambda0", - "inputParameters" : { - "input" : "${workflow.input}", - "scriptExpression" : "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false}}" + "name": "test_terminate_task_failed_wf", + "version": 1, + "tasks": [ + { + "name": "lambda", + "taskReferenceName": "lambda0", + "inputParameters": { + "input": "${workflow.input}", + "scriptExpression": "if ($.input.a==1){return {testvalue: true}} else{return {testvalue: false}}" + }, + "type": "LAMBDA", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type" : "LAMBDA", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "terminate", - "taskReferenceName" : "terminate0", - "inputParameters" : { - "terminationStatus" : "FAILED", - "workflowOutput" : "${lambda0.output}" + { + "name": "terminate", + "taskReferenceName": "terminate0", + "inputParameters": { + "terminationStatus": "FAILED", + "workflowOutput": "${lambda0.output}" + }, + "type": "TERMINATE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] }, - "type" : "TERMINATE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_2", - "taskReferenceName" : "t2", - "inputParameters" : { }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ ], - "outputParameters" : { }, - "failureWorkflow" : "failure_workflow", - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + { + "name": "integration_task_2", + "taskReferenceName": "t2", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [], + "outputParameters": {}, + "failureWorkflow": "failure_workflow", + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/terminate_task_parent_workflow.json b/test-harness/src/test/resources/terminate_task_parent_workflow.json index 9268ba4717..f38790e66f 100644 --- a/test-harness/src/test/resources/terminate_task_parent_workflow.json +++ b/test-harness/src/test/resources/terminate_task_parent_workflow.json @@ -2,75 +2,76 @@ "name": "test_terminate_task_parent_wf", "version": 1, "tasks": [ - { - "name": "test_forkjoin", - "taskReferenceName": "forkx", - "type": "FORK_JOIN", - "forkTasks": [ - [ - { - "name": "test_lambda_task1", - "taskReferenceName": "lambdaTask1", - "inputParameters": { - "lambdaValue": "${workflow.input.lambdaValue}", - "scriptExpression": "var i = 10; if ($.lambdaValue == 1){ return {testvalue: 'Lambda value was 1', iValue: i} } else { return {testvalue: 'Lambda value was NOT 1', iValue: i + 3} }" - }, - "type": "LAMBDA" - }, - { - "name": "test_terminate_subworkflow", - "taskReferenceName": "test_terminate_subworkflow", - "inputParameters": { - }, - "type": "SUB_WORKFLOW", - "subWorkflowParam": { - "name": "test_terminate_task_sub_wf" - } - } - ], - [ - { - "name": "test_lambda_task2", - "taskReferenceName": "lambdaTask2", - "inputParameters": { - "lambdaValue": "${workflow.input.lambdaValue}", - "scriptExpression": "var i = 10; if ($.lambdaValue == 1){ return {testvalue: 'Lambda value was 1', iValue: i} } else { return {testvalue: 'Lambda value was NOT 1', iValue: i + 3} }" - }, - "type": "LAMBDA" - }, - { - "name": "test_wait_task", - "taskReferenceName": "basicJavaA", - "type": "WAIT" - }, - { - "name": "terminate", - "taskReferenceName": "terminate0", - "inputParameters": { - "terminationStatus": "COMPLETED", - "workflowOutput": "some output" - }, - "type": "TERMINATE", - "startDelay": 0, - "optional": false - }, - { - "name": "test_second_wait_task", - "taskReferenceName": "basicJavaB", - "type": "WAIT" - } - ] - ] - }, - { - "name": "join", - "taskReferenceName": "thejoin", - "type": "JOIN", - "joinOn": [ - "basicJavaA", - "basicJavaB" - ] - } + { + "name": "test_forkjoin", + "taskReferenceName": "forkx", + "type": "FORK_JOIN", + "forkTasks": [ + [ + { + "name": "test_lambda_task1", + "taskReferenceName": "lambdaTask1", + "inputParameters": { + "lambdaValue": "${workflow.input.lambdaValue}", + "scriptExpression": "var i = 10; if ($.lambdaValue == 1){ return {testvalue: 'Lambda value was 1', iValue: i} } else { return {testvalue: 'Lambda value was NOT 1', iValue: i + 3} }" + }, + "type": "LAMBDA" + }, + { + "name": "test_terminate_subworkflow", + "taskReferenceName": "test_terminate_subworkflow", + "inputParameters": { + }, + "type": "SUB_WORKFLOW", + "subWorkflowParam": { + "name": "test_terminate_task_sub_wf" + } + } + ], + [ + { + "name": "test_lambda_task2", + "taskReferenceName": "lambdaTask2", + "inputParameters": { + "lambdaValue": "${workflow.input.lambdaValue}", + "scriptExpression": "var i = 10; if ($.lambdaValue == 1){ return {testvalue: 'Lambda value was 1', iValue: i} } else { return {testvalue: 'Lambda value was NOT 1', iValue: i + 3} }" + }, + "type": "LAMBDA" + }, + { + "name": "test_wait_task", + "taskReferenceName": "basicJavaA", + "type": "WAIT" + }, + { + "name": "terminate", + "taskReferenceName": "terminate0", + "inputParameters": { + "terminationStatus": "COMPLETED", + "workflowOutput": "some output" + }, + "type": "TERMINATE", + "startDelay": 0, + "optional": false + }, + { + "name": "test_second_wait_task", + "taskReferenceName": "basicJavaB", + "type": "WAIT" + } + ] + ] + }, + { + "name": "join", + "taskReferenceName": "thejoin", + "type": "JOIN", + "joinOn": [ + "basicJavaA", + "basicJavaB" + ] + } ], - "schemaVersion": 2 + "schemaVersion": 2, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/terminate_task_sub_workflow.json b/test-harness/src/test/resources/terminate_task_sub_workflow.json index 2d29b8bc16..71d83305de 100644 --- a/test-harness/src/test/resources/terminate_task_sub_workflow.json +++ b/test-harness/src/test/resources/terminate_task_sub_workflow.json @@ -2,11 +2,12 @@ "name": "test_terminate_task_sub_wf", "version": 1, "tasks": [ - { - "name": "test_third_wait_task", - "taskReferenceName": "basicJavaC", - "type": "WAIT" - } + { + "name": "test_third_wait_task", + "taskReferenceName": "basicJavaC", + "type": "WAIT" + } ], - "schemaVersion": 2 + "schemaVersion": 2, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/wait_workflow_integration_test.json b/test-harness/src/test/resources/wait_workflow_integration_test.json index 3c8663a248..8cc567e881 100644 --- a/test-harness/src/test/resources/wait_workflow_integration_test.json +++ b/test-harness/src/test/resources/wait_workflow_integration_test.json @@ -1,40 +1,44 @@ { - "name" : "test_wait_workflow", - "version" : 1, - "tasks" : [ { - "name" : "wait", - "taskReferenceName" : "wait0", - "inputParameters" : { }, - "type" : "WAIT", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - }, { - "name" : "integration_task_1", - "taskReferenceName" : "t1", - "inputParameters" : { }, - "type" : "SIMPLE", - "decisionCases" : { }, - "defaultCase" : [ ], - "forkTasks" : [ ], - "startDelay" : 0, - "joinOn" : [ ], - "optional" : false, - "defaultExclusiveJoinTask" : [ ], - "asyncComplete" : false, - "loopOver" : [ ] - } ], - "inputParameters" : [ ], - "outputParameters" : { }, - "schemaVersion" : 2, - "restartable" : true, - "workflowStatusListenerEnabled" : false, - "timeoutPolicy" : "ALERT_ONLY", - "timeoutSeconds" : 0 + "name": "test_wait_workflow", + "version": 1, + "tasks": [ + { + "name": "wait", + "taskReferenceName": "wait0", + "inputParameters": {}, + "type": "WAIT", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "integration_task_1", + "taskReferenceName": "t1", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/test-harness/src/test/resources/workflow_with_sub_workflow_1_integration_test.json b/test-harness/src/test/resources/workflow_with_sub_workflow_1_integration_test.json index daff7fcd5b..36a30db9b5 100644 --- a/test-harness/src/test/resources/workflow_with_sub_workflow_1_integration_test.json +++ b/test-harness/src/test/resources/workflow_with_sub_workflow_1_integration_test.json @@ -50,5 +50,6 @@ "restartable": true, "workflowStatusListenerEnabled": false, "timeoutPolicy": "ALERT_ONLY", - "timeoutSeconds": 0 + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" } \ No newline at end of file diff --git a/zookeeper-lock/build.gradle b/zookeeper-lock/build.gradle index 1a9782fc40..92da549d3f 100644 --- a/zookeeper-lock/build.gradle +++ b/zookeeper-lock/build.gradle @@ -4,7 +4,7 @@ dependencies { implementation 'org.springframework.boot:spring-boot-starter' implementation "javax.inject:javax.inject:1" - implementation "org.apache.commons:commons-lang3:${revCommonsLang3}" + implementation "org.apache.commons:commons-lang3" implementation "org.apache.curator:curator-recipes:${revCuratorRecipes}" testImplementation "org.apache.curator:curator-test:${revCuratorTest}" diff --git a/zookeeper-lock/src/test/java/com/netflix/conductor/zookeeper/lock/ZookeeperLockTest.java b/zookeeper-lock/src/test/java/com/netflix/conductor/zookeeper/lock/ZookeeperLockTest.java index 3517c9126b..369ce26bb7 100644 --- a/zookeeper-lock/src/test/java/com/netflix/conductor/zookeeper/lock/ZookeeperLockTest.java +++ b/zookeeper-lock/src/test/java/com/netflix/conductor/zookeeper/lock/ZookeeperLockTest.java @@ -12,31 +12,34 @@ */ package com.netflix.conductor.zookeeper.lock; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import com.netflix.conductor.core.config.ConductorProperties; import com.netflix.conductor.core.sync.Lock; import com.netflix.conductor.service.ExecutionLockService; import com.netflix.conductor.zookeeper.config.ZookeeperProperties; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; +import javax.inject.Provider; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.test.TestingServer; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; - -import javax.inject.Provider; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.concurrent.Semaphore; -import java.util.concurrent.TimeUnit; - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class ZookeeperLockTest { + private static final Logger LOGGER = LoggerFactory.getLogger(ZookeeperLockTest.class); + TestingServer zkServer; ZookeeperProperties properties; Provider mockProvider; @@ -187,7 +190,7 @@ public void run() { lock.releaseLock(lockID); iterations++; if (iterations % 10 == 0) { - System.out.println("Finished iterations:" + iterations); + LOGGER.info("Finished iterations: {}", iterations); } } finishedSuccessfully = true;