diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/AbstractMasterIntegrationTestCase.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/AbstractMasterIntegrationTestCase.java
index e5e6bb336dd1..efb70adc5d4e 100644
--- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/AbstractMasterIntegrationTestCase.java
+++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/AbstractMasterIntegrationTestCase.java
@@ -25,12 +25,26 @@
import org.apache.dolphinscheduler.server.master.integration.WorkflowOperator;
import org.apache.dolphinscheduler.server.master.integration.WorkflowTestCaseContextFactory;
+import java.util.UUID;
+import java.util.concurrent.ThreadLocalRandom;
+import java.util.concurrent.atomic.AtomicInteger;
+
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.AfterEach;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.test.context.DynamicPropertyRegistry;
+import org.springframework.test.context.DynamicPropertySource;
+import org.springframework.test.context.TestExecutionListeners;
+import org.springframework.test.context.event.EventPublishingTestExecutionListener;
+import org.springframework.test.context.jdbc.SqlScriptsTestExecutionListener;
+import org.springframework.test.context.support.DependencyInjectionTestExecutionListener;
+import org.springframework.test.context.support.DirtiesContextBeforeModesTestExecutionListener;
+import org.springframework.test.context.support.DirtiesContextTestExecutionListener;
import org.springframework.test.annotation.DirtiesContext;
+import org.springframework.test.context.transaction.TransactionalTestExecutionListener;
+import org.springframework.test.context.web.ServletTestExecutionListener;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Metrics;
@@ -45,8 +59,19 @@
MasterServer.class,
DaoConfiguration.class})
@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_EACH_TEST_METHOD)
+@TestExecutionListeners(listeners = {
+ ServletTestExecutionListener.class,
+ DirtiesContextBeforeModesTestExecutionListener.class,
+ DependencyInjectionTestExecutionListener.class,
+ DirtiesContextTestExecutionListener.class,
+ TransactionalTestExecutionListener.class,
+ SqlScriptsTestExecutionListener.class,
+ EventPublishingTestExecutionListener.class})
public abstract class AbstractMasterIntegrationTestCase {
+ private static final AtomicInteger NEXT_MASTER_LISTEN_PORT =
+ new AtomicInteger(ThreadLocalRandom.current().nextInt(20_000, 50_000));
+
@Autowired
protected WorkflowTestCaseContextFactory workflowTestCaseContextFactory;
@@ -68,6 +93,17 @@ public abstract class AbstractMasterIntegrationTestCase {
@Autowired
private MeterRegistry meterRegistry;
+ @DynamicPropertySource
+ static void registerParallelSafeProperties(DynamicPropertyRegistry registry) {
+ final String databaseName = "dolphinscheduler_" + UUID.randomUUID().toString().replace("-", "");
+ final int masterListenPort = NEXT_MASTER_LISTEN_PORT.getAndIncrement();
+
+ registry.add("spring.datasource.url", () -> "jdbc:h2:mem:" + databaseName
+ + ";MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true;");
+ registry.add("master.listen-port", () -> masterListenPort);
+ registry.add("server.port", () -> 0);
+ }
+
/**
* Unbind this test method's {@link MeterRegistry} from {@link Metrics#globalRegistry}.
*
diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/WorkflowTestCaseContextFactory.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/WorkflowTestCaseContextFactory.java
index b84f789b2b89..2bb2a1f2d02e 100644
--- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/WorkflowTestCaseContextFactory.java
+++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/WorkflowTestCaseContextFactory.java
@@ -39,6 +39,7 @@
import org.apache.dolphinscheduler.dao.repository.WorkflowDefinitionLogDao;
import org.apache.dolphinscheduler.dao.repository.WorkflowInstanceDao;
import org.apache.dolphinscheduler.dao.repository.WorkflowTaskRelationLogDao;
+import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.commons.collections4.CollectionUtils;
@@ -51,6 +52,8 @@
@Component
public class WorkflowTestCaseContextFactory {
+ private static final String DEFAULT_MASTER_ADDRESS = "127.0.0.1:5678";
+
@Autowired
private ProjectDao projectDao;
@@ -84,8 +87,12 @@ public class WorkflowTestCaseContextFactory {
@Autowired
private IEnvironmentDao environmentDao;
+ @Autowired
+ private MasterConfig masterConfig;
+
public WorkflowTestCaseContext initializeContextFromYaml(final String yamlPath) {
final WorkflowTestCaseContext workflowTestCaseContext = YamlFactory.load(yamlPath);
+ normalizeDefaultMasterAddress(workflowTestCaseContext);
initializeProjectToDB(workflowTestCaseContext.getProject());
initializeWorkflowDefinitionToDB(workflowTestCaseContext.getWorkflows());
initializeTaskDefinitionsToDB(workflowTestCaseContext.getTasks());
@@ -98,6 +105,33 @@ public WorkflowTestCaseContext initializeContextFromYaml(final String yamlPath)
return workflowTestCaseContext;
}
+ private void normalizeDefaultMasterAddress(final WorkflowTestCaseContext workflowTestCaseContext) {
+ normalizeWorkflowInstanceHost(workflowTestCaseContext.getWorkflowInstances());
+ normalizeTaskInstanceHost(workflowTestCaseContext.getTaskInstances());
+ }
+
+ private void normalizeWorkflowInstanceHost(final List workflowInstances) {
+ if (CollectionUtils.isEmpty(workflowInstances)) {
+ return;
+ }
+ for (final WorkflowInstance workflowInstance : workflowInstances) {
+ if (DEFAULT_MASTER_ADDRESS.equals(workflowInstance.getHost())) {
+ workflowInstance.setHost(masterConfig.getMasterAddress());
+ }
+ }
+ }
+
+ private void normalizeTaskInstanceHost(final List taskInstances) {
+ if (CollectionUtils.isEmpty(taskInstances)) {
+ return;
+ }
+ for (final TaskInstance taskInstance : taskInstances) {
+ if (DEFAULT_MASTER_ADDRESS.equals(taskInstance.getHost())) {
+ taskInstance.setHost(masterConfig.getMasterAddress());
+ }
+ }
+ }
+
private void initializeTaskInstancesToDB(List taskInstances) {
if (CollectionUtils.isEmpty(taskInstances)) {
return;
From 0b57bbef9c72b49fa11e47e43c580cfa20bc9690 Mon Sep 17 00:00:00 2001
From: SbloodyS <460888207@qq.com>
Date: Sat, 2 May 2026 23:42:35 +0800
Subject: [PATCH 05/10] ut optimize
---
dolphinscheduler-master/pom.xml | 2 +-
.../server/master/AbstractMasterIntegrationTestCase.java | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/dolphinscheduler-master/pom.xml b/dolphinscheduler-master/pom.xml
index 46a4858632fa..17c1ed4f496e 100644
--- a/dolphinscheduler-master/pom.xml
+++ b/dolphinscheduler-master/pom.xml
@@ -386,10 +386,10 @@
spring-boot-integration-test
- test
test
+ test
**/integration/cases/*TestCase.java
diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/AbstractMasterIntegrationTestCase.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/AbstractMasterIntegrationTestCase.java
index efb70adc5d4e..09c68f815b81 100644
--- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/AbstractMasterIntegrationTestCase.java
+++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/AbstractMasterIntegrationTestCase.java
@@ -34,6 +34,7 @@
import org.junit.jupiter.api.AfterEach;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.DynamicPropertyRegistry;
import org.springframework.test.context.DynamicPropertySource;
import org.springframework.test.context.TestExecutionListeners;
@@ -42,7 +43,6 @@
import org.springframework.test.context.support.DependencyInjectionTestExecutionListener;
import org.springframework.test.context.support.DirtiesContextBeforeModesTestExecutionListener;
import org.springframework.test.context.support.DirtiesContextTestExecutionListener;
-import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.transaction.TransactionalTestExecutionListener;
import org.springframework.test.context.web.ServletTestExecutionListener;
From 8735f6be07048ec7b202b8158db30c40aaf714b9 Mon Sep 17 00:00:00 2001
From: SbloodyS <460888207@qq.com>
Date: Mon, 4 May 2026 16:21:38 +0800
Subject: [PATCH 06/10] ut optimize
---
.../AbstractMasterIntegrationTestCase.java | 20 -----
...tegrationTestContextCustomizerFactory.java | 88 +++++++++++++++++++
.../test/resources/META-INF/spring.factories | 2 +
3 files changed, 90 insertions(+), 20 deletions(-)
create mode 100644 dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/MasterIntegrationTestContextCustomizerFactory.java
create mode 100644 dolphinscheduler-master/src/test/resources/META-INF/spring.factories
diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/AbstractMasterIntegrationTestCase.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/AbstractMasterIntegrationTestCase.java
index 09c68f815b81..a9090851af8a 100644
--- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/AbstractMasterIntegrationTestCase.java
+++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/AbstractMasterIntegrationTestCase.java
@@ -25,18 +25,12 @@
import org.apache.dolphinscheduler.server.master.integration.WorkflowOperator;
import org.apache.dolphinscheduler.server.master.integration.WorkflowTestCaseContextFactory;
-import java.util.UUID;
-import java.util.concurrent.ThreadLocalRandom;
-import java.util.concurrent.atomic.AtomicInteger;
-
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.AfterEach;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.DynamicPropertyRegistry;
-import org.springframework.test.context.DynamicPropertySource;
import org.springframework.test.context.TestExecutionListeners;
import org.springframework.test.context.event.EventPublishingTestExecutionListener;
import org.springframework.test.context.jdbc.SqlScriptsTestExecutionListener;
@@ -69,9 +63,6 @@
EventPublishingTestExecutionListener.class})
public abstract class AbstractMasterIntegrationTestCase {
- private static final AtomicInteger NEXT_MASTER_LISTEN_PORT =
- new AtomicInteger(ThreadLocalRandom.current().nextInt(20_000, 50_000));
-
@Autowired
protected WorkflowTestCaseContextFactory workflowTestCaseContextFactory;
@@ -93,17 +84,6 @@ public abstract class AbstractMasterIntegrationTestCase {
@Autowired
private MeterRegistry meterRegistry;
- @DynamicPropertySource
- static void registerParallelSafeProperties(DynamicPropertyRegistry registry) {
- final String databaseName = "dolphinscheduler_" + UUID.randomUUID().toString().replace("-", "");
- final int masterListenPort = NEXT_MASTER_LISTEN_PORT.getAndIncrement();
-
- registry.add("spring.datasource.url", () -> "jdbc:h2:mem:" + databaseName
- + ";MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true;");
- registry.add("master.listen-port", () -> masterListenPort);
- registry.add("server.port", () -> 0);
- }
-
/**
* Unbind this test method's {@link MeterRegistry} from {@link Metrics#globalRegistry}.
*
diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/MasterIntegrationTestContextCustomizerFactory.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/MasterIntegrationTestContextCustomizerFactory.java
new file mode 100644
index 000000000000..2be16b99458c
--- /dev/null
+++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/MasterIntegrationTestContextCustomizerFactory.java
@@ -0,0 +1,88 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.dolphinscheduler.server.master.integration;
+
+import org.apache.dolphinscheduler.server.master.AbstractMasterIntegrationTestCase;
+
+import java.util.List;
+import java.util.Objects;
+import java.util.UUID;
+import java.util.concurrent.ThreadLocalRandom;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.springframework.boot.test.util.TestPropertyValues;
+import org.springframework.context.ConfigurableApplicationContext;
+import org.springframework.test.context.ContextConfigurationAttributes;
+import org.springframework.test.context.ContextCustomizer;
+import org.springframework.test.context.ContextCustomizerFactory;
+import org.springframework.test.context.MergedContextConfiguration;
+
+public class MasterIntegrationTestContextCustomizerFactory implements ContextCustomizerFactory {
+
+ private static final AtomicInteger NEXT_MASTER_LISTEN_PORT =
+ new AtomicInteger(ThreadLocalRandom.current().nextInt(20_000, 50_000));
+
+ @Override
+ public ContextCustomizer createContextCustomizer(final Class> testClass,
+ final List configAttributes) {
+ if (!AbstractMasterIntegrationTestCase.class.isAssignableFrom(testClass)) {
+ return null;
+ }
+ return new MasterIntegrationTestContextCustomizer(testClass.getName());
+ }
+
+ private static final class MasterIntegrationTestContextCustomizer implements ContextCustomizer {
+
+ private final String testClassName;
+
+ private MasterIntegrationTestContextCustomizer(final String testClassName) {
+ this.testClassName = testClassName;
+ }
+
+ @Override
+ public void customizeContext(final ConfigurableApplicationContext context,
+ final MergedContextConfiguration mergedConfig) {
+ final String databaseName = "dolphinscheduler_" + UUID.randomUUID().toString().replace("-", "");
+ final int masterListenPort = NEXT_MASTER_LISTEN_PORT.getAndIncrement();
+
+ TestPropertyValues.of(
+ "spring.datasource.url=jdbc:h2:mem:" + databaseName
+ + ";MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true;",
+ "master.listen-port=" + masterListenPort,
+ "server.port=0")
+ .applyTo(context);
+ }
+
+ @Override
+ public boolean equals(final Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (!(o instanceof MasterIntegrationTestContextCustomizer)) {
+ return false;
+ }
+ MasterIntegrationTestContextCustomizer that = (MasterIntegrationTestContextCustomizer) o;
+ return Objects.equals(testClassName, that.testClassName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(testClassName);
+ }
+ }
+}
diff --git a/dolphinscheduler-master/src/test/resources/META-INF/spring.factories b/dolphinscheduler-master/src/test/resources/META-INF/spring.factories
new file mode 100644
index 000000000000..1f47242b471b
--- /dev/null
+++ b/dolphinscheduler-master/src/test/resources/META-INF/spring.factories
@@ -0,0 +1,2 @@
+org.springframework.test.context.ContextCustomizerFactory=\
+org.apache.dolphinscheduler.server.master.integration.MasterIntegrationTestContextCustomizerFactory
From bd0460232cf37c23f466e14e329ab00f5bcfc505 Mon Sep 17 00:00:00 2001
From: SbloodyS <460888207@qq.com>
Date: Mon, 4 May 2026 16:23:54 +0800
Subject: [PATCH 07/10] ut optimize
---
.../test/resources/META-INF/spring.factories | 17 +++++++++++++++++
1 file changed, 17 insertions(+)
diff --git a/dolphinscheduler-master/src/test/resources/META-INF/spring.factories b/dolphinscheduler-master/src/test/resources/META-INF/spring.factories
index 1f47242b471b..71fa1be30f98 100644
--- a/dolphinscheduler-master/src/test/resources/META-INF/spring.factories
+++ b/dolphinscheduler-master/src/test/resources/META-INF/spring.factories
@@ -1,2 +1,19 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
org.springframework.test.context.ContextCustomizerFactory=\
org.apache.dolphinscheduler.server.master.integration.MasterIntegrationTestContextCustomizerFactory
From fbb8892c18b52c41af86d2e5ad8c516a4e343202 Mon Sep 17 00:00:00 2001
From: SbloodyS <460888207@qq.com>
Date: Mon, 4 May 2026 17:07:02 +0800
Subject: [PATCH 08/10] ut optimize
---
dolphinscheduler-master/pom.xml | 17 ++--
...tegrationTestContextCustomizerFactory.java | 88 -------------------
.../test/resources/META-INF/spring.factories | 19 ----
3 files changed, 12 insertions(+), 112 deletions(-)
delete mode 100644 dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/MasterIntegrationTestContextCustomizerFactory.java
delete mode 100644 dolphinscheduler-master/src/test/resources/META-INF/spring.factories
diff --git a/dolphinscheduler-master/pom.xml b/dolphinscheduler-master/pom.xml
index 17c1ed4f496e..5b215754f86b 100644
--- a/dolphinscheduler-master/pom.xml
+++ b/dolphinscheduler-master/pom.xml
@@ -382,6 +382,9 @@
**/integration/cases/*TestCase.java
+
+ junit.jupiter.execution.parallel.enabled = false
+
@@ -394,13 +397,17 @@
**/integration/cases/*TestCase.java
+ 4
+ true
+
+ junit.jupiter.execution.parallel.enabled = false
+
${project.build.directory}/jacoco.exec
- true
- concurrent
- same_thread
- fixed
- 4
+ jdbc:h2:mem:dolphinscheduler-${surefire.forkNumber};MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true;
+ 567${surefire.forkNumber}
+ 0
+ 1d
diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/MasterIntegrationTestContextCustomizerFactory.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/MasterIntegrationTestContextCustomizerFactory.java
deleted file mode 100644
index 2be16b99458c..000000000000
--- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/MasterIntegrationTestContextCustomizerFactory.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.server.master.integration;
-
-import org.apache.dolphinscheduler.server.master.AbstractMasterIntegrationTestCase;
-
-import java.util.List;
-import java.util.Objects;
-import java.util.UUID;
-import java.util.concurrent.ThreadLocalRandom;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import org.springframework.boot.test.util.TestPropertyValues;
-import org.springframework.context.ConfigurableApplicationContext;
-import org.springframework.test.context.ContextConfigurationAttributes;
-import org.springframework.test.context.ContextCustomizer;
-import org.springframework.test.context.ContextCustomizerFactory;
-import org.springframework.test.context.MergedContextConfiguration;
-
-public class MasterIntegrationTestContextCustomizerFactory implements ContextCustomizerFactory {
-
- private static final AtomicInteger NEXT_MASTER_LISTEN_PORT =
- new AtomicInteger(ThreadLocalRandom.current().nextInt(20_000, 50_000));
-
- @Override
- public ContextCustomizer createContextCustomizer(final Class> testClass,
- final List configAttributes) {
- if (!AbstractMasterIntegrationTestCase.class.isAssignableFrom(testClass)) {
- return null;
- }
- return new MasterIntegrationTestContextCustomizer(testClass.getName());
- }
-
- private static final class MasterIntegrationTestContextCustomizer implements ContextCustomizer {
-
- private final String testClassName;
-
- private MasterIntegrationTestContextCustomizer(final String testClassName) {
- this.testClassName = testClassName;
- }
-
- @Override
- public void customizeContext(final ConfigurableApplicationContext context,
- final MergedContextConfiguration mergedConfig) {
- final String databaseName = "dolphinscheduler_" + UUID.randomUUID().toString().replace("-", "");
- final int masterListenPort = NEXT_MASTER_LISTEN_PORT.getAndIncrement();
-
- TestPropertyValues.of(
- "spring.datasource.url=jdbc:h2:mem:" + databaseName
- + ";MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true;",
- "master.listen-port=" + masterListenPort,
- "server.port=0")
- .applyTo(context);
- }
-
- @Override
- public boolean equals(final Object o) {
- if (this == o) {
- return true;
- }
- if (!(o instanceof MasterIntegrationTestContextCustomizer)) {
- return false;
- }
- MasterIntegrationTestContextCustomizer that = (MasterIntegrationTestContextCustomizer) o;
- return Objects.equals(testClassName, that.testClassName);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(testClassName);
- }
- }
-}
diff --git a/dolphinscheduler-master/src/test/resources/META-INF/spring.factories b/dolphinscheduler-master/src/test/resources/META-INF/spring.factories
deleted file mode 100644
index 71fa1be30f98..000000000000
--- a/dolphinscheduler-master/src/test/resources/META-INF/spring.factories
+++ /dev/null
@@ -1,19 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-org.springframework.test.context.ContextCustomizerFactory=\
-org.apache.dolphinscheduler.server.master.integration.MasterIntegrationTestContextCustomizerFactory
From 325dae39848a37b3d0a5b824535668775f14e698 Mon Sep 17 00:00:00 2001
From: SbloodyS <460888207@qq.com>
Date: Mon, 4 May 2026 17:39:12 +0800
Subject: [PATCH 09/10] ut optimize
---
.../AbstractMasterIntegrationTestCase.java | 16 ----------------
1 file changed, 16 deletions(-)
diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/AbstractMasterIntegrationTestCase.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/AbstractMasterIntegrationTestCase.java
index a9090851af8a..e5e6bb336dd1 100644
--- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/AbstractMasterIntegrationTestCase.java
+++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/AbstractMasterIntegrationTestCase.java
@@ -31,14 +31,6 @@
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.TestExecutionListeners;
-import org.springframework.test.context.event.EventPublishingTestExecutionListener;
-import org.springframework.test.context.jdbc.SqlScriptsTestExecutionListener;
-import org.springframework.test.context.support.DependencyInjectionTestExecutionListener;
-import org.springframework.test.context.support.DirtiesContextBeforeModesTestExecutionListener;
-import org.springframework.test.context.support.DirtiesContextTestExecutionListener;
-import org.springframework.test.context.transaction.TransactionalTestExecutionListener;
-import org.springframework.test.context.web.ServletTestExecutionListener;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Metrics;
@@ -53,14 +45,6 @@
MasterServer.class,
DaoConfiguration.class})
@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_EACH_TEST_METHOD)
-@TestExecutionListeners(listeners = {
- ServletTestExecutionListener.class,
- DirtiesContextBeforeModesTestExecutionListener.class,
- DependencyInjectionTestExecutionListener.class,
- DirtiesContextTestExecutionListener.class,
- TransactionalTestExecutionListener.class,
- SqlScriptsTestExecutionListener.class,
- EventPublishingTestExecutionListener.class})
public abstract class AbstractMasterIntegrationTestCase {
@Autowired
From 20f0f8564aa11c02339f2ad7139702443d4f33a7 Mon Sep 17 00:00:00 2001
From: SbloodyS <460888207@qq.com>
Date: Mon, 4 May 2026 17:46:40 +0800
Subject: [PATCH 10/10] ut optimize
---
.../cases/WorkflowStartBasicTestCase.java | 283 +++
.../cases/WorkflowStartConditionTestCase.java | 268 +++
.../WorkflowStartDispatchPolicyTestCase.java | 200 ++
.../cases/WorkflowStartGraphTestCase.java | 460 ++++
.../cases/WorkflowStartParameterTestCase.java | 261 +++
.../WorkflowStartSerialStrategyTestCase.java | 225 ++
.../WorkflowStartSubWorkflowTestCase.java | 196 ++
.../cases/WorkflowStartSwitchTestCase.java | 131 ++
.../cases/WorkflowStartTaskGroupTestCase.java | 82 +
.../cases/WorkflowStartTestCase.java | 1967 -----------------
.../cases/WorkflowStartTimeoutTestCase.java | 252 +++
11 files changed, 2358 insertions(+), 1967 deletions(-)
create mode 100644 dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartBasicTestCase.java
create mode 100644 dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartConditionTestCase.java
create mode 100644 dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartDispatchPolicyTestCase.java
create mode 100644 dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartGraphTestCase.java
create mode 100644 dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartParameterTestCase.java
create mode 100644 dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartSerialStrategyTestCase.java
create mode 100644 dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartSubWorkflowTestCase.java
create mode 100644 dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartSwitchTestCase.java
create mode 100644 dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartTaskGroupTestCase.java
delete mode 100644 dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartTestCase.java
create mode 100644 dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartTimeoutTestCase.java
diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartBasicTestCase.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartBasicTestCase.java
new file mode 100644
index 000000000000..5331548e7a25
--- /dev/null
+++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartBasicTestCase.java
@@ -0,0 +1,283 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.dolphinscheduler.server.master.integration.cases;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.awaitility.Awaitility.await;
+
+import org.apache.dolphinscheduler.common.enums.Flag;
+import org.apache.dolphinscheduler.common.enums.WorkflowExecutionStatus;
+import org.apache.dolphinscheduler.dao.entity.TaskInstance;
+import org.apache.dolphinscheduler.dao.entity.WorkflowDefinition;
+import org.apache.dolphinscheduler.extract.master.command.RunWorkflowCommandParam;
+import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus;
+import org.apache.dolphinscheduler.server.master.AbstractMasterIntegrationTestCase;
+import org.apache.dolphinscheduler.server.master.integration.WorkflowOperator;
+import org.apache.dolphinscheduler.server.master.integration.WorkflowTestCaseContext;
+
+import org.apache.commons.lang3.time.DateUtils;
+
+import java.time.Duration;
+import java.util.List;
+
+import org.assertj.core.api.Assertions;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+
+/**
+ * Integration tests for basic workflow start scenarios.
+ */
+public class WorkflowStartBasicTestCase extends AbstractMasterIntegrationTestCase {
+
+ @Test
+ @DisplayName("Test start a workflow with one fake task(A) success")
+ public void testStartWorkflow_with_oneSuccessTask() {
+ final String yaml = "/it/start/workflow_with_one_fake_task_success.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
+ .matches(
+ workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS)
+ .matches(
+ workflowInstance -> workflowInstance.getDryRun() == Flag.NO.getCode());
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .satisfiesExactly(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ assertThat(taskInstance.getDryRun()).isEqualTo(Flag.NO.getCode());
+ });
+ });
+
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with one fake task(A) dry run success")
+ public void testStartWorkflow_with_oneSuccessTaskDryRun() {
+ final String yaml = "/it/start/workflow_with_one_fake_task_success.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .dryRun(Flag.YES)
+ .build();
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
+ .matches(
+ workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS)
+ .matches(
+ workflowInstance -> workflowInstance.getDryRun() == Flag.YES.getCode());
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .satisfiesExactly(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ assertThat(taskInstance.getDryRun()).isEqualTo(Flag.YES.getCode());
+ });
+ });
+
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with two fake task(A) has the same name")
+ public void testStartWorkflow_contains_duplicateTaskName() {
+ final String yaml = "/it/start/workflow_with_duplicate_task_name.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ assertThat(repository.queryWorkflowInstance(workflowInstanceId).getState())
+ .isEqualTo(WorkflowExecutionStatus.FAILURE);
+ assertThat(repository.queryTaskInstance(workflowInstanceId)).isEmpty();
+ });
+
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with one fake task(A) using environment config")
+ public void testStartWorkflow_with_oneSuccessTaskUsingEnvironmentConfig() {
+ final String yaml = "/it/start/workflow_with_one_fake_task_using_environment_success.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
+ .matches(
+ workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS);
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .satisfiesExactly(taskInstance -> {
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+ });
+
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with one fake task(A) failed")
+ public void testStartWorkflow_with_oneFailedTask() {
+ final String yaml = "/it/start/workflow_with_one_fake_task_failed.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
+ .isEqualTo(WorkflowExecutionStatus.FAILURE));
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .satisfiesExactly(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with one fake task(A) fatal")
+ public void testStartWorkflow_with_oneFatalTask() {
+ final String yaml = "/it/start/workflow_with_one_fake_task_fatal.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
+ .isEqualTo(WorkflowExecutionStatus.FAILURE));
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .satisfiesExactly(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with one fake task(A) failed")
+ public void testStartWorkflow_with_oneFailedTaskWithRetry() {
+ final String yaml = "/it/start/workflow_with_one_fake_task_failed_with_retry.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(3))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
+ .isEqualTo(WorkflowExecutionStatus.FAILURE));
+
+ final List taskInstances = repository.queryTaskInstance(workflow);
+ Assertions
+ .assertThat(taskInstances)
+ .allSatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
+ })
+ .hasSize(2);
+
+ final TaskInstance taskInstance = taskInstances.get(0);
+ Assertions
+ .assertThat(taskInstance)
+ .matches(task -> task.getRetryTimes() == 0)
+ .matches(task -> task.getFlag() == Flag.NO)
+ .isNotNull();
+
+ final TaskInstance latestTaskInstance = taskInstances.get(1);
+ Assertions
+ .assertThat(latestTaskInstance)
+ .matches(task -> task.getRetryTimes() == 1)
+ .matches(task -> task.getFlag() == Flag.YES)
+ .isNotNull();
+ assertThat(latestTaskInstance.getFirstSubmitTime()).isEqualTo(taskInstance.getFirstSubmitTime());
+ assertThat(latestTaskInstance.getSubmitTime())
+ .isAtLeast(DateUtils.addSeconds(taskInstance.getSubmitTime(), -65));
+ assertThat(latestTaskInstance.getSubmitTime())
+ .isAtMost(DateUtils.addMinutes(taskInstance.getSubmitTime(), 65));
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+}
diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartConditionTestCase.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartConditionTestCase.java
new file mode 100644
index 000000000000..6ce370527901
--- /dev/null
+++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartConditionTestCase.java
@@ -0,0 +1,268 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.dolphinscheduler.server.master.integration.cases;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.awaitility.Awaitility.await;
+
+import org.apache.dolphinscheduler.common.enums.WorkflowExecutionStatus;
+import org.apache.dolphinscheduler.dao.entity.WorkflowDefinition;
+import org.apache.dolphinscheduler.extract.master.command.RunWorkflowCommandParam;
+import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus;
+import org.apache.dolphinscheduler.server.master.AbstractMasterIntegrationTestCase;
+import org.apache.dolphinscheduler.server.master.integration.WorkflowOperator;
+import org.apache.dolphinscheduler.server.master.integration.WorkflowTestCaseContext;
+
+import java.time.Duration;
+
+import org.assertj.core.api.Assertions;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+
+/**
+ * Integration tests for workflow start condition task scenarios.
+ */
+public class WorkflowStartConditionTestCase extends AbstractMasterIntegrationTestCase {
+
+ @Test
+ @DisplayName("Test start a workflow with one condition task(B) when one fake predecessor task(A) run success")
+ void testStartWorkflow_with_oneConditionTaskWithOneFakePredecessor_runSuccess() {
+ final String yaml = "/it/start/workflow_with_one_condition_task_with_one_fake_predecessor_success.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(parentWorkflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
+ .matches(
+ workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS);
+
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflowInstanceId))
+ .hasSize(3)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("B");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("C");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with one condition task(B) which is forbidden when one fake predecessor task(A) run success")
+ void testStartWorkflow_with_oneForbiddenConditionTaskWithOneFakePredecessor_runSuccess() {
+ final String yaml =
+ "/it/start/workflow_with_one_forbidden_condition_task_with_one_fake_predecessor_success.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(parentWorkflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
+ .matches(
+ workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS);
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflowInstanceId))
+ .hasSize(3)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("C");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("D");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with one condition task(B) when one fake predecessor task(A) run failed")
+ void testStartWorkflow_with_oneConditionTaskWithOneFakePredecessor_runFailed() {
+ final String yaml = "/it/start/workflow_with_one_condition_task_with_one_fake_predecessor_failed.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(parentWorkflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
+ .matches(
+ workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS);
+
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflowInstanceId))
+ .hasSize(3)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("B");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("D");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with one condition task(B) when one fake predecessor task(A) run fatal")
+ void testStartWorkflow_with_oneConditionTaskWithOneFakePredecessor_runFatal() {
+ final String yaml = "/it/start/workflow_with_one_condition_task_with_one_fake_predecessor_fatal.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(parentWorkflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
+ .matches(
+ workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS);
+
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflowInstanceId))
+ .hasSize(3)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("B");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("D");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with one condition task(B) which is forbidden when one fake predecessor task(A) run failed")
+ void testStartWorkflow_with_oneForbiddenConditionTaskWithOneFakePredecessor_runFailed() {
+ final String yaml =
+ "/it/start/workflow_with_one_forbidden_condition_task_with_one_fake_predecessor_failed.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(parentWorkflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
+ .matches(
+ workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.FAILURE);
+
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflowInstanceId))
+ .hasSize(1)
+ .satisfiesExactly(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with one condition task(B) which is forbidden when one fake predecessor task(A) run fatal")
+ void testStartWorkflow_with_oneForbiddenConditionTaskWithOneFakePredecessor_runFatal() {
+ final String yaml =
+ "/it/start/workflow_with_one_forbidden_condition_task_with_one_fake_predecessor_fatal.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(parentWorkflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
+ .matches(
+ workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.FAILURE);
+
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflowInstanceId))
+ .hasSize(1)
+ .satisfiesExactly(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+}
diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartDispatchPolicyTestCase.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartDispatchPolicyTestCase.java
new file mode 100644
index 000000000000..d7ec68a056eb
--- /dev/null
+++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartDispatchPolicyTestCase.java
@@ -0,0 +1,200 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.dolphinscheduler.server.master.integration.cases;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.awaitility.Awaitility.await;
+
+import org.apache.dolphinscheduler.common.enums.WorkflowExecutionStatus;
+import org.apache.dolphinscheduler.dao.entity.WorkflowDefinition;
+import org.apache.dolphinscheduler.extract.master.command.RunWorkflowCommandParam;
+import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus;
+import org.apache.dolphinscheduler.server.master.AbstractMasterIntegrationTestCase;
+import org.apache.dolphinscheduler.server.master.config.TaskDispatchPolicy;
+import org.apache.dolphinscheduler.server.master.integration.WorkflowOperator;
+import org.apache.dolphinscheduler.server.master.integration.WorkflowTestCaseContext;
+
+import java.time.Duration;
+
+import org.assertj.core.api.Assertions;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+
+/**
+ * Integration tests for workflow start dispatch policy scenarios.
+ */
+public class WorkflowStartDispatchPolicyTestCase extends AbstractMasterIntegrationTestCase {
+
+ @Test
+ @DisplayName("Test start a workflow whose task specifies a non-existent worker group when dispatch timeout is enabled")
+ public void testTaskFail_with_workerGroupNotFoundAndTimeoutEnabled() {
+ TaskDispatchPolicy taskDispatchPolicy = new TaskDispatchPolicy();
+ taskDispatchPolicy.setDispatchTimeoutEnabled(true);
+ taskDispatchPolicy.setMaxTaskDispatchDuration(Duration.ofSeconds(10));
+ this.masterConfig.setTaskDispatchPolicy(taskDispatchPolicy);
+
+ final String yaml = "/it/start/workflow_with_worker_group_not_found.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofSeconds(30))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .hasSize(1)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getWorkerGroup()).isEqualTo("workerGroupNotFound");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
+ });
+
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
+ .isEqualTo(WorkflowExecutionStatus.FAILURE));
+ });
+
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow whose task specifies a non-existent worker group when dispatch timeout is disabled")
+ public void testTaskRemainsSubmittedSuccess_with_workerGroupNotFoundAndTimeoutDisabled() {
+ TaskDispatchPolicy policy = new TaskDispatchPolicy();
+ policy.setDispatchTimeoutEnabled(false);
+ this.masterConfig.setTaskDispatchPolicy(policy);
+
+ final String yaml = "/it/start/workflow_with_worker_group_not_found.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofSeconds(30))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .hasSize(1)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getWorkerGroup()).isEqualTo("workerGroupNotFound");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUBMITTED_SUCCESS);
+ });
+
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
+ .isEqualTo(WorkflowExecutionStatus.RUNNING_EXECUTION));
+
+ });
+
+ // This test intentionally leaves the workflow running, so we skip the resource cleanup check.
+ // masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow when no available worker and dispatch timeout is enabled")
+ public void testTaskFail_with_noAvailableWorkerAndTimeoutEnabled() {
+ TaskDispatchPolicy taskDispatchPolicy = new TaskDispatchPolicy();
+ taskDispatchPolicy.setDispatchTimeoutEnabled(true);
+ taskDispatchPolicy.setMaxTaskDispatchDuration(Duration.ofSeconds(10));
+ this.masterConfig.setTaskDispatchPolicy(taskDispatchPolicy);
+
+ final String yaml = "/it/start/workflow_with_no_available_worker.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofSeconds(30))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .hasSize(1)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getWorkerGroup()).isEqualTo("default");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
+ });
+
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
+ .isEqualTo(WorkflowExecutionStatus.FAILURE));
+ });
+
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow when no available worker and dispatch timeout is disabled")
+ public void testTaskRemainsSubmittedSuccess_with_noAvailableWorkerAndTimeoutDisabled() {
+ TaskDispatchPolicy policy = new TaskDispatchPolicy();
+ policy.setDispatchTimeoutEnabled(false);
+ this.masterConfig.setTaskDispatchPolicy(policy);
+
+ final String yaml = "/it/start/workflow_with_no_available_worker.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofSeconds(30))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .hasSize(1)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getWorkerGroup()).isEqualTo("default");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUBMITTED_SUCCESS);
+ });
+
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
+ .isEqualTo(WorkflowExecutionStatus.RUNNING_EXECUTION));
+ });
+
+ // This test intentionally leaves the workflow running, so we skip the resource cleanup check.
+ // masterContainer.assertAllResourceReleased();
+ }
+
+}
diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartGraphTestCase.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartGraphTestCase.java
new file mode 100644
index 000000000000..c3b9137e4393
--- /dev/null
+++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartGraphTestCase.java
@@ -0,0 +1,460 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.dolphinscheduler.server.master.integration.cases;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.awaitility.Awaitility.await;
+
+import org.apache.dolphinscheduler.common.enums.FailureStrategy;
+import org.apache.dolphinscheduler.common.enums.TaskDependType;
+import org.apache.dolphinscheduler.common.enums.WorkflowExecutionStatus;
+import org.apache.dolphinscheduler.dao.entity.TaskInstance;
+import org.apache.dolphinscheduler.dao.entity.WorkflowDefinition;
+import org.apache.dolphinscheduler.extract.master.command.RunWorkflowCommandParam;
+import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus;
+import org.apache.dolphinscheduler.server.master.AbstractMasterIntegrationTestCase;
+import org.apache.dolphinscheduler.server.master.integration.WorkflowOperator;
+import org.apache.dolphinscheduler.server.master.integration.WorkflowTestCaseContext;
+
+import java.time.Duration;
+import java.util.List;
+import java.util.function.Consumer;
+
+import org.assertj.core.api.Assertions;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+
+import com.google.common.collect.Lists;
+
+/**
+ * Integration tests for workflow start graph traversal scenarios.
+ */
+public class WorkflowStartGraphTestCase extends AbstractMasterIntegrationTestCase {
+
+ @Test
+ @DisplayName("Test start a workflow with three fake task(A) using end failure strategy")
+ public void testStartWorkflow_with_threeFakeTask_usingFailureStrategyEnd() {
+ final String yaml = "/it/start/workflow_with_three_parallel_fake_task_using_failure_strategy.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .failureStrategy(FailureStrategy.END)
+ .build();
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ assertThat(repository.queryWorkflowInstance(workflowInstanceId).getState())
+ .isEqualTo(WorkflowExecutionStatus.FAILURE);
+ Assertions.assertThat(repository.queryTaskInstance(workflow))
+ .hasSize(3)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.KILL);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("B");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.KILL);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("C");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
+ });
+
+ });
+
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with one fake task(A) with multiple predecessors run success")
+ void testStartWorkflow_with_oneTaskWithMultiplePredecessors_runSuccess() {
+ final String yaml = "/it/start/workflow_with_one_fake_task_with_multiple_predecessors_success.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(parentWorkflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
+ .matches(
+ workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS);
+
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflowInstanceId))
+ .hasSize(4)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("B");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("C");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("D");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with one fake task(A) with multiple predecessors run failed")
+ void testStartWorkflow_with_oneTaskWithMultiplePredecessors_runFailed() {
+ final String yaml = "/it/start/workflow_with_one_fake_task_with_multiple_predecessors_failed.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(parentWorkflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .failureStrategy(FailureStrategy.CONTINUE)
+ .build();
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
+ .matches(
+ workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.FAILURE);
+
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflowInstanceId))
+ .hasSize(3)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("B");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("C");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with shared downstream task when failed predecessor finishes first using continue failure strategy")
+ void testStartWorkflow_with_sharedDownstreamTask_whenFailedPredecessorFinishFirst_usingFailureStrategyContinue() {
+ final String yaml =
+ "/it/start/workflow_with_shared_downstream_task_when_failed_predecessor_finish_first.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(parentWorkflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .failureStrategy(FailureStrategy.CONTINUE)
+ .build();
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
+ .matches(
+ workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.FAILURE)
+ .matches(workflowInstance -> workflowInstance.getEndTime() != null);
+
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflowInstanceId))
+ .hasSize(2)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("B");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with two parallel fake tasks(A, B) success")
+ public void testStartWorkflow_with_twoParallelSuccessTask() {
+ final String yaml = "/it/start/workflow_with_two_parallel_fake_task_success.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
+ .isEqualTo(WorkflowExecutionStatus.SUCCESS));
+
+ final List taskInstances = repository.queryTaskInstance(workflow);
+ Assertions
+ .assertThat(taskInstances)
+ .hasSize(2)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("B");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with two parallel fake tasks(A(failed), B(failed)) success")
+ public void testStartWorkflow_with_twoParallelFailedTask() {
+ final String yaml = "/it/start/workflow_with_two_parallel_fake_task_failed.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .filteredOn(
+ workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.FAILURE)
+ .hasSize(1);
+
+ final List taskInstances = repository.queryTaskInstance(workflow);
+ Assertions
+ .assertThat(taskInstances)
+ .hasSize(2)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("B");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with three parallel three fake tasks(A1->A2->A3, B1->B2->B3, C1->C2->C3) success")
+ public void testStartWorkflow_with_threeParallelSuccessTask() {
+ final String yaml = "/it/start/workflow_with_three_parallel_three_fake_task_success.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .filteredOn(
+ workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS)
+ .hasSize(1);
+
+ final List taskInstances = repository.queryTaskInstance(workflow);
+ Assertions
+ .assertThat(taskInstances)
+ .hasSize(9)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A1");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A2");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A3");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("B1");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("B2");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("B3");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("C1");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("C2");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("C3");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with three parallel three fake tasks(A1->A2->A3, B1->B2->B3, C1->C2->C3) success")
+ public void testStartWorkflowFromStartNodes_with_threeParallelSuccessTask() {
+ final String yaml = "/it/start/workflow_with_three_parallel_three_fake_task_success.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final RunWorkflowCommandParam runWorkflowCommandParam = RunWorkflowCommandParam.builder()
+ .startNodes(Lists.newArrayList(6L))
+ .build();
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(runWorkflowCommandParam)
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .filteredOn(
+ workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS)
+ .hasSize(1);
+
+ final List taskInstances = repository.queryTaskInstance(workflow);
+ Assertions
+ .assertThat(taskInstances)
+ .hasSize(2)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("C2");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("C3");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with task depend type TASK_ONLY")
+ public void testStartWorkflow_withTaskOnlyStrategy() {
+ final String yaml = "/it/start/workflow_with_task_only_strategy.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam().withStartNodes(Lists.newArrayList(1L)))
+ .taskDependType(TaskDependType.TASK_ONLY)
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
+ .isEqualTo(WorkflowExecutionStatus.SUCCESS));
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .hasSize(1)
+ .satisfiesExactly(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with task which successors is forbidden")
+ public void testStartWorkflow_withTaskSuccessorsIsForbidden() {
+ final String yaml = "/it/start/workflow_with_task_successors_is_forbidden.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
+ .isEqualTo(WorkflowExecutionStatus.SUCCESS));
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .hasSize(2)
+ .satisfiesExactly(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ }, (Consumer) taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("C1");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+}
diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartParameterTestCase.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartParameterTestCase.java
new file mode 100644
index 000000000000..d1406e4eadff
--- /dev/null
+++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartParameterTestCase.java
@@ -0,0 +1,261 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.dolphinscheduler.server.master.integration.cases;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.awaitility.Awaitility.await;
+
+import org.apache.dolphinscheduler.common.enums.WorkflowExecutionStatus;
+import org.apache.dolphinscheduler.dao.entity.WorkflowDefinition;
+import org.apache.dolphinscheduler.extract.master.command.RunWorkflowCommandParam;
+import org.apache.dolphinscheduler.plugin.task.api.enums.DataType;
+import org.apache.dolphinscheduler.plugin.task.api.enums.Direct;
+import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus;
+import org.apache.dolphinscheduler.plugin.task.api.model.Property;
+import org.apache.dolphinscheduler.plugin.task.api.utils.VarPoolUtils;
+import org.apache.dolphinscheduler.server.master.AbstractMasterIntegrationTestCase;
+import org.apache.dolphinscheduler.server.master.integration.WorkflowOperator;
+import org.apache.dolphinscheduler.server.master.integration.WorkflowTestCaseContext;
+
+import java.time.Duration;
+import java.util.List;
+
+import org.assertj.core.api.Assertions;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+
+import com.google.common.collect.Lists;
+
+/**
+ * Integration tests for workflow start parameter scenarios.
+ */
+public class WorkflowStartParameterTestCase extends AbstractMasterIntegrationTestCase {
+
+ @Test
+ @DisplayName("Test start a workflow which using workflow params")
+ public void testStartWorkflow_usingWorkflowParam() {
+ final String yaml = "/it/start/workflow_with_global_param.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
+ .isEqualTo(WorkflowExecutionStatus.FAILURE));
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .hasSize(2)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("B");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
+ });
+ });
+
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow which using command params")
+ public void testStartWorkflow_usingCommandParam() {
+ final String yaml = "/it/start/workflow_with_global_param.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final RunWorkflowCommandParam runWorkflowCommandParam = RunWorkflowCommandParam.builder()
+ .commandParams(Lists.newArrayList(Property.builder()
+ .prop("name")
+ .direct(Direct.IN)
+ .type(DataType.VARCHAR)
+ .value("commandParam")
+ .build()))
+ .build();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(runWorkflowCommandParam)
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
+ .isEqualTo(WorkflowExecutionStatus.FAILURE));
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .hasSize(2)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("B");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow contains fake task using local param will be overwrite by varpool")
+ public void testStartWorkflow_fakeTask_usingLocalParamOverWriteByVarPool() {
+ final String yaml = "/it/start/workflow_with_local_param_overwrite_by_varpool.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final RunWorkflowCommandParam runWorkflowCommandParam = RunWorkflowCommandParam.builder()
+ .build();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(runWorkflowCommandParam)
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ List assertVarPools = Lists.newArrayList(
+ Property.builder().prop("output").direct(Direct.OUT).type(DataType.VARCHAR).value("1").build());
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .satisfiesExactly(workflowInstance -> {
+ assertThat(workflowInstance.getState()).isEqualTo(WorkflowExecutionStatus.SUCCESS);
+ assertThat(VarPoolUtils.deserializeVarPool(workflowInstance.getVarPool()))
+ .isEqualTo(assertVarPools);
+ });
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .hasSize(3)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(VarPoolUtils.deserializeVarPool(taskInstance.getVarPool()))
+ .isEqualTo(assertVarPools);
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("B");
+ assertThat(VarPoolUtils.deserializeVarPool(taskInstance.getVarPool()))
+ .isEqualTo(assertVarPools);
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("C");
+ assertThat(VarPoolUtils.deserializeVarPool(taskInstance.getVarPool()))
+ .isEqualTo(assertVarPools);
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow which using null key params")
+ public void testStartWorkflow_usingNullKeyParam() {
+ final String yaml = "/it/start/workflow_with_null_key_param.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final RunWorkflowCommandParam runWorkflowCommandParam = RunWorkflowCommandParam.builder()
+ .commandParams(Lists.newArrayList(Property.builder()
+ .prop(null)
+ .direct(Direct.IN)
+ .type(DataType.VARCHAR)
+ .value("commandParam")
+ .build()))
+ .build();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(runWorkflowCommandParam)
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
+ .isEqualTo(WorkflowExecutionStatus.SUCCESS));
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .hasSize(2)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("B");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow which using workflow built in params")
+ public void testStartWorkflow_usingWorkflowBuiltInParam() {
+ final String yaml = "/it/start/workflow_with_built_in_param.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
+ .isEqualTo(WorkflowExecutionStatus.SUCCESS));
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .hasSize(2)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("B");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+}
diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartSerialStrategyTestCase.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartSerialStrategyTestCase.java
new file mode 100644
index 000000000000..72d2734540ea
--- /dev/null
+++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartSerialStrategyTestCase.java
@@ -0,0 +1,225 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.dolphinscheduler.server.master.integration.cases;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.awaitility.Awaitility.await;
+
+import org.apache.dolphinscheduler.common.enums.WorkflowExecutionStatus;
+import org.apache.dolphinscheduler.dao.entity.TaskInstance;
+import org.apache.dolphinscheduler.dao.entity.WorkflowDefinition;
+import org.apache.dolphinscheduler.dao.entity.WorkflowInstance;
+import org.apache.dolphinscheduler.extract.master.command.RunWorkflowCommandParam;
+import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus;
+import org.apache.dolphinscheduler.server.master.AbstractMasterIntegrationTestCase;
+import org.apache.dolphinscheduler.server.master.integration.WorkflowOperator;
+import org.apache.dolphinscheduler.server.master.integration.WorkflowTestCaseContext;
+
+import org.apache.commons.lang3.time.DateUtils;
+
+import java.time.Duration;
+import java.util.List;
+
+import org.assertj.core.api.Assertions;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+
+/**
+ * Integration tests for serial workflow start strategy scenarios.
+ */
+public class WorkflowStartSerialStrategyTestCase extends AbstractMasterIntegrationTestCase {
+
+ @Test
+ @DisplayName("Test start a workflow with one fake task(A) using serial wait strategy")
+ public void testStartWorkflow_with_serialWaitStrategy() {
+ final String yaml = "/it/start/workflow_with_serial_wait_strategy.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ final Integer workflowInstanceId1 = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+ final Integer workflowInstanceId2 = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+ final Integer workflowInstanceId3 = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ assertThat(repository.queryWorkflowInstance(workflowInstanceId1).getState())
+ .isEqualTo(WorkflowExecutionStatus.RUNNING_EXECUTION);
+ assertThat(repository.queryWorkflowInstance(workflowInstanceId2).getState())
+ .isEqualTo(WorkflowExecutionStatus.SERIAL_WAIT);
+ assertThat(repository.queryWorkflowInstance(workflowInstanceId3).getState())
+ .isEqualTo(WorkflowExecutionStatus.SERIAL_WAIT);
+ });
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ final WorkflowInstance workflowInstance1 = repository.queryWorkflowInstance(workflowInstanceId1);
+ final WorkflowInstance workflowInstance2 = repository.queryWorkflowInstance(workflowInstanceId2);
+ final WorkflowInstance workflowInstance3 = repository.queryWorkflowInstance(workflowInstanceId3);
+ assertThat(workflowInstance1.getState()).isEqualTo(WorkflowExecutionStatus.SUCCESS);
+ assertThat(workflowInstance2.getState()).isEqualTo(WorkflowExecutionStatus.SUCCESS);
+ assertThat(workflowInstance2.getEndTime())
+ .isAtLeast(DateUtils.addSeconds(workflowInstance1.getEndTime(), 5));
+ assertThat(workflowInstance3.getState()).isEqualTo(WorkflowExecutionStatus.SUCCESS);
+ assertThat(workflowInstance3.getEndTime())
+ .isAtLeast(DateUtils.addSeconds(workflowInstance2.getEndTime(), 5));
+ });
+
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with one fake task(A) using serial discard strategy")
+ public void testStartWorkflow_with_serialDiscardStrategy() {
+ final String yaml = "/it/start/workflow_with_serial_discard_strategy.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ final Integer workflowInstanceId1 = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+ final Integer workflowInstanceId2 = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+ final Integer workflowInstanceId3 = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ final WorkflowInstance workflowInstance1 = repository.queryWorkflowInstance(workflowInstanceId1);
+ final WorkflowInstance workflowInstance2 = repository.queryWorkflowInstance(workflowInstanceId2);
+ final WorkflowInstance workflowInstance3 = repository.queryWorkflowInstance(workflowInstanceId3);
+ assertThat(workflowInstance1.getState()).isEqualTo(WorkflowExecutionStatus.SUCCESS);
+ assertThat(workflowInstance2.getState()).isEqualTo(WorkflowExecutionStatus.STOP);
+ assertThat(workflowInstance2.getEndTime()).isNotNull();
+ assertThat(workflowInstance2.getEndTime()).isAtLeast(workflowInstance2.getStartTime());
+ assertThat(workflowInstance3.getState()).isEqualTo(WorkflowExecutionStatus.STOP);
+ assertThat(workflowInstance3.getEndTime()).isNotNull();
+ assertThat(workflowInstance3.getEndTime()).isAtLeast(workflowInstance3.getStartTime());
+ });
+
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with one fake task(A) using serial priority strategy")
+ public void testStartWorkflow_with_serialPriorityStrategy() {
+ final String yaml = "/it/start/workflow_with_serial_priority_strategy.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ final Integer workflowInstanceId1 = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+ final Integer workflowInstanceId2 = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+ final Integer workflowInstanceId3 = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ final WorkflowInstance workflowInstance1 = repository.queryWorkflowInstance(workflowInstanceId1);
+ final WorkflowInstance workflowInstance2 = repository.queryWorkflowInstance(workflowInstanceId2);
+ final WorkflowInstance workflowInstance3 = repository.queryWorkflowInstance(workflowInstanceId3);
+ assertThat(workflowInstance1.getState()).isEqualTo(WorkflowExecutionStatus.STOP);
+ assertThat(workflowInstance1.getEndTime()).isNotNull();
+ assertThat(workflowInstance1.getEndTime()).isAtLeast(workflowInstance1.getStartTime());
+ assertThat(workflowInstance2.getState()).isEqualTo(WorkflowExecutionStatus.STOP);
+ assertThat(workflowInstance2.getEndTime()).isNotNull();
+ assertThat(workflowInstance2.getEndTime()).isAtLeast(workflowInstance2.getStartTime());
+ assertThat(workflowInstance3.getState()).isEqualTo(WorkflowExecutionStatus.SUCCESS);
+ });
+
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with two serial fake tasks(A -> B) success")
+ public void testStartWorkflow_with_twoSerialSuccessTask() {
+ String yaml = "/it/start/workflow_with_two_serial_fake_task_success.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
+ .isEqualTo(WorkflowExecutionStatus.SUCCESS))
+ .hasSize(1);
+
+ final List taskInstances = repository.queryTaskInstance(workflow);
+ Assertions
+ .assertThat(taskInstances)
+ .hasSize(2)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("B");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with two serial fake tasks(A(failed) -> B) success")
+ public void testStartWorkflow_with_twoSerialFailedTask() {
+ final String yaml = "/it/start/workflow_with_two_serial_fake_task_failed.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
+ .isEqualTo(WorkflowExecutionStatus.FAILURE));
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .satisfiesExactly(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("A");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+}
diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartSubWorkflowTestCase.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartSubWorkflowTestCase.java
new file mode 100644
index 000000000000..4e381639e932
--- /dev/null
+++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartSubWorkflowTestCase.java
@@ -0,0 +1,196 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.dolphinscheduler.server.master.integration.cases;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.awaitility.Awaitility.await;
+
+import org.apache.dolphinscheduler.common.enums.Flag;
+import org.apache.dolphinscheduler.common.enums.WorkflowExecutionStatus;
+import org.apache.dolphinscheduler.dao.entity.WorkflowDefinition;
+import org.apache.dolphinscheduler.dao.entity.WorkflowInstance;
+import org.apache.dolphinscheduler.extract.master.command.RunWorkflowCommandParam;
+import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus;
+import org.apache.dolphinscheduler.server.master.AbstractMasterIntegrationTestCase;
+import org.apache.dolphinscheduler.server.master.integration.WorkflowOperator;
+import org.apache.dolphinscheduler.server.master.integration.WorkflowTestCaseContext;
+
+import java.time.Duration;
+import java.util.List;
+
+import org.assertj.core.api.Assertions;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+
+/**
+ * Integration tests for workflow start sub workflow scenarios.
+ */
+public class WorkflowStartSubWorkflowTestCase extends AbstractMasterIntegrationTestCase {
+
+ @Test
+ @DisplayName("Test start a workflow with one sub workflow task(A) success")
+ public void testStartWorkflow_with_subWorkflowTask_success() {
+ final String yaml = "/it/start/workflow_with_sub_workflow_task_success.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(parentWorkflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
+ .matches(
+ workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS)
+ .matches(
+ workflowInstance -> workflowInstance.getIsSubWorkflow() == Flag.NO)
+ .matches(
+ workflowInstance -> workflowInstance.getDryRun() == Flag.NO.getCode());
+
+ final List subWorkflowInstance =
+ repository.queryWorkflowInstance(context.getWorkflows().get(1));
+ Assertions
+ .assertThat(subWorkflowInstance)
+ .hasSize(1)
+ .satisfiesExactly(workflowInstance -> {
+ assertThat(workflowInstance.getState()).isEqualTo(WorkflowExecutionStatus.SUCCESS);
+ assertThat(workflowInstance.getIsSubWorkflow()).isEqualTo(Flag.YES);
+ assertThat(workflowInstance.getDryRun()).isEqualTo(Flag.NO.getCode());
+ });
+
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflowInstanceId))
+ .satisfiesExactly(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("sub_logic_task");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+
+ Assertions
+ .assertThat(repository.queryTaskInstance(subWorkflowInstance.get(0).getId()))
+ .satisfiesExactly(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("fake_task");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+ });
+
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with one sub workflow task(A) dry run, will not execute")
+ public void testStartWorkflow_with_subWorkflowTask_dryRunSuccess() {
+ final String yaml = "/it/start/workflow_with_sub_workflow_task_success.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(parentWorkflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .dryRun(Flag.YES)
+ .build();
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
+ .matches(
+ workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS)
+ .matches(
+ workflowInstance -> workflowInstance.getIsSubWorkflow() == Flag.NO)
+ .matches(
+ workflowInstance -> workflowInstance.getDryRun() == Flag.YES.getCode());
+
+ final List subWorkflowInstance =
+ repository.queryWorkflowInstance(context.getWorkflows().get(1));
+ Assertions
+ .assertThat(subWorkflowInstance)
+ .isEmpty();
+
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflowInstanceId))
+ .satisfiesExactly(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("sub_logic_task");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ assertThat(taskInstance.getDryRun()).isEqualTo(Flag.YES.getCode());
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with one sub workflow task(A) failed")
+ public void testStartWorkflow_with_subWorkflowTask_failed() {
+ final String yaml = "/it/start/workflow_with_sub_workflow_task_failed.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(parentWorkflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
+ .matches(
+ workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.FAILURE)
+ .matches(
+ workflowInstance -> workflowInstance.getIsSubWorkflow() == Flag.NO);
+
+ final List subWorkflowInstance =
+ repository.queryWorkflowInstance(context.getWorkflows().get(1));
+ Assertions
+ .assertThat(subWorkflowInstance)
+ .hasSize(1)
+ .satisfiesExactly(workflowInstance -> {
+ assertThat(workflowInstance.getState()).isEqualTo(WorkflowExecutionStatus.FAILURE);
+ assertThat(workflowInstance.getIsSubWorkflow()).isEqualTo(Flag.YES);
+ });
+
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflowInstanceId))
+ .satisfiesExactly(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("sub_logic_task");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
+ });
+
+ Assertions
+ .assertThat(repository.queryTaskInstance(subWorkflowInstance.get(0).getId()))
+ .satisfiesExactly(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("fake_task");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
+ });
+ });
+
+ masterContainer.assertAllResourceReleased();
+ }
+
+}
diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartSwitchTestCase.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartSwitchTestCase.java
new file mode 100644
index 000000000000..67faea327184
--- /dev/null
+++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartSwitchTestCase.java
@@ -0,0 +1,131 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.dolphinscheduler.server.master.integration.cases;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.awaitility.Awaitility.await;
+
+import org.apache.dolphinscheduler.common.enums.FailureStrategy;
+import org.apache.dolphinscheduler.common.enums.Flag;
+import org.apache.dolphinscheduler.common.enums.WorkflowExecutionStatus;
+import org.apache.dolphinscheduler.dao.entity.WorkflowDefinition;
+import org.apache.dolphinscheduler.extract.master.command.RunWorkflowCommandParam;
+import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus;
+import org.apache.dolphinscheduler.server.master.AbstractMasterIntegrationTestCase;
+import org.apache.dolphinscheduler.server.master.integration.WorkflowOperator;
+import org.apache.dolphinscheduler.server.master.integration.WorkflowTestCaseContext;
+
+import java.time.Duration;
+
+import org.assertj.core.api.Assertions;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+
+/**
+ * Integration tests for workflow start switch task scenarios.
+ */
+public class WorkflowStartSwitchTestCase extends AbstractMasterIntegrationTestCase {
+
+ @Test
+ @DisplayName("Test start a workflow with one success switch task and two fake task")
+ public void testStartWorkflow_with_oneSuccessSwitch_twoFakeTask() {
+ final String yaml = "/it/start/workflow_with_one_success_switch_two_fake_task.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(parentWorkflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .failureStrategy(FailureStrategy.CONTINUE)
+ .build();
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
+ .matches(
+ workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS)
+ .matches(
+ workflowInstance -> workflowInstance.getIsSubWorkflow() == Flag.NO)
+ .matches(
+ workflowInstance -> workflowInstance.getDryRun() == Flag.NO.getCode());
+
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflowInstanceId))
+ .hasSize(2)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("switch_task");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("success_branch");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+
+ });
+
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow with one failed switch task and two fake task")
+ public void testStartWorkflow_with_oneFailedSwitch_twoFakeTask() {
+ final String yaml = "/it/start/workflow_with_one_failed_switch_two_fake_task.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(parentWorkflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
+ .matches(
+ workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS)
+ .matches(
+ workflowInstance -> workflowInstance.getIsSubWorkflow() == Flag.NO)
+ .matches(
+ workflowInstance -> workflowInstance.getDryRun() == Flag.NO.getCode());
+
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflowInstanceId))
+ .hasSize(2)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("switch_task");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ })
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("default_branch");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+
+ });
+
+ masterContainer.assertAllResourceReleased();
+ }
+
+}
diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartTaskGroupTestCase.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartTaskGroupTestCase.java
new file mode 100644
index 000000000000..4fb7bc59377c
--- /dev/null
+++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartTaskGroupTestCase.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.dolphinscheduler.server.master.integration.cases;
+
+import static org.awaitility.Awaitility.await;
+
+import org.apache.dolphinscheduler.dao.entity.TaskInstance;
+import org.apache.dolphinscheduler.dao.entity.WorkflowDefinition;
+import org.apache.dolphinscheduler.extract.master.command.RunWorkflowCommandParam;
+import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus;
+import org.apache.dolphinscheduler.server.master.AbstractMasterIntegrationTestCase;
+import org.apache.dolphinscheduler.server.master.integration.WorkflowOperator;
+import org.apache.dolphinscheduler.server.master.integration.WorkflowTestCaseContext;
+
+import java.time.Duration;
+import java.util.List;
+
+import org.assertj.core.api.Assertions;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+
+/**
+ * Integration tests for workflow start task group scenarios.
+ */
+public class WorkflowStartTaskGroupTestCase extends AbstractMasterIntegrationTestCase {
+
+ @Test
+ @DisplayName("Test start a workflow with two fake task(A) using task group")
+ public void testStartWorkflow_with_successTaskUsingTaskGroup() {
+ final String yaml = "/it/start/workflow_with_fake_tasks_using_task_group.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofMinutes(2))
+ .atLeast(Duration.ofSeconds(20))
+ .untilAsserted(() -> {
+ final List taskInstances = repository.queryTaskInstance(workflow);
+ Assertions
+ .assertThat(taskInstances)
+ .hasSize(2)
+ .allMatch(taskInstance -> TaskExecutionStatus.SUCCESS.equals(taskInstance.getState()) &&
+ taskInstance.getTaskGroupId() == context.getTaskGroups().get(0).getId());
+
+ final TaskInstance taskA = taskInstances.stream()
+ .filter(t -> "A".equals(t.getName()))
+ .findFirst().get();
+ final TaskInstance taskB = taskInstances.stream()
+ .filter(t -> "B".equals(t.getName()))
+ .findFirst().get();
+ // TaskA's task group priority is smaller than B
+ Assertions.assertThat(taskA.getStartTime()).isAfter(taskB.getStartTime());
+ Assertions.assertThat(taskA.getEndTime()).isAfter(taskB.getEndTime());
+
+ });
+
+ masterContainer.assertAllResourceReleased();
+ }
+
+}
diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartTestCase.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartTestCase.java
deleted file mode 100644
index d62c52f45d4a..000000000000
--- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartTestCase.java
+++ /dev/null
@@ -1,1967 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.server.master.integration.cases;
-
-import static com.google.common.truth.Truth.assertThat;
-import static org.awaitility.Awaitility.await;
-
-import org.apache.dolphinscheduler.common.enums.AlertType;
-import org.apache.dolphinscheduler.common.enums.FailureStrategy;
-import org.apache.dolphinscheduler.common.enums.Flag;
-import org.apache.dolphinscheduler.common.enums.TaskDependType;
-import org.apache.dolphinscheduler.common.enums.WorkflowExecutionStatus;
-import org.apache.dolphinscheduler.dao.entity.TaskInstance;
-import org.apache.dolphinscheduler.dao.entity.WorkflowDefinition;
-import org.apache.dolphinscheduler.dao.entity.WorkflowInstance;
-import org.apache.dolphinscheduler.extract.master.command.RunWorkflowCommandParam;
-import org.apache.dolphinscheduler.plugin.task.api.enums.DataType;
-import org.apache.dolphinscheduler.plugin.task.api.enums.Direct;
-import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus;
-import org.apache.dolphinscheduler.plugin.task.api.model.Property;
-import org.apache.dolphinscheduler.plugin.task.api.utils.VarPoolUtils;
-import org.apache.dolphinscheduler.server.master.AbstractMasterIntegrationTestCase;
-import org.apache.dolphinscheduler.server.master.config.TaskDispatchPolicy;
-import org.apache.dolphinscheduler.server.master.integration.WorkflowOperator;
-import org.apache.dolphinscheduler.server.master.integration.WorkflowTestCaseContext;
-
-import org.apache.commons.lang3.time.DateUtils;
-
-import java.time.Duration;
-import java.util.List;
-import java.util.function.Consumer;
-
-import org.assertj.core.api.Assertions;
-import org.junit.jupiter.api.DisplayName;
-import org.junit.jupiter.api.Test;
-
-import com.google.common.collect.Lists;
-
-/**
- * The integration test for starting a workflow from workflow definition.
- * In each test method, will create different workflow from yaml, and then trigger it, and do assertions.
- *
The method name should be clear to describe the test scenario.
- */
-public class WorkflowStartTestCase extends AbstractMasterIntegrationTestCase {
-
- @Test
- @DisplayName("Test start a workflow with one fake task(A) success")
- public void testStartWorkflow_with_oneSuccessTask() {
- final String yaml = "/it/start/workflow_with_one_fake_task_success.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
- .matches(
- workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS)
- .matches(
- workflowInstance -> workflowInstance.getDryRun() == Flag.NO.getCode());
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .satisfiesExactly(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- assertThat(taskInstance.getDryRun()).isEqualTo(Flag.NO.getCode());
- });
- });
-
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with one fake task(A) dry run success")
- public void testStartWorkflow_with_oneSuccessTaskDryRun() {
- final String yaml = "/it/start/workflow_with_one_fake_task_success.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .dryRun(Flag.YES)
- .build();
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
- .matches(
- workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS)
- .matches(
- workflowInstance -> workflowInstance.getDryRun() == Flag.YES.getCode());
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .satisfiesExactly(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- assertThat(taskInstance.getDryRun()).isEqualTo(Flag.YES.getCode());
- });
- });
-
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with two fake task(A) has the same name")
- public void testStartWorkflow_contains_duplicateTaskName() {
- final String yaml = "/it/start/workflow_with_duplicate_task_name.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- assertThat(repository.queryWorkflowInstance(workflowInstanceId).getState())
- .isEqualTo(WorkflowExecutionStatus.FAILURE);
- assertThat(repository.queryTaskInstance(workflowInstanceId)).isEmpty();
- });
-
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with one fake task(A) using serial wait strategy")
- public void testStartWorkflow_with_serialWaitStrategy() {
- final String yaml = "/it/start/workflow_with_serial_wait_strategy.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- final Integer workflowInstanceId1 = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
- final Integer workflowInstanceId2 = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
- final Integer workflowInstanceId3 = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- assertThat(repository.queryWorkflowInstance(workflowInstanceId1).getState())
- .isEqualTo(WorkflowExecutionStatus.RUNNING_EXECUTION);
- assertThat(repository.queryWorkflowInstance(workflowInstanceId2).getState())
- .isEqualTo(WorkflowExecutionStatus.SERIAL_WAIT);
- assertThat(repository.queryWorkflowInstance(workflowInstanceId3).getState())
- .isEqualTo(WorkflowExecutionStatus.SERIAL_WAIT);
- });
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- final WorkflowInstance workflowInstance1 = repository.queryWorkflowInstance(workflowInstanceId1);
- final WorkflowInstance workflowInstance2 = repository.queryWorkflowInstance(workflowInstanceId2);
- final WorkflowInstance workflowInstance3 = repository.queryWorkflowInstance(workflowInstanceId3);
- assertThat(workflowInstance1.getState()).isEqualTo(WorkflowExecutionStatus.SUCCESS);
- assertThat(workflowInstance2.getState()).isEqualTo(WorkflowExecutionStatus.SUCCESS);
- assertThat(workflowInstance2.getEndTime())
- .isAtLeast(DateUtils.addSeconds(workflowInstance1.getEndTime(), 5));
- assertThat(workflowInstance3.getState()).isEqualTo(WorkflowExecutionStatus.SUCCESS);
- assertThat(workflowInstance3.getEndTime())
- .isAtLeast(DateUtils.addSeconds(workflowInstance2.getEndTime(), 5));
- });
-
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with one fake task(A) using serial discard strategy")
- public void testStartWorkflow_with_serialDiscardStrategy() {
- final String yaml = "/it/start/workflow_with_serial_discard_strategy.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- final Integer workflowInstanceId1 = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
- final Integer workflowInstanceId2 = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
- final Integer workflowInstanceId3 = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- final WorkflowInstance workflowInstance1 = repository.queryWorkflowInstance(workflowInstanceId1);
- final WorkflowInstance workflowInstance2 = repository.queryWorkflowInstance(workflowInstanceId2);
- final WorkflowInstance workflowInstance3 = repository.queryWorkflowInstance(workflowInstanceId3);
- assertThat(workflowInstance1.getState()).isEqualTo(WorkflowExecutionStatus.SUCCESS);
- assertThat(workflowInstance2.getState()).isEqualTo(WorkflowExecutionStatus.STOP);
- assertThat(workflowInstance2.getEndTime()).isNotNull();
- assertThat(workflowInstance2.getEndTime()).isAtLeast(workflowInstance2.getStartTime());
- assertThat(workflowInstance3.getState()).isEqualTo(WorkflowExecutionStatus.STOP);
- assertThat(workflowInstance3.getEndTime()).isNotNull();
- assertThat(workflowInstance3.getEndTime()).isAtLeast(workflowInstance3.getStartTime());
- });
-
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with one fake task(A) using serial priority strategy")
- public void testStartWorkflow_with_serialPriorityStrategy() {
- final String yaml = "/it/start/workflow_with_serial_priority_strategy.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- final Integer workflowInstanceId1 = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
- final Integer workflowInstanceId2 = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
- final Integer workflowInstanceId3 = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- final WorkflowInstance workflowInstance1 = repository.queryWorkflowInstance(workflowInstanceId1);
- final WorkflowInstance workflowInstance2 = repository.queryWorkflowInstance(workflowInstanceId2);
- final WorkflowInstance workflowInstance3 = repository.queryWorkflowInstance(workflowInstanceId3);
- assertThat(workflowInstance1.getState()).isEqualTo(WorkflowExecutionStatus.STOP);
- assertThat(workflowInstance1.getEndTime()).isNotNull();
- assertThat(workflowInstance1.getEndTime()).isAtLeast(workflowInstance1.getStartTime());
- assertThat(workflowInstance2.getState()).isEqualTo(WorkflowExecutionStatus.STOP);
- assertThat(workflowInstance2.getEndTime()).isNotNull();
- assertThat(workflowInstance2.getEndTime()).isAtLeast(workflowInstance2.getStartTime());
- assertThat(workflowInstance3.getState()).isEqualTo(WorkflowExecutionStatus.SUCCESS);
- });
-
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with three fake task(A) using end failure strategy")
- public void testStartWorkflow_with_threeFakeTask_usingFailureStrategyEnd() {
- final String yaml = "/it/start/workflow_with_three_parallel_fake_task_using_failure_strategy.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .failureStrategy(FailureStrategy.END)
- .build();
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- assertThat(repository.queryWorkflowInstance(workflowInstanceId).getState())
- .isEqualTo(WorkflowExecutionStatus.FAILURE);
- Assertions.assertThat(repository.queryTaskInstance(workflow))
- .hasSize(3)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.KILL);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("B");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.KILL);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("C");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
- });
-
- });
-
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with two fake task(A) using task group")
- public void testStartWorkflow_with_successTaskUsingTaskGroup() {
- final String yaml = "/it/start/workflow_with_fake_tasks_using_task_group.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
-
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(2))
- .atLeast(Duration.ofSeconds(20))
- .untilAsserted(() -> {
- final List taskInstances = repository.queryTaskInstance(workflow);
- Assertions
- .assertThat(taskInstances)
- .hasSize(2)
- .allMatch(taskInstance -> TaskExecutionStatus.SUCCESS.equals(taskInstance.getState()) &&
- taskInstance.getTaskGroupId() == context.getTaskGroups().get(0).getId());
-
- final TaskInstance taskA = taskInstances.stream()
- .filter(t -> "A".equals(t.getName()))
- .findFirst().get();
- final TaskInstance taskB = taskInstances.stream()
- .filter(t -> "B".equals(t.getName()))
- .findFirst().get();
- // TaskA's task group priority is smaller than B
- Assertions.assertThat(taskA.getStartTime()).isAfter(taskB.getStartTime());
- Assertions.assertThat(taskA.getEndTime()).isAfter(taskB.getEndTime());
-
- });
-
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with one fake task(A) using environment config")
- public void testStartWorkflow_with_oneSuccessTaskUsingEnvironmentConfig() {
- final String yaml = "/it/start/workflow_with_one_fake_task_using_environment_success.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
-
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
- .matches(
- workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS);
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .satisfiesExactly(taskInstance -> {
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
- });
-
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with one success switch task and two fake task")
- public void testStartWorkflow_with_oneSuccessSwitch_twoFakeTask() {
- final String yaml = "/it/start/workflow_with_one_success_switch_two_fake_task.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(parentWorkflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .failureStrategy(FailureStrategy.CONTINUE)
- .build();
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
-
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
- .matches(
- workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS)
- .matches(
- workflowInstance -> workflowInstance.getIsSubWorkflow() == Flag.NO)
- .matches(
- workflowInstance -> workflowInstance.getDryRun() == Flag.NO.getCode());
-
- Assertions
- .assertThat(repository.queryTaskInstance(workflowInstanceId))
- .hasSize(2)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("switch_task");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("success_branch");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
-
- });
-
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with one failed switch task and two fake task")
- public void testStartWorkflow_with_oneFailedSwitch_twoFakeTask() {
- final String yaml = "/it/start/workflow_with_one_failed_switch_two_fake_task.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(parentWorkflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
-
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
- .matches(
- workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS)
- .matches(
- workflowInstance -> workflowInstance.getIsSubWorkflow() == Flag.NO)
- .matches(
- workflowInstance -> workflowInstance.getDryRun() == Flag.NO.getCode());
-
- Assertions
- .assertThat(repository.queryTaskInstance(workflowInstanceId))
- .hasSize(2)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("switch_task");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("default_branch");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
-
- });
-
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with one sub workflow task(A) success")
- public void testStartWorkflow_with_subWorkflowTask_success() {
- final String yaml = "/it/start/workflow_with_sub_workflow_task_success.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(parentWorkflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
-
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
- .matches(
- workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS)
- .matches(
- workflowInstance -> workflowInstance.getIsSubWorkflow() == Flag.NO)
- .matches(
- workflowInstance -> workflowInstance.getDryRun() == Flag.NO.getCode());
-
- final List subWorkflowInstance =
- repository.queryWorkflowInstance(context.getWorkflows().get(1));
- Assertions
- .assertThat(subWorkflowInstance)
- .hasSize(1)
- .satisfiesExactly(workflowInstance -> {
- assertThat(workflowInstance.getState()).isEqualTo(WorkflowExecutionStatus.SUCCESS);
- assertThat(workflowInstance.getIsSubWorkflow()).isEqualTo(Flag.YES);
- assertThat(workflowInstance.getDryRun()).isEqualTo(Flag.NO.getCode());
- });
-
- Assertions
- .assertThat(repository.queryTaskInstance(workflowInstanceId))
- .satisfiesExactly(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("sub_logic_task");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
-
- Assertions
- .assertThat(repository.queryTaskInstance(subWorkflowInstance.get(0).getId()))
- .satisfiesExactly(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("fake_task");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
- });
-
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with one sub workflow task(A) dry run, will not execute")
- public void testStartWorkflow_with_subWorkflowTask_dryRunSuccess() {
- final String yaml = "/it/start/workflow_with_sub_workflow_task_success.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(parentWorkflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .dryRun(Flag.YES)
- .build();
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
-
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
- .matches(
- workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS)
- .matches(
- workflowInstance -> workflowInstance.getIsSubWorkflow() == Flag.NO)
- .matches(
- workflowInstance -> workflowInstance.getDryRun() == Flag.YES.getCode());
-
- final List subWorkflowInstance =
- repository.queryWorkflowInstance(context.getWorkflows().get(1));
- Assertions
- .assertThat(subWorkflowInstance)
- .isEmpty();
-
- Assertions
- .assertThat(repository.queryTaskInstance(workflowInstanceId))
- .satisfiesExactly(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("sub_logic_task");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- assertThat(taskInstance.getDryRun()).isEqualTo(Flag.YES.getCode());
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with one fake task(A) with multiple predecessors run success")
- void testStartWorkflow_with_oneTaskWithMultiplePredecessors_runSuccess() {
- final String yaml = "/it/start/workflow_with_one_fake_task_with_multiple_predecessors_success.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(parentWorkflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
- .matches(
- workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS);
-
- Assertions
- .assertThat(repository.queryTaskInstance(workflowInstanceId))
- .hasSize(4)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("B");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("C");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("D");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with one fake task(A) with multiple predecessors run failed")
- void testStartWorkflow_with_oneTaskWithMultiplePredecessors_runFailed() {
- final String yaml = "/it/start/workflow_with_one_fake_task_with_multiple_predecessors_failed.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(parentWorkflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .failureStrategy(FailureStrategy.CONTINUE)
- .build();
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
- .matches(
- workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.FAILURE);
-
- Assertions
- .assertThat(repository.queryTaskInstance(workflowInstanceId))
- .hasSize(3)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("B");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("C");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with shared downstream task when failed predecessor finishes first using continue failure strategy")
- void testStartWorkflow_with_sharedDownstreamTask_whenFailedPredecessorFinishFirst_usingFailureStrategyContinue() {
- final String yaml =
- "/it/start/workflow_with_shared_downstream_task_when_failed_predecessor_finish_first.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(parentWorkflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .failureStrategy(FailureStrategy.CONTINUE)
- .build();
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
- .matches(
- workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.FAILURE)
- .matches(workflowInstance -> workflowInstance.getEndTime() != null);
-
- Assertions
- .assertThat(repository.queryTaskInstance(workflowInstanceId))
- .hasSize(2)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("B");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with one sub workflow task(A) failed")
- public void testStartWorkflow_with_subWorkflowTask_failed() {
- final String yaml = "/it/start/workflow_with_sub_workflow_task_failed.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(parentWorkflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
-
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
- .matches(
- workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.FAILURE)
- .matches(
- workflowInstance -> workflowInstance.getIsSubWorkflow() == Flag.NO);
-
- final List subWorkflowInstance =
- repository.queryWorkflowInstance(context.getWorkflows().get(1));
- Assertions
- .assertThat(subWorkflowInstance)
- .hasSize(1)
- .satisfiesExactly(workflowInstance -> {
- assertThat(workflowInstance.getState()).isEqualTo(WorkflowExecutionStatus.FAILURE);
- assertThat(workflowInstance.getIsSubWorkflow()).isEqualTo(Flag.YES);
- });
-
- Assertions
- .assertThat(repository.queryTaskInstance(workflowInstanceId))
- .satisfiesExactly(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("sub_logic_task");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
- });
-
- Assertions
- .assertThat(repository.queryTaskInstance(subWorkflowInstance.get(0).getId()))
- .satisfiesExactly(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("fake_task");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
- });
- });
-
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow which using workflow params")
- public void testStartWorkflow_usingWorkflowParam() {
- final String yaml = "/it/start/workflow_with_global_param.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
- .isEqualTo(WorkflowExecutionStatus.FAILURE));
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .hasSize(2)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("B");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
- });
- });
-
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow which using command params")
- public void testStartWorkflow_usingCommandParam() {
- final String yaml = "/it/start/workflow_with_global_param.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final RunWorkflowCommandParam runWorkflowCommandParam = RunWorkflowCommandParam.builder()
- .commandParams(Lists.newArrayList(Property.builder()
- .prop("name")
- .direct(Direct.IN)
- .type(DataType.VARCHAR)
- .value("commandParam")
- .build()))
- .build();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(runWorkflowCommandParam)
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
- .isEqualTo(WorkflowExecutionStatus.FAILURE));
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .hasSize(2)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("B");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow contains fake task using local param will be overwrite by varpool")
- public void testStartWorkflow_fakeTask_usingLocalParamOverWriteByVarPool() {
- final String yaml = "/it/start/workflow_with_local_param_overwrite_by_varpool.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final RunWorkflowCommandParam runWorkflowCommandParam = RunWorkflowCommandParam.builder()
- .build();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(runWorkflowCommandParam)
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- List assertVarPools = Lists.newArrayList(
- Property.builder().prop("output").direct(Direct.OUT).type(DataType.VARCHAR).value("1").build());
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .satisfiesExactly(workflowInstance -> {
- assertThat(workflowInstance.getState()).isEqualTo(WorkflowExecutionStatus.SUCCESS);
- assertThat(VarPoolUtils.deserializeVarPool(workflowInstance.getVarPool()))
- .isEqualTo(assertVarPools);
- });
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .hasSize(3)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(VarPoolUtils.deserializeVarPool(taskInstance.getVarPool()))
- .isEqualTo(assertVarPools);
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("B");
- assertThat(VarPoolUtils.deserializeVarPool(taskInstance.getVarPool()))
- .isEqualTo(assertVarPools);
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("C");
- assertThat(VarPoolUtils.deserializeVarPool(taskInstance.getVarPool()))
- .isEqualTo(assertVarPools);
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow which using null key params")
- public void testStartWorkflow_usingNullKeyParam() {
- final String yaml = "/it/start/workflow_with_null_key_param.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final RunWorkflowCommandParam runWorkflowCommandParam = RunWorkflowCommandParam.builder()
- .commandParams(Lists.newArrayList(Property.builder()
- .prop(null)
- .direct(Direct.IN)
- .type(DataType.VARCHAR)
- .value("commandParam")
- .build()))
- .build();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(runWorkflowCommandParam)
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
- .isEqualTo(WorkflowExecutionStatus.SUCCESS));
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .hasSize(2)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("B");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with one fake task(A) failed")
- public void testStartWorkflow_with_oneFailedTask() {
- final String yaml = "/it/start/workflow_with_one_fake_task_failed.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
- .isEqualTo(WorkflowExecutionStatus.FAILURE));
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .satisfiesExactly(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with one fake task(A) fatal")
- public void testStartWorkflow_with_oneFatalTask() {
- final String yaml = "/it/start/workflow_with_one_fake_task_fatal.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
- .isEqualTo(WorkflowExecutionStatus.FAILURE));
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .satisfiesExactly(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with one fake task(A) failed")
- public void testStartWorkflow_with_oneFailedTaskWithRetry() {
- final String yaml = "/it/start/workflow_with_one_fake_task_failed_with_retry.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(3))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
- .isEqualTo(WorkflowExecutionStatus.FAILURE));
-
- final List taskInstances = repository.queryTaskInstance(workflow);
- Assertions
- .assertThat(taskInstances)
- .allSatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
- })
- .hasSize(2);
-
- final TaskInstance taskInstance = taskInstances.get(0);
- Assertions
- .assertThat(taskInstance)
- .matches(task -> task.getRetryTimes() == 0)
- .matches(task -> task.getFlag() == Flag.NO)
- .isNotNull();
-
- final TaskInstance latestTaskInstance = taskInstances.get(1);
- Assertions
- .assertThat(latestTaskInstance)
- .matches(task -> task.getRetryTimes() == 1)
- .matches(task -> task.getFlag() == Flag.YES)
- .isNotNull();
- assertThat(latestTaskInstance.getFirstSubmitTime()).isEqualTo(taskInstance.getFirstSubmitTime());
- assertThat(latestTaskInstance.getSubmitTime())
- .isAtLeast(DateUtils.addSeconds(taskInstance.getSubmitTime(), -65));
- assertThat(latestTaskInstance.getSubmitTime())
- .isAtMost(DateUtils.addMinutes(taskInstance.getSubmitTime(), 65));
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with two serial fake tasks(A -> B) success")
- public void testStartWorkflow_with_twoSerialSuccessTask() {
- String yaml = "/it/start/workflow_with_two_serial_fake_task_success.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
- .isEqualTo(WorkflowExecutionStatus.SUCCESS))
- .hasSize(1);
-
- final List taskInstances = repository.queryTaskInstance(workflow);
- Assertions
- .assertThat(taskInstances)
- .hasSize(2)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("B");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with two serial fake tasks(A(failed) -> B) success")
- public void testStartWorkflow_with_twoSerialFailedTask() {
- final String yaml = "/it/start/workflow_with_two_serial_fake_task_failed.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
- .isEqualTo(WorkflowExecutionStatus.FAILURE));
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .satisfiesExactly(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with two parallel fake tasks(A, B) success")
- public void testStartWorkflow_with_twoParallelSuccessTask() {
- final String yaml = "/it/start/workflow_with_two_parallel_fake_task_success.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
- .isEqualTo(WorkflowExecutionStatus.SUCCESS));
-
- final List taskInstances = repository.queryTaskInstance(workflow);
- Assertions
- .assertThat(taskInstances)
- .hasSize(2)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("B");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with two parallel fake tasks(A(failed), B(failed)) success")
- public void testStartWorkflow_with_twoParallelFailedTask() {
- final String yaml = "/it/start/workflow_with_two_parallel_fake_task_failed.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .filteredOn(
- workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.FAILURE)
- .hasSize(1);
-
- final List taskInstances = repository.queryTaskInstance(workflow);
- Assertions
- .assertThat(taskInstances)
- .hasSize(2)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("B");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with three parallel three fake tasks(A1->A2->A3, B1->B2->B3, C1->C2->C3) success")
- public void testStartWorkflow_with_threeParallelSuccessTask() {
- final String yaml = "/it/start/workflow_with_three_parallel_three_fake_task_success.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .filteredOn(
- workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS)
- .hasSize(1);
-
- final List taskInstances = repository.queryTaskInstance(workflow);
- Assertions
- .assertThat(taskInstances)
- .hasSize(9)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A1");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A2");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A3");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("B1");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("B2");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("B3");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("C1");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("C2");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("C3");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with three parallel three fake tasks(A1->A2->A3, B1->B2->B3, C1->C2->C3) success")
- public void testStartWorkflowFromStartNodes_with_threeParallelSuccessTask() {
- final String yaml = "/it/start/workflow_with_three_parallel_three_fake_task_success.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final RunWorkflowCommandParam runWorkflowCommandParam = RunWorkflowCommandParam.builder()
- .startNodes(Lists.newArrayList(6L))
- .build();
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(runWorkflowCommandParam)
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .filteredOn(
- workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS)
- .hasSize(1);
-
- final List taskInstances = repository.queryTaskInstance(workflow);
- Assertions
- .assertThat(taskInstances)
- .hasSize(2)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("C2");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("C3");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow which using workflow built in params")
- public void testStartWorkflow_usingWorkflowBuiltInParam() {
- final String yaml = "/it/start/workflow_with_built_in_param.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
- .isEqualTo(WorkflowExecutionStatus.SUCCESS));
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .hasSize(2)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("B");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow which contains a dep task with timeout kill strategy")
- public void testStartWorkflow_withTimeoutKillTask() {
- final String yaml = "/it/start/workflow_with_timeout_kill_task.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getWorkflow("workflow_with_timeout_kill_task");
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofSeconds(90))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
- .isEqualTo(WorkflowExecutionStatus.STOP));
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .hasSize(1)
- .satisfiesExactly(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("dep_task_with_timeout_killed");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.KILL);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow which contains a dep task will be kill by system timeout")
- public void testStartWorkflow_withSystemTimeoutKillTask() {
- masterConfig.getServerLoadProtection().setMaxTaskInstanceRuntime(Duration.ofMinutes(1));
-
- final String yaml = "/it/start/workflow_with_system_timeout_kill_task.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getWorkflow("workflow_with_timeout_kill_task");
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofSeconds(90))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
- .isEqualTo(WorkflowExecutionStatus.STOP));
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .hasSize(1)
- .satisfiesExactly(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("dep_task_with_timeout_killed");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.KILL);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with task depend type TASK_ONLY")
- public void testStartWorkflow_withTaskOnlyStrategy() {
- final String yaml = "/it/start/workflow_with_task_only_strategy.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam().withStartNodes(Lists.newArrayList(1L)))
- .taskDependType(TaskDependType.TASK_ONLY)
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
- .isEqualTo(WorkflowExecutionStatus.SUCCESS));
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .hasSize(1)
- .satisfiesExactly(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with task which successors is forbidden")
- public void testStartWorkflow_withTaskSuccessorsIsForbidden() {
- final String yaml = "/it/start/workflow_with_task_successors_is_forbidden.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
- .isEqualTo(WorkflowExecutionStatus.SUCCESS));
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .hasSize(2)
- .satisfiesExactly(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- }, (Consumer) taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("C1");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with one condition task(B) when one fake predecessor task(A) run success")
- void testStartWorkflow_with_oneConditionTaskWithOneFakePredecessor_runSuccess() {
- final String yaml = "/it/start/workflow_with_one_condition_task_with_one_fake_predecessor_success.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(parentWorkflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
- .matches(
- workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS);
-
- Assertions
- .assertThat(repository.queryTaskInstance(workflowInstanceId))
- .hasSize(3)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("B");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("C");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with one condition task(B) which is forbidden when one fake predecessor task(A) run success")
- void testStartWorkflow_with_oneForbiddenConditionTaskWithOneFakePredecessor_runSuccess() {
- final String yaml =
- "/it/start/workflow_with_one_forbidden_condition_task_with_one_fake_predecessor_success.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(parentWorkflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
- .matches(
- workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS);
- Assertions
- .assertThat(repository.queryTaskInstance(workflowInstanceId))
- .hasSize(3)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("C");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("D");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with one condition task(B) when one fake predecessor task(A) run failed")
- void testStartWorkflow_with_oneConditionTaskWithOneFakePredecessor_runFailed() {
- final String yaml = "/it/start/workflow_with_one_condition_task_with_one_fake_predecessor_failed.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(parentWorkflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
- .matches(
- workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS);
-
- Assertions
- .assertThat(repository.queryTaskInstance(workflowInstanceId))
- .hasSize(3)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("B");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("D");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with one condition task(B) when one fake predecessor task(A) run fatal")
- void testStartWorkflow_with_oneConditionTaskWithOneFakePredecessor_runFatal() {
- final String yaml = "/it/start/workflow_with_one_condition_task_with_one_fake_predecessor_fatal.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(parentWorkflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
- .matches(
- workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS);
-
- Assertions
- .assertThat(repository.queryTaskInstance(workflowInstanceId))
- .hasSize(3)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("B");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- })
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("D");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with one condition task(B) which is forbidden when one fake predecessor task(A) run failed")
- void testStartWorkflow_with_oneForbiddenConditionTaskWithOneFakePredecessor_runFailed() {
- final String yaml =
- "/it/start/workflow_with_one_forbidden_condition_task_with_one_fake_predecessor_failed.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(parentWorkflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
- .matches(
- workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.FAILURE);
-
- Assertions
- .assertThat(repository.queryTaskInstance(workflowInstanceId))
- .hasSize(1)
- .satisfiesExactly(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow with one condition task(B) which is forbidden when one fake predecessor task(A) run fatal")
- void testStartWorkflow_with_oneForbiddenConditionTaskWithOneFakePredecessor_runFatal() {
- final String yaml =
- "/it/start/workflow_with_one_forbidden_condition_task_with_one_fake_predecessor_fatal.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition parentWorkflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(parentWorkflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofMinutes(1))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
- .matches(
- workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.FAILURE);
-
- Assertions
- .assertThat(repository.queryTaskInstance(workflowInstanceId))
- .hasSize(1)
- .satisfiesExactly(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
- });
- });
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow whose task specifies a non-existent worker group when dispatch timeout is enabled")
- public void testTaskFail_with_workerGroupNotFoundAndTimeoutEnabled() {
- TaskDispatchPolicy taskDispatchPolicy = new TaskDispatchPolicy();
- taskDispatchPolicy.setDispatchTimeoutEnabled(true);
- taskDispatchPolicy.setMaxTaskDispatchDuration(Duration.ofSeconds(10));
- this.masterConfig.setTaskDispatchPolicy(taskDispatchPolicy);
-
- final String yaml = "/it/start/workflow_with_worker_group_not_found.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofSeconds(30))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .hasSize(1)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getWorkerGroup()).isEqualTo("workerGroupNotFound");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
- });
-
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
- .isEqualTo(WorkflowExecutionStatus.FAILURE));
- });
-
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow whose task specifies a non-existent worker group when dispatch timeout is disabled")
- public void testTaskRemainsSubmittedSuccess_with_workerGroupNotFoundAndTimeoutDisabled() {
- TaskDispatchPolicy policy = new TaskDispatchPolicy();
- policy.setDispatchTimeoutEnabled(false);
- this.masterConfig.setTaskDispatchPolicy(policy);
-
- final String yaml = "/it/start/workflow_with_worker_group_not_found.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofSeconds(30))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .hasSize(1)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getWorkerGroup()).isEqualTo("workerGroupNotFound");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUBMITTED_SUCCESS);
- });
-
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
- .isEqualTo(WorkflowExecutionStatus.RUNNING_EXECUTION));
-
- });
-
- // This test intentionally leaves the workflow running, so we skip the resource cleanup check.
- // masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow when no available worker and dispatch timeout is enabled")
- public void testTaskFail_with_noAvailableWorkerAndTimeoutEnabled() {
- TaskDispatchPolicy taskDispatchPolicy = new TaskDispatchPolicy();
- taskDispatchPolicy.setDispatchTimeoutEnabled(true);
- taskDispatchPolicy.setMaxTaskDispatchDuration(Duration.ofSeconds(10));
- this.masterConfig.setTaskDispatchPolicy(taskDispatchPolicy);
-
- final String yaml = "/it/start/workflow_with_no_available_worker.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofSeconds(30))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .hasSize(1)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getWorkerGroup()).isEqualTo("default");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.FAILURE);
- });
-
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
- .isEqualTo(WorkflowExecutionStatus.FAILURE));
- });
-
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow when no available worker and dispatch timeout is disabled")
- public void testTaskRemainsSubmittedSuccess_with_noAvailableWorkerAndTimeoutDisabled() {
- TaskDispatchPolicy policy = new TaskDispatchPolicy();
- policy.setDispatchTimeoutEnabled(false);
- this.masterConfig.setTaskDispatchPolicy(policy);
-
- final String yaml = "/it/start/workflow_with_no_available_worker.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .build();
- workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofSeconds(30))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .hasSize(1)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("A");
- assertThat(taskInstance.getWorkerGroup()).isEqualTo("default");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUBMITTED_SUCCESS);
- });
-
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
- .isEqualTo(WorkflowExecutionStatus.RUNNING_EXECUTION));
- });
-
- // This test intentionally leaves the workflow running, so we skip the resource cleanup check.
- // masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow when timeout should trigger alert when warningGroupId is set")
- public void testWorkflowTimeout_WithAlertGroup_ShouldSendAlert() {
- final String yaml = "/it/start/workflow_with_workflow_timeout_alert.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getOneWorkflow();
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .warningGroupId(workflow.getWarningGroupId())
- .build();
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await().atMost(Duration.ofMinutes(2))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
- .matches(
- workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS);
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .hasSize(1)
- .anySatisfy(taskInstance -> {
- assertThat(taskInstance.getName()).isEqualTo("long_running_task");
- assertThat(taskInstance.getWorkerGroup()).isEqualTo("default");
- assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
- });
- Assertions
- .assertThat(repository.queryAlert(workflowInstanceId))
- .hasSize(1)
- .anySatisfy(alert -> {
- assertThat(alert.getTitle()).isEqualTo("Workflow Timeout Warn");
- assertThat(alert.getProjectCode()).isEqualTo(1);
- assertThat(alert.getWorkflowDefinitionCode()).isEqualTo(1);
- assertThat(alert.getAlertType()).isEqualTo(AlertType.WORKFLOW_INSTANCE_TIMEOUT);
- });
- });
-
- masterContainer.assertAllResourceReleased();
- }
- @Test
- @DisplayName("Test start a workflow which contains a dep task with timeout warn strategy")
- public void testStartWorkflow_withTimeoutWarnTask() {
- masterConfig.getServerLoadProtection().setEnabled(false);
-
- final String yaml = "/it/start/workflow_with_timeout_warn_task.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getWorkflow("workflow_with_timeout_warn_task");
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO
- .builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .warningGroupId(workflow.getWarningGroupId())
- .build();
-
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofSeconds(90))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .satisfiesExactly(workflowInstance -> assertThat(
- workflowInstance.getState())
- .isEqualTo(WorkflowExecutionStatus.RUNNING_EXECUTION));
-
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .hasSize(1)
- .satisfiesExactly(taskInstance -> {
- assertThat(taskInstance.getName())
- .isEqualTo("dep_task_with_timeout_warn");
- assertThat(taskInstance.getState())
- .isEqualTo(TaskExecutionStatus.RUNNING_EXECUTION);
- });
-
- Assertions
- .assertThat(repository.queryAlert(workflowInstanceId))
- .isNotEmpty()
- .anySatisfy(alert -> {
- assertThat(alert.getAlertType())
- .isEqualTo(AlertType.TASK_TIMEOUT);
- });
- });
-
- workflowOperator.stopWorkflowInstance(workflowInstanceId);
- await()
- .atMost(Duration.ofSeconds(30))
- .untilAsserted(() -> Assertions.assertThat(repository.queryWorkflowInstance(workflowInstanceId))
- .matches(w -> w.getState() == WorkflowExecutionStatus.STOP));
- masterContainer.assertAllResourceReleased();
- }
-
- @Test
- @DisplayName("Test start a workflow which contains a dep task with timeout warn failed strategy")
- public void testStartWorkflow_withTimeoutWarnFailedTask() {
- masterConfig.getServerLoadProtection().setEnabled(false);
-
- final String yaml = "/it/start/workflow_with_timeout_warnfailed_task.yaml";
- final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
- final WorkflowDefinition workflow = context.getWorkflow("workflow_with_timeout_warnfailed_task");
-
- final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO
- .builder()
- .workflowDefinition(workflow)
- .runWorkflowCommandParam(new RunWorkflowCommandParam())
- .warningGroupId(workflow.getWarningGroupId())
- .build();
-
- final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
-
- await()
- .atMost(Duration.ofSeconds(90))
- .untilAsserted(() -> {
- Assertions
- .assertThat(repository.queryWorkflowInstance(workflow))
- .satisfiesExactly(workflowInstance -> assertThat(
- workflowInstance.getState())
- .isEqualTo(WorkflowExecutionStatus.STOP));
-
- Assertions
- .assertThat(repository.queryTaskInstance(workflow))
- .hasSize(1)
- .satisfiesExactly(taskInstance -> {
- assertThat(taskInstance.getName())
- .isEqualTo("dep_task_with_timeout_warnfailed");
- assertThat(taskInstance.getState())
- .isEqualTo(TaskExecutionStatus.KILL);
- });
-
- Assertions
- .assertThat(repository.queryAlert(workflowInstanceId))
- .isNotEmpty()
- .anySatisfy(alert -> {
- assertThat(alert.getAlertType())
- .isEqualTo(AlertType.TASK_TIMEOUT);
- });
- });
-
- masterContainer.assertAllResourceReleased();
- }
-
-}
diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartTimeoutTestCase.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartTimeoutTestCase.java
new file mode 100644
index 000000000000..935290104fc2
--- /dev/null
+++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/integration/cases/WorkflowStartTimeoutTestCase.java
@@ -0,0 +1,252 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.dolphinscheduler.server.master.integration.cases;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.awaitility.Awaitility.await;
+
+import org.apache.dolphinscheduler.common.enums.AlertType;
+import org.apache.dolphinscheduler.common.enums.WorkflowExecutionStatus;
+import org.apache.dolphinscheduler.dao.entity.WorkflowDefinition;
+import org.apache.dolphinscheduler.extract.master.command.RunWorkflowCommandParam;
+import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus;
+import org.apache.dolphinscheduler.server.master.AbstractMasterIntegrationTestCase;
+import org.apache.dolphinscheduler.server.master.integration.WorkflowOperator;
+import org.apache.dolphinscheduler.server.master.integration.WorkflowTestCaseContext;
+
+import java.time.Duration;
+
+import org.assertj.core.api.Assertions;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+
+/**
+ * Integration tests for workflow start timeout and alert scenarios.
+ */
+public class WorkflowStartTimeoutTestCase extends AbstractMasterIntegrationTestCase {
+
+ @Test
+ @DisplayName("Test start a workflow which contains a dep task with timeout kill strategy")
+ public void testStartWorkflow_withTimeoutKillTask() {
+ final String yaml = "/it/start/workflow_with_timeout_kill_task.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getWorkflow("workflow_with_timeout_kill_task");
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofSeconds(90))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
+ .isEqualTo(WorkflowExecutionStatus.STOP));
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .hasSize(1)
+ .satisfiesExactly(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("dep_task_with_timeout_killed");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.KILL);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow which contains a dep task will be kill by system timeout")
+ public void testStartWorkflow_withSystemTimeoutKillTask() {
+ masterConfig.getServerLoadProtection().setMaxTaskInstanceRuntime(Duration.ofMinutes(1));
+
+ final String yaml = "/it/start/workflow_with_system_timeout_kill_task.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getWorkflow("workflow_with_timeout_kill_task");
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .build();
+ workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofSeconds(90))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .satisfiesExactly(workflowInstance -> assertThat(workflowInstance.getState())
+ .isEqualTo(WorkflowExecutionStatus.STOP));
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .hasSize(1)
+ .satisfiesExactly(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("dep_task_with_timeout_killed");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.KILL);
+ });
+ });
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow when timeout should trigger alert when warningGroupId is set")
+ public void testWorkflowTimeout_WithAlertGroup_ShouldSendAlert() {
+ final String yaml = "/it/start/workflow_with_workflow_timeout_alert.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getOneWorkflow();
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO.builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .warningGroupId(workflow.getWarningGroupId())
+ .build();
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await().atMost(Duration.ofMinutes(2))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflowInstanceId))
+ .matches(
+ workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS);
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .hasSize(1)
+ .anySatisfy(taskInstance -> {
+ assertThat(taskInstance.getName()).isEqualTo("long_running_task");
+ assertThat(taskInstance.getWorkerGroup()).isEqualTo("default");
+ assertThat(taskInstance.getState()).isEqualTo(TaskExecutionStatus.SUCCESS);
+ });
+ Assertions
+ .assertThat(repository.queryAlert(workflowInstanceId))
+ .hasSize(1)
+ .anySatisfy(alert -> {
+ assertThat(alert.getTitle()).isEqualTo("Workflow Timeout Warn");
+ assertThat(alert.getProjectCode()).isEqualTo(1);
+ assertThat(alert.getWorkflowDefinitionCode()).isEqualTo(1);
+ assertThat(alert.getAlertType()).isEqualTo(AlertType.WORKFLOW_INSTANCE_TIMEOUT);
+ });
+ });
+
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow which contains a dep task with timeout warn strategy")
+ public void testStartWorkflow_withTimeoutWarnTask() {
+ masterConfig.getServerLoadProtection().setEnabled(false);
+
+ final String yaml = "/it/start/workflow_with_timeout_warn_task.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getWorkflow("workflow_with_timeout_warn_task");
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO
+ .builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .warningGroupId(workflow.getWarningGroupId())
+ .build();
+
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofSeconds(90))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .satisfiesExactly(workflowInstance -> assertThat(
+ workflowInstance.getState())
+ .isEqualTo(WorkflowExecutionStatus.RUNNING_EXECUTION));
+
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .hasSize(1)
+ .satisfiesExactly(taskInstance -> {
+ assertThat(taskInstance.getName())
+ .isEqualTo("dep_task_with_timeout_warn");
+ assertThat(taskInstance.getState())
+ .isEqualTo(TaskExecutionStatus.RUNNING_EXECUTION);
+ });
+
+ Assertions
+ .assertThat(repository.queryAlert(workflowInstanceId))
+ .isNotEmpty()
+ .anySatisfy(alert -> {
+ assertThat(alert.getAlertType())
+ .isEqualTo(AlertType.TASK_TIMEOUT);
+ });
+ });
+
+ workflowOperator.stopWorkflowInstance(workflowInstanceId);
+ await()
+ .atMost(Duration.ofSeconds(30))
+ .untilAsserted(() -> Assertions.assertThat(repository.queryWorkflowInstance(workflowInstanceId))
+ .matches(w -> w.getState() == WorkflowExecutionStatus.STOP));
+ masterContainer.assertAllResourceReleased();
+ }
+
+ @Test
+ @DisplayName("Test start a workflow which contains a dep task with timeout warn failed strategy")
+ public void testStartWorkflow_withTimeoutWarnFailedTask() {
+ masterConfig.getServerLoadProtection().setEnabled(false);
+
+ final String yaml = "/it/start/workflow_with_timeout_warnfailed_task.yaml";
+ final WorkflowTestCaseContext context = workflowTestCaseContextFactory.initializeContextFromYaml(yaml);
+ final WorkflowDefinition workflow = context.getWorkflow("workflow_with_timeout_warnfailed_task");
+
+ final WorkflowOperator.WorkflowTriggerDTO workflowTriggerDTO = WorkflowOperator.WorkflowTriggerDTO
+ .builder()
+ .workflowDefinition(workflow)
+ .runWorkflowCommandParam(new RunWorkflowCommandParam())
+ .warningGroupId(workflow.getWarningGroupId())
+ .build();
+
+ final Integer workflowInstanceId = workflowOperator.manualTriggerWorkflow(workflowTriggerDTO);
+
+ await()
+ .atMost(Duration.ofSeconds(90))
+ .untilAsserted(() -> {
+ Assertions
+ .assertThat(repository.queryWorkflowInstance(workflow))
+ .satisfiesExactly(workflowInstance -> assertThat(
+ workflowInstance.getState())
+ .isEqualTo(WorkflowExecutionStatus.STOP));
+
+ Assertions
+ .assertThat(repository.queryTaskInstance(workflow))
+ .hasSize(1)
+ .satisfiesExactly(taskInstance -> {
+ assertThat(taskInstance.getName())
+ .isEqualTo("dep_task_with_timeout_warnfailed");
+ assertThat(taskInstance.getState())
+ .isEqualTo(TaskExecutionStatus.KILL);
+ });
+
+ Assertions
+ .assertThat(repository.queryAlert(workflowInstanceId))
+ .isNotEmpty()
+ .anySatisfy(alert -> {
+ assertThat(alert.getAlertType())
+ .isEqualTo(AlertType.TASK_TIMEOUT);
+ });
+ });
+
+ masterContainer.assertAllResourceReleased();
+ }
+
+}