-
Notifications
You must be signed in to change notification settings - Fork 2.3k
Fix rerunWorkflow places synchronous system tasks in the queue #2494
Changes from 1 commit
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -101,6 +101,7 @@ | |
import static com.netflix.conductor.common.metadata.tasks.TaskType.SIMPLE; | ||
import static com.netflix.conductor.common.metadata.tasks.TaskType.SUB_WORKFLOW; | ||
import static com.netflix.conductor.common.metadata.tasks.TaskType.SWITCH; | ||
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_JSON_JQ_TRANSFORM; | ||
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_LAMBDA; | ||
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_SUB_WORKFLOW; | ||
import static com.netflix.conductor.common.metadata.tasks.TaskType.TASK_TYPE_WAIT; | ||
|
@@ -180,6 +181,21 @@ public WorkflowSystemTask http2() { | |
return new WorkflowSystemTaskStub("HTTP2"); | ||
} | ||
|
||
@Bean(TASK_TYPE_JSON_JQ_TRANSFORM) | ||
public WorkflowSystemTask jsonBean() { | ||
return new WorkflowSystemTaskStub("JSON_JQ_TRANSFORM") { | ||
@Override | ||
public boolean isAsync() { | ||
return false; | ||
} | ||
|
||
@Override | ||
public void start(Workflow workflow, Task task, WorkflowExecutor executor) { | ||
task.setStatus(Task.Status.COMPLETED); | ||
} | ||
}; | ||
} | ||
|
||
@Bean | ||
public SystemTaskRegistry systemTaskRegistry(Set<WorkflowSystemTask> tasks) { | ||
return new SystemTaskRegistry(tasks); | ||
|
@@ -1324,6 +1340,59 @@ public void testRerunWorkflowWithTaskId() { | |
assertEquals(new HashSet<>(), workflow.getFailedReferenceTaskNames()); | ||
} | ||
|
||
@Test | ||
public void testRerunWorkflowWithSyncSystemTaskId() { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 👍🏼 |
||
//setup | ||
String workflowId = IDGenerator.generate(); | ||
|
||
Task task1 = new Task(); | ||
task1.setTaskType(TaskType.SIMPLE.name()); | ||
task1.setTaskDefName("task1"); | ||
task1.setReferenceTaskName("task1_ref"); | ||
task1.setWorkflowInstanceId(workflowId); | ||
task1.setScheduledTime(System.currentTimeMillis()); | ||
task1.setTaskId(IDGenerator.generate()); | ||
task1.setStatus(Status.COMPLETED); | ||
task1.setWorkflowTask(new WorkflowTask()); | ||
task1.setOutputData(new HashMap<>()); | ||
|
||
Task task2 = new Task(); | ||
task2.setTaskType(TaskType.JSON_JQ_TRANSFORM.name()); | ||
task2.setReferenceTaskName("task2_ref"); | ||
task2.setWorkflowInstanceId(workflowId); | ||
task2.setScheduledTime(System.currentTimeMillis()); | ||
task2.setTaskId("system-task-id"); | ||
task2.setStatus(Status.FAILED); | ||
|
||
Workflow workflow = new Workflow(); | ||
workflow.setWorkflowId(workflowId); | ||
WorkflowDef workflowDef = new WorkflowDef(); | ||
workflowDef.setName("workflow"); | ||
workflowDef.setVersion(1); | ||
workflow.setWorkflowDefinition(workflowDef); | ||
workflow.setOwnerApp("junit_testRerunWorkflowId"); | ||
workflow.setStatus(WorkflowStatus.FAILED); | ||
workflow.setReasonForIncompletion("task2 failed"); | ||
workflow.setFailedReferenceTaskNames(new HashSet<String>() {{ | ||
add("task2_ref"); | ||
}}); | ||
workflow.getTasks().addAll(Arrays.asList(task1, task2)); | ||
//end of setup | ||
|
||
//when: | ||
when(executionDAOFacade.getWorkflowById(workflow.getWorkflowId(), true)).thenReturn(workflow); | ||
RerunWorkflowRequest rerunWorkflowRequest = new RerunWorkflowRequest(); | ||
rerunWorkflowRequest.setReRunFromWorkflowId(workflow.getWorkflowId()); | ||
rerunWorkflowRequest.setReRunFromTaskId(task2.getTaskId()); | ||
workflowExecutor.rerun(rerunWorkflowRequest); | ||
|
||
//then: | ||
assertEquals(Status.COMPLETED, task2.getStatus()); | ||
assertEquals(Workflow.WorkflowStatus.RUNNING, workflow.getStatus()); | ||
assertNull(workflow.getReasonForIncompletion()); | ||
assertEquals(new HashSet<>(), workflow.getFailedReferenceTaskNames()); | ||
} | ||
|
||
@Test | ||
public void testRerunSubWorkflowWithTaskId() { | ||
//setup | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -13,6 +13,7 @@ | |
package com.netflix.conductor.test.integration | ||
|
||
import com.netflix.conductor.common.metadata.tasks.Task | ||
import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest | ||
import com.netflix.conductor.common.run.Workflow | ||
import com.netflix.conductor.test.base.AbstractSpecification | ||
import spock.lang.Shared | ||
|
@@ -84,4 +85,66 @@ class JsonJQTransformSpec extends AbstractSpecification { | |
tasks[0].reasonForIncompletion as String == "Cannot index string with string \"array\"" | ||
} | ||
} | ||
|
||
/** | ||
* Given the following invalid input JSON | ||
*{* "in1": "a", | ||
* "in2": "b" | ||
*}* using the same query from the success test, jq will try to get in1['array'] | ||
* and fail since 'in1' is a string. | ||
* | ||
* Re-run failed system task with the following valid input JSON will fix the workflow | ||
*{* "in1": {* "array": [ "a", "b" ] | ||
*}, | ||
* "in2": {* "array": [ "c", "d" ] | ||
*}*}* expect the workflow task to transform to following result: | ||
*{* out: [ "a", "b", "c", "d" ] | ||
*} | ||
*/ | ||
def "Test rerun workflow with failed json jq transform task"() { | ||
given: "workflow input" | ||
def invalidInput = new HashMap() | ||
invalidInput['in1'] = "a" | ||
invalidInput['in2'] = "b" | ||
|
||
def validInput = new HashMap() | ||
def input = new HashMap() | ||
input['in1'] = new HashMap() | ||
input['in1']['array'] = ["a", "b"] | ||
input['in2'] = new HashMap() | ||
input['in2']['array'] = ["c", "d"] | ||
validInput['input'] = input | ||
validInput['queryExpression'] = '.input as $_ | { out: ($_.in1.array + $_.in2.array) }' | ||
|
||
when: "workflow which has the json jq transform task started" | ||
def workflowInstanceId = workflowExecutor.startWorkflow(JSON_JQ_TRANSFORM_WF, 1, | ||
'', invalidInput, null, null, null) | ||
|
||
then: "verify that the workflow and task failed with expected error" | ||
with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { | ||
status == Workflow.WorkflowStatus.FAILED | ||
tasks.size() == 1 | ||
tasks[0].status == Task.Status.FAILED | ||
tasks[0].taskType == 'JSON_JQ_TRANSFORM' | ||
tasks[0].reasonForIncompletion as String == "Cannot index string with string \"array\"" | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Thanks, updated! |
||
} | ||
|
||
when: "workflow which has the json jq transform task reran" | ||
def reRunWorkflowRequest = new RerunWorkflowRequest() | ||
reRunWorkflowRequest.reRunFromWorkflowId = workflowInstanceId | ||
def reRunTaskId = workflowExecutionService.getExecutionStatus(workflowInstanceId, true).tasks[0].taskId | ||
reRunWorkflowRequest.reRunFromTaskId = reRunTaskId | ||
reRunWorkflowRequest.taskInput = validInput | ||
|
||
workflowExecutor.rerun(reRunWorkflowRequest) | ||
|
||
then: "verify that the workflow and task are completed with expected output" | ||
with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { | ||
status == Workflow.WorkflowStatus.COMPLETED | ||
tasks.size() == 1 | ||
tasks[0].status == Task.Status.COMPLETED | ||
tasks[0].taskType == 'JSON_JQ_TRANSFORM' | ||
tasks[0].outputData as String == "[result:[out:[a, b, c, d]], resultList:[[out:[a, b, c, d]]]]" | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. same as above There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. updated as well, the There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I missed that it's a map here. In that case, it makes sense to assert the keys of the map rather than converting it to s String and asserting that. The assertion will fail, if the There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think we can rely on Java's implementation of There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Verifying the contents of a Map using its
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Make sense, didn't realize the outputData result is JsonNode. yea, the purpose of the test isn't for those kind of checks. Refactored to only do basic assertions. |
||
} | ||
} | ||
} |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
getOutputData()
does not returnnull
. Can you please explain the need for a null check here?There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The outputData field is reset to null here: https://github.com/Netflix/conductor/blob/main/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java#L1642 I guess we could update that line to just clear the map instead of setting it null, but feel there might be other cases that I might neglected if I changed that. So I added the null check for outputData in the system task instead. And that's the only sync system task that need to add the null check
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
It makes sense to clear the
outputData
or set it to an emptyHashMap
duringrerun
. If you look at the Task, the outputData field is defaulted to an empty map. So it's natural for the rest of the code to expect a non-null value fromTask.getOutputData()
.DoWhile
also expects a non-null output data and would fail if null is returned.Also, the fewer nulls we introduce, the less chance of an unexpected
NullPointerException
.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
ya updated it to clear the
outputData
instead of setting to null