Skip to content

Commit

Permalink
fix(workflow): consider workflow when completing work items
Browse files Browse the repository at this point in the history
This fixes two issues:

1) When completing a work item, we didn't consider it's associated
   workflow when searching for sibling tasks. This means that if the
   same task is used in multiple workflows, task flows used in any of
   those workflows would be evaluated.

2) When completing a "leaf" work item (i.e. no attached flows) we only
   checked if all sibling work items are complete. However, we have to
   check if _any_ work items are still open ("READY") before we can
   complete a workflow.
  • Loading branch information
czosel committed Jun 9, 2020
1 parent 57ee16f commit c024660
Show file tree
Hide file tree
Showing 2 changed files with 83 additions and 30 deletions.
68 changes: 39 additions & 29 deletions caluma/caluma_workflow/domain_logic.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,42 +119,52 @@ def post_complete(work_item, user):
if not CompleteWorkItemLogic._can_continue(work_item, work_item.task):
return work_item

flow = models.Flow.objects.filter(task_flows__task=work_item.task_id).first()
flow_referenced_tasks = models.Task.objects.filter(task_flows__flow=flow)
flow = models.Flow.objects.filter(
task_flows__task=work_item.task_id,
task_flows__workflow=work_item.case.workflow.pk,
).first()

all_complete = all(
CompleteWorkItemLogic._can_continue(work_item, task)
for task in flow_referenced_tasks
)
if flow:
sibling_tasks = models.Task.objects.filter(task_flows__flow=flow)

if flow and all_complete:
jexl = FlowJexl()
result = jexl.evaluate(flow.next)
if not isinstance(result, list):
result = [result]
all_siblings_complete = all(
CompleteWorkItemLogic._can_continue(work_item, task)
for task in sibling_tasks
)

tasks = models.Task.objects.filter(pk__in=result)
if all_siblings_complete:
jexl = FlowJexl()
result = jexl.evaluate(flow.next)
if not isinstance(result, list):
result = [result]

created_work_items = utils.bulk_create_work_items(
tasks, case, user, work_item
)
tasks = models.Task.objects.filter(pk__in=result)

created_work_items = utils.bulk_create_work_items(
tasks, case, user, work_item
)

for created_work_item in created_work_items: # pragma: no cover
for created_work_item in created_work_items: # pragma: no cover
send_event(
events.created_work_item,
sender="post_complete_work_item",
work_item=created_work_item,
)
else:
has_ready_work_items = work_item.case.work_items.filter(
status=models.WorkItem.STATUS_READY
).exists()

if not has_ready_work_items:
# no more tasks, mark case as complete
case.status = models.Case.STATUS_COMPLETED
case.closed_at = timezone.now()
case.closed_by_user = user.username
case.closed_by_group = user.group
case.save()
send_event(
events.created_work_item,
sender="post_complete_work_item",
work_item=created_work_item,
events.completed_case, sender="post_complete_work_item", case=case
)
elif not flow and all_complete:
# no more tasks, mark case as complete
case.status = models.Case.STATUS_COMPLETED
case.closed_at = timezone.now()
case.closed_by_user = user.username
case.closed_by_group = user.group
case.save()
send_event(
events.completed_case, sender="post_complete_work_item", case=case
)

send_event(
events.completed_work_item,
Expand Down
45 changes: 44 additions & 1 deletion caluma/caluma_workflow/tests/test_work_item.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,7 @@ def test_complete_multiple_instance_task_form_work_item_next(
task_next = task_factory(
type=models.Task.TYPE_SIMPLE, form=None, address_groups='["group-name"]|groups'
)
task_flow = task_flow_factory(task=task)
task_flow = task_flow_factory(task=task, workflow=work_item.case.workflow)
task_flow.flow.next = f"'{task_next.slug}'|task"
task_flow.flow.save()

Expand Down Expand Up @@ -1065,3 +1065,46 @@ def test_complete_work_item_parallel(
assert case.status == models.Case.STATUS_COMPLETED
else:
assert case.status == models.Case.STATUS_RUNNING


def test_complete_work_item_same_task_multiple_workflows(
db,
case_factory,
work_item_factory,
task_factory,
flow_factory,
task_flow_factory,
workflow_factory,
schema_executor,
admin_user,
):
workflow_1, workflow_2 = workflow_factory.create_batch(2)
# create two work items which can be processed in parallel
task_1, task_2 = task_factory.create_batch(2, type=models.Task.TYPE_SIMPLE)

flow = flow_factory(next=f"'{task_2.slug}'|task")

# workflow 1 consists out of 2 tasks, workflow_2 just out of one
task_flow_factory(task=task_1, workflow=workflow_1, flow=flow)

case_1 = case_factory(workflow=workflow_1)
case_2 = case_factory(workflow=workflow_2)

work_item_1 = work_item_factory(
task=task_1, status=models.WorkItem.STATUS_READY, child_case=None, case=case_1
)
work_item_2 = work_item_factory(
task=task_1, status=models.WorkItem.STATUS_READY, child_case=None, case=case_2
)

api.complete_work_item(work_item_1, admin_user)
api.complete_work_item(work_item_2, admin_user)

work_item_1.refresh_from_db()
work_item_2.refresh_from_db()

case_1.refresh_from_db()
case_2.refresh_from_db()

assert case_1.status == models.Case.STATUS_RUNNING
assert case_2.status == models.Case.STATUS_COMPLETED

0 comments on commit c024660

Please sign in to comment.