Skip to content

Commit 7388658

Browse files
authored
Merge pull request #315 from sartography/feature/new-task-states
Feature/new task states
2 parents eceef15 + a04fdd3 commit 7388658

19 files changed

+888
-130
lines changed

SpiffWorkflow/bpmn/PythonScriptEngine.py

Lines changed: 6 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,11 @@
11
# -*- coding: utf-8 -*-
22
import ast
3-
import copy
43
import sys
54
import traceback
65
import warnings
76

87
from .PythonScriptEngineEnvironment import TaskDataEnvironment
98
from ..exceptions import SpiffWorkflowException, WorkflowTaskException
10-
from ..operators import Operator
119

1210

1311
# Copyright (C) 2020 Kelly McDonald
@@ -40,18 +38,19 @@ class PythonScriptEngine(object):
4038
"""
4139

4240
def __init__(self, default_globals=None, scripting_additions=None, environment=None):
41+
4342
if default_globals is not None or scripting_additions is not None:
4443
warnings.warn(f'default_globals and scripting_additions are deprecated. '
4544
f'Please provide an environment such as TaskDataEnvrionment',
4645
DeprecationWarning, stacklevel=2)
46+
4747
if environment is None:
4848
environment_globals = {}
4949
environment_globals.update(default_globals or {})
5050
environment_globals.update(scripting_additions or {})
5151
self.environment = TaskDataEnvironment(environment_globals)
5252
else:
5353
self.environment = environment
54-
self.error_tasks = {}
5554

5655
def validate(self, expression):
5756
ast.parse(expression)
@@ -62,28 +61,19 @@ def evaluate(self, task, expression, external_methods=None):
6261
return the result.
6362
"""
6463
try:
65-
if isinstance(expression, Operator):
66-
# I am assuming that this takes care of some kind of XML
67-
# expression judging from the contents of operators.py
68-
return expression._matches(task)
69-
else:
70-
return self._evaluate(expression, task.data, external_methods)
64+
return self._evaluate(expression, task.data, external_methods)
7165
except SpiffWorkflowException as se:
7266
se.add_note(f"Error evaluating expression '{expression}'")
7367
raise se
7468
except Exception as e:
7569
raise WorkflowTaskException(f"Error evaluating expression '{expression}'", task=task, exception=e)
7670

7771
def execute(self, task, script, external_methods=None):
78-
"""
79-
Execute the script, within the context of the specified task
80-
"""
72+
"""Execute the script, within the context of the specified task."""
8173
try:
82-
self.check_for_overwrite(task, external_methods or {})
83-
self._execute(script, task.data, external_methods or {})
74+
return self._execute(script, task.data, external_methods or {})
8475
except Exception as err:
8576
wte = self.create_task_exec_exception(task, script, err)
86-
self.error_tasks[task.id] = wte
8777
raise wte
8878

8979
def call_service(self, operation_name, operation_params, task_data):
@@ -120,21 +110,8 @@ def get_error_line_number_and_content(self, script, err):
120110
error_line = script.splitlines()[line_number - 1]
121111
return line_number, error_line
122112

123-
def check_for_overwrite(self, task, external_methods):
124-
"""It's possible that someone will define a variable with the
125-
same name as a pre-defined script, rending the script un-callable.
126-
This results in a nearly indecipherable error. Better to fail
127-
fast with a sensible error message."""
128-
func_overwrites = set(self.environment.globals).intersection(task.data)
129-
func_overwrites.update(set(external_methods).intersection(task.data))
130-
if len(func_overwrites) > 0:
131-
msg = f"You have task data that overwrites a predefined " \
132-
f"function(s). Please change the following variable or " \
133-
f"field name(s) to something else: {func_overwrites}"
134-
raise WorkflowTaskException(msg, task=task)
135-
136113
def _evaluate(self, expression, context, external_methods=None):
137114
return self.environment.evaluate(expression, context, external_methods)
138115

139116
def _execute(self, script, context, external_methods=None):
140-
self.environment.execute(script, context, external_methods)
117+
return self.environment.execute(script, context, external_methods)

SpiffWorkflow/bpmn/PythonScriptEngineEnvironment.py

Lines changed: 20 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import copy
22
import warnings
33

4+
45
class BasePythonScriptEngineEnvironment:
56
def __init__(self, environment_globals=None):
67
self.globals = environment_globals or {}
@@ -11,7 +12,9 @@ def evaluate(self, expression, context, external_methods=None):
1112
def execute(self, script, context, external_methods=None):
1213
raise NotImplementedError("Subclass must implement this method")
1314

15+
1416
class TaskDataEnvironment(BasePythonScriptEngineEnvironment):
17+
1518
def evaluate(self, expression, context, external_methods=None):
1619
my_globals = copy.copy(self.globals) # else we pollute all later evals.
1720
self._prepare_context(context)
@@ -20,6 +23,7 @@ def evaluate(self, expression, context, external_methods=None):
2023
return eval(expression, my_globals)
2124

2225
def execute(self, script, context, external_methods=None):
26+
self.check_for_overwrite(context, external_methods or {})
2327
my_globals = copy.copy(self.globals)
2428
self._prepare_context(context)
2529
my_globals.update(external_methods or {})
@@ -28,12 +32,12 @@ def execute(self, script, context, external_methods=None):
2832
exec(script, context)
2933
finally:
3034
self._remove_globals_and_functions_from_context(context, external_methods)
35+
return True
3136

3237
def _prepare_context(self, context):
3338
pass
3439

35-
def _remove_globals_and_functions_from_context(self, context,
36-
external_methods=None):
40+
def _remove_globals_and_functions_from_context(self, context, external_methods=None):
3741
"""When executing a script, don't leave the globals, functions
3842
and external methods in the context that we have modified."""
3943
for k in list(context):
@@ -43,6 +47,20 @@ def _remove_globals_and_functions_from_context(self, context,
4347
external_methods and k in external_methods:
4448
context.pop(k)
4549

50+
def check_for_overwrite(self, context, external_methods):
51+
"""It's possible that someone will define a variable with the
52+
same name as a pre-defined script, rendering the script un-callable.
53+
This results in a nearly indecipherable error. Better to fail
54+
fast with a sensible error message."""
55+
func_overwrites = set(self.globals).intersection(context)
56+
func_overwrites.update(set(external_methods).intersection(context))
57+
if len(func_overwrites) > 0:
58+
msg = f"You have task data that overwrites a predefined " \
59+
f"function(s). Please change the following variable or " \
60+
f"field name(s) to something else: {func_overwrites}"
61+
raise ValueError(msg)
62+
63+
4664
class Box(dict):
4765
"""
4866
Example:

SpiffWorkflow/bpmn/serializer/migration/version_1_2.py

Lines changed: 66 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -16,58 +16,64 @@ def convert_timer_expressions(dct):
1616

1717
message = "Unable to convert time specifications for {spec}. This most likely because the values are set during workflow execution."
1818

19+
# Moving this code into helper functions to make sonarcloud STFU about this file.
20+
# Don't really consider this better but whatever.
21+
22+
def convert_timedate(spec):
23+
expr = spec['event_definition'].pop('dateTime')
24+
try:
25+
dt = eval(expr)
26+
if isinstance(dt, datetime):
27+
spec['event_definition']['expression'] = f"'{dt.isoformat()}'"
28+
spec['event_definition']['typename'] = 'TimeDateEventDefinition'
29+
elif isinstance(dt, timedelta):
30+
spec['event_definition']['expression'] = f"'{td_to_iso(dt)}'"
31+
spec['event_definition']['typename'] = 'DurationTimerEventDefinition'
32+
except:
33+
raise VersionMigrationError(message.format(spec=spec['name']))
34+
35+
def convert_cycle(spec, task):
36+
expr = spec['event_definition'].pop('cycle_definition')
37+
try:
38+
repeat, duration = eval(expr)
39+
spec['event_definition']['expression'] = f"'R{repeat}/{td_to_iso(duration)}'"
40+
if task is not None:
41+
cycles_complete = task['data'].pop('repeat_count', 0)
42+
start_time = task['internal_data'].pop('start_time', None)
43+
if start_time is not None:
44+
dt = datetime.fromisoformat(start_time)
45+
task['internal_data']['event_value'] = {
46+
'cycles': repeat - cycles_complete,
47+
'next': datetime.combine(dt.date(), dt.time(), LOCALTZ).isoformat(),
48+
'duration': duration.total_seconds(),
49+
}
50+
except:
51+
raise VersionMigrationError(message.format(spec=spec['name']))
52+
53+
if spec['typename'] == 'StartEvent':
54+
spec['outputs'].remove(spec['name'])
55+
if task is not None:
56+
children = [ dct['tasks'][c] for c in task['children'] ]
57+
# Formerly cycles were handled by looping back and reusing the tasks so this removes the extra tasks
58+
remove = [ c for c in children if c['task_spec'] == task['task_spec']][0]
59+
for task_id in remove['children']:
60+
child = dct['tasks'][task_id]
61+
if child['task_spec'].startswith('return') or child['state'] != TaskState.COMPLETED:
62+
dct['tasks'].pop(task_id)
63+
else:
64+
task['children'].append(task_id)
65+
task['children'].remove(remove['id'])
66+
dct['tasks'].pop(remove['id'])
67+
1968
has_timer = lambda ts: 'event_definition' in ts and ts['event_definition']['typename'] in [ 'CycleTimerEventDefinition', 'TimerEventDefinition']
2069
for spec in [ ts for ts in dct['spec']['task_specs'].values() if has_timer(ts) ]:
2170
spec['event_definition']['name'] = spec['event_definition'].pop('label')
2271
if spec['event_definition']['typename'] == 'TimerEventDefinition':
23-
expr = spec['event_definition'].pop('dateTime')
24-
try:
25-
dt = eval(expr)
26-
if isinstance(dt, datetime):
27-
spec['event_definition']['expression'] = f"'{dt.isoformat()}'"
28-
spec['event_definition']['typename'] = 'TimeDateEventDefinition'
29-
elif isinstance(dt, timedelta):
30-
spec['event_definition']['expression'] = f"'{td_to_iso(dt)}'"
31-
spec['event_definition']['typename'] = 'DurationTimerEventDefinition'
32-
except:
33-
raise VersionMigrationError(message.format(spec=spec['name']))
34-
72+
convert_timedate(spec)
3573
if spec['event_definition']['typename'] == 'CycleTimerEventDefinition':
36-
3774
tasks = [ t for t in dct['tasks'].values() if t['task_spec'] == spec['name'] ]
3875
task = tasks[0] if len(tasks) > 0 else None
39-
40-
expr = spec['event_definition'].pop('cycle_definition')
41-
try:
42-
repeat, duration = eval(expr)
43-
spec['event_definition']['expression'] = f"'R{repeat}/{td_to_iso(duration)}'"
44-
if task is not None:
45-
cycles_complete = task['data'].pop('repeat_count', 0)
46-
start_time = task['internal_data'].pop('start_time', None)
47-
if start_time is not None:
48-
dt = datetime.fromisoformat(start_time)
49-
task['internal_data']['event_value'] = {
50-
'cycles': repeat - cycles_complete,
51-
'next': datetime.combine(dt.date(), dt.time(), LOCALTZ).isoformat(),
52-
'duration': duration.total_seconds(),
53-
}
54-
except:
55-
raise VersionMigrationError(message.format(spec=spec['name']))
56-
57-
if spec['typename'] == 'StartEvent':
58-
spec['outputs'].remove(spec['name'])
59-
if task is not None:
60-
children = [ dct['tasks'][c] for c in task['children'] ]
61-
# Formerly cycles were handled by looping back and reusing the tasks so this removes the extra tasks
62-
remove = [ c for c in children if c['task_spec'] == task['task_spec']][0]
63-
for task_id in remove['children']:
64-
child = dct['tasks'][task_id]
65-
if child['task_spec'].startswith('return') or child['state'] != TaskState.COMPLETED:
66-
dct['tasks'].pop(task_id)
67-
else:
68-
task['children'].append(task_id)
69-
task['children'].remove(remove['id'])
70-
dct['tasks'].pop(remove['id'])
76+
convert_cycle(spec, task)
7177

7278
def add_default_condition_to_cond_task_specs(dct):
7379

@@ -122,3 +128,18 @@ def remove_loop_reset(dct):
122128
parent = dct['tasks'].get(task['parent'])
123129
parent['children'] = [c for c in parent['children'] if c != task['id']]
124130
dct['spec']['task_specs'].pop(spec['name'])
131+
132+
def update_task_states(dct):
133+
134+
def update(process):
135+
for task in process['tasks'].values():
136+
if task['state'] == 32:
137+
task['state'] = TaskState.COMPLETED
138+
elif task['state'] == 64:
139+
task['state'] = TaskState.CANCELLED
140+
141+
root = dct['tasks'].get(dct['root'])
142+
if root['state'] == 32:
143+
update(dct)
144+
for sp in dct['subprocesses'].values():
145+
update(sp)

SpiffWorkflow/bpmn/serializer/migration/version_migration.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
create_data_objects_and_io_specs,
88
check_multiinstance,
99
remove_loop_reset,
10+
update_task_states,
1011
)
1112

1213
def from_version_1_1(old):
@@ -36,6 +37,7 @@ def from_version_1_1(old):
3637
create_data_objects_and_io_specs(new)
3738
check_multiinstance(new)
3839
remove_loop_reset(new)
40+
update_task_states(new)
3941
new['VERSION'] = "1.2"
4042
return new
4143

@@ -53,6 +55,7 @@ def from_version_1_0(old):
5355
attributes based on the task states.
5456
"""
5557
new = deepcopy(old)
58+
new['VERSION'] = "1.1"
5659
move_subprocesses_to_top(new)
5760
return from_version_1_1(new)
5861

SpiffWorkflow/bpmn/specs/ScriptTask.py

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
# 02110-1301 USA
1919

2020
from .BpmnSpecMixin import BpmnSpecMixin
21-
from ...task import TaskState
2221
from ...specs.Simple import Simple
2322

2423

@@ -30,13 +29,8 @@ def _execute(self, task):
3029
pass
3130

3231
def _run_hook(self, task):
33-
try:
34-
self._execute(task)
35-
super(ScriptEngineTask, self)._run_hook(task)
36-
except Exception as exc:
37-
task._set_state(TaskState.WAITING)
38-
raise exc
39-
return True
32+
return self._execute(task)
33+
4034

4135
class ScriptTask(ScriptEngineTask):
4236

@@ -54,5 +48,4 @@ def spec_type(self):
5448
return 'Script Task'
5549

5650
def _execute(self, task):
57-
task.workflow.script_engine.execute(task, self.script)
58-
51+
return task.workflow.script_engine.execute(task, self.script)

SpiffWorkflow/serializer/dict.py

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
from base64 import b64encode, b64decode
2222
from ..workflow import Workflow
2323
from ..util.impl import get_class
24-
from ..task import Task
24+
from ..task import Task, TaskState
2525
from ..operators import (Attrib, PathAttrib, Equal, NotEqual, Operator, GreaterThan, LessThan, Match)
2626
from ..specs.base import TaskSpec
2727
from ..specs.AcquireMutex import AcquireMutex
@@ -604,11 +604,18 @@ def deserialize_workflow(self, s_state, wf_class=Workflow, wf_spec=None, **kwarg
604604
workflow.spec = wf_spec
605605
workflow.task_tree = self.deserialize_task(workflow, s_state['task_tree'], reset_specs)
606606

607-
# Re-connect parents
607+
# Re-connect parents and update states if necessary
608608
tasklist = workflow.get_tasks()
609+
root = workflow.get_tasks_from_spec_name('Root')[0]
610+
update_state = root.state != TaskState.COMPLETED
609611
for task in tasklist:
610612
if task.parent is not None:
611613
task.parent = workflow.get_task_from_id(task.parent, tasklist)
614+
if update_state:
615+
if task.state == 32:
616+
task.state = TaskState.COMPLETED
617+
elif task.state == 64:
618+
task.state = TaskState.CANCELLED
612619

613620
if workflow.last_task is not None:
614621
workflow.last_task = workflow.get_task_from_id(s_state['last_task'],tasklist)

0 commit comments

Comments
 (0)