Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions SpiffWorkflow/bpmn/FeelLikeScriptEngine.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,8 @@ def __eq__(self, other):

def feelConcatenate(*lst):
ilist = []
for l in lst:
ilist = ilist + l
for list_item in lst:
ilist = ilist + list_item
return ilist

def feelAppend(lst,item):
Expand Down Expand Up @@ -144,7 +144,7 @@ def feelFilter(var,a,b,op,column=None):
newvar.append({'key':key,'value':var[key]})
var = newvar

if column!=None:
if column is not None:
return [x.get(column) for x in var if opmap[op](x.get(a), b)]
else:
return [x for x in var if opmap[op](x.get(a), b)]
Expand Down
4 changes: 2 additions & 2 deletions SpiffWorkflow/bpmn/PythonScriptEngine.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,8 @@ class PythonScriptEngine(object):
def __init__(self, default_globals=None, scripting_additions=None, environment=None):

if default_globals is not None or scripting_additions is not None:
warnings.warn(f'default_globals and scripting_additions are deprecated. '
f'Please provide an environment such as TaskDataEnvrionment',
warnings.warn('default_globals and scripting_additions are deprecated. '
'Please provide an environment such as TaskDataEnvrionment',
DeprecationWarning, stacklevel=2)

if environment is None:
Expand Down
2 changes: 1 addition & 1 deletion SpiffWorkflow/bpmn/PythonScriptEngineEnvironment.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def __deepcopy__(self, memodict=None):
def __getattr__(self, attr):
try:
output = self[attr]
except:
except Exception:
raise AttributeError(
"Dictionary has no attribute '%s' " % str(attr))
return output
Expand Down
2 changes: 1 addition & 1 deletion SpiffWorkflow/bpmn/parser/BpmnParser.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def validate(self, bpmn, filename=None):
except ValidationException as ve:
ve.file_name = filename
ve.line_number = self.validator.error_log.last_error.line
except LxmlError as le:
except LxmlError:
last_error = self.validator.error_log.last_error
raise ValidationException(last_error.message, file_name=filename,
line_number=last_error.line)
Expand Down
18 changes: 10 additions & 8 deletions SpiffWorkflow/bpmn/parser/TaskParser.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,23 +98,25 @@ def _add_loop_task(self, loop_characteristics):
self._copy_task_attrs(original)

def _add_multiinstance_task(self, loop_characteristics):

sequential = loop_characteristics.get('isSequential') == 'true'
prefix = 'bpmn:multiInstanceLoopCharacteristics'
cardinality = self.xpath(f'./{prefix}/bpmn:loopCardinality')
loop_input = self.xpath(f'./{prefix}/bpmn:loopDataInputRef')
if len(cardinality) == 0 and len(loop_input) == 0:
self.raise_validation_exception("A multiinstance task must specify a cardinality or a loop input data reference")
self.raise_validation_exception(
"A multiinstance task must specify a cardinality or a loop input data reference")
elif len(cardinality) > 0 and len(loop_input) > 0:
self.raise_validation_exception("A multiinstance task must specify exactly one of cardinality or loop input data reference")
self.raise_validation_exception(
"A multiinstance task must specify exactly one of cardinality or loop input data reference")
cardinality = cardinality[0].text if len(cardinality) > 0 else None

loop_input = loop_input[0].text if len(loop_input) > 0 else None
if loop_input is not None:
if self.task.io_specification is not None:
try:
loop_input = [v for v in self.task.io_specification.data_inputs if v.name == loop_input][0]
except:
except Exception:
self.raise_validation_exception('The loop input data reference is missing from the IO specification')
else:
loop_input = TaskDataReference(loop_input)
Expand All @@ -129,7 +131,7 @@ def _add_multiinstance_task(self, loop_characteristics):
try:
refs = set(self.task.io_specification.data_inputs + self.task.io_specification.data_outputs)
loop_output = [v for v in refs if v.name == loop_output][0]
except:
except Exception:
self.raise_validation_exception('The loop output data reference is missing from the IO specification')
else:
loop_output = TaskDataReference(loop_output)
Expand All @@ -142,8 +144,8 @@ def _add_multiinstance_task(self, loop_characteristics):

original = self.spec.task_specs.pop(self.task.name)
params = {
'task_spec': '',
'cardinality': cardinality,
'task_spec': '',
'cardinality': cardinality,
'data_input': loop_input,
'data_output':loop_output,
'input_item': input_item,
Expand Down Expand Up @@ -207,7 +209,7 @@ def parse_node(self):
target_ref = sequence_flow.get('targetRef')
try:
target_node = one(self.doc_xpath('.//bpmn:*[@id="%s"]'% target_ref))
except:
except Exception:
self.raise_validation_exception('When looking for a task spec, we found two items, '
'perhaps a form has the same ID? (%s)' % target_ref)

Expand Down
2 changes: 1 addition & 1 deletion SpiffWorkflow/bpmn/parser/task_parsers.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,6 @@ def get_script(self):
return one(self.xpath('.//bpmn:script')).text
except AssertionError as ae:
raise ValidationException(
f"Invalid Script Task. No Script Provided. " + str(ae),
"Invalid Script Task. No Script Provided. " + str(ae),
node=self.node, file_name=self.filename)

11 changes: 6 additions & 5 deletions SpiffWorkflow/bpmn/serializer/migration/version_1_2.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def convert_timedate(spec):
elif isinstance(dt, timedelta):
spec['event_definition']['expression'] = f"'{td_to_iso(dt)}'"
spec['event_definition']['typename'] = 'DurationTimerEventDefinition'
except:
except Exception:
raise VersionMigrationError(message.format(spec=spec['name']))

def convert_cycle(spec, task):
Expand All @@ -66,7 +66,7 @@ def convert_cycle(spec, task):
'next': datetime.combine(dt.date(), dt.time(), LOCALTZ).isoformat(),
'duration': duration.total_seconds(),
}
except:
except Exception:
raise VersionMigrationError(message.format(spec=spec['name']))

if spec['typename'] == 'StartEvent':
Expand All @@ -84,7 +84,8 @@ def convert_cycle(spec, task):
task['children'].remove(remove['id'])
dct['tasks'].pop(remove['id'])

has_timer = lambda ts: 'event_definition' in ts and ts['event_definition']['typename'] in [ 'CycleTimerEventDefinition', 'TimerEventDefinition']
def has_timer(ts):
return "event_definition" in ts and ts["event_definition"]["typename"] in ["CycleTimerEventDefinition", "TimerEventDefinition"]
for spec in [ ts for ts in dct['spec']['task_specs'].values() if has_timer(ts) ]:
spec['event_definition']['name'] = spec['event_definition'].pop('label')
if spec['event_definition']['typename'] == 'TimerEventDefinition':
Expand Down Expand Up @@ -132,7 +133,7 @@ def update_data_specs(spec):
item['typename'] = 'DataObject'

def check_multiinstance(dct):

specs = [ spec for spec in dct['spec']['task_specs'].values() if 'prevtaskclass' in spec ]
if len(specs) > 0:
raise VersionMigrationError("This workflow cannot be migrated because it contains MultiInstance Tasks")
Expand Down Expand Up @@ -225,7 +226,7 @@ def update_task_specs(spec):
if spec['typename'] not in ['BpmnStartTask', 'SimpleBpmnTask', '_EndJoin', '_BoundaryEventParent']:
spec['bpmn_id'] = spec['name']
spec['bpmn_name'] = spec['description'] or None
if 'event_definition' in spec and spec['event_definition']['typename'] in descriptions:
if 'event_definition' in spec and spec['event_definition']['typename'] in descriptions:
spec_desc = descriptions.get(spec['typename'])
event_desc = descriptions.get(spec['event_definition']['typename'])
cancelling = spec.get('cancel_activity')
Expand Down
4 changes: 2 additions & 2 deletions SpiffWorkflow/bpmn/serializer/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ def get_version(self, serialization, use_gzip=False):
dct = self.__get_dict(serialization, use_gzip)
if self.VERSION_KEY in dct:
return dct[self.VERSION_KEY]
except: # Don't bail out trying to get a version, just return none.
except Exception: # Don't bail out trying to get a version, just return none.
return None

def workflow_to_dict(self, workflow):
Expand Down Expand Up @@ -279,7 +279,7 @@ def task_tree_from_dict(self, process_dct, task_id, parent_task, process, top_le

for child_task_id in task_dict['children']:
if child_task_id in process_dct['tasks']:
child = process_dct['tasks'][child_task_id]
process_dct['tasks'][child_task_id]
self.task_tree_from_dict(process_dct, child_task_id, task, process, top, top_dct)
else:
raise ValueError(f"Task {task_id} ({task_spec.name}) has child {child_task_id}, but no such task exists")
Expand Down
4 changes: 2 additions & 2 deletions SpiffWorkflow/bpmn/specs/bpmn_task_spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def _update_hook(self, my_task):
data = {}
for var in self.io_specification.data_inputs:
if var.bpmn_id not in my_task.data:
raise WorkflowDataException(f"Missing data input", task=my_task, data_input=var)
raise WorkflowDataException("Missing data input", task=my_task, data_input=var)
data[var.bpmn_id] = my_task.data[var.bpmn_id]
my_task.data = data

Expand All @@ -84,7 +84,7 @@ def _on_complete_hook(self, my_task):
data = {}
for var in self.io_specification.data_outputs:
if var.bpmn_id not in my_task.data:
raise WorkflowDataException(f"Missing data ouput", task=my_task, data_output=var)
raise WorkflowDataException("Missing data ouput", task=my_task, data_output=var)
data[var.bpmn_id] = my_task.data[var.bpmn_id]
my_task.data = data

Expand Down
2 changes: 1 addition & 1 deletion SpiffWorkflow/bpmn/specs/mixins/inclusive_gateway.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,6 @@ def check(spec):
def _run_hook(self, my_task):
outputs = self._get_matching_outputs(my_task)
if len(outputs) == 0:
raise WorkflowTaskException(f'No conditions satisfied on gateway', task=my_task)
raise WorkflowTaskException('No conditions satisfied on gateway', task=my_task)
my_task._sync_children(outputs, TaskState.FUTURE)
return True
2 changes: 1 addition & 1 deletion SpiffWorkflow/bpmn/specs/mixins/subworkflow_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ def update_data(self, my_task, subworkflow):
for var in subworkflow.spec.io_specification.data_outputs:
if var.bpmn_id not in end[0].data:
raise WorkflowDataException(
f"The Data Output was not available in the subprocess output.",
"The Data Output was not available in the subprocess output.",
task=my_task,
data_output=var,
)
Expand Down
4 changes: 2 additions & 2 deletions SpiffWorkflow/bpmn/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,8 +200,8 @@ def catch_bpmn_message(self, name, payload):
conversation = task.task_spec.event_definition.conversation()
if not conversation:
raise WorkflowTaskException(
f"The waiting task and message payload can not be matched to any correlation key (conversation topic). "
f"And is therefor unable to respond to the given message.", task)
"The waiting task and message payload can not be matched to any correlation key (conversation topic). "
"And is therefor unable to respond to the given message.", task)
updated_props = self._correlate(conversation, payload, task)
task.task_spec.catch(task, event_definition)
self.refresh_waiting_tasks()
Expand Down
2 changes: 1 addition & 1 deletion SpiffWorkflow/camunda/parser/task_spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,5 +180,5 @@ def get_script(self):
return one(self.xpath('.//bpmn:script')).text
except AssertionError as ae:
raise ValidationException(
f"Invalid Script Task. No Script Provided. " + str(ae),
"Invalid Script Task. No Script Provided. " + str(ae),
node=self.node, file_name=self.filename)
2 changes: 1 addition & 1 deletion SpiffWorkflow/dmn/engine/DMNEngine.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def result(self, task):
for rule in matched_rules:
rule_output = rule.output_as_dict(task)
for key in rule_output.keys():
if not key in result:
if key not in result:
result[key] = []
result[key].append(rule_output[key])
elif len(matched_rules) > 0:
Expand Down
4 changes: 2 additions & 2 deletions SpiffWorkflow/operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,8 +199,8 @@ def valueof(scope, op, default=None):

def is_number(text):
try:
x = int(text)
except:
int(text)
except Exception:
return False
return True

Expand Down
2 changes: 1 addition & 1 deletion SpiffWorkflow/serializer/xml.py
Original file line number Diff line number Diff line change
Expand Up @@ -487,7 +487,7 @@ def serialize_multi_instance(self, spec):

def deserialize_multi_instance(self, wf_spec, elem, cls=None,
**kwargs):
if cls == None:
if cls is None:
cls = MultiInstance
#cls = MultiInstance(wf_spec,elem.find('name'),elem.find('times'))
times = self.deserialize_value(elem.find('times'))
Expand Down
2 changes: 1 addition & 1 deletion SpiffWorkflow/specs/Transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def _update_hook(self, my_task):

if self.transforms:
for transform in self.transforms:
logger.debug(f'Execute transform', extra=my_task.log_info({'transform': transform}))
logger.debug('Execute transform', extra=my_task.log_info({'transform': transform}))
exec(transform)
return True

Expand Down
1 change: 0 additions & 1 deletion SpiffWorkflow/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@

import logging
import time
import warnings
from uuid import uuid4

from .util.deep_merge import DeepMerge
Expand Down
3 changes: 2 additions & 1 deletion SpiffWorkflow/util/deep_merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,8 @@ class DeepMerge(object):
@staticmethod
def merge(a, b, path=None):
"merges b into a"
if path is None: path = []
if path is None:
path = []
for key in b:
if key in a:
if a[key] == b[key]:
Expand Down
2 changes: 1 addition & 1 deletion SpiffWorkflow/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def is_completed(self):
mask = TaskState.NOT_FINISHED_MASK
iter = Task.Iterator(self.task_tree, mask)
try:
nexttask = next(iter)
next(iter)
except StopIteration:
# No waiting tasks found.
return True
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@ requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta:__legacy__"

[tool.ruff]
line-length = 120
line-length = 140
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
# -*- coding: utf-8 -*-
import pathlib
import sys
from setuptools import setup, find_packages

sys.path.insert(0, '.')
sys.path.insert(0, 'SpiffWorkflow')
from setuptools import setup, find_packages

# The directory containing this file
HERE = pathlib.Path(__file__).parent
Expand Down
6 changes: 4 additions & 2 deletions tests/SpiffWorkflow/bpmn/CustomScriptTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,11 +33,13 @@ def testRunThroughSaveRestore(self):
self.actual_test(save_restore=False)

def actual_test(self, save_restore):
if save_restore: self.save_restore()
if save_restore:
self.save_restore()
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.complete_subworkflow()
if save_restore: self.save_restore()
if save_restore:
self.save_restore()
data = self.workflow.last_task.data
self.assertEqual(data['c1'], 'HELLO')
self.assertEqual(data['c2'], 'GOODBYE')
Expand Down
1 change: 0 additions & 1 deletion tests/SpiffWorkflow/bpmn/ProcessDependencyTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ def testSpiffParser(self):
def actual_test(self, parser):
# We ought to test the parsers in the packages they belong to, not here.
filename = 'call_activity_nested'
process_name = 'Level1'
base_dir = os.path.join(os.path.dirname(__file__), 'data', filename)
parser.add_bpmn_file(os.path.join(base_dir, 'call_activity_nested.bpmn'))
dependencies = parser.get_dependencies()
Expand Down
Loading