Skip to content

Commit 86cb84d

Browse files
authored
Merge pull request #393 from sartography/bugfix/data-object-management
Bugfix/data object management
2 parents 32c00e4 + bee5cc6 commit 86cb84d

File tree

21 files changed

+1738
-39
lines changed

21 files changed

+1738
-39
lines changed

SpiffWorkflow/bpmn/parser/ProcessParser.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,6 @@ def __init__(self, p, node, nsmap, data_stores, filename=None, lane=None):
3939
:param node: the XML node for the process
4040
:param data_stores: map of ids to data store implementations
4141
:param filename: the source BPMN filename (optional)
42-
:param doc_xpath: an xpath evaluator for the document (optional)
4342
:param lane: the lane of a subprocess (optional)
4443
"""
4544
super().__init__(node, nsmap, filename=filename, lane=lane)
@@ -48,7 +47,7 @@ def __init__(self, p, node, nsmap, data_stores, filename=None, lane=None):
4847
self.spec = None
4948
self.process_executable = node.get('isExecutable', 'true') == 'true'
5049
self.data_stores = data_stores
51-
self.inherited_data_objects = {}
50+
self.parent = None
5251

5352
def get_name(self):
5453
"""
@@ -118,8 +117,6 @@ def _parse(self):
118117
raise ValidationException(f"Process {self.bpmn_id} is not executable.", node=self.node, file_name=self.filename)
119118
self.spec = BpmnProcessSpec(name=self.bpmn_id, description=self.get_name(), filename=self.filename)
120119

121-
self.spec.data_objects.update(self.inherited_data_objects)
122-
123120
# Get the data objects
124121
for obj in self.xpath('./bpmn:dataObject'):
125122
data_object = self.parse_data_object(obj)
@@ -147,7 +144,7 @@ def _parse(self):
147144
split_task.inputs = [self.spec.start]
148145

149146
def parse_data_object(self, obj):
150-
return DataObject(obj.get('id'), obj.get('name'))
147+
return self.create_data_spec(obj, DataObject)
151148

152149
def get_spec(self):
153150
"""

SpiffWorkflow/bpmn/parser/node_parser.py

Lines changed: 16 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -82,8 +82,9 @@ def parse_incoming_data_references(self):
8282
specs = []
8383
for name in self.xpath('./bpmn:dataInputAssociation/bpmn:sourceRef'):
8484
ref = first(self.doc_xpath(f".//bpmn:dataObjectReference[@id='{name.text}']"))
85-
if ref is not None and ref.get('dataObjectRef') in self.process_parser.spec.data_objects:
86-
specs.append(self.process_parser.spec.data_objects[ref.get('dataObjectRef')])
85+
data_obj = self._resolve_data_object_ref(ref)
86+
if data_obj is not None:
87+
specs.append(data_obj)
8788
else:
8889
ref = first(self.doc_xpath(f".//bpmn:dataStoreReference[@id='{name.text}']"))
8990
if ref is not None and ref.get('dataStoreRef') in self.process_parser.data_stores:
@@ -96,8 +97,9 @@ def parse_outgoing_data_references(self):
9697
specs = []
9798
for name in self.xpath('./bpmn:dataOutputAssociation/bpmn:targetRef'):
9899
ref = first(self.doc_xpath(f".//bpmn:dataObjectReference[@id='{name.text}']"))
99-
if ref is not None and ref.get('dataObjectRef') in self.process_parser.spec.data_objects:
100-
specs.append(self.process_parser.spec.data_objects[ref.get('dataObjectRef')])
100+
data_obj = self._resolve_data_object_ref(ref)
101+
if data_obj is not None:
102+
specs.append(data_obj)
101103
else:
102104
ref = first(self.doc_xpath(f".//bpmn:dataStoreReference[@id='{name.text}']"))
103105
if ref is not None and ref.get('dataStoreRef') in self.process_parser.data_stores:
@@ -124,6 +126,16 @@ def parse_io_spec(self):
124126
outputs.append(data_refs[ref.text])
125127
return BpmnIoSpecification(inputs, outputs)
126128

129+
def _resolve_data_object_ref(self, ref):
130+
if ref is not None:
131+
current = self.process_parser
132+
while current is not None:
133+
data_obj = current.spec.data_objects.get(ref.get('dataObjectRef'))
134+
if data_obj is None:
135+
current = self.process_parser.parent
136+
else:
137+
return data_obj
138+
127139
def create_data_spec(self, item, cls):
128140
return cls(item.attrib.get('id'), item.attrib.get('name'))
129141

SpiffWorkflow/bpmn/parser/task_parsers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ def get_subprocess_spec(task_parser):
7575
spec_id = task_parser.node.get('id')
7676
# This parser makes me want to cry
7777
spec_parser = task_parser.process_parser.parser.process_parsers[spec_id]
78-
spec_parser.inherited_data_objects.update(task_parser.process_parser.spec.data_objects)
78+
spec_parser.parent = task_parser.process_parser
7979
return spec_id
8080

8181
@staticmethod

SpiffWorkflow/bpmn/serializer/default/workflow.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -157,6 +157,4 @@ def subprocesses_from_dict(self, dct, workflow, top_workflow=None):
157157
sp = self.registry.restore(dct.pop(str(task.id)), task=task, top_workflow=top_workflow)
158158
top_workflow.subprocesses[task.id] = sp
159159
sp.completed_event.connect(task.task_spec._on_subworkflow_completed, task)
160-
if len(sp.spec.data_objects) > 0:
161-
sp.data = task.workflow.data
162160
self.subprocesses_from_dict(dct, sp, top_workflow)

SpiffWorkflow/bpmn/serializer/migration/version_1_3.py

Lines changed: 37 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -132,4 +132,40 @@ def add_new_typenames(dct):
132132
for sp in dct['subprocesses'].values():
133133
sp['typename'] = 'BpmnSubWorkflow'
134134
for task in sp['tasks'].values():
135-
task['typename'] = 'Task'
135+
task['typename'] = 'Task'
136+
137+
def update_data_objects(dct):
138+
139+
def update_spec(parent):
140+
children = []
141+
for ts in [ts for ts in parent['task_specs'].values() if 'spec' in ts]:
142+
child = dct['subprocess_specs'].get(ts['spec'])
143+
children.append((child, ts['typename']))
144+
update_spec(child)
145+
for child in [c for c, spec_type in children if spec_type != 'CallActivity']:
146+
for name in parent['data_objects']:
147+
child['data_objects'].pop(name, None)
148+
149+
data_objects = []
150+
151+
def update_wf(wf, spec):
152+
153+
data_objects.extend([v for v in spec.get('data_objects', {}) if v not in data_objects])
154+
155+
for task in [t for t in wf['tasks'].values() if t['id'] in dct['subprocesses']]:
156+
ts = spec['task_specs'][task['task_spec']]
157+
sp_spec = dct['subprocess_specs'].get(ts['spec'])
158+
sp = dct['subprocesses'].get(task['id'])
159+
update_wf(sp, sp_spec)
160+
161+
if len(spec.get('data_objects', {})) > 0:
162+
wf['data']['data_objects'] = {}
163+
164+
for key in list(wf['data']):
165+
if key in spec.get('data_objects', {}):
166+
wf['data']['data_objects'][key] = wf['data'].pop(key)
167+
elif key in data_objects:
168+
del wf['data'][key]
169+
170+
update_spec(dct['spec'])
171+
update_wf(dct, dct['spec'])

SpiffWorkflow/bpmn/serializer/migration/version_migration.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333
remove_boundary_event_parent,
3434
remove_root_task,
3535
add_new_typenames,
36+
update_data_objects,
3637
)
3738

3839
def from_version_1_2(dct):
@@ -41,6 +42,7 @@ def from_version_1_2(dct):
4142
remove_boundary_event_parent(dct)
4243
remove_root_task(dct)
4344
add_new_typenames(dct)
45+
update_data_objects(dct)
4446

4547

4648
def from_version_1_1(dct):

SpiffWorkflow/bpmn/specs/bpmn_task_spec.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ def __init__(self, *args):
1212
super(_BpmnCondition, self).__init__(*args)
1313

1414
def _matches(self, task):
15-
return task.workflow.script_engine.evaluate(task, self.args[0], external_context=task.workflow.data)
15+
return task.workflow.script_engine.evaluate(task, self.args[0], external_context=task.workflow.data_objects)
1616

1717

1818
class BpmnIoSpecification:

SpiffWorkflow/bpmn/specs/data_spec.py

Lines changed: 16 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -62,18 +62,31 @@ class DataObject(BpmnDataSpecification):
6262

6363
def get(self, my_task):
6464
"""Copy a value form the workflow data to the task data."""
65-
if self.bpmn_id not in my_task.workflow.data:
65+
66+
# Find the spec where the data object is defined and put it there
67+
wf = my_task.workflow
68+
while wf is not None and self.bpmn_id not in wf.spec.data_objects:
69+
wf = wf.parent_workflow
70+
71+
if wf is None or self.bpmn_id not in wf.data_objects:
6672
message = f"The data object could not be read; '{self.bpmn_id}' does not exist in the process."
6773
raise WorkflowDataException(message, my_task, data_input=self)
68-
my_task.data[self.bpmn_id] = deepcopy(my_task.workflow.data[self.bpmn_id])
74+
75+
my_task.data[self.bpmn_id] = deepcopy(wf.data_objects[self.bpmn_id])
6976
data_log.info(f'Read workflow variable {self.bpmn_id}', extra=my_task.log_info())
7077

7178
def set(self, my_task):
7279
"""Copy a value from the task data to the workflow data"""
80+
7381
if self.bpmn_id not in my_task.data:
7482
message = f"A data object could not be set; '{self.bpmn_id}' not exist in the task."
7583
raise WorkflowDataException(message, my_task, data_output=self)
76-
my_task.workflow.data[self.bpmn_id] = deepcopy(my_task.data[self.bpmn_id])
84+
85+
wf = my_task.workflow
86+
while wf is not None and self.bpmn_id not in wf.spec.data_objects:
87+
wf = wf.parent_workflow
88+
89+
wf.data_objects[self.bpmn_id] = deepcopy(my_task.data[self.bpmn_id])
7790
del my_task.data[self.bpmn_id]
7891
data_log.info(f'Set workflow variable {self.bpmn_id}', extra=my_task.log_info())
7992

SpiffWorkflow/bpmn/specs/event_definitions/conditional.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,6 @@ def __init__(self, expression, **kwargs):
99

1010
def has_fired(self, my_task):
1111
my_task._set_internal_data(
12-
has_fired=my_task.workflow.script_engine.evaluate(my_task, self.expression, external_context=my_task.workflow.data)
12+
has_fired=my_task.workflow.script_engine.evaluate(my_task, self.expression, external_context=my_task.workflow.data_objects)
1313
)
1414
return my_task._get_internal_data('has_fired', False)

SpiffWorkflow/bpmn/specs/mixins/subworkflow_task.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -58,12 +58,6 @@ def _on_cancel(self, my_task):
5858
subworkflow.cancel()
5959

6060
def copy_data(self, my_task, subworkflow):
61-
# There is only one copy of any given data object, so it should be updated immediately
62-
# Doing this is actually a little problematic, because it gives parent processes access to
63-
# data objects defined in subprocesses.
64-
# But our data management is already hopelessly messed up and in dire needs of reconsideration
65-
if len(subworkflow.spec.data_objects) > 0:
66-
subworkflow.data = my_task.workflow.data
6761
start = subworkflow.get_next_task(spec_name='Start')
6862
start.set_data(**my_task.data)
6963

0 commit comments

Comments
 (0)