Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 19 additions & 0 deletions SpiffWorkflow/bpmn/serializer/helpers/encoder.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import json
from types import ModuleType


def create_encoder(registry, user_encoder_cls=None):
base = user_encoder_cls or json.JSONEncoder

class SpiffEncoder(base):
def default(self, obj):
typename = registry.typenames.get(type(obj))
if typename is not None:
return registry.convert_to_dict[typename](obj)
if callable(obj) or isinstance(obj, ModuleType):
return None
if isinstance(obj, set):
return list(obj)
return super().default(obj)

return SpiffEncoder
12 changes: 12 additions & 0 deletions SpiffWorkflow/bpmn/serializer/helpers/registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ class DefaultRegistry(DictionaryConverter):
def __init__(self):

super().__init__()
self._encoder_mode = False
self.register(UUID, lambda v: { 'value': str(v) }, lambda v: UUID(v['value']))
self.register(datetime, lambda v: { 'value': v.isoformat() }, lambda v: datetime.fromisoformat(v['value']))
self.register(timedelta, lambda v: { 'days': v.days, 'seconds': v.seconds }, lambda v: timedelta(**v))
Expand All @@ -46,9 +47,20 @@ def convert(self, obj):
Returns:
the result of `convert` conversion after preprocessing
"""
if self._encoder_mode:
return self._convert_for_encoder(obj)
cleaned = self.clean(obj)
return super().convert(cleaned)

def _convert_for_encoder(self, obj):
typename = self.typenames.get(obj.__class__)
if typename in self.convert_to_dict:
return self.convert_to_dict[typename](obj)
elif isinstance(obj, dict):
return self.clean(obj)
else:
return obj

def clean(self, obj):
"""A method that can be used to preprocess an object before conversion to a dict.

Expand Down
12 changes: 9 additions & 3 deletions SpiffWorkflow/bpmn/serializer/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

from .migration.version_migration import MIGRATIONS
from .helpers import DefaultRegistry
from .helpers.encoder import create_encoder

from .config import DEFAULT_CONFIG

Expand Down Expand Up @@ -97,6 +98,7 @@ def __init__(self, registry=None, version=VERSION, json_encoder_cls=None, json_d
self.json_encoder_cls = json_encoder_cls
self.json_decoder_cls = json_decoder_cls
self.VERSION = version
self._encoder_cls = create_encoder(self.registry, json_encoder_cls)

def serialize_json(self, workflow, use_gzip=False):
"""Serialize the dictionary representation of the workflow to JSON.
Expand All @@ -108,9 +110,13 @@ def serialize_json(self, workflow, use_gzip=False):
Returns:
a JSON dump of the dictionary representation or a gzipped version of it
"""
dct = self.to_dict(workflow)
dct[self.VERSION_KEY] = self.VERSION
json_str = json.dumps(dct, cls=self.json_encoder_cls)
self.registry._encoder_mode = True
try:
dct = self.to_dict(workflow)
dct[self.VERSION_KEY] = self.VERSION
json_str = json.dumps(dct, cls=self._encoder_cls)
finally:
self.registry._encoder_mode = False
return gzip.compress(json_str.encode('utf-8')) if use_gzip else json_str

def deserialize_json(self, serialization, use_gzip=False):
Expand Down
18 changes: 9 additions & 9 deletions tests/SpiffWorkflow/bpmn/test_performance_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,13 +80,13 @@ def test_performance_20_items(self):

# Measure serialization
start_serialize = time.time()
state = self.serializer.to_dict(workflow)
state = self.serializer.serialize_json(workflow)
end_serialize = time.time()
serialize_time = end_serialize - start_serialize

# Measure deserialization
start_deserialize = time.time()
restored_workflow = self.serializer.from_dict(state)
restored_workflow = self.serializer.deserialize_json(state)
end_deserialize = time.time()
deserialize_time = end_deserialize - start_deserialize

Expand Down Expand Up @@ -119,13 +119,13 @@ def test_performance_100_items(self):

# Measure serialization
start_serialize = time.time()
state = self.serializer.to_dict(workflow)
state = self.serializer.serialize_json(workflow)
end_serialize = time.time()
serialize_time = end_serialize - start_serialize

# Measure deserialization
start_deserialize = time.time()
restored_workflow = self.serializer.from_dict(state)
restored_workflow = self.serializer.deserialize_json(state)
end_deserialize = time.time()
deserialize_time = end_deserialize - start_deserialize

Expand Down Expand Up @@ -158,13 +158,13 @@ def test_performance_200_items(self):

# Measure serialization
start_serialize = time.time()
state = self.serializer.to_dict(workflow)
state = self.serializer.serialize_json(workflow)
end_serialize = time.time()
serialize_time = end_serialize - start_serialize

# Measure deserialization
start_deserialize = time.time()
restored_workflow = self.serializer.from_dict(state)
restored_workflow = self.serializer.deserialize_json(state)
end_deserialize = time.time()
deserialize_time = end_deserialize - start_deserialize

Expand Down Expand Up @@ -197,13 +197,13 @@ def test_performance_300_items(self):

# Measure serialization
start_serialize = time.time()
state = self.serializer.to_dict(workflow)
state = self.serializer.serialize_json(workflow)
end_serialize = time.time()
serialize_time = end_serialize - start_serialize

# Measure deserialization
start_deserialize = time.time()
restored_workflow = self.serializer.from_dict(state)
restored_workflow = self.serializer.deserialize_json(state)
end_deserialize = time.time()
deserialize_time = end_deserialize - start_deserialize

Expand Down Expand Up @@ -237,7 +237,7 @@ def did_complete_task(task):
# Serialize at checkpoints
if tasks_completed % checkpoint_interval == 0:
start_serialize = time.time()
state = self.serializer.to_dict(workflow)
state = self.serializer.serialize_json(workflow)
end_serialize = time.time()
serialize_time = end_serialize - start_serialize

Expand Down
Loading