diff --git a/.gitignore b/.gitignore index b29f65217d..9d0648062f 100644 --- a/.gitignore +++ b/.gitignore @@ -18,6 +18,7 @@ tests/htmlcov/* .python-version *~ *.tmp +.pre-commit-config.yaml # Debian generated files debian/.debhelper/* diff --git a/docs/CONTRIBUTORS.rst b/docs/CONTRIBUTORS.rst index 7c883d9df9..74abd39e19 100644 --- a/docs/CONTRIBUTORS.rst +++ b/docs/CONTRIBUTORS.rst @@ -5,8 +5,8 @@ Note: Development of TUF occurs on the "develop" branch of this repository. Contributions can be made by submitting GitHub pull requests. Submitted code should follow our `code style guidelines -`_, which provide -examples of what to do (or not to do) when writing Python code. +`_, which are +enforced with linters and auto-formatters (details below). Contributors must also indicate acceptance of the `Developer Certificate of Origin `_ (DCO) when making a contribution @@ -105,6 +105,26 @@ To work on the TUF project, it's best to perform a development install. $ pip install -r requirements-dev.txt +Auto-formatting +=============== + +CI/CD will check that new TUF code is formatted with `black +`__ and `isort `__. +Auto-formatting can be done on the command line: +:: + + $ # TODO: configure black and isort args in pyproject.toml (see #1161) + $ black --line-length 80 tuf/api + $ isort --line-length 80 --profile black -p tuf tuf/api + +or via source code editor plugin +[`black `__, +`isort `__] or +`pre-commit `__-powered git hooks +[`black `__, +`isort `__]. + + Testing ======= diff --git a/docs/GOVERNANCE.md b/docs/GOVERNANCE.md index e21db3f394..c7bf70755a 100644 --- a/docs/GOVERNANCE.md +++ b/docs/GOVERNANCE.md @@ -31,6 +31,8 @@ undergo review and automated testing, including, but not limited to: [Tox](https://tox.readthedocs.io/en/latest/). * Static code analysis via [Pylint](https://www.pylint.org/) and [Bandit](https://wiki.openstack.org/wiki/Security/Projects/Bandit). +- Auto-formatting with [black](https://black.readthedocs.io/) and +[isort](https://pycqa.github.io/isort/). * Checks for Signed-off-by commits via [Probot: DCO](https://github.com/probot/dco). * Review by one or more [maintainers](MAINTAINERS.txt). diff --git a/requirements-test.txt b/requirements-test.txt index 80a7b09904..65646ca222 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -8,5 +8,7 @@ python-dateutil # additional test tools for linting and coverage measurement coverage +black +isort pylint bandit diff --git a/tox.ini b/tox.ini index b54a221cc0..a2fda18722 100644 --- a/tox.ini +++ b/tox.ini @@ -42,9 +42,14 @@ commands = [testenv:lint] commands = - # Use different pylint configs for legacy and new (tuf/api) code + # Use different configs for new (tuf/api/*) and legacy code + # TODO: configure black and isort args in pyproject.toml (see #1161) + black --check --diff --line-length 80 {toxinidir}/tuf/api + isort --check --diff --line-length 80 --profile black -p tuf {toxinidir}/tuf/api + pylint {toxinidir}/tuf/api --rcfile={toxinidir}/tuf/api/pylintrc + # NOTE: Contrary to what the pylint docs suggest, ignoring full paths does # work, unfortunately each subdirectory has to be ignored explicitly. pylint {toxinidir}/tuf --ignore={toxinidir}/tuf/api,{toxinidir}/tuf/api/serialization - pylint {toxinidir}/tuf/api --rcfile={toxinidir}/tuf/api/pylintrc + bandit -r {toxinidir}/tuf diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index afd45c64f7..f891388f4f 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -15,25 +15,25 @@ available in the class model. """ +import tempfile from datetime import datetime, timedelta from typing import Any, Dict, Mapping, Optional -import tempfile - from securesystemslib.keys import verify_signature +from securesystemslib.signer import Signature, Signer +from securesystemslib.storage import FilesystemBackend, StorageBackendInterface from securesystemslib.util import persist_temp_file -from securesystemslib.signer import Signer, Signature -from securesystemslib.storage import (StorageBackendInterface, - FilesystemBackend) - -from tuf.api.serialization import (MetadataSerializer, MetadataDeserializer, - SignedSerializer) -import tuf.formats import tuf.exceptions +import tuf.formats +from tuf.api.serialization import ( + MetadataDeserializer, + MetadataSerializer, + SignedSerializer, +) -class Metadata(): +class Metadata: """A container for signed TUF metadata. Provides methods to convert to and from dictionary, read and write to and @@ -55,12 +55,13 @@ class Metadata(): ] """ - def __init__(self, signed: 'Signed', signatures: list) -> None: + + def __init__(self, signed: "Signed", signatures: list) -> None: self.signed = signed self.signatures = signatures @classmethod - def from_dict(cls, metadata: Mapping[str, Any]) -> 'Metadata': + def from_dict(cls, metadata: Mapping[str, Any]) -> "Metadata": """Creates Metadata object from its dict representation. Arguments: @@ -78,34 +79,36 @@ def from_dict(cls, metadata: Mapping[str, Any]) -> 'Metadata': """ # Dispatch to contained metadata class on metadata _type field. - _type = metadata['signed']['_type'] + _type = metadata["signed"]["_type"] - if _type == 'targets': + if _type == "targets": inner_cls = Targets - elif _type == 'snapshot': + elif _type == "snapshot": inner_cls = Snapshot - elif _type == 'timestamp': + elif _type == "timestamp": inner_cls = Timestamp - elif _type == 'root': + elif _type == "root": inner_cls = Root else: raise ValueError(f'unrecognized metadata type "{_type}"') signatures = [] - for signature in metadata.pop('signatures'): + for signature in metadata.pop("signatures"): signature_obj = Signature.from_dict(signature) signatures.append(signature_obj) return cls( - signed=inner_cls.from_dict(metadata.pop('signed')), - signatures=signatures) + signed=inner_cls.from_dict(metadata.pop("signed")), + signatures=signatures, + ) @classmethod def from_file( - cls, filename: str, + cls, + filename: str, deserializer: Optional[MetadataDeserializer] = None, - storage_backend: Optional[StorageBackendInterface] = None - ) -> 'Metadata': + storage_backend: Optional[StorageBackendInterface] = None, + ) -> "Metadata": """Loads TUF metadata from file storage. Arguments: @@ -130,6 +133,7 @@ def from_file( # Use local scope import to avoid circular import errors # pylint: disable=import-outside-toplevel from tuf.api.serialization.json import JSONDeserializer + deserializer = JSONDeserializer() if storage_backend is None: @@ -147,14 +151,13 @@ def to_dict(self) -> Dict[str, Any]: for sig in self.signatures: signatures.append(sig.to_dict()) - return { - 'signatures': signatures, - 'signed': self.signed.to_dict() - } + return {"signatures": signatures, "signed": self.signed.to_dict()} def to_file( - self, filename: str, serializer: Optional[MetadataSerializer] = None, - storage_backend: Optional[StorageBackendInterface] = None + self, + filename: str, + serializer: Optional[MetadataSerializer] = None, + storage_backend: Optional[StorageBackendInterface] = None, ) -> None: """Writes TUF metadata to file storage. @@ -178,6 +181,7 @@ def to_file( # Use local scope import to avoid circular import errors # pylint: disable=import-outside-toplevel from tuf.api.serialization.json import JSONSerializer + serializer = JSONSerializer(compact=True) with tempfile.TemporaryFile() as temp_file: @@ -186,8 +190,10 @@ def to_file( # Signatures. def sign( - self, signer: Signer, append: bool = False, - signed_serializer: Optional[SignedSerializer] = None + self, + signer: Signer, + append: bool = False, + signed_serializer: Optional[SignedSerializer] = None, ) -> Dict[str, Any]: """Creates signature over 'signed' and assigns it to 'signatures'. @@ -216,6 +222,7 @@ def sign( # Use local scope import to avoid circular import errors # pylint: disable=import-outside-toplevel from tuf.api.serialization.json import CanonicalJSONSerializer + signed_serializer = CanonicalJSONSerializer() signature = signer.sign(signed_serializer.serialize(self.signed)) @@ -227,8 +234,11 @@ def sign( return signature - def verify(self, key: Mapping[str, Any], - signed_serializer: Optional[SignedSerializer] = None) -> bool: + def verify( + self, + key: Mapping[str, Any], + signed_serializer: Optional[SignedSerializer] = None, + ) -> bool: """Verifies 'signatures' over 'signed' that match the passed key by id. Arguments: @@ -251,27 +261,31 @@ def verify(self, key: Mapping[str, Any], A boolean indicating if the signature is valid for the passed key. """ - signatures_for_keyid = list(filter( - lambda sig: sig.keyid == key['keyid'], self.signatures)) + signatures_for_keyid = list( + filter(lambda sig: sig.keyid == key["keyid"], self.signatures) + ) if not signatures_for_keyid: - raise tuf.exceptions.Error( - f'no signature for key {key["keyid"]}.') + raise tuf.exceptions.Error(f"no signature for key {key['keyid']}.") if len(signatures_for_keyid) > 1: raise tuf.exceptions.Error( - f'{len(signatures_for_keyid)} signatures for key ' - f'{key["keyid"]}, not sure which one to verify.') + f"{len(signatures_for_keyid)} signatures for key " + f"{key['keyid']}, not sure which one to verify." + ) if signed_serializer is None: # Use local scope import to avoid circular import errors # pylint: disable=import-outside-toplevel from tuf.api.serialization.json import CanonicalJSONSerializer + signed_serializer = CanonicalJSONSerializer() return verify_signature( - key, signatures_for_keyid[0].to_dict(), - signed_serializer.serialize(self.signed)) + key, + signatures_for_keyid[0].to_dict(), + signed_serializer.serialize(self.signed), + ) class Signed: @@ -289,12 +303,13 @@ class Signed: expires: The metadata expiration datetime object. """ + # NOTE: Signed is a stupid name, because this might not be signed yet, but # we keep it to match spec terminology (I often refer to this as "payload", # or "inner metadata") def __init__( - self, _type: str, version: int, spec_version: str, - expires: datetime) -> None: + self, _type: str, version: int, spec_version: str, expires: datetime + ) -> None: self._type = _type self.version = version @@ -303,7 +318,7 @@ def __init__( # TODO: Should we separate data validation from constructor? if version < 0: - raise ValueError(f'version must be < 0, got {version}') + raise ValueError(f"version must be < 0, got {version}") self.version = version @staticmethod @@ -315,10 +330,10 @@ def _common_fields_from_dict(signed_dict: Mapping[str, Any]) -> list: See '{Root, Timestamp, Snapshot, Targets}.from_dict' methods for usage. """ - _type = signed_dict.pop('_type') - version = signed_dict.pop('version') - spec_version = signed_dict.pop('spec_version') - expires_str = signed_dict.pop('expires') + _type = signed_dict.pop("_type") + version = signed_dict.pop("version") + spec_version = signed_dict.pop("spec_version") + expires_str = signed_dict.pop("expires") # Convert 'expires' TUF metadata string to a datetime object, which is # what the constructor expects and what we store. The inverse operation # is implemented in '_common_fields_to_dict'. @@ -332,10 +347,10 @@ def _common_fields_to_dict(self) -> Dict[str, Any]: """ return { - '_type': self._type, - 'version': self.version, - 'spec_version': self.spec_version, - 'expires': self.expires.isoformat() + 'Z' + "_type": self._type, + "version": self.version, + "spec_version": self.spec_version, + "expires": self.expires.isoformat() + "Z", } # Modification. @@ -382,14 +397,21 @@ class Root(Signed): } """ + # TODO: determine an appropriate value for max-args and fix places where # we violate that. This __init__ function takes 7 arguments, whereas the # default max-args value for pylint is 5 # pylint: disable=too-many-arguments def __init__( - self, _type: str, version: int, spec_version: str, - expires: datetime, consistent_snapshot: bool, - keys: Mapping[str, Any], roles: Mapping[str, Any]) -> None: + self, + _type: str, + version: int, + spec_version: str, + expires: datetime, + consistent_snapshot: bool, + keys: Mapping[str, Any], + roles: Mapping[str, Any], + ) -> None: super().__init__(_type, version, spec_version, expires) # TODO: Add classes for keys and roles self.consistent_snapshot = consistent_snapshot @@ -397,39 +419,42 @@ def __init__( self.roles = roles @classmethod - def from_dict(cls, root_dict: Mapping[str, Any]) -> 'Root': + def from_dict(cls, root_dict: Mapping[str, Any]) -> "Root": """Creates Root object from its dict representation. """ common_args = cls._common_fields_from_dict(root_dict) - consistent_snapshot = root_dict.pop('consistent_snapshot') - keys = root_dict.pop('keys') - roles = root_dict.pop('roles') + consistent_snapshot = root_dict.pop("consistent_snapshot") + keys = root_dict.pop("keys") + roles = root_dict.pop("roles") return cls(*common_args, consistent_snapshot, keys, roles) def to_dict(self) -> Dict[str, Any]: """Returns the dict representation of self. """ root_dict = self._common_fields_to_dict() - root_dict.update({ - 'consistent_snapshot': self.consistent_snapshot, - 'keys': self.keys, - 'roles': self.roles - }) + root_dict.update( + { + "consistent_snapshot": self.consistent_snapshot, + "keys": self.keys, + "roles": self.roles, + } + ) return root_dict # Update key for a role. - def add_key(self, role: str, keyid: str, - key_metadata: Mapping[str, Any]) -> None: + def add_key( + self, role: str, keyid: str, key_metadata: Mapping[str, Any] + ) -> None: """Adds new key for 'role' and updates the key store. """ - if keyid not in self.roles[role]['keyids']: - self.roles[role]['keyids'].append(keyid) + if keyid not in self.roles[role]["keyids"]: + self.roles[role]["keyids"].append(keyid) self.keys[keyid] = key_metadata # Remove key for a role. def remove_key(self, role: str, keyid: str) -> None: """Removes key for 'role' and updates the key store. """ - if keyid in self.roles[role]['keyids']: - self.roles[role]['keyids'].remove(keyid) + if keyid in self.roles[role]["keyids"]: + self.roles[role]["keyids"].remove(keyid) for keyinfo in self.roles.values(): - if keyid in keyinfo['keyids']: + if keyid in keyinfo["keyids"]: return del self.keys[keyid] @@ -454,36 +479,41 @@ class Timestamp(Signed): } """ + def __init__( - self, _type: str, version: int, spec_version: str, - expires: datetime, meta: Mapping[str, Any]) -> None: + self, + _type: str, + version: int, + spec_version: str, + expires: datetime, + meta: Mapping[str, Any], + ) -> None: super().__init__(_type, version, spec_version, expires) # TODO: Add class for meta self.meta = meta @classmethod - def from_dict(cls, timestamp_dict: Mapping[str, Any]) -> 'Timestamp': + def from_dict(cls, timestamp_dict: Mapping[str, Any]) -> "Timestamp": """Creates Timestamp object from its dict representation. """ common_args = cls._common_fields_from_dict(timestamp_dict) - meta = timestamp_dict.pop('meta') + meta = timestamp_dict.pop("meta") return cls(*common_args, meta) def to_dict(self) -> Dict[str, Any]: """Returns the dict representation of self. """ timestamp_dict = self._common_fields_to_dict() - timestamp_dict.update({ - 'meta': self.meta - }) + timestamp_dict.update({"meta": self.meta}) return timestamp_dict # Modification. - def update(self, version: int, length: int, - hashes: Mapping[str, Any]) -> None: + def update( + self, version: int, length: int, hashes: Mapping[str, Any] + ) -> None: """Assigns passed info about snapshot metadata to meta dict. """ - self.meta['snapshot.json'] = { - 'version': version, - 'length': length, - 'hashes': hashes + self.meta["snapshot.json"] = { + "version": version, + "length": length, + "hashes": hashes, } @@ -513,41 +543,49 @@ class Snapshot(Signed): } """ + def __init__( - self, _type: str, version: int, spec_version: str, - expires: datetime, meta: Mapping[str, Any]) -> None: + self, + _type: str, + version: int, + spec_version: str, + expires: datetime, + meta: Mapping[str, Any], + ) -> None: super().__init__(_type, version, spec_version, expires) # TODO: Add class for meta self.meta = meta @classmethod - def from_dict(cls, snapshot_dict: Mapping[str, Any]) -> 'Snapshot': + def from_dict(cls, snapshot_dict: Mapping[str, Any]) -> "Snapshot": """Creates Snapshot object from its dict representation. """ common_args = cls._common_fields_from_dict(snapshot_dict) - meta = snapshot_dict.pop('meta') + meta = snapshot_dict.pop("meta") return cls(*common_args, meta) def to_dict(self) -> Dict[str, Any]: """Returns the dict representation of self. """ snapshot_dict = self._common_fields_to_dict() - snapshot_dict.update({ - 'meta': self.meta - }) + snapshot_dict.update({"meta": self.meta}) return snapshot_dict # Modification. def update( - self, rolename: str, version: int, length: Optional[int] = None, - hashes: Optional[Mapping[str, Any]] = None) -> None: + self, + rolename: str, + version: int, + length: Optional[int] = None, + hashes: Optional[Mapping[str, Any]] = None, + ) -> None: """Assigns passed (delegated) targets role info to meta dict. """ - metadata_fn = f'{rolename}.json' + metadata_fn = f"{rolename}.json" - self.meta[metadata_fn] = {'version': version} + self.meta[metadata_fn] = {"version": version} if length is not None: - self.meta[metadata_fn]['length'] = length + self.meta[metadata_fn]["length"] = length if hashes is not None: - self.meta[metadata_fn]['hashes'] = hashes + self.meta[metadata_fn]["hashes"] = hashes class Targets(Signed): @@ -603,14 +641,19 @@ class Targets(Signed): } """ + # TODO: determine an appropriate value for max-args and fix places where # we violate that. This __init__ function takes 7 arguments, whereas the # default max-args value for pylint is 5 # pylint: disable=too-many-arguments def __init__( - self, _type: str, version: int, spec_version: str, - expires: datetime, targets: Mapping[str, Any], - delegations: Mapping[str, Any] + self, + _type: str, + version: int, + spec_version: str, + expires: datetime, + targets: Mapping[str, Any], + delegations: Mapping[str, Any], ) -> None: super().__init__(_type, version, spec_version, expires) # TODO: Add class for meta @@ -618,20 +661,22 @@ def __init__( self.delegations = delegations @classmethod - def from_dict(cls, targets_dict: Mapping[str, Any]) -> 'Targets': + def from_dict(cls, targets_dict: Mapping[str, Any]) -> "Targets": """Creates Targets object from its dict representation. """ common_args = cls._common_fields_from_dict(targets_dict) - targets = targets_dict.pop('targets') - delegations = targets_dict.pop('delegations') + targets = targets_dict.pop("targets") + delegations = targets_dict.pop("delegations") return cls(*common_args, targets, delegations) def to_dict(self) -> Dict[str, Any]: """Returns the dict representation of self. """ targets_dict = self._common_fields_to_dict() - targets_dict.update({ - 'targets': self.targets, - 'delegations': self.delegations, - }) + targets_dict.update( + { + "targets": self.targets, + "delegations": self.delegations, + } + ) return targets_dict # Modification. diff --git a/tuf/api/pylintrc b/tuf/api/pylintrc index badef7613d..409a96149f 100644 --- a/tuf/api/pylintrc +++ b/tuf/api/pylintrc @@ -1,12 +1,53 @@ -[MESSAGE_CONTROL] -disable=fixme +# Minimal pylint configuration file for Secure Systems Lab Python Style Guide: +# https://github.com/secure-systems-lab/code-style-guidelines +# +# Based on Google Python Style Guide pylintrc and pylint defaults: +# https://google.github.io/styleguide/pylintrc +# http://pylint.pycqa.org/en/latest/technical_reference/features.html + +[MESSAGES CONTROL] +# Disable the message, report, category or checker with the given id(s). +# NOTE: To keep this config as short as possible we only disable checks that +# are currently in conflict with our code. If new code displeases the linter +# (for good reasons) consider updating this config file, or disable checks with +# 'pylint: disable=XYZ' comments. +disable=fixme, + too-few-public-methods, + too-many-arguments, [BASIC] -good-names=e +good-names=i,j,k,v,e,f,fn,fp,_type +# Regexes for allowed names are copied from the Google pylintrc +# NOTE: Pylint captures regex name groups such as 'snake_case' or 'camel_case'. +# If there are multiple groups it enfoces the prevalent naming style inside +# each modules. Names in the exempt capturing group are ignored. +function-rgx=^(?:(?PsetUp|tearDown|setUpModule|tearDownModule)|(?P_?[A-Z][a-zA-Z0-9]*)|(?P_?[a-z][a-z0-9_]*))$ +method-rgx=(?x)^(?:(?P_[a-z0-9_]+__|runTest|setUp|tearDown|setUpTestCase|tearDownTestCase|setupSelf|tearDownClass|setUpClass|(test|assert)_*[A-Z0-9][a-zA-Z0-9_]*|next)|(?P_{0,2}[A-Z][a-zA-Z0-9_]*)|(?P_{0,2}[a-z][a-z0-9_]*))$ +argument-rgx=^[a-z][a-z0-9_]*$ +attr-rgx=^_{0,2}[a-z][a-z0-9_]*$ +class-attribute-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$ +class-rgx=^_?[A-Z][a-zA-Z0-9]*$ +const-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$ +inlinevar-rgx=^[a-z][a-z0-9_]*$ +module-rgx=^(_?[a-z][a-z0-9_]*|__init__)$ +no-docstring-rgx=(__.*__|main|test.*|.*test|.*Test)$ +variable-rgx=^[a-z][a-z0-9_]*$ +docstring-min-length=10 [FORMAT] +ignore-long-lines=(?x)( + ^\s*(\#\ )??$| + ^\s*(from\s+\S+\s+)?import\s+.+$) indent-string=" " -max-line-length=79 +indent-after-paren=4 +max-line-length=80 +single-line-if-stmt=yes + +[LOGGING] +logging-format-style=new + +[MISCELLANEOUS] +notes=TODO -[DESIGN] -min-public-methods=0 +[STRING] +check-quote-consistency=yes diff --git a/tuf/api/serialization/json.py b/tuf/api/serialization/json.py index 215a0ad790..3c7828ae9f 100644 --- a/tuf/api/serialization/json.py +++ b/tuf/api/serialization/json.py @@ -18,11 +18,13 @@ # creating default de/serializers there (see metadata local scope imports). # NOTE: A less desirable alternative would be to add more abstraction layers. from tuf.api.metadata import Metadata, Signed -from tuf.api.serialization import (MetadataSerializer, - MetadataDeserializer, - SignedSerializer, - SerializationError, - DeserializationError) +from tuf.api.serialization import ( + DeserializationError, + MetadataDeserializer, + MetadataSerializer, + SerializationError, + SignedSerializer, +) class JSONDeserializer(MetadataDeserializer): @@ -34,7 +36,7 @@ def deserialize(self, raw_data: bytes) -> Metadata: json_dict = json.loads(raw_data.decode("utf-8")) metadata_obj = Metadata.from_dict(json_dict) - except Exception as e: # pylint: disable=broad-except + except Exception as e: # pylint: disable=broad-except raise DeserializationError from e return metadata_obj @@ -48,20 +50,23 @@ class JSONSerializer(MetadataSerializer): 'serialize' should be compact by excluding whitespace. """ + def __init__(self, compact: bool = False) -> None: self.compact = compact def serialize(self, metadata_obj: Metadata) -> bytes: """Serialize Metadata object into utf-8 encoded JSON bytes. """ try: - indent = (None if self.compact else 1) - separators = ((',', ':') if self.compact else (',', ': ')) - json_bytes = json.dumps(metadata_obj.to_dict(), - indent=indent, - separators=separators, - sort_keys=True).encode("utf-8") - - except Exception as e: # pylint: disable=broad-except + indent = None if self.compact else 1 + separators = (",", ":") if self.compact else (",", ": ") + json_bytes = json.dumps( + metadata_obj.to_dict(), + indent=indent, + separators=separators, + sort_keys=True, + ).encode("utf-8") + + except Exception as e: # pylint: disable=broad-except raise SerializationError from e return json_bytes @@ -78,7 +83,7 @@ def serialize(self, signed_obj: Signed) -> bytes: signed_dict = signed_obj.to_dict() canonical_bytes = encode_canonical(signed_dict).encode("utf-8") - except Exception as e: # pylint: disable=broad-except + except Exception as e: # pylint: disable=broad-except raise SerializationError from e return canonical_bytes