Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 35 additions & 1 deletion tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import tuf.exceptions
from tuf.api.metadata import (
Metadata,
Root,
Snapshot,
Timestamp,
Targets,
Expand Down Expand Up @@ -98,6 +99,7 @@ def tearDownClass(cls):

def test_generic_read(self):
for metadata, inner_metadata_cls in [
('root', Root),
('snapshot', Snapshot),
('timestamp', Timestamp),
('targets', Targets)]:
Expand Down Expand Up @@ -144,7 +146,7 @@ def test_compact_json(self):


def test_read_write_read_compare(self):
for metadata in ['snapshot', 'timestamp', 'targets']:
for metadata in ['root', 'snapshot', 'timestamp', 'targets']:
path = os.path.join(self.repo_dir, 'metadata', metadata + '.json')
metadata_obj = Metadata.from_file(path)

Expand Down Expand Up @@ -258,6 +260,18 @@ def test_metadata_snapshot(self):
snapshot.signed.update('role1', 2, 123, hashes)
self.assertEqual(snapshot.signed.meta, fileinfo)

# Update only version. Length and hashes are optional.
snapshot.signed.update('role2', 3)
fileinfo['role2.json'] = {'version': 3}
self.assertEqual(snapshot.signed.meta, fileinfo)

# Test from_dict and to_dict without hashes and length.
snapshot_dict = snapshot.to_dict()
test_dict = snapshot_dict['signed'].copy()
del test_dict['meta']['role1.json']['length']
del test_dict['meta']['role1.json']['hashes']
snapshot = Snapshot.from_dict(test_dict)
self.assertEqual(snapshot_dict['signed'], snapshot.to_dict())

def test_metadata_timestamp(self):
timestamp_path = os.path.join(
Expand Down Expand Up @@ -293,6 +307,18 @@ def test_metadata_timestamp(self):
timestamp.signed.update(2, 520, hashes)
self.assertEqual(timestamp.signed.meta['snapshot.json'], fileinfo)

# Test from_dict and to_dict without hashes and length.
timestamp_dict = timestamp.to_dict()
test_dict = timestamp_dict['signed'].copy()
del test_dict['meta']['snapshot.json']['length']
del test_dict['meta']['snapshot.json']['hashes']
timestamp_test = Timestamp.from_dict(test_dict)
self.assertEqual(timestamp_dict['signed'], timestamp_test.to_dict())

# Update only version. Length and hashes are optional.
timestamp.signed.update(3)
fileinfo = {'version': 3}
self.assertEqual(timestamp.signed.meta['snapshot.json'], fileinfo)

def test_key_class(self):
keys = {
Expand Down Expand Up @@ -419,6 +445,14 @@ def test_metadata_targets(self):
# Verify that data is updated
self.assertEqual(targets.signed.targets[filename], fileinfo)

# Test from_dict/to_dict Targets without delegations
targets_dict = targets.to_dict()
del targets_dict["signed"]["delegations"]
tmp_dict = targets_dict["signed"].copy()
targets_obj = Targets.from_dict(tmp_dict)
tar_d = targets_obj.to_dict()
self.assertEqual(targets_dict["signed"], targets_obj.to_dict())

def setup_dict_with_unrecognized_field(self, file_path, field, value):
json_dict = {}
with open(file_path) as f:
Expand Down
103 changes: 54 additions & 49 deletions tuf/api/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -511,16 +511,19 @@ class Root(Signed):
supports consistent snapshots.
keys: A dictionary that contains a public key store used to verify
top level roles metadata signatures::
{
'<KEYID>': <Key instance>,
...
},

{
'<KEYID>': <Key instance>,
...
},

roles: A dictionary that contains a list of signing keyids and
a signature threshold for each top level role::
{
'<ROLE>': <Role istance>,
...
}

{
'<ROLE>': <Role istance>,
...
}

"""

Expand Down Expand Up @@ -612,7 +615,7 @@ class Timestamp(Signed):
'<HASH ALGO 1>': '<SNAPSHOT METADATA FILE HASH 1>',
'<HASH ALGO 2>': '<SNAPSHOT METADATA FILE HASH 2>',
...
}
} // optional
}
}

Expand Down Expand Up @@ -648,14 +651,19 @@ def to_dict(self) -> Dict[str, Any]:

# Modification.
def update(
self, version: int, length: int, hashes: Mapping[str, Any]
self,
version: int,
length: Optional[int] = None,
hashes: Optional[Mapping[str, Any]] = None,
) -> None:
"""Assigns passed info about snapshot metadata to meta dict."""
self.meta["snapshot.json"] = {
"version": version,
"length": length,
"hashes": hashes,
}
self.meta["snapshot.json"] = {"version": version}

if length is not None:
self.meta["snapshot.json"]["length"] = length

if hashes is not None:
self.meta["snapshot.json"]["hashes"] = hashes
Comment on lines +660 to +666
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: Not a huge fan of three separate lookups of "snapshot.json" key... it's not a major deal here, just pointing it out

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I imagine this would be something we would change when we fix #1333.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think that's related: you don't need more than a single lookup even now but it is a small detail



class Snapshot(Signed):
Expand Down Expand Up @@ -755,34 +763,34 @@ class Targets(Signed):
roles and public key store used to verify their metadata
signatures::

{
'keys' : {
'<KEYID>': {
'keytype': '<KEY TYPE>',
'scheme': '<KEY SCHEME>',
'keyid_hash_algorithms': [
'<HASH ALGO 1>',
'<HASH ALGO 2>'
...
],
'keyval': {
'public': '<PUBLIC KEY HEX REPRESENTATION>'
}
{
'keys' : {
'<KEYID>': {
'keytype': '<KEY TYPE>',
'scheme': '<KEY SCHEME>',
'keyid_hash_algorithms': [
'<HASH ALGO 1>',
'<HASH ALGO 2>'
...
],
'keyval': {
'public': '<PUBLIC KEY HEX REPRESENTATION>'
}
},
...
},
'roles': [
{
'name': '<ROLENAME>',
'keyids': ['<SIGNING KEY KEYID>', ...],
'threshold': <SIGNATURE THRESHOLD>,
'terminating': <TERMINATING BOOLEAN>,
'path_hash_prefixes': ['<HEX DIGEST>', ... ], // or
'paths' : ['PATHPATTERN', ... ],
},
...
},
'roles': [
{
'name': '<ROLENAME>',
'keyids': ['<SIGNING KEY KEYID>', ...],
'threshold': <SIGNATURE THRESHOLD>,
'terminating': <TERMINATING BOOLEAN>,
'path_hash_prefixes': ['<HEX DIGEST>', ... ], // or
'paths' : ['PATHPATTERN', ... ],
},
...
]
}
]
}

"""

Expand All @@ -798,7 +806,7 @@ def __init__(
spec_version: str,
expires: datetime,
targets: Mapping[str, Any],
delegations: Mapping[str, Any],
delegations: Optional[Mapping[str, Any]] = None,
unrecognized_fields: Optional[Mapping[str, Any]] = None,
) -> None:
super().__init__(version, spec_version, expires, unrecognized_fields)
Expand All @@ -811,19 +819,16 @@ def from_dict(cls, targets_dict: Mapping[str, Any]) -> "Targets":
"""Creates Targets object from its dict representation."""
common_args = cls._common_fields_from_dict(targets_dict)
targets = targets_dict.pop("targets")
delegations = targets_dict.pop("delegations")
delegations = targets_dict.pop("delegations", None)
# All fields left in the targets_dict are unrecognized.
return cls(*common_args, targets, delegations, targets_dict)

def to_dict(self) -> Dict[str, Any]:
"""Returns the dict representation of self."""
targets_dict = self._common_fields_to_dict()
targets_dict.update(
{
"targets": self.targets,
"delegations": self.delegations,
}
)
targets_dict["targets"] = self.targets
if self.delegations:
targets_dict["delegations"] = self.delegations
return targets_dict

# Modification.
Expand Down