Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
111 changes: 107 additions & 4 deletions tests/test_metadata_serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,13 @@
import unittest
import copy

from typing import Dict, Callable
from typing import Dict, Callable, Optional, Mapping, Any
from datetime import datetime

from tests import utils

from tuf.api.metadata import (
Signed,
Root,
Snapshot,
Timestamp,
Expand Down Expand Up @@ -50,6 +52,44 @@ def wrapper(test_cls: "TestSerialization"):

class TestSerialization(unittest.TestCase):

# Snapshot instances with meta = {} are valid, but for a full valid
# repository it's required that meta has at least one element inside it.
invalid_signed: DataSet = {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

there's a lot of test cases here that are unlikely to be useful (e.g. 5 invalid spec versions is just overkill) but since the tests are so fast... I guess no harm done?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I removed one of the cases one digit spec_version, the others seem relevant to the checks we are making.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ok, but do keep in mind that the idea isn't to test the lines of code that we happen to have in there right now: the idea is to have test data that covers as much of the possible inputs as practically useful -- since we can't cover 100% of the possibilities we inevitably end up making decisions on what's important: we test the inputs we think could be interesting.

"no _type": '{"spec_version": "1.0.0", "expires": "2030-01-01T00:00:00Z", "meta": {}}',
"no spec_version": '{"_type": "signed", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}',
"no version": '{"_type": "signed", "spec_version": "1.0.0", "expires": "2030-01-01T00:00:00Z", "meta": {}}',
"no expires": '{"_type": "signed", "spec_version": "1.0.0", "version": 1, "meta": {}}',
"empty str _type":
'{"_type": "", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}',
"empty str spec_version":
'{"_type": "signed", "spec_version": "", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}',
"_type wrong type":
'{"_type": "foo", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}',
"version wrong type":
'{"_type": "signed", "spec_version": "1.0.0", "version": "a", "expires": "2030-01-01T00:00:00Z", "meta": {}}',
"invalid spec_version str":
'{"_type": "signed", "spec_version": "abc", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}',
"two digit spec_version":
'{"_type": "signed", "spec_version": "1.2.a", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}',
"no digit spec_version":
'{"_type": "signed", "spec_version": "a.b.c", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}',
"different major spec_version":
'{"_type": "signed", "spec_version": "0.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}',
"version 0":
'{"_type": "signed", "spec_version": "1.0.0", "version": 0, "expires": "2030-01-01T00:00:00Z", "meta": {}}',
"version below 0":
'{"_type": "signed", "spec_version": "1.0.0", "version": -1, "expires": "2030-01-01T00:00:00Z", "meta": {}}',
"wrong datetime string":
'{"_type": "signed", "spec_version": "1.0.0", "version": 1, "expires": "abc", "meta": {}}',
}

@run_sub_tests_with_dataset(invalid_signed)
def test_invalid_signed_serialization(self, test_case_data: Dict[str, str]):
case_dict = json.loads(test_case_data)
with self.assertRaises((KeyError, ValueError, TypeError)):
Snapshot.from_dict(copy.deepcopy(case_dict))


valid_keys: DataSet = {
"all": '{"keytype": "rsa", "scheme": "rsassa-pss-sha256", \
"keyval": {"public": "foo"}}',
Expand All @@ -60,12 +100,45 @@ class TestSerialization(unittest.TestCase):
}

@run_sub_tests_with_dataset(valid_keys)
def test_key_serialization(self, test_case_data: str):
def test_valid_key_serialization(self, test_case_data: str):
case_dict = json.loads(test_case_data)
key = Key.from_dict("id", copy.copy(case_dict))
self.assertDictEqual(case_dict, key.to_dict())


invalid_keys: DataSet = {
"no keyid": '{"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "abc"}}',
"no keytype": '{"keyid": "id", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}',
"no scheme": '{"keyid": "id", "keytype": "rsa", "keyval": {"public": "foo"}}',
"no keyval": '{"keyid": "id", "keytype": "rsa", "scheme": "rsassa-pss-sha256"}',
"keyid wrong type": '{"keyid": 1, "keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "abc"}}',
"keytype wrong type": '{"keyid": "id", "keytype": 1, "scheme": "rsassa-pss-sha256", "keyval": {"public": "abc"}}',
"scheme wrong type": '{"keyid": "id", "keytype": "rsa", "scheme": 1, "keyval": {"public": "abc"}}',
"keyval wrong type": '{"keyid": "id", "keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": 1}',
}

@run_sub_tests_with_dataset(invalid_keys)
def test_invalid_key_serialization(self, test_case_data: Dict[str, str]):
case_dict = json.loads(test_case_data)
with self.assertRaises((TypeError, KeyError)):
keyid = case_dict.pop("keyid")
Key.from_dict(keyid, copy.copy(case_dict))

invalid_roles: DataSet = {
"no threshold": '{"keyids": ["keyid"]}',
"no keyids": '{"threshold": 3}',
"wrong threshold type": '{"keyids": ["keyid"], "threshold": "a"}',
"threshold below 1": '{"keyids": ["keyid"], "threshold": 0}',
"duplicate keyids": '{"keyids": ["keyid", "keyid"], "threshold": 3}',
}

@run_sub_tests_with_dataset(invalid_roles)
def test_invalid_role_serialization(self, test_case_data: Dict[str, str]):
case_dict = json.loads(test_case_data)
with self.assertRaises((KeyError, TypeError, ValueError)):
Role.from_dict(copy.deepcopy(case_dict))


valid_roles: DataSet = {
"all": '{"keyids": ["keyid"], "threshold": 3}',
"unrecognized field": '{"keyids": ["keyid"], "threshold": 3, "foo": "bar"}',
Expand Down Expand Up @@ -102,12 +175,28 @@ def test_root_serialization(self, test_case_data: str):
root = Root.from_dict(copy.deepcopy(case_dict))
self.assertDictEqual(case_dict, root.to_dict())


invalid_metafiles: DataSet = {
"wrong length type": '{"version": 1, "length": "a", "hashes": {"sha256" : "abc"}}',
"length 0": '{"version": 1, "length": 0, "hashes": {"sha256" : "abc"}}',
"length below 0": '{"version": 1, "length": -1, "hashes": {"sha256" : "abc"}}',
"empty hashes dict": '{"version": 1, "length": 1, "hashes": {}}',
"hashes wrong type": '{"version": 1, "length": 1, "hashes": 1}',
"hashes values wrong type": '{"version": 1, "length": 1, "hashes": {"sha256": 1}}',
}

@run_sub_tests_with_dataset(invalid_metafiles)
def test_invalid_metafile_serialization(self, test_case_data: Dict[str, str]):
case_dict = json.loads(test_case_data)
with self.assertRaises((TypeError, ValueError, AttributeError)):
MetaFile.from_dict(copy.deepcopy(case_dict))


valid_metafiles: DataSet = {
"all": '{"hashes": {"sha256" : "abc"}, "length": 12, "version": 1}',
"no length": '{"hashes": {"sha256" : "abc"}, "version": 1 }',
"no hashes": '{"length": 12, "version": 1}',
"unrecognized field": '{"hashes": {"sha256" : "abc"}, "length": 12, "version": 1, \
"foo": "bar"}',
"unrecognized field": '{"hashes": {"sha256" : "abc"}, "length": 12, "version": 1, "foo": "bar"}',
}

@run_sub_tests_with_dataset(valid_metafiles)
Expand Down Expand Up @@ -181,6 +270,20 @@ def test_delegation_serialization(self, test_case_data: str):
self.assertDictEqual(case_dict, delegation.to_dict())


invalid_targetfiles: DataSet = {
"no hashes": '{"length": 1}',
"no length": '{"hashes": {"sha256": "abc"}}'
# The remaining cases are the same as for invalid_hashes and
# invalid_length datasets.
}

@run_sub_tests_with_dataset(invalid_targetfiles)
def test_invalid_targetfile_serialization(self, test_case_data: Dict[str, str]):
case_dict = json.loads(test_case_data)
with self.assertRaises(KeyError):
TargetFile.from_dict(copy.deepcopy(case_dict))


valid_targetfiles: DataSet = {
"all": '{"length": 12, "hashes": {"sha256" : "abc"}, \
"custom" : {"foo": "bar"} }',
Expand Down
2 changes: 1 addition & 1 deletion tuf/api/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -427,7 +427,7 @@ def __init__(
if not all(
isinstance(at, str) for at in [keyid, keytype, scheme]
) or not isinstance(keyval, Dict):
raise ValueError("Unexpected Key attributes types!")
raise TypeError("Unexpected Key attributes types!")
self.keyid = keyid
self.keytype = keytype
self.scheme = scheme
Expand Down