From 8dc07ccd9a4f732bf3f6e63d515279d46f1e1bb8 Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Mon, 11 Jan 2021 17:20:47 +0200 Subject: [PATCH 01/25] imports: Make 'download' imports vendoring-compatible Use "from tuf import " instead of "import tuf.": this makes it possible for vendoring tool to vendor tuf. Fix all references to in the code. Signed-off-by: Jussi Kukkonen --- tuf/client/updater.py | 8 ++++---- tuf/download.py | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index 1897b1f599..c3d741f78b 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -131,7 +131,7 @@ import io import tuf -import tuf.download +from tuf import download import tuf.requests_fetcher import tuf.formats import tuf.settings @@ -1225,7 +1225,7 @@ def _check_file_length(self, file_object, trusted_file_length): Non-public method that ensures the length of 'file_object' is strictly equal to 'trusted_file_length'. This is a deliberately redundant implementation designed to complement - tuf.download._check_downloaded_length(). + download._check_downloaded_length(). file_object: @@ -1322,7 +1322,7 @@ def _get_target_file(self, target_filepath, file_length, file_hashes, for file_mirror in file_mirrors: try: - file_object = tuf.download.safe_download(file_mirror, + file_object = download.safe_download(file_mirror, file_length, self.fetcher) # Verify 'file_object' against the expected length and hashes. @@ -1520,7 +1520,7 @@ def _get_metadata_file(self, metadata_role, remote_filename, for file_mirror in file_mirrors: try: - file_object = tuf.download.unsafe_download(file_mirror, + file_object = download.unsafe_download(file_mirror, upperbound_filelength, self.fetcher) file_object.seek(0) diff --git a/tuf/download.py b/tuf/download.py index 2d946ef891..17142d2fb7 100755 --- a/tuf/download.py +++ b/tuf/download.py @@ -53,7 +53,7 @@ def safe_download(url, required_length, fetcher): Given the 'url' and 'required_length' of the desired file, open a connection to 'url', download it, and return the contents of the file. Also ensure the length of the downloaded file matches 'required_length' exactly. - tuf.download.unsafe_download() may be called if an upper download limit is + download.unsafe_download() may be called if an upper download limit is preferred. @@ -101,7 +101,7 @@ def unsafe_download(url, required_length, fetcher): Given the 'url' and 'required_length' of the desired file, open a connection to 'url', download it, and return the contents of the file. Also ensure the length of the downloaded file is up to 'required_length', and no larger. - tuf.download.safe_download() may be called if an exact download limit is + download.safe_download() may be called if an exact download limit is preferred. From 4b078b0975b379b21cc4b2508cc0ce34998a37c9 Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Mon, 11 Jan 2021 19:10:28 +0200 Subject: [PATCH 02/25] imports: Make 'exceptions' imports vendoring-compatible Use "from tuf import " instead of "import tuf.": this makes it possible for vendoring tool to vendor tuf. Fix all references to in the code. Signed-off-by: Jussi Kukkonen --- tuf/api/metadata.py | 6 ++--- tuf/client/updater.py | 60 +++++++++++++++++++++--------------------- tuf/developer_tool.py | 11 ++++---- tuf/download.py | 8 +++--- tuf/formats.py | 3 ++- tuf/keydb.py | 9 ++++--- tuf/log.py | 6 ++--- tuf/repository_lib.py | 40 ++++++++++++++-------------- tuf/repository_tool.py | 24 ++++++++--------- tuf/roledb.py | 9 ++++--- tuf/scripts/client.py | 9 ++++--- tuf/scripts/repo.py | 29 ++++++++++---------- tuf/sig.py | 3 ++- 13 files changed, 112 insertions(+), 105 deletions(-) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index f891388f4f..0bbe15412d 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -24,7 +24,7 @@ from securesystemslib.storage import FilesystemBackend, StorageBackendInterface from securesystemslib.util import persist_temp_file -import tuf.exceptions +from tuf import exceptions import tuf.formats from tuf.api.serialization import ( MetadataDeserializer, @@ -266,10 +266,10 @@ def verify( ) if not signatures_for_keyid: - raise tuf.exceptions.Error(f"no signature for key {key['keyid']}.") + raise exceptions.Error(f"no signature for key {key['keyid']}.") if len(signatures_for_keyid) > 1: - raise tuf.exceptions.Error( + raise exceptions.Error( f"{len(signatures_for_keyid)} signatures for key " f"{key['keyid']}, not sure which one to verify." ) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index c3d741f78b..dc5e625bdf 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -132,6 +132,7 @@ import tuf from tuf import download +from tuf import exceptions import tuf.requests_fetcher import tuf.formats import tuf.settings @@ -140,7 +141,6 @@ import tuf.mirrors import tuf.roledb import tuf.sig -import tuf.exceptions import securesystemslib.exceptions import securesystemslib.hash @@ -208,7 +208,7 @@ def __init__(self, map_file): self.map_file = securesystemslib.util.load_json_file(map_file) except (securesystemslib.exceptions.Error) as e: - raise tuf.exceptions.Error('Cannot load the map file: ' + str(e)) + raise exceptions.Error('Cannot load the map file: ' + str(e)) # Raise securesystemslib.exceptions.FormatError if the map file is # improperly formatted. @@ -309,14 +309,14 @@ def get_valid_targetinfo(self, target_filename, match_custom_field=True): continue else: - raise tuf.exceptions.UnknownTargetError('The repositories in the' + raise exceptions.UnknownTargetError('The repositories in the' ' mapping do not agree on the target, or none of them have' ' signed for the target, and "terminating" was set to True.') # If we are here, it means either there were no mappings, or none of the # mappings provided the target. logger.debug('Did not find valid targetinfo for ' + repr(target_filename)) - raise tuf.exceptions.UnknownTargetError('The repositories in the map' + raise exceptions.UnknownTargetError('The repositories in the map' ' file do not agree on the target, or none of them have signed' ' for the target.') @@ -336,7 +336,7 @@ def _verify_metadata_directories(self, repositories_directory): repository_name) if not os.path.isdir(repository_directory): - raise tuf.exceptions.Error('The metadata directory' + raise exceptions.Error('The metadata directory' ' for ' + repr(repository_name) + ' must exist' ' at ' + repr(repository_directory)) @@ -348,7 +348,7 @@ def _verify_metadata_directories(self, repositories_directory): repository_directory, 'metadata', 'current', 'root.json') if not os.path.isfile(root_file): - raise tuf.exceptions.Error( + raise exceptions.Error( 'The Root file must exist at ' + repr(root_file)) else: @@ -372,7 +372,7 @@ def _matching_targetinfo( targetinfo, updater = self._update_from_repository( repository_name, target_filename) - except (tuf.exceptions.UnknownTargetError, tuf.exceptions.Error): + except (exceptions.UnknownTargetError, exceptions.Error): continue valid_targetinfo[updater] = targetinfo @@ -535,7 +535,7 @@ def _update_from_repository(self, repository_name, target_filename): updater = self.get_updater(repository_name) if not updater: - raise tuf.exceptions.Error( + raise exceptions.Error( 'Cannot load updater for ' + repr(repository_name)) else: @@ -732,7 +732,7 @@ def __init__(self, repository_name, repository_mirrors, fetcher=None): # Ensure the repository metadata directory has been set. if tuf.settings.repositories_directory is None: - raise tuf.exceptions.RepositoryError('The TUF update client' + raise exceptions.RepositoryError('The TUF update client' ' module must specify the directory containing the local repository' ' files. "tuf.settings.repositories_directory" MUST be set.') @@ -742,14 +742,14 @@ def __init__(self, repository_name, repository_mirrors, fetcher=None): # raise MissingLocalRepository if the repo does not exist at all. if not os.path.exists(repository_directory): - raise tuf.exceptions.MissingLocalRepositoryError('Local repository ' + + raise exceptions.MissingLocalRepositoryError('Local repository ' + repr(repository_directory) + ' does not exist.') current_path = os.path.join(repository_directory, 'metadata', 'current') # Ensure the current path is valid/exists before saving it. if not os.path.exists(current_path): - raise tuf.exceptions.RepositoryError('Missing' + raise exceptions.RepositoryError('Missing' ' ' + repr(current_path) + '. This path must exist and, at a minimum,' ' contain the Root metadata file.') @@ -760,7 +760,7 @@ def __init__(self, repository_name, repository_mirrors, fetcher=None): # Ensure the previous path is valid/exists. if not os.path.exists(previous_path): - raise tuf.exceptions.RepositoryError('Missing ' + repr(previous_path) + '.' + raise exceptions.RepositoryError('Missing ' + repr(previous_path) + '.' ' This path MUST exist.') self.metadata_directory['previous'] = previous_path @@ -773,7 +773,7 @@ def __init__(self, repository_name, repository_mirrors, fetcher=None): # Raise an exception if the repository is missing the required 'root' # metadata. if 'root' not in self.metadata['current']: - raise tuf.exceptions.RepositoryError('No root of trust!' + raise exceptions.RepositoryError('No root of trust!' ' Could not find the "root.json" file.') @@ -968,7 +968,7 @@ def _import_delegations(self, parent_role): tuf.keydb.add_key(key, repository_name=self.repository_name) - except tuf.exceptions.KeyAlreadyExistsError: + except exceptions.KeyAlreadyExistsError: pass except (securesystemslib.exceptions.FormatError, securesystemslib.exceptions.Error): @@ -989,7 +989,7 @@ def _import_delegations(self, parent_role): logger.debug('Adding delegated role: ' + str(rolename) + '.') tuf.roledb.add_role(rolename, roleinfo, self.repository_name) - except tuf.exceptions.RoleAlreadyExistsError: + except exceptions.RoleAlreadyExistsError: logger.warning('Role already exists: ' + rolename) except Exception: @@ -1062,7 +1062,7 @@ def refresh(self, unsafely_update_root_if_necessary=True): try: self._ensure_not_expired(root_metadata, 'root') - except tuf.exceptions.ExpiredMetadataError: + except exceptions.ExpiredMetadataError: # Raise 'tuf.exceptions.NoWorkingMirrorError' if a valid (not # expired, properly signed, and valid metadata) 'root.json' cannot be # installed. @@ -1148,7 +1148,7 @@ def neither_403_nor_404(mirror_error): version=next_version) # When we run into HTTP 403/404 error from ALL mirrors, break out of # loop, because the next root metadata file is most likely missing. - except tuf.exceptions.NoWorkingMirrorError as exception: + except exceptions.NoWorkingMirrorError as exception: for mirror_error in exception.mirror_errors.values(): # Otherwise, reraise the error, because it is not a simple HTTP # error. @@ -1254,7 +1254,7 @@ def _check_file_length(self, file_object, trusted_file_length): # ensures that a downloaded file strictly matches a known, or trusted, # file length. if observed_length != trusted_file_length: - raise tuf.exceptions.DownloadLengthMismatchError(trusted_file_length, + raise exceptions.DownloadLengthMismatchError(trusted_file_length, observed_length) else: @@ -1341,7 +1341,7 @@ def _get_target_file(self, target_filepath, file_length, file_hashes, logger.debug('Failed to update ' + repr(target_filepath) + ' from' ' all mirrors: ' + repr(file_mirror_errors)) - raise tuf.exceptions.NoWorkingMirrorError(file_mirror_errors) + raise exceptions.NoWorkingMirrorError(file_mirror_errors) @@ -1434,7 +1434,7 @@ def _verify_metadata_file(self, metadata_file_object, metadata_signable = securesystemslib.util.load_json_string(metadata) except Exception as exception: - raise tuf.exceptions.InvalidMetadataJSONError(exception) + raise exceptions.InvalidMetadataJSONError(exception) else: # Ensure the loaded 'metadata_signable' is properly formatted. Raise @@ -1547,7 +1547,7 @@ def _get_metadata_file(self, metadata_role, remote_filename, code_spec_minor_version = int(code_spec_version_split[1]) if metadata_spec_major_version != code_spec_major_version: - raise tuf.exceptions.UnsupportedSpecificationError( + raise exceptions.UnsupportedSpecificationError( 'Downloaded metadata that specifies an unsupported ' 'spec_version. This code supports major version number: ' + repr(code_spec_major_version) + '; however, the obtained ' @@ -1576,7 +1576,7 @@ def _get_metadata_file(self, metadata_role, remote_filename, # Verify that the downloaded version matches the version expected by # the caller. if version_downloaded != expected_version: - raise tuf.exceptions.BadVersionNumberError('Downloaded' + raise exceptions.BadVersionNumberError('Downloaded' ' version number: ' + repr(version_downloaded) + '. Version' ' number MUST be: ' + repr(expected_version)) @@ -1594,7 +1594,7 @@ def _get_metadata_file(self, metadata_role, remote_filename, self.metadata['current'][metadata_role]['version'] if version_downloaded < current_version: - raise tuf.exceptions.ReplayedMetadataError(metadata_role, + raise exceptions.ReplayedMetadataError(metadata_role, version_downloaded, current_version) except KeyError: @@ -1619,7 +1619,7 @@ def _get_metadata_file(self, metadata_role, remote_filename, else: logger.debug('Failed to update ' + repr(remote_filename) + ' from all' ' mirrors: ' + repr(file_mirror_errors)) - raise tuf.exceptions.NoWorkingMirrorError(file_mirror_errors) + raise exceptions.NoWorkingMirrorError(file_mirror_errors) @@ -1810,7 +1810,7 @@ def _update_metadata_if_changed(self, metadata_role, # Ensure the referenced metadata has been loaded. The 'root' role may be # updated without having 'snapshot' available. if referenced_metadata not in self.metadata['current']: - raise tuf.exceptions.RepositoryError('Cannot update' + raise exceptions.RepositoryError('Cannot update' ' ' + repr(metadata_role) + ' because ' + referenced_metadata + ' is' ' missing.') @@ -2281,7 +2281,7 @@ def _ensure_not_expired(self, metadata_object, metadata_rolename): if expires_timestamp <= current_time: message = 'Metadata '+repr(metadata_rolename)+' expired on ' + \ expires_datetime.ctime() + ' (UTC).' - raise tuf.exceptions.ExpiredMetadataError(message) + raise exceptions.ExpiredMetadataError(message) @@ -2478,7 +2478,7 @@ def _targets_of_role(self, rolename, targets=None, skip_refresh=False): logger.debug('Getting targets of role: ' + repr(rolename) + '.') if not tuf.roledb.role_exists(rolename, self.repository_name): - raise tuf.exceptions.UnknownRoleError(rolename) + raise exceptions.UnknownRoleError(rolename) # We do not need to worry about the target paths being trusted because # this is enforced before any new metadata is accepted. @@ -2580,7 +2580,7 @@ def targets_of_role(self, rolename='targets'): if not tuf.roledb.role_exists(rolename, self.repository_name): - raise tuf.exceptions.UnknownRoleError(rolename) + raise exceptions.UnknownRoleError(rolename) return self._targets_of_role(rolename, skip_refresh=True) @@ -2628,7 +2628,7 @@ def get_one_valid_targetinfo(self, target_filepath): target_filepath = target_filepath.replace('\\', '/') if target_filepath.startswith('/'): - raise tuf.exceptions.FormatError('The requested target file cannot' + raise exceptions.FormatError('The requested target file cannot' ' contain a leading path separator: ' + repr(target_filepath)) # Get target by looking at roles in order of priority tags. @@ -2636,7 +2636,7 @@ def get_one_valid_targetinfo(self, target_filepath): # Raise an exception if the target information could not be retrieved. if target is None: - raise tuf.exceptions.UnknownTargetError(repr(target_filepath) + ' not' + raise exceptions.UnknownTargetError(repr(target_filepath) + ' not' ' found.') # Otherwise, return the found target. diff --git a/tuf/developer_tool.py b/tuf/developer_tool.py index e3269b088b..c7c6d9e3fc 100755 --- a/tuf/developer_tool.py +++ b/tuf/developer_tool.py @@ -39,6 +39,7 @@ import json import tuf +from tuf import exceptions import tuf.formats import tuf.keydb import tuf.roledb @@ -378,7 +379,7 @@ def status(self): try: _check_role_keys(delegated_role, self.repository_name) - except tuf.exceptions.InsufficientKeysError: + except exceptions.InsufficientKeysError: insufficient_keys.append(delegated_role) continue @@ -407,7 +408,7 @@ def status(self): try: _check_role_keys(self.rolename, self.repository_name) - except tuf.exceptions.InsufficientKeysError as e: + except exceptions.InsufficientKeysError as e: logger.info(str(e)) return @@ -417,7 +418,7 @@ def status(self): self.repository_name) self._log_status(self.project_name, signable, self.repository_name) - except tuf.exceptions.UnsignedMetadataError as e: + except exceptions.UnsignedMetadataError as e: # This error is raised if the metadata has insufficient signatures to # meet the threshold. self._log_status(self.project_name, e.signable, self.repository_name) @@ -864,7 +865,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, signable = securesystemslib.util.load_json_file(targets_metadata_path) try: tuf.formats.check_signable_object_format(signable) - except tuf.exceptions.UnsignedMetadataError: + except exceptions.UnsignedMetadataError: # Downgrade the error to a warning because a use case exists where # metadata may be generated unsigned on one machine and signed on another. logger.warning('Unsigned metadata object: ' + repr(signable)) @@ -977,7 +978,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, try: tuf.keydb.add_key(key_object, repository_name=repository_name) - except tuf.exceptions.KeyAlreadyExistsError: + except exceptions.KeyAlreadyExistsError: pass for role in metadata_object['delegations']['roles']: diff --git a/tuf/download.py b/tuf/download.py index 17142d2fb7..5f93e4f57f 100755 --- a/tuf/download.py +++ b/tuf/download.py @@ -40,7 +40,7 @@ import six import tuf -import tuf.exceptions +from tuf import exceptions import tuf.formats # See 'log.py' to learn how logging is handled in TUF. @@ -299,13 +299,13 @@ def _check_downloaded_length(total_downloaded, required_length, logger.debug('Minimum average download speed: ' + repr(tuf.settings.MIN_AVERAGE_DOWNLOAD_SPEED)) if average_download_speed < tuf.settings.MIN_AVERAGE_DOWNLOAD_SPEED: - raise tuf.exceptions.SlowRetrievalError(average_download_speed) + raise exceptions.SlowRetrievalError(average_download_speed) else: logger.debug('Good average download speed: ' + repr(average_download_speed) + ' bytes per second') - raise tuf.exceptions.DownloadLengthMismatchError(required_length, total_downloaded) + raise exceptions.DownloadLengthMismatchError(required_length, total_downloaded) else: # We specifically disabled strict checking of required length, but we @@ -313,7 +313,7 @@ def _check_downloaded_length(total_downloaded, required_length, # Timestamp or Root metadata, for which we have no signed metadata; so, # we must guess a reasonable required_length for it. if average_download_speed < tuf.settings.MIN_AVERAGE_DOWNLOAD_SPEED: - raise tuf.exceptions.SlowRetrievalError(average_download_speed) + raise exceptions.SlowRetrievalError(average_download_speed) else: logger.debug('Good average download speed: ' + diff --git a/tuf/formats.py b/tuf/formats.py index dc51ba9c98..0c59168469 100755 --- a/tuf/formats.py +++ b/tuf/formats.py @@ -73,6 +73,7 @@ import securesystemslib.schema as SCHEMA import tuf +from tuf import exceptions import six @@ -1000,7 +1001,7 @@ def check_signable_object_format(signable): 'Unrecognized type ' + repr(role_type)), error) if not signable['signatures']: - raise tuf.exceptions.UnsignedMetadataError('Signable object of type ' + + raise exceptions.UnsignedMetadataError('Signable object of type ' + repr(role_type) + ' has no signatures ', signable) # 'securesystemslib.exceptions.FormatError' raised if 'signable' does not diff --git a/tuf/keydb.py b/tuf/keydb.py index bc306bac25..c4a85cffb1 100755 --- a/tuf/keydb.py +++ b/tuf/keydb.py @@ -44,6 +44,7 @@ import logging import copy +from tuf import exceptions import tuf.formats import six @@ -133,7 +134,7 @@ def create_keydb_from_root_metadata(root_metadata, repository_name='default'): # Although keyid duplicates should *not* occur (unique dict keys), log a # warning and continue. However, 'key_dict' may have already been # adding to the keydb elsewhere. - except tuf.exceptions.KeyAlreadyExistsError as e: # pragma: no cover + except exceptions.KeyAlreadyExistsError as e: # pragma: no cover logger.warning(e) continue @@ -287,7 +288,7 @@ def add_key(key_dict, keyid=None, repository_name='default'): # available in the key database before returning. keyid = key_dict['keyid'] if keyid in _keydb_dict[repository_name]: - raise tuf.exceptions.KeyAlreadyExistsError('Key: ' + keyid) + raise exceptions.KeyAlreadyExistsError('Key: ' + keyid) _keydb_dict[repository_name][keyid] = copy.deepcopy(key_dict) @@ -343,7 +344,7 @@ def get_key(keyid, repository_name='default'): return copy.deepcopy(_keydb_dict[repository_name][keyid]) except KeyError as error: - six.raise_from(tuf.exceptions.UnknownKeyError('Key: ' + keyid), error) + six.raise_from(exceptions.UnknownKeyError('Key: ' + keyid), error) @@ -396,7 +397,7 @@ def remove_key(keyid, repository_name='default'): del _keydb_dict[repository_name][keyid] else: - raise tuf.exceptions.UnknownKeyError('Key: ' + keyid) + raise exceptions.UnknownKeyError('Key: ' + keyid) diff --git a/tuf/log.py b/tuf/log.py index 8a6a84d20a..88182bcd91 100755 --- a/tuf/log.py +++ b/tuf/log.py @@ -74,7 +74,7 @@ import tuf import tuf.settings -import tuf.exceptions +from tuf import exceptions import securesystemslib.formats @@ -249,7 +249,7 @@ def set_filehandler_log_level(log_level=_DEFAULT_FILE_LOG_LEVEL): file_handler.setLevel(log_level) else: - raise tuf.exceptions.Error( + raise exceptions.Error( 'File handler has not been set. Enable file logging' ' before attempting to set its log level') @@ -418,7 +418,7 @@ def enable_file_logging(log_filename=tuf.settings.LOG_FILENAME): logger.addHandler(file_handler) else: - raise tuf.exceptions.Error( + raise exceptions.Error( 'The file handler has already been been set. A new file handler' ' can be set by first calling disable_file_logging()') diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index 1f64f66413..af2947bd5d 100644 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -41,7 +41,7 @@ import tuf import tuf.formats -import tuf.exceptions +from tuf import exceptions import tuf.keydb import tuf.roledb import tuf.sig @@ -232,7 +232,7 @@ def should_write(): repository_name=repository_name) # Note that 'signable' is an argument to tuf.UnsignedMetadataError(). - raise tuf.exceptions.UnsignedMetadataError('Not enough' + raise exceptions.UnsignedMetadataError('Not enough' ' signatures for ' + repr(metadata_filename), signable) # 'rolename' is a delegated role or a top-level role that is partially @@ -305,12 +305,12 @@ def _check_role_keys(rolename, repository_name): # Raise an exception for an invalid threshold of public keys. if total_keyids < threshold: - raise tuf.exceptions.InsufficientKeysError(repr(rolename) + ' role contains' + raise exceptions.InsufficientKeysError(repr(rolename) + ' role contains' ' ' + repr(total_keyids) + ' / ' + repr(threshold) + ' public keys.') # Raise an exception for an invalid threshold of signing keys. if total_signatures == 0 and total_signing_keys < threshold: - raise tuf.exceptions.InsufficientKeysError(repr(rolename) + ' role contains' + raise exceptions.InsufficientKeysError(repr(rolename) + ' role contains' ' ' + repr(total_signing_keys) + ' / ' + repr(threshold) + ' signing keys.') @@ -341,7 +341,7 @@ def _remove_invalid_and_duplicate_signatures(signable, repository_name): try: key = tuf.keydb.get_key(keyid, repository_name=repository_name) - except tuf.exceptions.UnknownKeyError: + except exceptions.UnknownKeyError: signable['signatures'].remove(signature) continue @@ -501,7 +501,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): signable = securesystemslib.util.load_json_file(root_filename) try: tuf.formats.check_signable_object_format(signable) - except tuf.exceptions.UnsignedMetadataError: + except exceptions.UnsignedMetadataError: # Downgrade the error to a warning because a use case exists where # metadata may be generated unsigned on one machine and signed on another. logger.warning('Unsigned metadata object: ' + repr(signable)) @@ -541,7 +541,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): consistent_snapshot = root_metadata['consistent_snapshot'] except securesystemslib.exceptions.StorageError as error: - six.raise_from(tuf.exceptions.RepositoryError('Cannot load the required' + six.raise_from(exceptions.RepositoryError('Cannot load the required' ' root file: ' + repr(root_filename)), error) # Load 'timestamp.json'. A Timestamp role file without a version number is @@ -570,7 +570,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): repository_name=repository_name) except securesystemslib.exceptions.StorageError as error: - six.raise_from(tuf.exceptions.RepositoryError('Cannot load the Timestamp ' + six.raise_from(exceptions.RepositoryError('Cannot load the Timestamp ' 'file: ' + repr(timestamp_filename)), error) # Load 'snapshot.json'. A consistent snapshot.json must be calculated if @@ -588,7 +588,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): signable = securesystemslib.util.load_json_file(snapshot_filename) try: tuf.formats.check_signable_object_format(signable) - except tuf.exceptions.UnsignedMetadataError: + except exceptions.UnsignedMetadataError: # Downgrade the error to a warning because a use case exists where # metadata may be generated unsigned on one machine and signed on another. logger.warning('Unsigned metadata object: ' + repr(signable)) @@ -616,7 +616,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): repository_name=repository_name) except securesystemslib.exceptions.StorageError as error: - six.raise_from(tuf.exceptions.RepositoryError('The Snapshot file ' + six.raise_from(exceptions.RepositoryError('The Snapshot file ' 'cannot be loaded: '+ repr(snapshot_filename)), error) # Load 'targets.json'. A consistent snapshot of the Targets role must be @@ -630,7 +630,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): signable = securesystemslib.util.load_json_file(targets_filename) try: tuf.formats.check_signable_object_format(signable) - except tuf.exceptions.UnsignedMetadataError: + except exceptions.UnsignedMetadataError: # Downgrade the error to a warning because a use case exists where # metadata may be generated unsigned on one machine and signed on another. logger.warning('Unsigned metadata object: ' + repr(signable)) @@ -675,11 +675,11 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): try: tuf.keydb.add_key(key_object, keyid=None, repository_name=repository_name) - except tuf.exceptions.KeyAlreadyExistsError: + except exceptions.KeyAlreadyExistsError: pass except securesystemslib.exceptions.StorageError as error: - six.raise_from(tuf.exceptions.RepositoryError('The Targets file ' + six.raise_from(exceptions.RepositoryError('The Targets file ' 'can not be loaded: ' + repr(targets_filename)), error) return repository, consistent_snapshot @@ -1877,7 +1877,7 @@ def sign_metadata(metadata_object, keyids, filename, repository_name): # is not formatted correctly. try: tuf.formats.check_signable_object_format(signable) - except tuf.exceptions.UnsignedMetadataError: + except exceptions.UnsignedMetadataError: # Downgrade the error to a warning because a use case exists where # metadata may be generated unsigned on one machine and signed on another. logger.warning('Unsigned metadata object: ' + repr(signable)) @@ -2028,7 +2028,7 @@ def _log_status_of_top_level_roles(targets_directory, metadata_directory, try: _check_role_keys(rolename, repository_name) - except tuf.exceptions.InsufficientKeysError as e: + except exceptions.InsufficientKeysError as e: logger.info(str(e)) # Do the top-level roles contain a valid threshold of signatures? Top-level @@ -2053,7 +2053,7 @@ def _log_status_of_top_level_roles(targets_directory, metadata_directory, # 'tuf.exceptions.UnsignedMetadataError' raised if metadata contains an # invalid threshold of signatures. log the valid/threshold message, where # valid < threshold. - except tuf.exceptions.UnsignedMetadataError as e: + except exceptions.UnsignedMetadataError as e: _log_status('root', e.signable, repository_name) return @@ -2078,7 +2078,7 @@ def _log_status_of_top_level_roles(targets_directory, metadata_directory, repository_name=repository_name) _log_status('targets', signable, repository_name) - except tuf.exceptions.UnsignedMetadataError as e: + except exceptions.UnsignedMetadataError as e: _log_status('targets', e.signable, repository_name) return @@ -2104,7 +2104,7 @@ def _log_status_of_top_level_roles(targets_directory, metadata_directory, filenames, repository_name=repository_name) _log_status('snapshot', signable, repository_name) - except tuf.exceptions.UnsignedMetadataError as e: + except exceptions.UnsignedMetadataError as e: _log_status('snapshot', e.signable, repository_name) return @@ -2130,7 +2130,7 @@ def _log_status_of_top_level_roles(targets_directory, metadata_directory, False, filenames, repository_name=repository_name) _log_status('timestamp', signable, repository_name) - except tuf.exceptions.UnsignedMetadataError as e: + except exceptions.UnsignedMetadataError as e: _log_status('timestamp', e.signable, repository_name) return @@ -2227,7 +2227,7 @@ def create_tuf_client_directory(repository_directory, client_directory): if e.errno == errno.EEXIST: message = 'Cannot create a fresh client metadata directory: ' +\ repr(client_metadata_directory) + '. Already exists.' - raise tuf.exceptions.RepositoryError(message) + raise exceptions.RepositoryError(message) # Testing of non-errno.EEXIST exceptions have been verified on all # supported OSs. An unexpected exception (the '/' directory exists, rather diff --git a/tuf/repository_tool.py b/tuf/repository_tool.py index 1fe6a51e83..ab3a646076 100755 --- a/tuf/repository_tool.py +++ b/tuf/repository_tool.py @@ -47,7 +47,7 @@ import tuf.roledb import tuf.sig import tuf.log -import tuf.exceptions +from tuf import exceptions import tuf.repository_lib as repo_lib import securesystemslib.keys @@ -787,7 +787,7 @@ def add_verification_key(self, key, expires=None): try: tuf.keydb.add_key(key, repository_name=self._repository_name) - except tuf.exceptions.KeyAlreadyExistsError: + except exceptions.KeyAlreadyExistsError: logger.warning('Adding a verification key that has already been used.') keyid = key['keyid'] @@ -905,7 +905,7 @@ def load_signing_key(self, key): try: tuf.keydb.add_key(key, repository_name=self._repository_name) - except tuf.exceptions.KeyAlreadyExistsError: + except exceptions.KeyAlreadyExistsError: tuf.keydb.remove_key(key['keyid'], self._repository_name) tuf.keydb.add_key(key, repository_name=self._repository_name) @@ -1491,7 +1491,7 @@ def __init__(self, repository_name): try: tuf.roledb.add_role(self._rolename, roleinfo, self._repository_name) - except tuf.exceptions.RoleAlreadyExistsError: + except exceptions.RoleAlreadyExistsError: pass @@ -1560,7 +1560,7 @@ def __init__(self, repository_name): try: tuf.roledb.add_role(self.rolename, roleinfo, self._repository_name) - except tuf.exceptions.RoleAlreadyExistsError: + except exceptions.RoleAlreadyExistsError: pass @@ -1623,7 +1623,7 @@ def __init__(self, repository_name): try: tuf.roledb.add_role(self._rolename, roleinfo, self._repository_name) - except tuf.exceptions.RoleAlreadyExistsError: + except exceptions.RoleAlreadyExistsError: pass @@ -1731,7 +1731,7 @@ def __init__(self, targets_directory, rolename='targets', roleinfo=None, try: tuf.roledb.add_role(self.rolename, roleinfo, self._repository_name) - except tuf.exceptions.RoleAlreadyExistsError: + except exceptions.RoleAlreadyExistsError: pass @@ -1772,7 +1772,7 @@ def __call__(self, rolename): return self._delegated_roles[rolename] else: - raise tuf.exceptions.UnknownRoleError(repr(rolename) + ' has' + raise exceptions.UnknownRoleError(repr(rolename) + ' has' ' not been delegated by ' + repr(self.rolename)) @@ -2497,7 +2497,7 @@ def revoke(self, rolename): del self._delegated_roles[rolename] self._parent_targets_object.remove_delegated_role(rolename) - except (tuf.exceptions.UnknownRoleError, KeyError): + except (exceptions.UnknownRoleError, KeyError): pass @@ -2846,11 +2846,11 @@ def _check_path(self, pathname): tuf.formats.RELPATH_SCHEMA.check_match(pathname) if '\\' in pathname: - raise tuf.exceptions.InvalidNameError('Path ' + repr(pathname) + raise exceptions.InvalidNameError('Path ' + repr(pathname) + ' does not use the forward slash (/) as directory separator.') if pathname.startswith('/'): - raise tuf.exceptions.InvalidNameError('Path ' + repr(pathname) + raise exceptions.InvalidNameError('Path ' + repr(pathname) + ' starts with a directory separator. All paths should be relative' ' to targets directory.') @@ -3175,7 +3175,7 @@ def load_repository(repository_directory, repository_name='default', tuf.keydb.add_key(key_object, keyid=None, repository_name=repository_name) - except tuf.exceptions.KeyAlreadyExistsError: + except exceptions.KeyAlreadyExistsError: pass return repository diff --git a/tuf/roledb.py b/tuf/roledb.py index 37add72e3a..eccb4ddd92 100755 --- a/tuf/roledb.py +++ b/tuf/roledb.py @@ -53,6 +53,7 @@ import copy import tuf +from tuf import exceptions import tuf.log import tuf.formats @@ -311,7 +312,7 @@ def add_role(rolename, roleinfo, repository_name='default'): raise securesystemslib.exceptions.InvalidNameError('Repository name does not exist: ' + repository_name) if rolename in _roledb_dict[repository_name]: - raise tuf.exceptions.RoleAlreadyExistsError('Role already exists: ' + rolename) + raise exceptions.RoleAlreadyExistsError('Role already exists: ' + rolename) _roledb_dict[repository_name][rolename] = copy.deepcopy(roleinfo) @@ -396,7 +397,7 @@ def update_roleinfo(rolename, roleinfo, mark_role_as_dirty=True, repository_name repository_name) if rolename not in _roledb_dict[repository_name]: - raise tuf.exceptions.UnknownRoleError('Role does not exist: ' + rolename) + raise exceptions.UnknownRoleError('Role does not exist: ' + rolename) # Update the global _roledb_dict and _dirty_roles structures so that # the latest 'roleinfo' is available to other modules, and the repository @@ -577,7 +578,7 @@ def role_exists(rolename, repository_name='default'): try: _check_rolename(rolename, repository_name) - except tuf.exceptions.UnknownRoleError: + except exceptions.UnknownRoleError: return False return True @@ -1046,7 +1047,7 @@ def _check_rolename(rolename, repository_name='default'): ' exist: ' + repository_name) if rolename not in _roledb_dict[repository_name]: - raise tuf.exceptions.UnknownRoleError('Role name does not exist: ' + rolename) + raise exceptions.UnknownRoleError('Role name does not exist: ' + rolename) diff --git a/tuf/scripts/client.py b/tuf/scripts/client.py index f9d8c9dbaa..770309001b 100755 --- a/tuf/scripts/client.py +++ b/tuf/scripts/client.py @@ -72,6 +72,7 @@ import logging import tuf +from tuf import exceptions import tuf.client.updater import tuf.settings import tuf.log @@ -106,7 +107,7 @@ def update_client(parsed_arguments): """ if not isinstance(parsed_arguments, argparse.Namespace): - raise tuf.exceptions.Error('Invalid namespace object.') + raise exceptions.Error('Invalid namespace object.') else: logger.debug('We have a valid argparse Namespace object.') @@ -143,7 +144,7 @@ def update_client(parsed_arguments): try: updater.download_target(target, destination_directory) - except tuf.exceptions.DownloadError: + except exceptions.DownloadError: pass # Remove any files from the destination directory that are no longer being @@ -235,8 +236,8 @@ def parse_arguments(): try: update_client(arguments) - except (tuf.exceptions.NoWorkingMirrorError, tuf.exceptions.RepositoryError, - tuf.exceptions.FormatError, tuf.exceptions.Error) as e: + except (exceptions.NoWorkingMirrorError, exceptions.RepositoryError, + exceptions.FormatError, exceptions.Error) as e: sys.stderr.write('Error: ' + str(e) + '\n') sys.exit(1) diff --git a/tuf/scripts/repo.py b/tuf/scripts/repo.py index da1664a86e..a2a0ffef50 100755 --- a/tuf/scripts/repo.py +++ b/tuf/scripts/repo.py @@ -148,6 +148,7 @@ import fnmatch import tuf +from tuf import exceptions import tuf.log import tuf.formats import tuf.repository_tool as repo_tool @@ -218,7 +219,7 @@ def process_command_line_arguments(parsed_arguments): # Do we have a valid argparse Namespace? if not isinstance(parsed_arguments, argparse.Namespace): - raise tuf.exceptions.Error('Invalid namespace: ' + repr(parsed_arguments)) + raise exceptions.Error('Invalid namespace: ' + repr(parsed_arguments)) else: logger.debug('We have a valid argparse Namespace.') @@ -266,15 +267,15 @@ def process_command_line_arguments(parsed_arguments): def delegate(parsed_arguments): if not parsed_arguments.delegatee: - raise tuf.exceptions.Error( + raise exceptions.Error( '--delegatee must be set to perform the delegation.') if parsed_arguments.delegatee in ('root', 'snapshot', 'timestamp', 'targets'): - raise tuf.exceptions.Error( + raise exceptions.Error( 'Cannot delegate to the top-level role: ' + repr(parsed_arguments.delegatee)) if not parsed_arguments.pubkeys: - raise tuf.exceptions.Error( + raise exceptions.Error( '--pubkeys must be set to perform the delegation.') public_keys = [] @@ -388,7 +389,7 @@ def gen_key(parsed_arguments): } if parsed_arguments.key not in SUPPORTED_CLI_KEYTYPES: - tuf.exceptions.Error( + exceptions.Error( 'Invalid key type: ' + repr(parsed_arguments.key) + '. Supported' ' key types: ' + repr(SUPPORTED_CLI_KEYTYPES)) @@ -464,12 +465,12 @@ def import_privatekey_from_file(keypath, password=None): encrypted_key, 'rsassa-pss-sha256', password) except securesystemslib.exceptions.CryptoError as error: - six.raise_from(tuf.exceptions.Error(repr(keypath) + ' cannot be ' + six.raise_from(exceptions.Error(repr(keypath) + ' cannot be ' ' imported, possibly because an invalid key file is given or ' ' the decryption password is incorrect.'), error) if key_object['keytype'] not in SUPPORTED_KEY_TYPES: - raise tuf.exceptions.Error('Trying to import an unsupported key' + raise exceptions.Error('Trying to import an unsupported key' ' type: ' + repr(key_object['keytype'] + '.' ' Supported key types: ' + repr(SUPPORTED_KEY_TYPES))) @@ -497,7 +498,7 @@ def import_publickey_from_file(keypath): key_object, junk = securesystemslib.keys.format_metadata_to_key(key_metadata) if key_object['keytype'] not in SUPPORTED_KEY_TYPES: - raise tuf.exceptions.Error('Trying to import an unsupported key' + raise exceptions.Error('Trying to import an unsupported key' ' type: ' + repr(key_object['keytype'] + '.' ' Supported key types: ' + repr(SUPPORTED_KEY_TYPES))) @@ -508,7 +509,7 @@ def import_publickey_from_file(keypath): def add_verification_key(parsed_arguments): if not parsed_arguments.pubkeys: - raise tuf.exceptions.Error('--pubkeys must be given with --trust.') + raise exceptions.Error('--pubkeys must be given with --trust.') repository = repo_tool.load_repository( os.path.join(parsed_arguments.path, REPO_DIR)) @@ -517,7 +518,7 @@ def add_verification_key(parsed_arguments): imported_pubkey = import_publickey_from_file(keypath) if parsed_arguments.role not in ('root', 'targets', 'snapshot', 'timestamp'): - raise tuf.exceptions.Error('The given --role is not a top-level role.') + raise exceptions.Error('The given --role is not a top-level role.') elif parsed_arguments.role == 'root': repository.root.add_verification_key(imported_pubkey) @@ -544,7 +545,7 @@ def add_verification_key(parsed_arguments): def remove_verification_key(parsed_arguments): if not parsed_arguments.pubkeys: - raise tuf.exceptions.Error('--pubkeys must be given with --distrust.') + raise exceptions.Error('--pubkeys must be given with --distrust.') repository = repo_tool.load_repository( os.path.join(parsed_arguments.path, REPO_DIR)) @@ -554,7 +555,7 @@ def remove_verification_key(parsed_arguments): try: if parsed_arguments.role not in ('root', 'targets', 'snapshot', 'timestamp'): - raise tuf.exceptions.Error('The given --role is not a top-level role.') + raise exceptions.Error('The given --role is not a top-level role.') elif parsed_arguments.role == 'root': repository.root.remove_verification_key(imported_pubkey) @@ -736,7 +737,7 @@ def add_target_to_repo(parsed_arguments, target_path, repo_targets_path, def remove_target_files_from_metadata(parsed_arguments, repository): if parsed_arguments.role in ('root', 'snapshot', 'timestamp'): - raise tuf.exceptions.Error( + raise exceptions.Error( 'Invalid rolename specified: ' + repr(parsed_arguments.role) + '.' ' It must be "targets" or a delegated rolename.') @@ -1146,7 +1147,7 @@ def parse_arguments(): try: process_command_line_arguments(arguments) - except (tuf.exceptions.Error) as e: + except (exceptions.Error) as e: sys.stderr.write('Error: ' + str(e) + '\n') sys.exit(1) diff --git a/tuf/sig.py b/tuf/sig.py index 2351e0e381..be1bee7d67 100755 --- a/tuf/sig.py +++ b/tuf/sig.py @@ -51,6 +51,7 @@ import logging import tuf +from tuf import exceptions import tuf.keydb import tuf.roledb import tuf.formats @@ -159,7 +160,7 @@ def get_signature_status(signable, role=None, repository_name='default', try: key = tuf.keydb.get_key(keyid, repository_name) - except tuf.exceptions.UnknownKeyError: + except exceptions.UnknownKeyError: unknown_sigs.append(keyid) continue From 4575637efd5dc2f0964fcb2a79af10d5ccbbd30f Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Mon, 11 Jan 2021 19:34:13 +0200 Subject: [PATCH 03/25] imports: Make 'formats' imports vendoring-compatible Use "from tuf import " instead of "import tuf.": this makes it possible for vendoring tool to vendor tuf. Fix all references to in the code. Signed-off-by: Jussi Kukkonen --- tuf/api/metadata.py | 4 +-- tuf/client/updater.py | 34 +++++++++--------- tuf/developer_tool.py | 8 ++--- tuf/download.py | 6 ++-- tuf/keydb.py | 4 +-- tuf/mirrors.py | 6 ++-- tuf/repository_lib.py | 72 ++++++++++++++++++------------------- tuf/repository_tool.py | 82 +++++++++++++++++++++--------------------- tuf/roledb.py | 14 ++++---- tuf/scripts/repo.py | 4 +-- tuf/sig.py | 14 ++++---- 11 files changed, 124 insertions(+), 124 deletions(-) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index 0bbe15412d..725c44383a 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -25,7 +25,7 @@ from securesystemslib.util import persist_temp_file from tuf import exceptions -import tuf.formats +from tuf import formats from tuf.api.serialization import ( MetadataDeserializer, MetadataSerializer, @@ -337,7 +337,7 @@ def _common_fields_from_dict(signed_dict: Mapping[str, Any]) -> list: # Convert 'expires' TUF metadata string to a datetime object, which is # what the constructor expects and what we store. The inverse operation # is implemented in '_common_fields_to_dict'. - expires = tuf.formats.expiry_string_to_datetime(expires_str) + expires = formats.expiry_string_to_datetime(expires_str) return [_type, version, spec_version, expires] def _common_fields_to_dict(self) -> Dict[str, Any]: diff --git a/tuf/client/updater.py b/tuf/client/updater.py index dc5e625bdf..cd7e8a6525 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -133,8 +133,8 @@ import tuf from tuf import download from tuf import exceptions +from tuf import formats import tuf.requests_fetcher -import tuf.formats import tuf.settings import tuf.keydb import tuf.log @@ -212,7 +212,7 @@ def __init__(self, map_file): # Raise securesystemslib.exceptions.FormatError if the map file is # improperly formatted. - tuf.formats.MAPFILE_SCHEMA.check_match(self.map_file) + formats.MAPFILE_SCHEMA.check_match(self.map_file) # Save the "repositories" entry of the map file, with the following # example format: @@ -263,11 +263,11 @@ def get_valid_targetinfo(self, target_filename, match_custom_field=True): # Is the argument properly formatted? If not, raise # 'tuf.exceptions.FormatError'. - tuf.formats.RELPATH_SCHEMA.check_match(target_filename) + formats.RELPATH_SCHEMA.check_match(target_filename) # TAP 4 requires that the following attributes be present in mappings: # "paths", "repositories", "terminating", and "threshold". - tuf.formats.MAPPING_SCHEMA.check_match(self.map_file['mapping']) + formats.MAPPING_SCHEMA.check_match(self.map_file['mapping']) # Set the top-level directory containing the metadata for each repository. repositories_directory = tuf.settings.repositories_directory @@ -486,7 +486,7 @@ def get_updater(self, repository_name): # Are the arguments properly formatted? If not, raise # 'tuf.exceptions.FormatError'. - tuf.formats.NAME_SCHEMA.check_match(repository_name) + formats.NAME_SCHEMA.check_match(repository_name) updater = self.repository_names_to_updaters.get(repository_name) @@ -688,7 +688,7 @@ def __init__(self, repository_name, repository_mirrors, fetcher=None): # keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mistmatch. securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - tuf.formats.MIRRORDICT_SCHEMA.check_match(repository_mirrors) + formats.MIRRORDICT_SCHEMA.check_match(repository_mirrors) # Save the validated arguments. self.repository_name = repository_name @@ -852,7 +852,7 @@ def _load_metadata_from_file(self, metadata_set, metadata_role): except securesystemslib.exceptions.Error: return - tuf.formats.check_signable_object_format(metadata_signable) + formats.check_signable_object_format(metadata_signable) # Extract the 'signed' role object from 'metadata_signable'. metadata_object = metadata_signable['signed'] @@ -1439,7 +1439,7 @@ def _verify_metadata_file(self, metadata_file_object, else: # Ensure the loaded 'metadata_signable' is properly formatted. Raise # 'securesystemslib.exceptions.FormatError' if not. - tuf.formats.check_signable_object_format(metadata_signable) + formats.check_signable_object_format(metadata_signable) # Is 'metadata_signable' expired? self._ensure_not_expired(metadata_signable['signed'], metadata_role) @@ -2009,7 +2009,7 @@ def _update_versioninfo(self, metadata_filename): # client's copy of snapshot.json. try: timestamp_version_number = self.metadata['current']['snapshot']['version'] - trusted_versioninfo = tuf.formats.make_versioninfo( + trusted_versioninfo = formats.make_versioninfo( timestamp_version_number) except KeyError: @@ -2025,7 +2025,7 @@ def _update_versioninfo(self, metadata_filename): targets_version_number = \ self.metadata['current'][metadata_filename[:-len('.json')]]['version'] trusted_versioninfo = \ - tuf.formats.make_versioninfo(targets_version_number) + formats.make_versioninfo(targets_version_number) except KeyError: trusted_versioninfo = \ @@ -2152,7 +2152,7 @@ def _update_fileinfo(self, metadata_filename): # to the fileinfo store. file_length, hashes = securesystemslib.util.get_file_details( current_filepath) - metadata_fileinfo = tuf.formats.make_targets_fileinfo(file_length, hashes) + metadata_fileinfo = formats.make_targets_fileinfo(file_length, hashes) self.fileinfo[metadata_filename] = metadata_fileinfo @@ -2273,9 +2273,9 @@ def _ensure_not_expired(self, metadata_object, metadata_rolename): # Extract the expiration time. Convert it to a unix timestamp and compare it # against the current time.time() (also in Unix/POSIX time format, although # with microseconds attached.) - expires_datetime = tuf.formats.expiry_string_to_datetime( + expires_datetime = formats.expiry_string_to_datetime( metadata_object['expires']) - expires_timestamp = tuf.formats.datetime_to_unix_timestamp(expires_datetime) + expires_timestamp = formats.datetime_to_unix_timestamp(expires_datetime) current_time = int(time.time()) if expires_timestamp <= current_time: @@ -2557,7 +2557,7 @@ def targets_of_role(self, rolename='targets'): # Does 'rolename' have the correct format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - tuf.formats.RELPATH_SCHEMA.check_match(rolename) + formats.RELPATH_SCHEMA.check_match(rolename) # If we've been given a delegated targets role, we don't know how to # validate it without knowing what the delegating role is -- there could @@ -2623,7 +2623,7 @@ def get_one_valid_targetinfo(self, target_filepath): # Does 'target_filepath' have the correct format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - tuf.formats.RELPATH_SCHEMA.check_match(target_filepath) + formats.RELPATH_SCHEMA.check_match(target_filepath) target_filepath = target_filepath.replace('\\', '/') @@ -3058,7 +3058,7 @@ def updated_targets(self, targets, destination_directory): # Do the arguments have the correct format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - tuf.formats.TARGETINFOS_SCHEMA.check_match(targets) + formats.TARGETINFOS_SCHEMA.check_match(targets) securesystemslib.formats.PATH_SCHEMA.check_match(destination_directory) # Keep track of the target objects and filepaths of updated targets. @@ -3156,7 +3156,7 @@ def download_target(self, target, destination_directory, # number of objects and object types, and that all dict # keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fail. - tuf.formats.TARGETINFO_SCHEMA.check_match(target) + formats.TARGETINFO_SCHEMA.check_match(target) securesystemslib.formats.PATH_SCHEMA.check_match(destination_directory) # Extract the target file information. diff --git a/tuf/developer_tool.py b/tuf/developer_tool.py index c7c6d9e3fc..25cdff1a30 100755 --- a/tuf/developer_tool.py +++ b/tuf/developer_tool.py @@ -40,7 +40,7 @@ import tuf from tuf import exceptions -import tuf.formats +from tuf import formats import tuf.keydb import tuf.roledb import tuf.sig @@ -723,7 +723,7 @@ def _save_project_configuration(metadata_directory, targets_directory, securesystemslib.formats.PATH_SCHEMA.check_match(metadata_directory) securesystemslib.formats.PATH_SCHEMA.check_match(prefix) securesystemslib.formats.PATH_SCHEMA.check_match(targets_directory) - tuf.formats.RELPATH_SCHEMA.check_match(project_name) + formats.RELPATH_SCHEMA.check_match(project_name) cfg_file_directory = metadata_directory @@ -818,7 +818,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, config_filename = os.path.join(project_directory, PROJECT_FILENAME) project_configuration = securesystemslib.util.load_json_file(config_filename) - tuf.formats.PROJECT_CFG_SCHEMA.check_match(project_configuration) + formats.PROJECT_CFG_SCHEMA.check_match(project_configuration) targets_directory = os.path.join(project_directory, project_configuration['targets_location']) @@ -864,7 +864,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, project_filename) signable = securesystemslib.util.load_json_file(targets_metadata_path) try: - tuf.formats.check_signable_object_format(signable) + formats.check_signable_object_format(signable) except exceptions.UnsignedMetadataError: # Downgrade the error to a warning because a use case exists where # metadata may be generated unsigned on one machine and signed on another. diff --git a/tuf/download.py b/tuf/download.py index 5f93e4f57f..3156ed7b19 100755 --- a/tuf/download.py +++ b/tuf/download.py @@ -41,7 +41,7 @@ import tuf from tuf import exceptions -import tuf.formats +from tuf import formats # See 'log.py' to learn how logging is handled in TUF. logger = logging.getLogger(__name__) @@ -87,7 +87,7 @@ def safe_download(url, required_length, fetcher): # Do all of the arguments have the appropriate format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. securesystemslib.formats.URL_SCHEMA.check_match(url) - tuf.formats.LENGTH_SCHEMA.check_match(required_length) + formats.LENGTH_SCHEMA.check_match(required_length) return _download_file(url, required_length, fetcher, STRICT_REQUIRED_LENGTH=True) @@ -135,7 +135,7 @@ def unsafe_download(url, required_length, fetcher): # Do all of the arguments have the appropriate format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. securesystemslib.formats.URL_SCHEMA.check_match(url) - tuf.formats.LENGTH_SCHEMA.check_match(required_length) + formats.LENGTH_SCHEMA.check_match(required_length) return _download_file(url, required_length, fetcher, STRICT_REQUIRED_LENGTH=False) diff --git a/tuf/keydb.py b/tuf/keydb.py index c4a85cffb1..dc153d8841 100755 --- a/tuf/keydb.py +++ b/tuf/keydb.py @@ -45,7 +45,7 @@ import copy from tuf import exceptions -import tuf.formats +from tuf import formats import six import securesystemslib @@ -100,7 +100,7 @@ def create_keydb_from_root_metadata(root_metadata, repository_name='default'): # This check will ensure 'root_metadata' has the appropriate number of objects # and object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - tuf.formats.ROOT_SCHEMA.check_match(root_metadata) + formats.ROOT_SCHEMA.check_match(root_metadata) # Does 'repository_name' have the correct format? securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) diff --git a/tuf/mirrors.py b/tuf/mirrors.py index 50d32a3b09..4ba90654b6 100755 --- a/tuf/mirrors.py +++ b/tuf/mirrors.py @@ -33,7 +33,7 @@ import os import tuf -import tuf.formats +from tuf import formats import securesystemslib import six @@ -84,8 +84,8 @@ def get_list_of_mirrors(file_type, file_path, mirrors_dict): """ # Checking if all the arguments have appropriate format. - tuf.formats.RELPATH_SCHEMA.check_match(file_path) - tuf.formats.MIRRORDICT_SCHEMA.check_match(mirrors_dict) + formats.RELPATH_SCHEMA.check_match(file_path) + formats.MIRRORDICT_SCHEMA.check_match(mirrors_dict) securesystemslib.formats.NAME_SCHEMA.check_match(file_type) # Verify 'file_type' is supported. diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index af2947bd5d..a54940c474 100644 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -40,8 +40,8 @@ import tempfile import tuf -import tuf.formats from tuf import exceptions +from tuf import formats import tuf.keydb import tuf.roledb import tuf.sig @@ -500,7 +500,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): # Initialize the key and role metadata of the top-level roles. signable = securesystemslib.util.load_json_file(root_filename) try: - tuf.formats.check_signable_object_format(signable) + formats.check_signable_object_format(signable) except exceptions.UnsignedMetadataError: # Downgrade the error to a warning because a use case exists where # metadata may be generated unsigned on one machine and signed on another. @@ -587,7 +587,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): try: signable = securesystemslib.util.load_json_file(snapshot_filename) try: - tuf.formats.check_signable_object_format(signable) + formats.check_signable_object_format(signable) except exceptions.UnsignedMetadataError: # Downgrade the error to a warning because a use case exists where # metadata may be generated unsigned on one machine and signed on another. @@ -629,7 +629,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): try: signable = securesystemslib.util.load_json_file(targets_filename) try: - tuf.formats.check_signable_object_format(signable) + formats.check_signable_object_format(signable) except exceptions.UnsignedMetadataError: # Downgrade the error to a warning because a use case exists where # metadata may be generated unsigned on one machine and signed on another. @@ -698,10 +698,10 @@ def _log_warning_if_expires_soon(rolename, expires_iso8601_timestamp, # unix timestamp, subtract from current time.time() (also in POSIX time) # and compare against 'seconds_remaining_to_warn'. Log a warning message # to console if 'rolename' expires soon. - datetime_object = tuf.formats.expiry_string_to_datetime( + datetime_object = formats.expiry_string_to_datetime( expires_iso8601_timestamp) expires_unix_timestamp = \ - tuf.formats.datetime_to_unix_timestamp(datetime_object) + formats.datetime_to_unix_timestamp(datetime_object) seconds_until_expires = expires_unix_timestamp - int(time.time()) if seconds_until_expires <= seconds_remaining_to_warn: @@ -985,7 +985,7 @@ def get_targets_metadata_fileinfo(filename, storage_backend, custom=None): # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. securesystemslib.formats.PATH_SCHEMA.check_match(filename) if custom is not None: - tuf.formats.CUSTOM_SCHEMA.check_match(custom) + formats.CUSTOM_SCHEMA.check_match(custom) # Note: 'filehashes' is a dictionary of the form # {'sha256': 1233dfba312, ...}. 'custom' is an optional @@ -995,7 +995,7 @@ def get_targets_metadata_fileinfo(filename, storage_backend, custom=None): filesize, filehashes = securesystemslib.util.get_file_details(filename, tuf.settings.FILE_HASH_ALGORITHMS, storage_backend) - return tuf.formats.make_targets_fileinfo(filesize, filehashes, custom=custom) + return formats.make_targets_fileinfo(filesize, filehashes, custom=custom) @@ -1037,7 +1037,7 @@ def get_metadata_versioninfo(rolename, repository_name): # Does 'rolename' have the correct format? # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) + formats.ROLENAME_SCHEMA.check_match(rolename) roleinfo = tuf.roledb.get_roleinfo(rolename, repository_name) versioninfo = {'version': roleinfo['version']} @@ -1182,7 +1182,7 @@ def get_target_hash(target_filepath): The hash of 'target_filepath'. """ - tuf.formats.RELPATH_SCHEMA.check_match(target_filepath) + formats.RELPATH_SCHEMA.check_match(target_filepath) digest_object = securesystemslib.hash.digest(algorithm=HASH_FUNCTION) digest_object.update(target_filepath.encode('utf-8')) @@ -1238,7 +1238,7 @@ def generate_root_metadata(version, expiration_date, consistent_snapshot, # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any of the arguments are # improperly formatted. - tuf.formats.METADATAVERSION_SCHEMA.check_match(version) + formats.METADATAVERSION_SCHEMA.check_match(version) securesystemslib.formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) securesystemslib.formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) @@ -1268,8 +1268,8 @@ def generate_root_metadata(version, expiration_date, consistent_snapshot, # Generate the authentication information Root establishes for each # top-level role. role_threshold = tuf.roledb.get_role_threshold(rolename, repository_name) - role_metadata = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, + role_metadata = formats.build_dict_conforming_to_schema( + formats.ROLE_SCHEMA, keyids=keyids, threshold=role_threshold) roledict[rolename] = role_metadata @@ -1285,8 +1285,8 @@ def generate_root_metadata(version, expiration_date, consistent_snapshot, # generate_root_metadata, etc. with one function that generates # metadata, possibly rolling that upwards into the calling function. # There are very few things that really need to be done differently. - return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, + return formats.build_dict_conforming_to_schema( + formats.ROOT_SCHEMA, version=version, expires=expiration_date, keys=keydict, @@ -1389,8 +1389,8 @@ def generate_targets_metadata(targets_directory, target_files, version, # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. securesystemslib.formats.PATH_SCHEMA.check_match(targets_directory) - tuf.formats.PATH_FILEINFO_SCHEMA.check_match(target_files) - tuf.formats.METADATAVERSION_SCHEMA.check_match(version) + formats.PATH_FILEINFO_SCHEMA.check_match(target_files) + formats.METADATAVERSION_SCHEMA.check_match(version) securesystemslib.formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) securesystemslib.formats.BOOLEAN_SCHEMA.check_match(write_consistent_targets) securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_existing_fileinfo) @@ -1400,7 +1400,7 @@ def generate_targets_metadata(targets_directory, target_files, version, ' targets and using existing fileinfo.') if delegations is not None: - tuf.formats.DELEGATIONS_SCHEMA.check_match(delegations) + formats.DELEGATIONS_SCHEMA.check_match(delegations) # If targets role has delegations, collect the up-to-date 'keyids' and # 'threshold' for each role. Update the delegations keys dictionary. delegations_keys = [] @@ -1460,15 +1460,15 @@ def generate_targets_metadata(targets_directory, target_files, version, # metadata, possibly rolling that upwards into the calling function. # There are very few things that really need to be done differently. if delegations is not None: - return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TARGETS_SCHEMA, + return formats.build_dict_conforming_to_schema( + formats.TARGETS_SCHEMA, version=version, expires=expiration_date, targets=filedict, delegations=delegations) else: - return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TARGETS_SCHEMA, + return formats.build_dict_conforming_to_schema( + formats.TARGETS_SCHEMA, version=version, expires=expiration_date, targets=filedict) @@ -1621,7 +1621,7 @@ def generate_snapshot_metadata(metadata_directory, version, expiration_date, # object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fails. securesystemslib.formats.PATH_SCHEMA.check_match(metadata_directory) - tuf.formats.METADATAVERSION_SCHEMA.check_match(version) + formats.METADATAVERSION_SCHEMA.check_match(version) securesystemslib.formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) securesystemslib.formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) @@ -1643,7 +1643,7 @@ def generate_snapshot_metadata(metadata_directory, version, expiration_date, # Make file info dictionary with make_metadata_fileinfo because # in the tuf spec length and hashes are optional for all # METAFILES in snapshot.json including the top-level targets file. - fileinfodict[TARGETS_FILENAME] = tuf.formats.make_metadata_fileinfo( + fileinfodict[TARGETS_FILENAME] = formats.make_metadata_fileinfo( targets_file_version['version'], length, hashes) # Search the metadata directory and generate the versioninfo of all the role @@ -1675,7 +1675,7 @@ def generate_snapshot_metadata(metadata_directory, version, expiration_date, file_version = get_metadata_versioninfo(rolename, repository_name) - fileinfodict[metadata_name] = tuf.formats.make_metadata_fileinfo( + fileinfodict[metadata_name] = formats.make_metadata_fileinfo( file_version['version'], length, hashes) else: @@ -1691,8 +1691,8 @@ def generate_snapshot_metadata(metadata_directory, version, expiration_date, # generate_root_metadata, etc. with one function that generates # metadata, possibly rolling that upwards into the calling function. # There are very few things that really need to be done differently. - return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.SNAPSHOT_SCHEMA, + return formats.build_dict_conforming_to_schema( + formats.SNAPSHOT_SCHEMA, version=version, expires=expiration_date, meta=fileinfodict) @@ -1758,7 +1758,7 @@ def generate_timestamp_metadata(snapshot_file_path, version, expiration_date, # object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fails. securesystemslib.formats.PATH_SCHEMA.check_match(snapshot_file_path) - tuf.formats.METADATAVERSION_SCHEMA.check_match(version) + formats.METADATAVERSION_SCHEMA.check_match(version) securesystemslib.formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_length) @@ -1773,7 +1773,7 @@ def generate_timestamp_metadata(snapshot_file_path, version, expiration_date, # Retrieve the versioninfo of the Snapshot metadata file. snapshot_version = get_metadata_versioninfo('snapshot', repository_name) snapshot_fileinfo[snapshot_filename] = \ - tuf.formats.make_metadata_fileinfo(snapshot_version['version'], + formats.make_metadata_fileinfo(snapshot_version['version'], length, hashes) # Generate the timestamp metadata object. @@ -1785,8 +1785,8 @@ def generate_timestamp_metadata(snapshot_file_path, version, expiration_date, # generate_root_metadata, etc. with one function that generates # metadata, possibly rolling that upwards into the calling function. # There are very few things that really need to be done differently. - return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TIMESTAMP_SCHEMA, + return formats.build_dict_conforming_to_schema( + formats.TIMESTAMP_SCHEMA, version=version, expires=expiration_date, meta=snapshot_fileinfo) @@ -1838,7 +1838,7 @@ def sign_metadata(metadata_object, keyids, filename, repository_name): # This check ensures arguments have the appropriate number of objects and # object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - tuf.formats.ANYROLE_SCHEMA.check_match(metadata_object) + formats.ANYROLE_SCHEMA.check_match(metadata_object) securesystemslib.formats.KEYIDS_SCHEMA.check_match(keyids) securesystemslib.formats.PATH_SCHEMA.check_match(filename) securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) @@ -1847,7 +1847,7 @@ def sign_metadata(metadata_object, keyids, filename, repository_name): # it contains a 'signatures' field containing the result # of signing the 'signed' field of 'metadata' with each # keyid of 'keyids'. - signable = tuf.formats.make_signable(metadata_object) + signable = formats.make_signable(metadata_object) # Sign the metadata with each keyid in 'keyids'. 'signable' should have # zero signatures (metadata_object contained none). @@ -1876,7 +1876,7 @@ def sign_metadata(metadata_object, keyids, filename, repository_name): # Raise 'securesystemslib.exceptions.FormatError' if the resulting 'signable' # is not formatted correctly. try: - tuf.formats.check_signable_object_format(signable) + formats.check_signable_object_format(signable) except exceptions.UnsignedMetadataError: # Downgrade the error to a warning because a use case exists where # metadata may be generated unsigned on one machine and signed on another. @@ -1936,9 +1936,9 @@ def write_metadata_file(metadata, filename, version_number, consistent_snapshot, # This check ensures arguments have the appropriate number of objects and # object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - tuf.formats.SIGNABLE_SCHEMA.check_match(metadata) + formats.SIGNABLE_SCHEMA.check_match(metadata) securesystemslib.formats.PATH_SCHEMA.check_match(filename) - tuf.formats.METADATAVERSION_SCHEMA.check_match(version_number) + formats.METADATAVERSION_SCHEMA.check_match(version_number) securesystemslib.formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) if storage_backend is None: diff --git a/tuf/repository_tool.py b/tuf/repository_tool.py index ab3a646076..987204ab92 100755 --- a/tuf/repository_tool.py +++ b/tuf/repository_tool.py @@ -43,11 +43,11 @@ from collections import deque import tuf -import tuf.formats +from tuf import exceptions +from tuf import formats import tuf.roledb import tuf.sig import tuf.log -from tuf import exceptions import tuf.repository_lib as repo_lib import securesystemslib.keys @@ -740,23 +740,23 @@ def add_verification_key(self, key, expires=None): if expires is None: if self.rolename == 'root': expires = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + ROOT_EXPIRATION)) + formats.unix_timestamp_to_datetime(int(time.time() + ROOT_EXPIRATION)) elif self.rolename == 'Targets': expires = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + TARGETS_EXPIRATION)) + formats.unix_timestamp_to_datetime(int(time.time() + TARGETS_EXPIRATION)) elif self.rolename == 'Snapshot': expires = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + SNAPSHOT_EXPIRATION)) + formats.unix_timestamp_to_datetime(int(time.time() + SNAPSHOT_EXPIRATION)) elif self.rolename == 'Timestamp': expires = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + TIMESTAMP_EXPIRATION)) + formats.unix_timestamp_to_datetime(int(time.time() + TIMESTAMP_EXPIRATION)) else: expires = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + TIMESTAMP_EXPIRATION)) + formats.unix_timestamp_to_datetime(int(time.time() + TIMESTAMP_EXPIRATION)) # Is 'expires' a datetime.datetime() object? # Raise 'securesystemslib.exceptions.FormatError' if not. @@ -770,7 +770,7 @@ def add_verification_key(self, key, expires=None): # Ensure the expiration has not already passed. current_datetime = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time())) + formats.unix_timestamp_to_datetime(int(time.time())) if expires < current_datetime: raise securesystemslib.exceptions.Error(repr(key) + ' has already' @@ -1234,7 +1234,7 @@ def version(self, version): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - tuf.formats.METADATAVERSION_SCHEMA.check_match(version) + formats.METADATAVERSION_SCHEMA.check_match(version) roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) roleinfo['version'] = version @@ -1306,7 +1306,7 @@ def threshold(self, threshold): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - tuf.formats.THRESHOLD_SCHEMA.check_match(threshold) + formats.THRESHOLD_SCHEMA.check_match(threshold) roleinfo = tuf.roledb.get_roleinfo(self._rolename, self._repository_name) roleinfo['previous_threshold'] = roleinfo['threshold'] @@ -1339,7 +1339,7 @@ def expiration(self): roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) expires = roleinfo['expires'] - return tuf.formats.expiry_string_to_datetime(expires) + return formats.expiry_string_to_datetime(expires) @@ -1386,7 +1386,7 @@ def expiration(self, datetime_object): # Ensure the expiration has not already passed. current_datetime_object = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time())) + formats.unix_timestamp_to_datetime(int(time.time())) if datetime_object < current_datetime_object: raise securesystemslib.exceptions.Error(repr(self.rolename) + ' has' @@ -1477,11 +1477,11 @@ def __init__(self, repository_name): # Is 'repository_name' properly formatted? Otherwise, raise a # tuf.exceptions.FormatError exception. - tuf.formats.ROLENAME_SCHEMA.check_match(repository_name) + formats.ROLENAME_SCHEMA.check_match(repository_name) # By default, 'snapshot' metadata is set to expire 1 week from the current # time. The expiration may be modified. - expiration = tuf.formats.unix_timestamp_to_datetime( + expiration = formats.unix_timestamp_to_datetime( int(time.time() + ROOT_EXPIRATION)) expiration = expiration.isoformat() + 'Z' @@ -1549,7 +1549,7 @@ def __init__(self, repository_name): # By default, 'root' metadata is set to expire 1 year from the current # time. The expiration may be modified. - expiration = tuf.formats.unix_timestamp_to_datetime( + expiration = formats.unix_timestamp_to_datetime( int(time.time() + TIMESTAMP_EXPIRATION)) expiration = expiration.isoformat() + 'Z' @@ -1612,7 +1612,7 @@ def __init__(self, repository_name): # By default, 'snapshot' metadata is set to expire 1 week from the current # time. The expiration may be modified. - expiration = tuf.formats.unix_timestamp_to_datetime( + expiration = formats.unix_timestamp_to_datetime( int(time.time() + SNAPSHOT_EXPIRATION)) expiration = expiration.isoformat() + 'Z' @@ -1693,11 +1693,11 @@ def __init__(self, targets_directory, rolename='targets', roleinfo=None, # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. securesystemslib.formats.PATH_SCHEMA.check_match(targets_directory) - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) + formats.ROLENAME_SCHEMA.check_match(rolename) securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) if roleinfo is not None: - tuf.formats.ROLEDB_SCHEMA.check_match(roleinfo) + formats.ROLEDB_SCHEMA.check_match(roleinfo) super(Targets, self).__init__() self._targets_directory = targets_directory @@ -1715,7 +1715,7 @@ def __init__(self, targets_directory, rolename='targets', roleinfo=None, # By default, Targets objects are set to expire 3 months from the current # time. May be later modified. - expiration = tuf.formats.unix_timestamp_to_datetime( + expiration = formats.unix_timestamp_to_datetime( int(time.time() + TARGETS_EXPIRATION)) expiration = expiration.isoformat() + 'Z' @@ -1766,7 +1766,7 @@ def __call__(self, rolename): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) + formats.ROLENAME_SCHEMA.check_match(rolename) if rolename in self._delegated_roles: return self._delegated_roles[rolename] @@ -1808,7 +1808,7 @@ def add_delegated_role(self, rolename, targets_object): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) + formats.ROLENAME_SCHEMA.check_match(rolename) if not isinstance(targets_object, Targets): raise securesystemslib.exceptions.FormatError(repr(targets_object) + ' is' @@ -1848,7 +1848,7 @@ def remove_delegated_role(self, rolename): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) + formats.ROLENAME_SCHEMA.check_match(rolename) if rolename not in self._delegated_roles: logger.debug(repr(rolename) + ' has not been delegated.') @@ -1936,7 +1936,7 @@ def add_paths(self, paths, child_rolename): # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. securesystemslib.formats.PATHS_SCHEMA.check_match(paths) - tuf.formats.ROLENAME_SCHEMA.check_match(child_rolename) + formats.ROLENAME_SCHEMA.check_match(child_rolename) # Ensure that 'child_rolename' exists, otherwise it will not have an entry # in the parent role's delegations field. @@ -2024,19 +2024,19 @@ def add_target(self, filepath, custom=None, fileinfo=None): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if there is a mismatch. - tuf.formats.RELPATH_SCHEMA.check_match(filepath) + formats.RELPATH_SCHEMA.check_match(filepath) if fileinfo and custom: raise securesystemslib.exceptions.Error("Can only take one of" " custom or fileinfo, not both.") if fileinfo: - tuf.formats.TARGETS_FILEINFO_SCHEMA.check_match(fileinfo) + formats.TARGETS_FILEINFO_SCHEMA.check_match(fileinfo) if custom is None: custom = {} else: - tuf.formats.CUSTOM_SCHEMA.check_match(custom) + formats.CUSTOM_SCHEMA.check_match(custom) # Add 'filepath' (i.e., relative to the targets directory) to the role's # list of targets. 'filepath' will not be verified as an allowed path @@ -2104,7 +2104,7 @@ def add_targets(self, list_of_targets): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - tuf.formats.RELPATHS_SCHEMA.check_match(list_of_targets) + formats.RELPATHS_SCHEMA.check_match(list_of_targets) # Ensure the paths in 'list_of_targets' are relative and use forward slash # as a separator or raise an exception. The paths of 'list_of_targets' @@ -2162,7 +2162,7 @@ def remove_target(self, filepath): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if there is a mismatch. - tuf.formats.RELPATH_SCHEMA.check_match(filepath) + formats.RELPATH_SCHEMA.check_match(filepath) # Remove 'relative_filepath', if found, and update this Targets roleinfo. fileinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) @@ -2241,7 +2241,7 @@ def _create_delegated_target(self, rolename, keyids, threshold, paths): expiration is set (3 months from the current time). """ - expiration = tuf.formats.unix_timestamp_to_datetime( + expiration = formats.unix_timestamp_to_datetime( int(time.time() + TARGETS_EXPIRATION)) expiration = expiration.isoformat() + 'Z' @@ -2363,17 +2363,17 @@ def delegate(self, rolename, public_keys, paths, threshold=1, # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) + formats.ROLENAME_SCHEMA.check_match(rolename) securesystemslib.formats.ANYKEYLIST_SCHEMA.check_match(public_keys) - tuf.formats.RELPATHS_SCHEMA.check_match(paths) - tuf.formats.THRESHOLD_SCHEMA.check_match(threshold) + formats.RELPATHS_SCHEMA.check_match(paths) + formats.THRESHOLD_SCHEMA.check_match(threshold) securesystemslib.formats.BOOLEAN_SCHEMA.check_match(terminating) if list_of_targets is not None: - tuf.formats.RELPATHS_SCHEMA.check_match(list_of_targets) + formats.RELPATHS_SCHEMA.check_match(list_of_targets) if path_hash_prefixes is not None: - tuf.formats.PATH_HASH_PREFIXES_SCHEMA.check_match(path_hash_prefixes) + formats.PATH_HASH_PREFIXES_SCHEMA.check_match(path_hash_prefixes) # Keep track of the valid keyids (added to the new Targets object) and # their keydicts (added to this Targets delegations). @@ -2477,7 +2477,7 @@ def revoke(self, rolename): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) + formats.ROLENAME_SCHEMA.check_match(rolename) # Remove 'rolename' from this Target's delegations dict. roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) @@ -2569,7 +2569,7 @@ def delegate_hashed_bins(self, list_of_targets, keys_of_hashed_bins, # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. securesystemslib.formats.PATHS_SCHEMA.check_match(list_of_targets) securesystemslib.formats.ANYKEYLIST_SCHEMA.check_match(keys_of_hashed_bins) - tuf.formats.NUMBINS_SCHEMA.check_match(number_of_bins) + formats.NUMBINS_SCHEMA.check_match(number_of_bins) prefix_length, prefix_count, bin_size = repo_lib.get_bin_numbers(number_of_bins) @@ -2712,7 +2712,7 @@ def add_target_to_bin(self, target_filepath, number_of_bins=DEFAULT_NUM_BINS, # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. securesystemslib.formats.PATH_SCHEMA.check_match(target_filepath) - tuf.formats.NUMBINS_SCHEMA.check_match(number_of_bins) + formats.NUMBINS_SCHEMA.check_match(number_of_bins) # TODO: check target_filepath is sane @@ -2774,7 +2774,7 @@ def remove_target_from_bin(self, target_filepath, # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. securesystemslib.formats.PATH_SCHEMA.check_match(target_filepath) - tuf.formats.NUMBINS_SCHEMA.check_match(number_of_bins) + formats.NUMBINS_SCHEMA.check_match(number_of_bins) # TODO: check target_filepath is sane? @@ -2843,7 +2843,7 @@ def _check_path(self, pathname): None. """ - tuf.formats.RELPATH_SCHEMA.check_match(pathname) + formats.RELPATH_SCHEMA.check_match(pathname) if '\\' in pathname: raise exceptions.InvalidNameError('Path ' + repr(pathname) @@ -3222,7 +3222,7 @@ def dump_signable_metadata(metadata_filepath): signable = securesystemslib.util.load_json_file(metadata_filepath) # Is 'signable' a valid metadata file? - tuf.formats.SIGNABLE_SCHEMA.check_match(signable) + formats.SIGNABLE_SCHEMA.check_match(signable) return securesystemslib.formats.encode_canonical(signable['signed']) @@ -3278,7 +3278,7 @@ def append_signature(signature, metadata_filepath): signable = securesystemslib.util.load_json_file(metadata_filepath) # Is 'signable' a valid metadata file? - tuf.formats.SIGNABLE_SCHEMA.check_match(signable) + formats.SIGNABLE_SCHEMA.check_match(signable) signable['signatures'].append(signature) diff --git a/tuf/roledb.py b/tuf/roledb.py index eccb4ddd92..30a3822309 100755 --- a/tuf/roledb.py +++ b/tuf/roledb.py @@ -54,8 +54,8 @@ import tuf from tuf import exceptions +from tuf import formats import tuf.log -import tuf.formats import securesystemslib import six @@ -112,7 +112,7 @@ def create_roledb_from_root_metadata(root_metadata, repository_name='default'): # This check will ensure 'root_metadata' has the appropriate number of objects # and object types, and that all dict keys are properly named. # Raises securesystemslib.exceptions.FormatError. - tuf.formats.ROOT_SCHEMA.check_match(root_metadata) + formats.ROOT_SCHEMA.check_match(root_metadata) # Is 'repository_name' formatted correctly? securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) @@ -295,10 +295,10 @@ def add_role(rolename, roleinfo, repository_name='default'): # Does 'rolename' have the correct object format? # This check will ensure 'rolename' has the appropriate number of objects # and object types, and that all dict keys are properly named. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) + formats.ROLENAME_SCHEMA.check_match(rolename) # Does 'roleinfo' have the correct object format? - tuf.formats.ROLEDB_SCHEMA.check_match(roleinfo) + formats.ROLEDB_SCHEMA.check_match(roleinfo) # Is 'repository_name' correctly formatted? securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) @@ -379,12 +379,12 @@ def update_roleinfo(rolename, roleinfo, mark_role_as_dirty=True, repository_name # Does the arguments have the correct object format? # This check will ensure arguments have the appropriate number of objects # and object types, and that all dict keys are properly named. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) + formats.ROLENAME_SCHEMA.check_match(rolename) securesystemslib.formats.BOOLEAN_SCHEMA.check_match(mark_role_as_dirty) securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) # Does 'roleinfo' have the correct object format? - tuf.formats.ROLEDB_SCHEMA.check_match(roleinfo) + formats.ROLEDB_SCHEMA.check_match(roleinfo) # Raises securesystemslib.exceptions.InvalidNameError. _validate_rolename(rolename) @@ -1031,7 +1031,7 @@ def _check_rolename(rolename, repository_name='default'): # Does 'rolename' have the correct object format? # This check will ensure 'rolename' has the appropriate number of objects # and object types, and that all dict keys are properly named. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) + formats.ROLENAME_SCHEMA.check_match(rolename) # Does 'repository_name' have the correct format? securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) diff --git a/tuf/scripts/repo.py b/tuf/scripts/repo.py index a2a0ffef50..5557ca6e90 100755 --- a/tuf/scripts/repo.py +++ b/tuf/scripts/repo.py @@ -149,8 +149,8 @@ import tuf from tuf import exceptions +from tuf import formats import tuf.log -import tuf.formats import tuf.repository_tool as repo_tool # 'pip install securesystemslib[crypto,pynacl]' is required for the CLI, @@ -624,7 +624,7 @@ def sign_role(parsed_arguments): role_privatekey, repository_name = repository._repository_name) # Set the delegated metadata file to expire in 3 months. - expiration = tuf.formats.unix_timestamp_to_datetime( + expiration = formats.unix_timestamp_to_datetime( int(time.time() + 7889230)) expiration = expiration.isoformat() + 'Z' diff --git a/tuf/sig.py b/tuf/sig.py index be1bee7d67..221bdfa928 100755 --- a/tuf/sig.py +++ b/tuf/sig.py @@ -52,9 +52,9 @@ import tuf from tuf import exceptions +from tuf import formats import tuf.keydb import tuf.roledb -import tuf.formats import securesystemslib @@ -126,14 +126,14 @@ def get_signature_status(signable, role=None, repository_name='default', # arguments have the appropriate number of objects and object types, and that # all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if the check fails. - tuf.formats.SIGNABLE_SCHEMA.check_match(signable) + formats.SIGNABLE_SCHEMA.check_match(signable) securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) if role is not None: - tuf.formats.ROLENAME_SCHEMA.check_match(role) + formats.ROLENAME_SCHEMA.check_match(role) if threshold is not None: - tuf.formats.THRESHOLD_SCHEMA.check_match(threshold) + formats.THRESHOLD_SCHEMA.check_match(threshold) if keyids is not None: securesystemslib.formats.KEYIDS_SCHEMA.check_match(keyids) @@ -280,8 +280,8 @@ def verify(signable, role, repository_name='default', threshold=None, role's threshold, False otherwise. """ - tuf.formats.SIGNABLE_SCHEMA.check_match(signable) - tuf.formats.ROLENAME_SCHEMA.check_match(role) + formats.SIGNABLE_SCHEMA.check_match(signable) + formats.ROLENAME_SCHEMA.check_match(role) securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) # Retrieve the signature status. tuf.sig.get_signature_status() raises: @@ -338,7 +338,7 @@ def may_need_new_keys(signature_status): # This check will ensure 'signature_status' has the appropriate number # of objects and object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - tuf.formats.SIGNATURESTATUS_SCHEMA.check_match(signature_status) + formats.SIGNATURESTATUS_SCHEMA.check_match(signature_status) unknown = signature_status['unknown_sigs'] untrusted = signature_status['untrusted_sigs'] From 07b3aed03ce0c1cdde9e02956a752ae29ab10d1c Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Mon, 11 Jan 2021 19:48:50 +0200 Subject: [PATCH 04/25] imports: Make 'log' imports vendoring-compatible Use "from tuf import " instead of "import tuf.": this makes it possible for vendoring tool to vendor tuf. Fix all references to in the code. Signed-off-by: Jussi Kukkonen --- tuf/client/updater.py | 2 +- tuf/developer_tool.py | 2 +- tuf/repository_lib.py | 4 ++-- tuf/repository_tool.py | 6 +++--- tuf/roledb.py | 2 +- tuf/scripts/client.py | 14 +++++++------- tuf/scripts/repo.py | 4 ++-- 7 files changed, 17 insertions(+), 17 deletions(-) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index cd7e8a6525..094afd8608 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -134,10 +134,10 @@ from tuf import download from tuf import exceptions from tuf import formats +from tuf import log import tuf.requests_fetcher import tuf.settings import tuf.keydb -import tuf.log import tuf.mirrors import tuf.roledb import tuf.sig diff --git a/tuf/developer_tool.py b/tuf/developer_tool.py index 25cdff1a30..14d50f4cdb 100755 --- a/tuf/developer_tool.py +++ b/tuf/developer_tool.py @@ -41,10 +41,10 @@ import tuf from tuf import exceptions from tuf import formats +from tuf import log import tuf.keydb import tuf.roledb import tuf.sig -import tuf.log import tuf.repository_lib as repo_lib import tuf.repository_tool diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index a54940c474..47f6c046d6 100644 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -42,10 +42,10 @@ import tuf from tuf import exceptions from tuf import formats +from tuf import log import tuf.keydb import tuf.roledb import tuf.sig -import tuf.log import tuf.settings import securesystemslib @@ -2269,7 +2269,7 @@ def disable_console_log_messages(): None. """ - tuf.log.remove_console_handler() + log.remove_console_handler() diff --git a/tuf/repository_tool.py b/tuf/repository_tool.py index 987204ab92..498652ab18 100755 --- a/tuf/repository_tool.py +++ b/tuf/repository_tool.py @@ -45,9 +45,9 @@ import tuf from tuf import exceptions from tuf import formats +from tuf import log import tuf.roledb import tuf.sig -import tuf.log import tuf.repository_lib as repo_lib import securesystemslib.keys @@ -100,8 +100,8 @@ # Add a console handler so that users are aware of potentially unintended # states, such as multiple roles that share keys. -tuf.log.add_console_handler() -tuf.log.set_console_log_level(logging.INFO) +log.add_console_handler() +log.set_console_log_level(logging.INFO) # Recommended RSA key sizes: # https://en.wikipedia.org/wiki/Key_size#Asymmetric_algorithm_key_lengths diff --git a/tuf/roledb.py b/tuf/roledb.py index 30a3822309..0b69537ae1 100755 --- a/tuf/roledb.py +++ b/tuf/roledb.py @@ -55,7 +55,7 @@ import tuf from tuf import exceptions from tuf import formats -import tuf.log +from tuf import log import securesystemslib import six diff --git a/tuf/scripts/client.py b/tuf/scripts/client.py index 770309001b..ebc93e97ab 100755 --- a/tuf/scripts/client.py +++ b/tuf/scripts/client.py @@ -73,9 +73,9 @@ import tuf from tuf import exceptions +from tuf import log import tuf.client.updater import tuf.settings -import tuf.log # See 'log.py' to learn how logging is handled in TUF. logger = logging.getLogger(__name__) @@ -204,22 +204,22 @@ def parse_arguments(): # Set the logging level. if parsed_arguments.verbose == 5: - tuf.log.set_log_level(logging.CRITICAL) + log.set_log_level(logging.CRITICAL) elif parsed_arguments.verbose == 4: - tuf.log.set_log_level(logging.ERROR) + log.set_log_level(logging.ERROR) elif parsed_arguments.verbose == 3: - tuf.log.set_log_level(logging.WARNING) + log.set_log_level(logging.WARNING) elif parsed_arguments.verbose == 2: - tuf.log.set_log_level(logging.INFO) + log.set_log_level(logging.INFO) elif parsed_arguments.verbose == 1: - tuf.log.set_log_level(logging.DEBUG) + log.set_log_level(logging.DEBUG) else: - tuf.log.set_log_level(logging.NOTSET) + log.set_log_level(logging.NOTSET) # Return the repository mirror containing the metadata and target files. return parsed_arguments diff --git a/tuf/scripts/repo.py b/tuf/scripts/repo.py index 5557ca6e90..c1bef95f8e 100755 --- a/tuf/scripts/repo.py +++ b/tuf/scripts/repo.py @@ -150,7 +150,7 @@ import tuf from tuf import exceptions from tuf import formats -import tuf.log +from tuf import log import tuf.repository_tool as repo_tool # 'pip install securesystemslib[crypto,pynacl]' is required for the CLI, @@ -1127,7 +1127,7 @@ def parse_arguments(): logging_levels = [logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL] - tuf.log.set_log_level(logging_levels[parsed_args.verbose]) + log.set_log_level(logging_levels[parsed_args.verbose]) return parsed_args From 696b92902ea86148327e1e368cc24fe947b89f90 Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Mon, 11 Jan 2021 20:04:04 +0200 Subject: [PATCH 05/25] updater: rename mirrors variables Make sure mirrors is not used as variable name (so it can be used for the module import name later). Signed-off-by: Jussi Kukkonen --- tuf/client/updater.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index 094afd8608..eaedea3c34 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -499,10 +499,10 @@ def get_updater(self, repository_name): # Create repository mirrors object needed by the # tuf.client.updater.Updater(). Each 'repository_name' can have more # than one mirror. - mirrors = {} + repo_mirrors = {} for url in self.repository_names_to_mirrors[repository_name]: - mirrors[url] = { + repo_mirrors[url] = { 'url_prefix': url, 'metadata_path': 'metadata', 'targets_path': 'targets'} @@ -511,7 +511,7 @@ def get_updater(self, repository_name): # NOTE: State (e.g., keys) should NOT be shared across different # updater instances. logger.debug('Adding updater for ' + repr(repository_name)) - updater = tuf.client.updater.Updater(repository_name, mirrors) + updater = tuf.client.updater.Updater(repository_name, repo_mirrors) except Exception: return None From 02046c0f49af09cb8236cc599706da549c93b9db Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Mon, 11 Jan 2021 20:08:54 +0200 Subject: [PATCH 06/25] imports: Make 'mirrors' imports vendoring compatible Use "from tuf import " instead of "import tuf.": this makes it possible for vendoring tool to vendor tuf. Fix all references to in the code. Signed-off-by: Jussi Kukkonen --- tuf/client/updater.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index eaedea3c34..b6b45a0460 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -135,10 +135,10 @@ from tuf import exceptions from tuf import formats from tuf import log +from tuf import mirrors import tuf.requests_fetcher import tuf.settings import tuf.keydb -import tuf.mirrors import tuf.roledb import tuf.sig @@ -1313,7 +1313,7 @@ def _get_target_file(self, target_filepath, file_length, file_hashes, dirname, basename = os.path.split(target_filepath) target_filepath = os.path.join(dirname, target_digest + '.' + basename) - file_mirrors = tuf.mirrors.get_list_of_mirrors('target', target_filepath, + file_mirrors = mirrors.get_list_of_mirrors('target', target_filepath, self.mirrors) # file_mirror (URL): error (Exception) @@ -1511,7 +1511,7 @@ def _get_metadata_file(self, metadata_role, remote_filename, A file object containing the metadata. """ - file_mirrors = tuf.mirrors.get_list_of_mirrors('meta', remote_filename, + file_mirrors = mirrors.get_list_of_mirrors('meta', remote_filename, self.mirrors) # file_mirror (URL): error (Exception) From 9550b1470dd3cb259b669aabc559c256901afff2 Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Mon, 11 Jan 2021 20:29:43 +0200 Subject: [PATCH 07/25] imports: Make 'roledb' imports vendoring-compatible Use "from tuf import " instead of "import tuf.": this makes it possible for vendoring tool to vendor tuf. Fix all references to in the code. Signed-off-by: Jussi Kukkonen --- tuf/client/updater.py | 22 ++--- tuf/developer_tool.py | 26 +++--- tuf/repository_lib.py | 110 ++++++++++++------------- tuf/repository_tool.py | 177 ++++++++++++++++++++--------------------- tuf/scripts/repo.py | 29 +++---- tuf/sig.py | 20 ++--- 6 files changed, 192 insertions(+), 192 deletions(-) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index b6b45a0460..c23c45b72d 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -136,10 +136,10 @@ from tuf import formats from tuf import log from tuf import mirrors +from tuf import roledb import tuf.requests_fetcher import tuf.settings import tuf.keydb -import tuf.roledb import tuf.sig import securesystemslib.exceptions @@ -767,7 +767,7 @@ def __init__(self, repository_name, repository_mirrors, fetcher=None): # Load current and previous metadata. for metadata_set in ['current', 'previous']: - for metadata_role in tuf.roledb.TOP_LEVEL_ROLES: + for metadata_role in roledb.TOP_LEVEL_ROLES: self._load_metadata_from_file(metadata_set, metadata_role) # Raise an exception if the repository is missing the required 'root' @@ -914,7 +914,7 @@ def _rebuild_key_and_role_db(self): tuf.keydb.create_keydb_from_root_metadata(self.metadata['current']['root'], self.repository_name) - tuf.roledb.create_roledb_from_root_metadata(self.metadata['current']['root'], + roledb.create_roledb_from_root_metadata(self.metadata['current']['root'], self.repository_name) @@ -983,11 +983,11 @@ def _import_delegations(self, parent_role): # Add the roles to the role database. for roleinfo in roles_info: try: - # NOTE: tuf.roledb.add_role will take care of the case where rolename + # NOTE: roledb.add_role will take care of the case where rolename # is None. rolename = roleinfo.get('name') logger.debug('Adding delegated role: ' + str(rolename) + '.') - tuf.roledb.add_role(rolename, roleinfo, self.repository_name) + roledb.add_role(rolename, roleinfo, self.repository_name) except exceptions.RoleAlreadyExistsError: logger.warning('Role already exists: ' + rolename) @@ -2237,7 +2237,7 @@ def _delete_metadata(self, metadata_role): # Remove knowledge of the role. if metadata_role in self.metadata['current']: del self.metadata['current'][metadata_role] - tuf.roledb.remove_role(metadata_role, self.repository_name) + roledb.remove_role(metadata_role, self.repository_name) @@ -2341,8 +2341,8 @@ def all_targets(self): # Fetch the targets of the delegated roles. get_rolenames returns # all roles available on the repository. delegated_targets = [] - for role in tuf.roledb.get_rolenames(self.repository_name): - if role in tuf.roledb.TOP_LEVEL_ROLES: + for role in roledb.get_rolenames(self.repository_name): + if role in roledb.TOP_LEVEL_ROLES: continue else: @@ -2477,7 +2477,7 @@ def _targets_of_role(self, rolename, targets=None, skip_refresh=False): targets_of_role = list(targets) logger.debug('Getting targets of role: ' + repr(rolename) + '.') - if not tuf.roledb.role_exists(rolename, self.repository_name): + if not roledb.role_exists(rolename, self.repository_name): raise exceptions.UnknownRoleError(rolename) # We do not need to worry about the target paths being trusted because @@ -2579,7 +2579,7 @@ def targets_of_role(self, rolename='targets'): self._refresh_targets_metadata(refresh_all_delegated_roles=True) - if not tuf.roledb.role_exists(rolename, self.repository_name): + if not roledb.role_exists(rolename, self.repository_name): raise exceptions.UnknownRoleError(rolename) return self._targets_of_role(rolename, skip_refresh=True) @@ -2973,7 +2973,7 @@ def remove_obsolete_targets(self, destination_directory): # Iterate the rolenames and verify whether the 'previous' directory # contains a target no longer found in 'current'. - for role in tuf.roledb.get_rolenames(self.repository_name): + for role in roledb.get_rolenames(self.repository_name): if role.startswith('targets'): if role in self.metadata['previous'] and self.metadata['previous'][role] != None: for target in self.metadata['previous'][role]['targets']: diff --git a/tuf/developer_tool.py b/tuf/developer_tool.py index 14d50f4cdb..256bf626a1 100755 --- a/tuf/developer_tool.py +++ b/tuf/developer_tool.py @@ -42,8 +42,8 @@ from tuf import exceptions from tuf import formats from tuf import log +from tuf import roledb import tuf.keydb -import tuf.roledb import tuf.sig import tuf.repository_lib as repo_lib import tuf.repository_tool @@ -252,12 +252,12 @@ def write(self, write_partial=False): # Raise 'securesystemslib.exceptions.FormatError' if any are improperly formatted. securesystemslib.formats.BOOLEAN_SCHEMA.check_match(write_partial) - # At this point the tuf.keydb and tuf.roledb stores must be fully + # At this point the tuf.keydb and roledb stores must be fully # populated, otherwise write() throwns a 'tuf.Repository' exception if # any of the project roles are missing signatures, keys, etc. # Write the metadata files of all the delegated roles of the project. - delegated_rolenames = tuf.roledb.get_delegated_rolenames(self.project_name, + delegated_rolenames = roledb.get_delegated_rolenames(self.project_name, self.repository_name) for delegated_rolename in delegated_rolenames: @@ -311,7 +311,7 @@ def add_verification_key(self, key, expires=None): securesystemslib.exceptions.Error, if the project already contains a key. - The role's entries in 'tuf.keydb.py' and 'tuf.roledb.py' are updated. + The role's entries in 'tuf.keydb.py' and 'roledb' are updated. None @@ -370,7 +370,7 @@ def status(self): filenames['targets'] = os.path.join(metadata_directory, self.project_name) # Delegated roles. - delegated_roles = tuf.roledb.get_delegated_rolenames(self.project_name, + delegated_roles = roledb.get_delegated_rolenames(self.project_name, self.repository_name) insufficient_keys = [] insufficient_signatures = [] @@ -464,7 +464,7 @@ def _generate_and_write_metadata(rolename, metadata_filename, write_partial, # Retrieve the roleinfo of 'rolename' to extract the needed metadata # attributes, such as version number, expiration, etc. - roleinfo = tuf.roledb.get_roleinfo(rolename, repository_name) + roleinfo = roledb.get_roleinfo(rolename, repository_name) metadata = generate_targets_metadata(targets_directory, roleinfo['paths'], roleinfo['version'], roleinfo['expires'], roleinfo['delegations'], @@ -808,7 +808,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, securesystemslib.formats.ANY_STRING_SCHEMA.check_match(prefix) # Clear the role and key databases since we are loading in a new project. - tuf.roledb.clear_roledb(clear_all=True) + roledb.clear_roledb(clear_all=True) tuf.keydb.clear_keydb(clear_all=True) # Locate metadata filepaths and targets filepath. @@ -878,7 +878,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, project.add_signature(signature) # Update roledb.py containing the loaded project attributes. - roleinfo = tuf.roledb.get_roleinfo(project_name, repository_name) + roleinfo = roledb.get_roleinfo(project_name, repository_name) roleinfo['signatures'].extend(signable['signatures']) roleinfo['version'] = targets_metadata['version'] roleinfo['paths'] = targets_metadata['targets'] @@ -891,7 +891,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, repository_name=repository_name): roleinfo['partial_loaded'] = True - tuf.roledb.update_roleinfo(project_name, roleinfo, mark_role_as_dirty=False, + roledb.update_roleinfo(project_name, roleinfo, mark_role_as_dirty=False, repository_name=repository_name) for key_metadata in targets_metadata['delegations']['keys'].values(): @@ -905,7 +905,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, 'signing_keyids': [], 'signatures': [], 'partial_loaded':False, 'delegations': {'keys':{}, 'roles':[]} } - tuf.roledb.add_role(rolename, roleinfo, repository_name=repository_name) + roledb.add_role(rolename, roleinfo, repository_name=repository_name) # Load the delegated metadata and generate their fileinfo. targets_objects = {} @@ -941,7 +941,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, metadata_object = _strip_prefix_from_targets_metadata(metadata_object, prefix) - roleinfo = tuf.roledb.get_roleinfo(metadata_name, repository_name) + roleinfo = roledb.get_roleinfo(metadata_name, repository_name) roleinfo['signatures'].extend(signable['signatures']) roleinfo['version'] = metadata_object['version'] roleinfo['expires'] = metadata_object['expires'] @@ -958,7 +958,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, roleinfo['partial_loaded'] = True - tuf.roledb.update_roleinfo(metadata_name, roleinfo, + roledb.update_roleinfo(metadata_name, roleinfo, mark_role_as_dirty=False, repository_name=repository_name) # Append to list of elements to avoid reloading repeated metadata. @@ -989,7 +989,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, 'partial_loaded': False, 'delegations': {'keys': {}, 'roles': []}} - tuf.roledb.add_role(rolename, roleinfo, repository_name=repository_name) + roledb.add_role(rolename, roleinfo, repository_name=repository_name) if new_prefix: project.prefix = new_prefix diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index 47f6c046d6..76c61753d3 100644 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -43,8 +43,8 @@ from tuf import exceptions from tuf import formats from tuf import log +from tuf import roledb import tuf.keydb -import tuf.roledb import tuf.sig import tuf.settings @@ -109,7 +109,7 @@ def _generate_and_write_metadata(rolename, metadata_filename, # Retrieve the roleinfo of 'rolename' to extract the needed metadata # attributes, such as version number, expiration, etc. - roleinfo = tuf.roledb.get_roleinfo(rolename, repository_name) + roleinfo = roledb.get_roleinfo(rolename, repository_name) previous_keyids = roleinfo.get('previous_keyids', []) previous_threshold = roleinfo.get('previous_threshold', 1) signing_keyids = sorted(set(roleinfo['signing_keyids'])) @@ -164,7 +164,7 @@ def _generate_and_write_metadata(rolename, metadata_filename, # Update roledb with the latest delegations info collected during # generate_targets_metadata() - tuf.roledb.update_roleinfo(rolename, roleinfo, + roledb.update_roleinfo(rolename, roleinfo, repository_name=repository_name) @@ -179,16 +179,16 @@ def _generate_and_write_metadata(rolename, metadata_filename, # properly signed). current_version = metadata['version'] if increment_version_number: - roleinfo = tuf.roledb.get_roleinfo(rolename, repository_name) + roleinfo = roledb.get_roleinfo(rolename, repository_name) metadata['version'] = metadata['version'] + 1 roleinfo['version'] = roleinfo['version'] + 1 - tuf.roledb.update_roleinfo(rolename, roleinfo, + roledb.update_roleinfo(rolename, roleinfo, repository_name=repository_name) else: logger.debug('Not incrementing ' + repr(rolename) + '\'s version number.') - if rolename in tuf.roledb.TOP_LEVEL_ROLES and not allow_partially_signed: + if rolename in roledb.TOP_LEVEL_ROLES and not allow_partially_signed: # Verify that the top-level 'rolename' is fully signed. Only a delegated # role should not be written to disk without full verification of its # signature(s), since it can only be considered fully signed depending on @@ -226,9 +226,9 @@ def should_write(): else: # Since new metadata cannot be successfully written, restore the current # version number. - roleinfo = tuf.roledb.get_roleinfo(rolename, repository_name) + roleinfo = roledb.get_roleinfo(rolename, repository_name) roleinfo['version'] = current_version - tuf.roledb.update_roleinfo(rolename, roleinfo, + roledb.update_roleinfo(rolename, roleinfo, repository_name=repository_name) # Note that 'signable' is an argument to tuf.UnsignedMetadataError(). @@ -269,7 +269,7 @@ def _metadata_is_partially_loaded(rolename, signable, repository_name): signatures. If 'rolename' is found to be partially loaded, mark it as partially loaded in - its 'tuf.roledb' roleinfo. This function exists to assist in deciding whether + its 'roledb' roleinfo. This function exists to assist in deciding whether a role's version number should be incremented when write() or write_parital() is called. Return True if 'rolename' was partially loaded, False otherwise. """ @@ -296,8 +296,8 @@ def _check_role_keys(rolename, repository_name): """ # Extract the total number of public and private keys of 'rolename' from its - # roleinfo in 'tuf.roledb'. - roleinfo = tuf.roledb.get_roleinfo(rolename, repository_name) + # roleinfo in 'roledb'. + roleinfo = roledb.get_roleinfo(rolename, repository_name) total_keyids = len(roleinfo['keyids']) threshold = roleinfo['threshold'] total_signatures = len(roleinfo['signatures']) @@ -415,14 +415,14 @@ def _delete_obsolete_metadata(metadata_directory, snapshot_metadata, logger.debug(repr(metadata_role) + ' does not match' ' supported extension ' + repr(METADATA_EXTENSION)) - if metadata_role in tuf.roledb.TOP_LEVEL_ROLES: + if metadata_role in roledb.TOP_LEVEL_ROLES: logger.debug('Not removing top-level metadata ' + repr(metadata_role)) return - # Delete the metadata file if it does not exist in 'tuf.roledb'. + # Delete the metadata file if it does not exist in 'roledb'. # 'repository_tool.py' might have removed 'metadata_name,' # but its metadata file is not actually deleted yet. Do it now. - if not tuf.roledb.role_exists(metadata_role, repository_name): + if not roledb.role_exists(metadata_role, repository_name): logger.info('Removing outdated metadata: ' + repr(metadata_path)) storage_backend.remove(metadata_path) @@ -508,10 +508,10 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): root_metadata = signable['signed'] tuf.keydb.create_keydb_from_root_metadata(root_metadata, repository_name) - tuf.roledb.create_roledb_from_root_metadata(root_metadata, repository_name) + roledb.create_roledb_from_root_metadata(root_metadata, repository_name) - # Load Root's roleinfo and update 'tuf.roledb'. - roleinfo = tuf.roledb.get_roleinfo('root', repository_name) + # Load Root's roleinfo and update 'roledb'. + roleinfo = roledb.get_roleinfo('root', repository_name) roleinfo['consistent_snapshot'] = root_metadata['consistent_snapshot'] roleinfo['signatures'] = [] for signature in signable['signatures']: @@ -534,7 +534,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): _log_warning_if_expires_soon(ROOT_FILENAME, roleinfo['expires'], ROOT_EXPIRES_WARN_SECONDS) - tuf.roledb.update_roleinfo('root', roleinfo, mark_role_as_dirty=False, + roledb.update_roleinfo('root', roleinfo, mark_role_as_dirty=False, repository_name=repository_name) # Ensure the 'consistent_snapshot' field is extracted. @@ -552,8 +552,8 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): for signature in signable['signatures']: repository.timestamp.add_signature(signature, mark_role_as_dirty=False) - # Load Timestamp's roleinfo and update 'tuf.roledb'. - roleinfo = tuf.roledb.get_roleinfo('timestamp', repository_name) + # Load Timestamp's roleinfo and update 'roledb'. + roleinfo = roledb.get_roleinfo('timestamp', repository_name) roleinfo['expires'] = timestamp_metadata['expires'] roleinfo['version'] = timestamp_metadata['version'] @@ -566,7 +566,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): _log_warning_if_expires_soon(TIMESTAMP_FILENAME, roleinfo['expires'], TIMESTAMP_EXPIRES_WARN_SECONDS) - tuf.roledb.update_roleinfo('timestamp', roleinfo, mark_role_as_dirty=False, + roledb.update_roleinfo('timestamp', roleinfo, mark_role_as_dirty=False, repository_name=repository_name) except securesystemslib.exceptions.StorageError as error: @@ -598,8 +598,8 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): for signature in signable['signatures']: repository.snapshot.add_signature(signature, mark_role_as_dirty=False) - # Load Snapshot's roleinfo and update 'tuf.roledb'. - roleinfo = tuf.roledb.get_roleinfo('snapshot', repository_name) + # Load Snapshot's roleinfo and update 'roledb'. + roleinfo = roledb.get_roleinfo('snapshot', repository_name) roleinfo['expires'] = snapshot_metadata['expires'] roleinfo['version'] = snapshot_metadata['version'] @@ -612,7 +612,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): _log_warning_if_expires_soon(SNAPSHOT_FILENAME, roleinfo['expires'], SNAPSHOT_EXPIRES_WARN_SECONDS) - tuf.roledb.update_roleinfo('snapshot', roleinfo, mark_role_as_dirty=False, + roledb.update_roleinfo('snapshot', roleinfo, mark_role_as_dirty=False, repository_name=repository_name) except securesystemslib.exceptions.StorageError as error: @@ -640,8 +640,8 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): for signature in signable['signatures']: repository.targets.add_signature(signature, mark_role_as_dirty=False) - # Update 'targets.json' in 'tuf.roledb.py' - roleinfo = tuf.roledb.get_roleinfo('targets', repository_name) + # Update 'targets.json' in 'roledb' + roleinfo = roledb.get_roleinfo('targets', repository_name) roleinfo['paths'] = targets_metadata['targets'] roleinfo['version'] = targets_metadata['version'] roleinfo['expires'] = targets_metadata['expires'] @@ -656,7 +656,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): _log_warning_if_expires_soon(TARGETS_FILENAME, roleinfo['expires'], TARGETS_EXPIRES_WARN_SECONDS) - tuf.roledb.update_roleinfo('targets', roleinfo, mark_role_as_dirty=False, + roledb.update_roleinfo('targets', roleinfo, mark_role_as_dirty=False, repository_name=repository_name) # Add the keys specified in the delegations field of the Targets role. @@ -872,7 +872,7 @@ def get_delegated_roles_metadata_filenames(metadata_directory, continue # Skip top-level roles, only interested in delegated roles. - if metadata_name in tuf.roledb.TOP_LEVEL_ROLES: + if metadata_name in roledb.TOP_LEVEL_ROLES: continue # Prevent reloading duplicate versions if consistent_snapshot is True @@ -1039,7 +1039,7 @@ def get_metadata_versioninfo(rolename, repository_name): # types, and that all dict keys are properly named. formats.ROLENAME_SCHEMA.check_match(rolename) - roleinfo = tuf.roledb.get_roleinfo(rolename, repository_name) + roleinfo = roledb.get_roleinfo(rolename, repository_name) versioninfo = {'version': roleinfo['version']} return versioninfo @@ -1195,7 +1195,7 @@ def generate_root_metadata(version, expiration_date, consistent_snapshot, repository_name='default'): """ - Create the root metadata. 'tuf.roledb.py' and 'tuf.keydb.py' + Create the root metadata. 'roledb' and 'tuf.keydb.py' are read and the information returned by these modules is used to generate the root metadata object. @@ -1224,10 +1224,10 @@ def generate_root_metadata(version, expiration_date, consistent_snapshot, securesystemslib.exceptions.Error, if an error is encountered while generating the root metadata object (e.g., a required top-level role not - found in 'tuf.roledb'.) + found in 'roledb'.) - The contents of 'tuf.keydb.py' and 'tuf.roledb.py' are read. + The contents of 'tuf.keydb.py' and 'roledb' are read. A root metadata object, conformant to 'tuf.formats.ROOT_SCHEMA'. @@ -1252,22 +1252,22 @@ def generate_root_metadata(version, expiration_date, consistent_snapshot, # Extract the role, threshold, and keyid information of the top-level roles, # which Root stores in its metadata. The necessary role metadata is generated # from this information. - for rolename in tuf.roledb.TOP_LEVEL_ROLES: + for rolename in roledb.TOP_LEVEL_ROLES: - # If a top-level role is missing from 'tuf.roledb.py', raise an exception. - if not tuf.roledb.role_exists(rolename, repository_name): + # If a top-level role is missing from 'roledb', raise an exception. + if not roledb.role_exists(rolename, repository_name): raise securesystemslib.exceptions.Error(repr(rolename) + ' not in' - ' "tuf.roledb".') + ' "roledb".') # Collect keys from all roles in a list - keyids = tuf.roledb.get_role_keyids(rolename, repository_name) + keyids = roledb.get_role_keyids(rolename, repository_name) for keyid in keyids: key = tuf.keydb.get_key(keyid, repository_name=repository_name) keylist.append(key) # Generate the authentication information Root establishes for each # top-level role. - role_threshold = tuf.roledb.get_role_threshold(rolename, repository_name) + role_threshold = roledb.get_role_threshold(rolename, repository_name) role_metadata = formats.build_dict_conforming_to_schema( formats.ROLE_SCHEMA, keyids=keyids, @@ -1406,9 +1406,9 @@ def generate_targets_metadata(targets_directory, target_files, version, delegations_keys = [] # Update 'keyids' and 'threshold' for each delegated role for role in delegations['roles']: - role['keyids'] = tuf.roledb.get_role_keyids(role['name'], + role['keyids'] = roledb.get_role_keyids(role['name'], repository_name) - role['threshold'] = tuf.roledb.get_role_threshold(role['name'], + role['threshold'] = roledb.get_role_threshold(role['name'], repository_name) # Collect all delegations keys for generating the delegations keydict @@ -1666,8 +1666,8 @@ def generate_snapshot_metadata(metadata_directory, version, expiration_date, # in the roledb are included in the Snapshot metadata. Since the # snapshot and timestamp roles are not listed in snapshot.json, do not # list these roles found in the metadata directory. - if tuf.roledb.role_exists(rolename, repository_name) and \ - rolename not in tuf.roledb.TOP_LEVEL_ROLES: + if roledb.role_exists(rolename, repository_name) and \ + rolename not in roledb.TOP_LEVEL_ROLES: length, hashes = _get_hashes_and_length_if_needed(use_length, use_hashes, os.path.join(metadata_directory, metadata_filename), storage_backend) @@ -2034,9 +2034,9 @@ def _log_status_of_top_level_roles(targets_directory, metadata_directory, # Do the top-level roles contain a valid threshold of signatures? Top-level # metadata is verified in Root -> Targets -> Snapshot -> Timestamp order. # Verify the metadata of the Root role. - dirty_rolenames = tuf.roledb.get_dirty_roles(repository_name) + dirty_rolenames = roledb.get_dirty_roles(repository_name) - root_roleinfo = tuf.roledb.get_roleinfo('root', repository_name) + root_roleinfo = roledb.get_roleinfo('root', repository_name) root_is_dirty = None if 'root' in dirty_rolenames: root_is_dirty = True @@ -2058,12 +2058,12 @@ def _log_status_of_top_level_roles(targets_directory, metadata_directory, return finally: - tuf.roledb.unmark_dirty(['root'], repository_name) - tuf.roledb.update_roleinfo('root', root_roleinfo, + roledb.unmark_dirty(['root'], repository_name) + roledb.update_roleinfo('root', root_roleinfo, mark_role_as_dirty=root_is_dirty, repository_name=repository_name) # Verify the metadata of the Targets role. - targets_roleinfo = tuf.roledb.get_roleinfo('targets', repository_name) + targets_roleinfo = roledb.get_roleinfo('targets', repository_name) targets_is_dirty = None if 'targets' in dirty_rolenames: targets_is_dirty = True @@ -2083,12 +2083,12 @@ def _log_status_of_top_level_roles(targets_directory, metadata_directory, return finally: - tuf.roledb.unmark_dirty(['targets'], repository_name) - tuf.roledb.update_roleinfo('targets', targets_roleinfo, + roledb.unmark_dirty(['targets'], repository_name) + roledb.update_roleinfo('targets', targets_roleinfo, mark_role_as_dirty=targets_is_dirty, repository_name=repository_name) # Verify the metadata of the snapshot role. - snapshot_roleinfo = tuf.roledb.get_roleinfo('snapshot', repository_name) + snapshot_roleinfo = roledb.get_roleinfo('snapshot', repository_name) snapshot_is_dirty = None if 'snapshot' in dirty_rolenames: snapshot_is_dirty = True @@ -2109,12 +2109,12 @@ def _log_status_of_top_level_roles(targets_directory, metadata_directory, return finally: - tuf.roledb.unmark_dirty(['snapshot'], repository_name) - tuf.roledb.update_roleinfo('snapshot', snapshot_roleinfo, + roledb.unmark_dirty(['snapshot'], repository_name) + roledb.update_roleinfo('snapshot', snapshot_roleinfo, mark_role_as_dirty=snapshot_is_dirty, repository_name=repository_name) # Verify the metadata of the Timestamp role. - timestamp_roleinfo = tuf.roledb.get_roleinfo('timestamp', repository_name) + timestamp_roleinfo = roledb.get_roleinfo('timestamp', repository_name) timestamp_is_dirty = None if 'timestamp' in dirty_rolenames: timestamp_is_dirty = True @@ -2135,8 +2135,8 @@ def _log_status_of_top_level_roles(targets_directory, metadata_directory, return finally: - tuf.roledb.unmark_dirty(['timestamp'], repository_name) - tuf.roledb.update_roleinfo('timestamp', timestamp_roleinfo, + roledb.unmark_dirty(['timestamp'], repository_name) + roledb.update_roleinfo('timestamp', timestamp_roleinfo, mark_role_as_dirty=timestamp_is_dirty, repository_name=repository_name) diff --git a/tuf/repository_tool.py b/tuf/repository_tool.py index 498652ab18..6dca4790c2 100755 --- a/tuf/repository_tool.py +++ b/tuf/repository_tool.py @@ -46,7 +46,7 @@ from tuf import exceptions from tuf import formats from tuf import log -import tuf.roledb +from tuf import roledb import tuf.sig import tuf.repository_lib as repo_lib @@ -250,7 +250,7 @@ def __init__(self, repository_directory, metadata_directory, self._use_snapshot_hashes = use_snapshot_hashes try: - tuf.roledb.create_roledb(repository_name) + roledb.create_roledb(repository_name) tuf.keydb.create_keydb(repository_name) except securesystemslib.exceptions.InvalidNameError: @@ -314,7 +314,7 @@ def writeall(self, consistent_snapshot=False, use_existing_fileinfo=False): # formatted. securesystemslib.formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) - # At this point, tuf.keydb and tuf.roledb must be fully populated, + # At this point, tuf.keydb and roledb must be fully populated, # otherwise writeall() throws a 'tuf.exceptions.UnsignedMetadataError' for # the top-level roles. exception if any of the top-level roles are missing # signatures, keys, etc. @@ -328,12 +328,12 @@ def writeall(self, consistent_snapshot=False, use_existing_fileinfo=False): repo_lib.TIMESTAMP_FILENAME)} snapshot_signable = None - dirty_rolenames = tuf.roledb.get_dirty_roles(self._repository_name) + dirty_rolenames = roledb.get_dirty_roles(self._repository_name) for dirty_rolename in dirty_rolenames: # Ignore top-level roles, they will be generated later in this method. - if dirty_rolename in tuf.roledb.TOP_LEVEL_ROLES: + if dirty_rolename in roledb.TOP_LEVEL_ROLES: continue dirty_filename = os.path.join(self._metadata_directory, @@ -349,7 +349,7 @@ def writeall(self, consistent_snapshot=False, use_existing_fileinfo=False): # metadata file. _generate_and_write_metadata() raises a # 'securesystemslib.exceptions.Error' exception if the metadata cannot be # written. - root_roleinfo = tuf.roledb.get_roleinfo('root', self._repository_name) + root_roleinfo = roledb.get_roleinfo('root', self._repository_name) old_consistent_snapshot = root_roleinfo['consistent_snapshot'] if 'root' in dirty_rolenames or consistent_snapshot != old_consistent_snapshot: repo_lib._generate_and_write_metadata('root', filenames['root'], @@ -384,9 +384,9 @@ def writeall(self, consistent_snapshot=False, use_existing_fileinfo=False): use_timestamp_length=self._use_timestamp_length, use_timestamp_hashes=self._use_timestamp_hashes) - tuf.roledb.unmark_dirty(dirty_rolenames, self._repository_name) + roledb.unmark_dirty(dirty_rolenames, self._repository_name) - # Delete the metadata of roles no longer in 'tuf.roledb'. Obsolete roles + # Delete the metadata of roles no longer in 'roledb'. Obsolete roles # may have been revoked and should no longer have their metadata files # available on disk, otherwise loading a repository may unintentionally # load them. @@ -460,7 +460,7 @@ def write(self, rolename, consistent_snapshot=False, increment_version_number=Tr use_existing_fileinfo=use_existing_fileinfo) # Ensure 'rolename' is no longer marked as dirty after the successful write(). - tuf.roledb.unmark_dirty([rolename], self._repository_name) + roledb.unmark_dirty([rolename], self._repository_name) @@ -536,7 +536,7 @@ def dirty_roles(self): None. """ - logger.info('Dirty roles: ' + str(tuf.roledb.get_dirty_roles(self._repository_name))) + logger.info('Dirty roles: ' + str(roledb.get_dirty_roles(self._repository_name))) @@ -560,7 +560,7 @@ def mark_dirty(self, roles): None. """ - tuf.roledb.mark_dirty(roles, self._repository_name) + roledb.mark_dirty(roles, self._repository_name) @@ -584,7 +584,7 @@ def unmark_dirty(self, roles): None. """ - tuf.roledb.unmark_dirty(roles, self._repository_name) + roledb.unmark_dirty(roles, self._repository_name) @@ -721,8 +721,7 @@ def add_verification_key(self, key, expires=None): expired. - The role's entries in 'tuf.keydb.py' and 'tuf.roledb.py' are - updated. + The role's entries in 'tuf.keydb.py' and 'roledb' are updated. None. @@ -791,19 +790,19 @@ def add_verification_key(self, key, expires=None): logger.warning('Adding a verification key that has already been used.') keyid = key['keyid'] - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) # Save the keyids that are being replaced since certain roles will need to # re-sign metadata with these keys (e.g., root). Use list() to make a copy # of roleinfo['keyids'] to ensure we're modifying distinct lists. previous_keyids = list(roleinfo['keyids']) - # Add 'key' to the role's entry in 'tuf.roledb.py', and avoid duplicates. + # Add 'key' to the role's entry in 'roledb', and avoid duplicates. if keyid not in roleinfo['keyids']: roleinfo['keyids'].append(keyid) roleinfo['previous_keyids'] = previous_keyids - tuf.roledb.update_roleinfo(self._rolename, roleinfo, + roledb.update_roleinfo(self._rolename, roleinfo, repository_name=self._repository_name) @@ -833,7 +832,7 @@ def remove_verification_key(self, key): previously added. - Updates the role's 'tuf.roledb.py' entry. + Updates the role's 'roledb' entry. None. @@ -846,12 +845,12 @@ def remove_verification_key(self, key): securesystemslib.formats.ANYKEY_SCHEMA.check_match(key) keyid = key['keyid'] - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) if keyid in roleinfo['keyids']: roleinfo['keyids'].remove(keyid) - tuf.roledb.update_roleinfo(self._rolename, roleinfo, + roledb.update_roleinfo(self._rolename, roleinfo, repository_name=self._repository_name) else: @@ -883,7 +882,7 @@ def load_signing_key(self, key): securesystemslib.exceptions.Error, if the private key is not found in 'key'. - Updates the role's 'tuf.keydb.py' and 'tuf.roledb.py' entries. + Updates the role's 'tuf.keydb.py' and 'roledb' entries. None. @@ -909,12 +908,12 @@ def load_signing_key(self, key): tuf.keydb.remove_key(key['keyid'], self._repository_name) tuf.keydb.add_key(key, repository_name=self._repository_name) - # Update the role's 'signing_keys' field in 'tuf.roledb.py'. - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + # Update the role's 'signing_keys' field in 'roledb'. + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) if key['keyid'] not in roleinfo['signing_keyids']: roleinfo['signing_keyids'].append(key['keyid']) - tuf.roledb.update_roleinfo(self.rolename, roleinfo, + roledb.update_roleinfo(self.rolename, roleinfo, repository_name=self._repository_name) @@ -943,7 +942,7 @@ def unload_signing_key(self, key): previously loaded. - Updates the signing keys of the role in 'tuf.roledb.py'. + Updates the signing keys of the role in 'roledb'. None. @@ -955,8 +954,8 @@ def unload_signing_key(self, key): # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. securesystemslib.formats.ANYKEY_SCHEMA.check_match(key) - # Update the role's 'signing_keys' field in 'tuf.roledb.py'. - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + # Update the role's 'signing_keys' field in 'roledb'. + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) # TODO: Should we consider removing keys from keydb that are no longer # associated with any roles? There could be many no-longer-used keys @@ -964,7 +963,7 @@ def unload_signing_key(self, key): if key['keyid'] in roleinfo['signing_keyids']: roleinfo['signing_keyids'].remove(key['keyid']) - tuf.roledb.update_roleinfo(self.rolename, roleinfo, + roledb.update_roleinfo(self.rolename, roleinfo, repository_name=self._repository_name) else: @@ -1004,7 +1003,7 @@ def add_signature(self, signature, mark_role_as_dirty=True): Adds 'signature', if not already added, to the role's 'signatures' field - in 'tuf.roledb.py'. + in 'roledb'. None. @@ -1017,7 +1016,7 @@ def add_signature(self, signature, mark_role_as_dirty=True): securesystemslib.formats.SIGNATURE_SCHEMA.check_match(signature) securesystemslib.formats.BOOLEAN_SCHEMA.check_match(mark_role_as_dirty) - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) # Ensure the roleinfo contains a 'signatures' field. if 'signatures' not in roleinfo: @@ -1027,7 +1026,7 @@ def add_signature(self, signature, mark_role_as_dirty=True): # added. if signature not in roleinfo['signatures']: roleinfo['signatures'].append(signature) - tuf.roledb.update_roleinfo(self.rolename, roleinfo, mark_role_as_dirty, + roledb.update_roleinfo(self.rolename, roleinfo, mark_role_as_dirty, repository_name=self._repository_name) else: @@ -1058,7 +1057,7 @@ def remove_signature(self, signature): added to this role. - Updates the 'signatures' field of the role in 'tuf.roledb.py'. + Updates the 'signatures' field of the role in 'roledb'. None. @@ -1070,12 +1069,12 @@ def remove_signature(self, signature): # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. securesystemslib.formats.SIGNATURE_SCHEMA.check_match(signature) - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) if signature in roleinfo['signatures']: roleinfo['signatures'].remove(signature) - tuf.roledb.update_roleinfo(self.rolename, roleinfo, + roledb.update_roleinfo(self.rolename, roleinfo, repository_name=self._repository_name) else: @@ -1106,7 +1105,7 @@ def signatures(self): 'securesystemslib.formats.SIGNATURES_SCHEMA'. """ - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) signatures = roleinfo['signatures'] return signatures @@ -1134,7 +1133,7 @@ def keys(self): A list of the role's keyids (i.e., keyids of the keys). """ - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) keyids = roleinfo['keyids'] return keyids @@ -1187,7 +1186,7 @@ def version(self): 'tuf.formats.VERSION_SCHEMA'. """ - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) version = roleinfo['version'] return version @@ -1224,7 +1223,7 @@ def version(self, version): Modifies the 'version' attribute of the Repository object and updates the - role's version in 'tuf.roledb.py'. + role's version in 'roledb'. None. @@ -1236,10 +1235,10 @@ def version(self, version): # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. formats.METADATAVERSION_SCHEMA.check_match(version) - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) roleinfo['version'] = version - tuf.roledb.update_roleinfo(self._rolename, roleinfo, + roledb.update_roleinfo(self._rolename, roleinfo, repository_name=self._repository_name) @@ -1265,7 +1264,7 @@ def threshold(self): 'tuf.formats.THRESHOLD_SCHEMA'. """ - roleinfo = tuf.roledb.get_roleinfo(self._rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self._rolename, self._repository_name) threshold = roleinfo['threshold'] return threshold @@ -1296,7 +1295,7 @@ def threshold(self, threshold): Modifies the threshold attribute of the Repository object and updates - the roles threshold in 'tuf.roledb.py'. + the roles threshold in 'roledb'. None. @@ -1308,11 +1307,11 @@ def threshold(self, threshold): # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. formats.THRESHOLD_SCHEMA.check_match(threshold) - roleinfo = tuf.roledb.get_roleinfo(self._rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self._rolename, self._repository_name) roleinfo['previous_threshold'] = roleinfo['threshold'] roleinfo['threshold'] = threshold - tuf.roledb.update_roleinfo(self._rolename, roleinfo, + roledb.update_roleinfo(self._rolename, roleinfo, repository_name=self._repository_name) @@ -1336,7 +1335,7 @@ def expiration(self): The role's expiration datetime, a datetime.datetime() object. """ - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) expires = roleinfo['expires'] return formats.expiry_string_to_datetime(expires) @@ -1392,12 +1391,12 @@ def expiration(self, datetime_object): raise securesystemslib.exceptions.Error(repr(self.rolename) + ' has' ' already expired.') - # Update the role's 'expires' entry in 'tuf.roledb.py'. - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + # Update the role's 'expires' entry in 'roledb'. + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) expires = datetime_object.isoformat() + 'Z' roleinfo['expires'] = expires - tuf.roledb.update_roleinfo(self.rolename, roleinfo, + roledb.update_roleinfo(self.rolename, roleinfo, repository_name=self._repository_name) @@ -1426,7 +1425,7 @@ def signing_keys(self): 'securesystemslib.formats.KEYIDS_SCHEMA'. """ - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) signing_keyids = roleinfo['signing_keyids'] return signing_keyids @@ -1462,7 +1461,7 @@ class Root(Metadata): tuf.exceptions.FormatError, if the argument is improperly formatted. - A 'root' role is added to 'tuf.roledb.py'. + A 'root' role is added to 'roledb'. None. @@ -1489,7 +1488,7 @@ def __init__(self, repository_name): 'signatures': [], 'version': 0, 'consistent_snapshot': False, 'expires': expiration, 'partial_loaded': False} try: - tuf.roledb.add_role(self._rolename, roleinfo, self._repository_name) + roledb.add_role(self._rolename, roleinfo, self._repository_name) except exceptions.RoleAlreadyExistsError: pass @@ -1530,7 +1529,7 @@ class Timestamp(Metadata): tuf.exceptions.FormatError, if the argument is improperly formatted. - A 'timestamp' role is added to 'tuf.roledb.py'. + A 'timestamp' role is added to 'roledb'. None. @@ -1558,7 +1557,7 @@ def __init__(self, repository_name): 'partial_loaded': False} try: - tuf.roledb.add_role(self.rolename, roleinfo, self._repository_name) + roledb.add_role(self.rolename, roleinfo, self._repository_name) except exceptions.RoleAlreadyExistsError: pass @@ -1593,7 +1592,7 @@ class Snapshot(Metadata): tuf.exceptions.FormatError, if the argument is improperly formatted. - A 'snapshot' role is added to 'tuf.roledb.py'. + A 'snapshot' role is added to 'roledb'. None. @@ -1621,7 +1620,7 @@ def __init__(self, repository_name): 'partial_loaded': False} try: - tuf.roledb.add_role(self._rolename, roleinfo, self._repository_name) + roledb.add_role(self._rolename, roleinfo, self._repository_name) except exceptions.RoleAlreadyExistsError: pass @@ -1678,7 +1677,7 @@ class Targets(Metadata): formatted. - Modifies the roleinfo of the targets role in 'tuf.roledb', or creates + Modifies the roleinfo of the targets role in 'roledb', or creates a default one named 'targets'. @@ -1727,9 +1726,9 @@ def __init__(self, targets_directory, rolename='targets', roleinfo=None, 'partial_loaded': False, 'delegations': {'keys': {}, 'roles': []}} - # Add the new role to the 'tuf.roledb'. + # Add the new role to the 'roledb'. try: - tuf.roledb.add_role(self.rolename, roleinfo, self._repository_name) + roledb.add_role(self.rolename, roleinfo, self._repository_name) except exceptions.RoleAlreadyExistsError: pass @@ -1756,7 +1755,7 @@ def __call__(self, rolename): delegated by this Targets object. - Modifies the roleinfo of the targets role in 'tuf.roledb'. + Modifies the roleinfo of the targets role in 'roledb'. The Targets object of 'rolename'. @@ -1826,7 +1825,7 @@ def add_delegated_role(self, rolename, targets_object): def remove_delegated_role(self, rolename): """ Remove 'rolename' from this Targets object's list of delegated roles. - This method does not update tuf.roledb and others. + This method does not update roledb and others. rolename: @@ -1883,7 +1882,7 @@ def target_files(self): None. """ - target_files = tuf.roledb.get_roleinfo(self._rolename, + target_files = roledb.get_roleinfo(self._rolename, self._repository_name)['paths'] return target_files @@ -1940,7 +1939,7 @@ def add_paths(self, paths, child_rolename): # Ensure that 'child_rolename' exists, otherwise it will not have an entry # in the parent role's delegations field. - if not tuf.roledb.role_exists(child_rolename, self._repository_name): + if not roledb.role_exists(child_rolename, self._repository_name): raise securesystemslib.exceptions.Error(repr(child_rolename) + ' does' ' not exist.') @@ -1953,7 +1952,7 @@ def add_paths(self, paths, child_rolename): # Get the current role's roleinfo, so that its delegations field can be # updated. - roleinfo = tuf.roledb.get_roleinfo(self._rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self._rolename, self._repository_name) # Update the delegated paths of 'child_rolename' to add relative paths. for role in roleinfo['delegations']['roles']: @@ -1967,7 +1966,7 @@ def add_paths(self, paths, child_rolename): else: logger.debug(repr(role['name']) + ' does not match child rolename.') - tuf.roledb.update_roleinfo(self._rolename, roleinfo, + roledb.update_roleinfo(self._rolename, roleinfo, repository_name=self._repository_name) @@ -2014,7 +2013,7 @@ def add_target(self, filepath, custom=None, fileinfo=None): Adds 'filepath' to this role's list of targets. This role's - 'tuf.roledb.py' entry is also updated. + 'roledb' entry is also updated. None. @@ -2051,8 +2050,8 @@ def add_target(self, filepath, custom=None, fileinfo=None): # later calls to write() will fail. self._check_path(filepath) - # Update the role's 'tuf.roledb.py' entry and avoid duplicates. - roleinfo = tuf.roledb.get_roleinfo(self._rolename, self._repository_name) + # Update the role's 'roledb' entry and avoid duplicates. + roleinfo = roledb.get_roleinfo(self._rolename, self._repository_name) if filepath not in roleinfo['paths']: logger.debug('Adding new target: ' + repr(filepath)) @@ -2065,7 +2064,7 @@ def add_target(self, filepath, custom=None, fileinfo=None): else: roleinfo['paths'].update({filepath: {'custom': custom}}) - tuf.roledb.update_roleinfo(self._rolename, roleinfo, + roledb.update_roleinfo(self._rolename, roleinfo, repository_name=self._repository_name) @@ -2115,8 +2114,8 @@ def add_targets(self, list_of_targets): for target in list_of_targets: self._check_path(target) - # Update this Targets 'tuf.roledb.py' entry. - roleinfo = tuf.roledb.get_roleinfo(self._rolename, self._repository_name) + # Update this Targets 'roledb' entry. + roleinfo = roledb.get_roleinfo(self._rolename, self._repository_name) for relative_target in list_of_targets: if relative_target not in roleinfo['paths']: logger.debug('Adding new target: ' + repr(relative_target)) @@ -2124,7 +2123,7 @@ def add_targets(self, list_of_targets): logger.debug('Replacing target: ' + repr(relative_target)) roleinfo['paths'].update({relative_target: {}}) - tuf.roledb.update_roleinfo(self.rolename, roleinfo, + roledb.update_roleinfo(self.rolename, roleinfo, repository_name=self._repository_name) @@ -2152,7 +2151,7 @@ def remove_target(self, filepath): repository's targets directory, or not found. - Modifies this Targets 'tuf.roledb.py' entry. + Modifies this Targets 'roledb' entry. None. @@ -2165,10 +2164,10 @@ def remove_target(self, filepath): formats.RELPATH_SCHEMA.check_match(filepath) # Remove 'relative_filepath', if found, and update this Targets roleinfo. - fileinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + fileinfo = roledb.get_roleinfo(self.rolename, self._repository_name) if filepath in fileinfo['paths']: del fileinfo['paths'][filepath] - tuf.roledb.update_roleinfo(self.rolename, fileinfo, + roledb.update_roleinfo(self.rolename, fileinfo, repository_name=self._repository_name) else: @@ -2192,16 +2191,16 @@ def clear_targets(self): None. - Modifies this Targets' 'tuf.roledb.py' entry. + Modifies this Targets' 'roledb' entry. None. """ - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) roleinfo['paths'] = {} - tuf.roledb.update_roleinfo(self.rolename, roleinfo, + roledb.update_roleinfo(self.rolename, roleinfo, repository_name=self._repository_name) @@ -2229,7 +2228,7 @@ def get_delegated_rolenames(self): A list of rolenames. """ - return tuf.roledb.get_delegated_rolenames(self.rolename, self._repository_name) + return roledb.get_delegated_rolenames(self.rolename, self._repository_name) @@ -2267,13 +2266,13 @@ def _update_roledb_delegations(self, keydict, delegations_roleinfo): roles in delegations_roleinfo """ - current_roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + current_roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) current_roleinfo['delegations']['keys'].update(keydict) for roleinfo in delegations_roleinfo: current_roleinfo['delegations']['roles'].append(roleinfo) - tuf.roledb.update_roleinfo(self.rolename, current_roleinfo, + roledb.update_roleinfo(self.rolename, current_roleinfo, repository_name=self._repository_name) @@ -2353,7 +2352,7 @@ def delegate(self, rolename, public_keys, paths, threshold=1, A new Target object is created for 'rolename' that is accessible to the caller (i.e., targets.). The 'tuf.keydb.py' and - 'tuf.roledb.py' stores are updated with 'public_keys'. + 'roledb' stores are updated with 'public_keys'. None. @@ -2446,7 +2445,7 @@ def revoke(self, rolename): Revoke this Targets' 'rolename' delegation. Its 'rolename' attribute is deleted, including the entries in its 'delegations' field and in - 'tuf.roledb'. + 'roledb'. Actual metadata files are not updated, only when repository.write() or repository.write() is called. @@ -2465,7 +2464,7 @@ def revoke(self, rolename): formatted. - The delegations dictionary of 'rolename' is modified, and its 'tuf.roledb' + The delegations dictionary of 'rolename' is modified, and its 'roledb' entry is updated. This Targets' 'rolename' delegation attribute is also deleted. @@ -2480,18 +2479,18 @@ def revoke(self, rolename): formats.ROLENAME_SCHEMA.check_match(rolename) # Remove 'rolename' from this Target's delegations dict. - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) for role in roleinfo['delegations']['roles']: if role['name'] == rolename: roleinfo['delegations']['roles'].remove(role) - tuf.roledb.update_roleinfo(self.rolename, roleinfo, + roledb.update_roleinfo(self.rolename, roleinfo, repository_name=self._repository_name) - # Remove 'rolename' from 'tuf.roledb.py'. + # Remove 'rolename' from 'roledb'. try: - tuf.roledb.remove_role(rolename, self._repository_name) + roledb.remove_role(rolename, self._repository_name) # Remove the rolename delegation from the current role. For example, the # 'django' role is removed from repository.targets('django'). del self._delegated_roles[rolename] @@ -2806,7 +2805,7 @@ def delegations(self): tuf.exceptions.UnknownRoleError, if this Targets' rolename - does not exist in 'tuf.roledb'. + does not exist in 'roledb'. None. @@ -3085,7 +3084,7 @@ def load_repository(repository_directory, repository_name='default', # Store the delegations in the form of delegated-delegating role tuples, # starting from the top-level targets: # [('role1', 'targets'), ('role2', 'targets'), ... ] - roleinfo = tuf.roledb.get_roleinfo('targets', repository_name) + roleinfo = roledb.get_roleinfo('targets', repository_name) for role in roleinfo['delegations']['roles']: delegations.append((role, 'targets')) diff --git a/tuf/scripts/repo.py b/tuf/scripts/repo.py index c1bef95f8e..3b1c279736 100755 --- a/tuf/scripts/repo.py +++ b/tuf/scripts/repo.py @@ -151,6 +151,7 @@ from tuf import exceptions from tuf import formats from tuf import log +from tuf import roledb import tuf.repository_tool as repo_tool # 'pip install securesystemslib[crypto,pynacl]' is required for the CLI, @@ -320,7 +321,7 @@ def delegate(parsed_arguments): repository.snapshot.load_signing_key(snapshot_private) repository.timestamp.load_signing_key(timestamp_private) - consistent_snapshot = tuf.roledb.get_roleinfo('root', + consistent_snapshot = roledb.get_roleinfo('root', repository._repository_name)['consistent_snapshot'] repository.writeall(consistent_snapshot=consistent_snapshot) @@ -365,7 +366,7 @@ def revoke(parsed_arguments): repository.snapshot.load_signing_key(snapshot_private) repository.timestamp.load_signing_key(timestamp_private) - consistent_snapshot = tuf.roledb.get_roleinfo('root', + consistent_snapshot = roledb.get_roleinfo('root', repository._repository_name)['consistent_snapshot'] repository.writeall(consistent_snapshot=consistent_snapshot) @@ -533,7 +534,7 @@ def add_verification_key(parsed_arguments): else: repository.timestamp.add_verification_key(imported_pubkey) - consistent_snapshot = tuf.roledb.get_roleinfo('root', + consistent_snapshot = roledb.get_roleinfo('root', repository._repository_name)['consistent_snapshot'] repository.write('root', consistent_snapshot=consistent_snapshot, increment_version_number=False) @@ -578,7 +579,7 @@ def remove_verification_key(parsed_arguments): except securesystemslib.exceptions.Error: print(repr(keypath) + ' is not a trusted key. Skipping.') - consistent_snapshot = tuf.roledb.get_roleinfo('root', + consistent_snapshot = roledb.get_roleinfo('root', repository._repository_name)['consistent_snapshot'] repository.write('root', consistent_snapshot=consistent_snapshot, increment_version_number=False) @@ -592,7 +593,7 @@ def sign_role(parsed_arguments): repository = repo_tool.load_repository( os.path.join(parsed_arguments.path, REPO_DIR)) - consistent_snapshot = tuf.roledb.get_roleinfo('root', + consistent_snapshot = roledb.get_roleinfo('root', repository._repository_name)['consistent_snapshot'] for keypath in parsed_arguments.sign: @@ -614,7 +615,7 @@ def sign_role(parsed_arguments): else: # TODO: repository_tool.py will be refactored to clean up the following # code, which adds and signs for a non-existent role. - if not tuf.roledb.role_exists(parsed_arguments.role): + if not roledb.role_exists(parsed_arguments.role): # Load the private key keydb and set the roleinfo in roledb so that # metadata can be written with repository.write(). @@ -635,7 +636,7 @@ def sign_role(parsed_arguments): 'signatures': [], 'version': 1, 'expires': expiration, 'delegations': {'keys': {}, 'roles': []}} - tuf.roledb.add_role(parsed_arguments.role, roleinfo, + roledb.add_role(parsed_arguments.role, roleinfo, repository_name=repository._repository_name) # Generate the Targets object of --role, and add it to the top-level @@ -716,7 +717,7 @@ def add_target_to_repo(parsed_arguments, target_path, repo_targets_path, shutil.copy(target_path, os.path.join(repo_targets_path, target_path)) - roleinfo = tuf.roledb.get_roleinfo( + roleinfo = roledb.get_roleinfo( parsed_arguments.role, repository_name=repository._repository_name) # It is assumed we have a delegated role, and that the caller has made @@ -729,7 +730,7 @@ def add_target_to_repo(parsed_arguments, target_path, repo_targets_path, logger.debug('Replacing target: ' + repr(target_path)) roleinfo['paths'].update({target_path: custom}) - tuf.roledb.update_roleinfo(parsed_arguments.role, roleinfo, + roledb.update_roleinfo(parsed_arguments.role, roleinfo, mark_role_as_dirty=True, repository_name=repository._repository_name) @@ -742,10 +743,10 @@ def remove_target_files_from_metadata(parsed_arguments, repository): ' It must be "targets" or a delegated rolename.') else: - # NOTE: The following approach of using tuf.roledb to update the target + # NOTE: The following approach of using roledb to update the target # files will be modified in the future when the repository tool's API is # refactored. - roleinfo = tuf.roledb.get_roleinfo( + roleinfo = roledb.get_roleinfo( parsed_arguments.role, repository._repository_name) for glob_pattern in parsed_arguments.remove: @@ -758,7 +759,7 @@ def remove_target_files_from_metadata(parsed_arguments, repository): ' given path/glob pattern ' + repr(glob_pattern)) continue - tuf.roledb.update_roleinfo( + roledb.update_roleinfo( parsed_arguments.role, roleinfo, mark_role_as_dirty=True, repository_name=repository._repository_name) @@ -783,7 +784,7 @@ def add_targets(parsed_arguments): add_target_to_repo(parsed_arguments, target_path, repo_targets_path, repository) - consistent_snapshot = tuf.roledb.get_roleinfo('root', + consistent_snapshot = roledb.get_roleinfo('root', repository._repository_name)['consistent_snapshot'] if parsed_arguments.role == 'targets': @@ -853,7 +854,7 @@ def remove_targets(parsed_arguments): repository.snapshot.load_signing_key(snapshot_private) repository.timestamp.load_signing_key(timestamp_private) - consistent_snapshot = tuf.roledb.get_roleinfo('root', + consistent_snapshot = roledb.get_roleinfo('root', repository._repository_name)['consistent_snapshot'] repository.writeall(consistent_snapshot=consistent_snapshot) diff --git a/tuf/sig.py b/tuf/sig.py index 221bdfa928..8a69bae7c0 100755 --- a/tuf/sig.py +++ b/tuf/sig.py @@ -53,8 +53,8 @@ import tuf from tuf import exceptions from tuf import formats +from tuf import roledb import tuf.keydb -import tuf.roledb import securesystemslib @@ -71,14 +71,14 @@ def get_signature_status(signable, role=None, repository_name='default', * bad -- Invalid signature * good -- Valid signature from key that is available in 'tuf.keydb', and is - authorized for the passed role as per 'tuf.roledb' (authorization may be + authorized for the passed role as per 'roledb' (authorization may be overwritten by passed 'keyids'). * unknown -- Signature from key that is not available in 'tuf.keydb', or if 'role' is None. * unknown signing schemes -- Signature from key with unknown signing scheme. * untrusted -- Valid signature from key that is available in 'tuf.keydb', - but is not trusted for the passed role as per 'tuf.roledb' or the passed + but is not trusted for the passed role as per 'roledb' or the passed 'keyids'. NOTE: The result may contain duplicate keyids or keyids that reference the @@ -97,7 +97,7 @@ def get_signature_status(signable, role=None, repository_name='default', TUF role string (e.g. 'root', 'targets', 'snapshot' or timestamp). threshold: - Rather than reference the role's threshold as set in tuf.roledb.py, use + Rather than reference the role's threshold as set in roledb, use the given 'threshold' to calculate the signature status of 'signable'. 'threshold' is an integer value that sets the role's threshold value, or the minimum number of signatures needed for metadata to be considered @@ -106,7 +106,7 @@ def get_signature_status(signable, role=None, repository_name='default', keyids: Similar to the 'threshold' argument, use the supplied list of 'keyids' to calculate the signature status, instead of referencing the keyids - in tuf.roledb.py for 'role'. + in roledb for 'role'. securesystemslib.exceptions.FormatError, if 'signable' does not have the @@ -180,7 +180,7 @@ def get_signature_status(signable, role=None, repository_name='default', # Note that if the role is not known, tuf.exceptions.UnknownRoleError # is raised here. if keyids is None: - keyids = tuf.roledb.get_role_keyids(role, repository_name) + keyids = roledb.get_role_keyids(role, repository_name) if keyid not in keyids: untrusted_sigs.append(keyid) @@ -204,7 +204,7 @@ def get_signature_status(signable, role=None, repository_name='default', if threshold is None: # Note that if the role is not known, tuf.exceptions.UnknownRoleError is # raised here. - threshold = tuf.roledb.get_role_threshold( + threshold = roledb.get_role_threshold( role, repository_name=repository_name) else: @@ -234,7 +234,7 @@ def verify(signable, role, repository_name='default', threshold=None, Verify that 'signable' has a valid threshold of authorized signatures identified by unique keyids. The threshold and whether a keyid is authorized is determined by querying the 'threshold' and 'keyids' info for - the passed 'role' in 'tuf.roledb'. Both values can be overwritten by + the passed 'role' in 'roledb'. Both values can be overwritten by passing the 'threshold' or 'keyids' arguments. NOTE: @@ -252,7 +252,7 @@ def verify(signable, role, repository_name='default', threshold=None, TUF role string (e.g. 'root', 'targets', 'snapshot' or timestamp). threshold: - Rather than reference the role's threshold as set in tuf.roledb.py, use + Rather than reference the role's threshold as set in roledb, use the given 'threshold' to calculate the signature status of 'signable'. 'threshold' is an integer value that sets the role's threshold value, or the minimum number of signatures needed for metadata to be considered @@ -261,7 +261,7 @@ def verify(signable, role, repository_name='default', threshold=None, keyids: Similar to the 'threshold' argument, use the supplied list of 'keyids' to calculate the signature status, instead of referencing the keyids - in tuf.roledb.py for 'role'. + in roledb for 'role'. tuf.exceptions.UnknownRoleError, if 'role' is not recognized. From c66c61f1d01368f8d228e1c4934f09bea8251dd4 Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Mon, 11 Jan 2021 20:38:36 +0200 Subject: [PATCH 08/25] imports: Make 'settings' imports vendoring-compatible Use "from tuf import " instead of "import tuf.": this makes it possible for vendoring tool to vendor tuf. Fix all references to in the code. Signed-off-by: Jussi Kukkonen --- tuf/client/updater.py | 22 +++++++++++----------- tuf/download.py | 16 ++++++++-------- tuf/log.py | 8 ++++---- tuf/repository_lib.py | 10 +++++----- tuf/scripts/client.py | 4 ++-- 5 files changed, 30 insertions(+), 30 deletions(-) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index c23c45b72d..65446e1ed7 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -137,8 +137,8 @@ from tuf import log from tuf import mirrors from tuf import roledb +from tuf import settings import tuf.requests_fetcher -import tuf.settings import tuf.keydb import tuf.sig @@ -151,14 +151,14 @@ # The Timestamp role does not have signed metadata about it; otherwise we # would need an infinite regress of metadata. Therefore, we use some # default, but sane, upper file length for its metadata. -DEFAULT_TIMESTAMP_UPPERLENGTH = tuf.settings.DEFAULT_TIMESTAMP_REQUIRED_LENGTH +DEFAULT_TIMESTAMP_UPPERLENGTH = settings.DEFAULT_TIMESTAMP_REQUIRED_LENGTH # The Root role may be updated without knowing its version number if # top-level metadata cannot be safely downloaded (e.g., keys may have been # revoked, thus requiring a new Root file that includes the updated keys) # and 'unsafely_update_root_if_necessary' is True. # We use some default, but sane, upper file length for its metadata. -DEFAULT_ROOT_UPPERLENGTH = tuf.settings.DEFAULT_ROOT_REQUIRED_LENGTH +DEFAULT_ROOT_UPPERLENGTH = settings.DEFAULT_ROOT_REQUIRED_LENGTH # See 'log.py' to learn how logging is handled in TUF. logger = logging.getLogger(__name__) @@ -270,7 +270,7 @@ def get_valid_targetinfo(self, target_filename, match_custom_field=True): formats.MAPPING_SCHEMA.check_match(self.map_file['mapping']) # Set the top-level directory containing the metadata for each repository. - repositories_directory = tuf.settings.repositories_directory + repositories_directory = settings.repositories_directory # Verify that the required local directories exist for each repository. self._verify_metadata_directories(repositories_directory) @@ -731,13 +731,13 @@ def __init__(self, repository_name, repository_mirrors, fetcher=None): self.consistent_snapshot = False # Ensure the repository metadata directory has been set. - if tuf.settings.repositories_directory is None: + if settings.repositories_directory is None: raise exceptions.RepositoryError('The TUF update client' ' module must specify the directory containing the local repository' ' files. "tuf.settings.repositories_directory" MUST be set.') # Set the path for the current set of metadata files. - repositories_directory = tuf.settings.repositories_directory + repositories_directory = settings.repositories_directory repository_directory = os.path.join(repositories_directory, self.repository_name) # raise MissingLocalRepository if the repo does not exist at all. @@ -1138,7 +1138,7 @@ def neither_403_nor_404(mirror_error): # Following the spec, try downloading the N+1th root for a certain maximum # number of times. lower_bound = current_root_metadata['version'] + 1 - upper_bound = lower_bound + tuf.settings.MAX_NUMBER_ROOT_ROTATIONS + upper_bound = lower_bound + settings.MAX_NUMBER_ROOT_ROTATIONS # Try downloading the next root. for next_version in range(lower_bound, upper_bound): @@ -1852,11 +1852,11 @@ def _update_metadata_if_changed(self, metadata_role, # expected role. Note: The Timestamp role is not updated via this # function. if metadata_role == 'snapshot': - upperbound_filelength = tuf.settings.DEFAULT_SNAPSHOT_REQUIRED_LENGTH + upperbound_filelength = settings.DEFAULT_SNAPSHOT_REQUIRED_LENGTH # The metadata is considered Targets (or delegated Targets metadata). else: - upperbound_filelength = tuf.settings.DEFAULT_TARGETS_REQUIRED_LENGTH + upperbound_filelength = settings.DEFAULT_TARGETS_REQUIRED_LENGTH try: self._update_metadata(metadata_role, upperbound_filelength, @@ -2681,7 +2681,7 @@ def _preorder_depth_first_walk(self, target_filepath): current_metadata = self.metadata['current'] role_names = ['targets'] visited_role_names = set() - number_of_delegations = tuf.settings.MAX_NUMBER_OF_DELEGATIONS + number_of_delegations = settings.MAX_NUMBER_OF_DELEGATIONS # Ensure the client has the most up-to-date version of 'targets.json'. # Raise 'tuf.exceptions.NoWorkingMirrorError' if the changed metadata @@ -2755,7 +2755,7 @@ def _preorder_depth_first_walk(self, target_filepath): if target is None and number_of_delegations == 0 and len(role_names) > 0: logger.debug(repr(len(role_names)) + ' roles left to visit, ' + 'but allowed to visit at most ' + - repr(tuf.settings.MAX_NUMBER_OF_DELEGATIONS) + ' delegations.') + repr(settings.MAX_NUMBER_OF_DELEGATIONS) + ' delegations.') return target diff --git a/tuf/download.py b/tuf/download.py index 3156ed7b19..18f41da4f2 100755 --- a/tuf/download.py +++ b/tuf/download.py @@ -42,6 +42,7 @@ import tuf from tuf import exceptions from tuf import formats +from tuf import settings # See 'log.py' to learn how logging is handled in TUF. logger = logging.getLogger(__name__) @@ -208,15 +209,14 @@ def _download_file(url, required_length, fetcher, STRICT_REQUIRED_LENGTH=True): seconds_spent_receiving = stop_time - start_time average_download_speed = number_of_bytes_received / seconds_spent_receiving - if average_download_speed < tuf.settings.MIN_AVERAGE_DOWNLOAD_SPEED: + if average_download_speed < settings.MIN_AVERAGE_DOWNLOAD_SPEED: logger.debug('The average download speed dropped below the minimum' - ' average download speed set in tuf.settings.py. Stopping the' - ' download!') + ' average download speed set in settings. Stopping the download!.') break else: logger.debug('The average download speed has not dipped below the' - ' minimum average download speed set in tuf.settings.py.') + ' minimum average download speed set in settings.') # Does the total number of downloaded bytes match the required length? _check_downloaded_length(number_of_bytes_received, required_length, @@ -273,7 +273,7 @@ def _check_downloaded_length(total_downloaded, required_length, tuf.exceptions.SlowRetrievalError, if the total downloaded was done in less than the acceptable download speed (as set in - tuf.settings.py). + tuf.settings). None. @@ -296,9 +296,9 @@ def _check_downloaded_length(total_downloaded, required_length, # If the average download speed is below a certain threshold, we flag # this as a possible slow-retrieval attack. logger.debug('Average download speed: ' + repr(average_download_speed)) - logger.debug('Minimum average download speed: ' + repr(tuf.settings.MIN_AVERAGE_DOWNLOAD_SPEED)) + logger.debug('Minimum average download speed: ' + repr(settings.MIN_AVERAGE_DOWNLOAD_SPEED)) - if average_download_speed < tuf.settings.MIN_AVERAGE_DOWNLOAD_SPEED: + if average_download_speed < settings.MIN_AVERAGE_DOWNLOAD_SPEED: raise exceptions.SlowRetrievalError(average_download_speed) else: @@ -312,7 +312,7 @@ def _check_downloaded_length(total_downloaded, required_length, # will log a warning anyway. This is useful when we wish to download the # Timestamp or Root metadata, for which we have no signed metadata; so, # we must guess a reasonable required_length for it. - if average_download_speed < tuf.settings.MIN_AVERAGE_DOWNLOAD_SPEED: + if average_download_speed < settings.MIN_AVERAGE_DOWNLOAD_SPEED: raise exceptions.SlowRetrievalError(average_download_speed) else: diff --git a/tuf/log.py b/tuf/log.py index 88182bcd91..b55896f4fa 100755 --- a/tuf/log.py +++ b/tuf/log.py @@ -73,8 +73,8 @@ import time import tuf -import tuf.settings from tuf import exceptions +from tuf import settings import securesystemslib.formats @@ -125,8 +125,8 @@ # '_DEFAULT_LOG_LEVEL'. The log level of messages handled by 'file_handler' # may be modified with 'set_filehandler_log_level()'. 'settings.LOG_FILENAME' # will be opened in append mode. -if tuf.settings.ENABLE_FILE_LOGGING: - file_handler = logging.FileHandler(tuf.settings.LOG_FILENAME) +if settings.ENABLE_FILE_LOGGING: + file_handler = logging.FileHandler(settings.LOG_FILENAME) file_handler.setLevel(_DEFAULT_FILE_LOG_LEVEL) file_handler.setFormatter(formatter) logger.addHandler(file_handler) @@ -381,7 +381,7 @@ def remove_console_handler(): -def enable_file_logging(log_filename=tuf.settings.LOG_FILENAME): +def enable_file_logging(log_filename=settings.LOG_FILENAME): """ Log messages to a file (i.e., 'log_filename'). The log level for the file diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index 76c61753d3..0271cddd4b 100644 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -44,9 +44,9 @@ from tuf import formats from tuf import log from tuf import roledb +from tuf import settings import tuf.keydb import tuf.sig -import tuf.settings import securesystemslib import securesystemslib.hash @@ -87,7 +87,7 @@ # The algorithm used by the repository to generate the path hash prefixes # of hashed bin delegations. Please see delegate_hashed_bins() -HASH_FUNCTION = tuf.settings.DEFAULT_HASH_ALGORITHM +HASH_FUNCTION = settings.DEFAULT_HASH_ALGORITHM @@ -803,7 +803,7 @@ def import_ed25519_privatekey_from_file(filepath, password=None): securesystemslib.exceptions.UnsupportedLibraryError, if 'filepath' cannot be decrypted due to an invalid configuration setting (i.e., invalid - 'tuf.settings.py' setting). + 'tuf.settings' setting). 'password' is used to decrypt the 'filepath' key file. @@ -993,7 +993,7 @@ def get_targets_metadata_fileinfo(filename, storage_backend, custom=None): # file information, such as the file's author, version/revision # numbers, etc. filesize, filehashes = securesystemslib.util.get_file_details(filename, - tuf.settings.FILE_HASH_ALGORITHMS, storage_backend) + settings.FILE_HASH_ALGORITHMS, storage_backend) return formats.make_targets_fileinfo(filesize, filehashes, custom=custom) @@ -1543,7 +1543,7 @@ def _get_hashes_and_length_if_needed(use_length, use_hashes, full_file_path, if use_hashes: hashes = securesystemslib.util.get_file_hashes(full_file_path, - tuf.settings.FILE_HASH_ALGORITHMS, storage_backend) + settings.FILE_HASH_ALGORITHMS, storage_backend) return length, hashes diff --git a/tuf/scripts/client.py b/tuf/scripts/client.py index ebc93e97ab..9433987ed8 100755 --- a/tuf/scripts/client.py +++ b/tuf/scripts/client.py @@ -74,8 +74,8 @@ import tuf from tuf import exceptions from tuf import log +from tuf import settings import tuf.client.updater -import tuf.settings # See 'log.py' to learn how logging is handled in TUF. logger = logging.getLogger(__name__) @@ -113,7 +113,7 @@ def update_client(parsed_arguments): logger.debug('We have a valid argparse Namespace object.') # Set the local repositories directory containing all of the metadata files. - tuf.settings.repositories_directory = '.' + settings.repositories_directory = '.' # Set the repository mirrors. This dictionary is needed by the Updater # class of updater.py. From 9d7047ffc9039932716a2eec0565da86038ff917 Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Mon, 11 Jan 2021 20:51:29 +0200 Subject: [PATCH 09/25] imports: Make 'sig' imports vendoring-compatible Use "from tuf import " instead of "import tuf.": this makes it possible for vendoring tool to vendor tuf. Fix all references to in the code. Remove one unused import. Signed-off-by: Jussi Kukkonen --- tuf/client/updater.py | 4 ++-- tuf/developer_tool.py | 10 +++++----- tuf/repository_lib.py | 10 +++++----- tuf/repository_tool.py | 1 - 4 files changed, 12 insertions(+), 13 deletions(-) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index 65446e1ed7..54baecdf84 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -138,9 +138,9 @@ from tuf import mirrors from tuf import roledb from tuf import settings +from tuf import sig import tuf.requests_fetcher import tuf.keydb -import tuf.sig import securesystemslib.exceptions import securesystemslib.hash @@ -1449,7 +1449,7 @@ def _verify_metadata_file(self, metadata_file_object, # metadata. # Verify the signature on the downloaded metadata object. - valid = tuf.sig.verify(metadata_signable, metadata_role, + valid = sig.verify(metadata_signable, metadata_role, self.repository_name) if not valid: diff --git a/tuf/developer_tool.py b/tuf/developer_tool.py index 256bf626a1..7547b523d2 100755 --- a/tuf/developer_tool.py +++ b/tuf/developer_tool.py @@ -43,8 +43,8 @@ from tuf import formats from tuf import log from tuf import roledb +from tuf import sig import tuf.keydb -import tuf.sig import tuf.repository_lib as repo_lib import tuf.repository_tool @@ -437,7 +437,7 @@ def _log_status(self, rolename, signable, repository_name): 'rolename'. """ - status = tuf.sig.get_signature_status(signable, rolename, repository_name) + status = sig.get_signature_status(signable, rolename, repository_name) message = repr(rolename) + ' role contains ' +\ repr(len(status['good_sigs'])) + ' / ' + repr(status['threshold']) +\ @@ -490,7 +490,7 @@ def _generate_and_write_metadata(rolename, metadata_filename, write_partial, temp_signable = repo_lib.sign_metadata(metadata, [], metadata_filename, repository_name) temp_signable['signatures'].extend(roleinfo['signatures']) - status = tuf.sig.get_signature_status(temp_signable, rolename, + status = sig.get_signature_status(temp_signable, rolename, repository_name) if len(status['good_sigs']) == 0: metadata['version'] = metadata['version'] + 1 @@ -499,7 +499,7 @@ def _generate_and_write_metadata(rolename, metadata_filename, write_partial, # non-partial write() else: - if tuf.sig.verify(signable, rolename, repository_name): + if sig.verify(signable, rolename, repository_name): metadata['version'] = metadata['version'] + 1 signable = repo_lib.sign_metadata(metadata, roleinfo['signing_keyids'], metadata_filename, repository_name) @@ -507,7 +507,7 @@ def _generate_and_write_metadata(rolename, metadata_filename, write_partial, # Write the metadata to file if contains a threshold of signatures. signable['signatures'].extend(roleinfo['signatures']) - if tuf.sig.verify(signable, rolename, repository_name) or write_partial: + if sig.verify(signable, rolename, repository_name) or write_partial: repo_lib._remove_invalid_and_duplicate_signatures(signable, repository_name) storage_backend = securesystemslib.storage.FilesystemBackend() filename = repo_lib.write_metadata_file(signable, metadata_filename, diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index 0271cddd4b..2af1abd708 100644 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -45,8 +45,8 @@ from tuf import log from tuf import roledb from tuf import settings +from tuf import sig import tuf.keydb -import tuf.sig import securesystemslib import securesystemslib.hash @@ -200,7 +200,7 @@ def _generate_and_write_metadata(rolename, metadata_filename, def should_write(): # Root must be signed by its previous keys and threshold. if rolename == 'root' and len(previous_keyids) > 0: - if not tuf.sig.verify(signable, rolename, repository_name, + if not sig.verify(signable, rolename, repository_name, previous_threshold, previous_keyids): return False @@ -208,7 +208,7 @@ def should_write(): logger.debug('Root is signed by a threshold of its previous keyids.') # In the normal case, we should write metadata if the threshold is met. - return tuf.sig.verify(signable, rolename, repository_name, + return sig.verify(signable, rolename, repository_name, roleinfo['threshold'], roleinfo['signing_keyids']) @@ -276,7 +276,7 @@ def _metadata_is_partially_loaded(rolename, signable, repository_name): # The signature status lists the number of good signatures, including # bad, untrusted, unknown, etc. - status = tuf.sig.get_signature_status(signable, rolename, repository_name) + status = sig.get_signature_status(signable, rolename, repository_name) if len(status['good_sigs']) < status['threshold'] and \ len(status['good_sigs']) >= 0: @@ -2147,7 +2147,7 @@ def _log_status(rolename, signable, repository_name): 'rolename'. """ - status = tuf.sig.get_signature_status(signable, rolename, repository_name) + status = sig.get_signature_status(signable, rolename, repository_name) logger.info(repr(rolename) + ' role contains ' + \ repr(len(status['good_sigs'])) + ' / ' + repr(status['threshold']) + \ diff --git a/tuf/repository_tool.py b/tuf/repository_tool.py index 6dca4790c2..5ef35b4a4b 100755 --- a/tuf/repository_tool.py +++ b/tuf/repository_tool.py @@ -47,7 +47,6 @@ from tuf import formats from tuf import log from tuf import roledb -import tuf.sig import tuf.repository_lib as repo_lib import securesystemslib.keys From 6faed27e0ac2ee0a59d4870095e39d2e6ad02b75 Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Mon, 11 Jan 2021 21:35:57 +0200 Subject: [PATCH 10/25] imports: Fix securesystemslib.exceptions imports Make the import compatible with vendoring tool and alias the import so it does not clash with the local module. Fix all references to the module in the code. Remove a related repo.py comment that was badly duplicated from module docstring. Signed-off-by: Jussi Kukkonen --- tuf/client/updater.py | 23 ++++++++++++----------- tuf/developer_tool.py | 8 +++++--- tuf/formats.py | 15 ++++++++------- tuf/keydb.py | 18 ++++++++++-------- tuf/log.py | 6 ++++-- tuf/mirrors.py | 6 ++++-- tuf/repository_lib.py | 26 ++++++++++++++------------ tuf/repository_tool.py | 36 +++++++++++++++++++----------------- tuf/requests_fetcher.py | 8 ++++---- tuf/roledb.py | 30 ++++++++++++++++-------------- tuf/scripts/repo.py | 16 ++++++++-------- tuf/sig.py | 9 +++++---- 12 files changed, 109 insertions(+), 92 deletions(-) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index 54baecdf84..ddb4c6bbdc 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -130,6 +130,8 @@ import warnings import io +from securesystemslib import exceptions as sslib_exceptions + import tuf from tuf import download from tuf import exceptions @@ -142,7 +144,6 @@ import tuf.requests_fetcher import tuf.keydb -import securesystemslib.exceptions import securesystemslib.hash import securesystemslib.keys import securesystemslib.util @@ -207,7 +208,7 @@ def __init__(self, map_file): # The map file dictionary that associates targets with repositories. self.map_file = securesystemslib.util.load_json_file(map_file) - except (securesystemslib.exceptions.Error) as e: + except (sslib_exceptions.Error) as e: raise exceptions.Error('Cannot load the map file: ' + str(e)) # Raise securesystemslib.exceptions.FormatError if the map file is @@ -829,7 +830,7 @@ def _load_metadata_from_file(self, metadata_set, metadata_role): # Ensure we have a valid metadata set. if metadata_set not in ['current', 'previous']: - raise securesystemslib.exceptions.Error( + raise sslib_exceptions.Error( 'Invalid metadata set: ' + repr(metadata_set)) # Save and construct the full metadata path. @@ -849,7 +850,7 @@ def _load_metadata_from_file(self, metadata_set, metadata_role): # be a valid json file. On the next refresh cycle, it will be # updated as required. If Root if cannot be loaded from disk # successfully, an exception should be raised by the caller. - except securesystemslib.exceptions.Error: + except sslib_exceptions.Error: return formats.check_signable_object_format(metadata_signable) @@ -971,7 +972,7 @@ def _import_delegations(self, parent_role): except exceptions.KeyAlreadyExistsError: pass - except (securesystemslib.exceptions.FormatError, securesystemslib.exceptions.Error): + except (sslib_exceptions.FormatError, sslib_exceptions.Error): logger.warning('Invalid key: ' + repr(keyid) + '. Aborting role ' + 'delegation for parent role \'' + parent_role + '\'.') raise @@ -1209,7 +1210,7 @@ def _check_hashes(self, file_object, trusted_hashes): computed_hash = digest_object.hexdigest() if trusted_hash != computed_hash: - raise securesystemslib.exceptions.BadHashError(trusted_hash, + raise sslib_exceptions.BadHashError(trusted_hash, computed_hash) else: @@ -1453,7 +1454,7 @@ def _verify_metadata_file(self, metadata_file_object, self.repository_name) if not valid: - raise securesystemslib.exceptions.BadSignatureError(metadata_role) + raise sslib_exceptions.BadSignatureError(metadata_role) # For root metadata, verify the downloaded root metadata object with the # new threshold of new signatures contained within the downloaded root @@ -1467,7 +1468,7 @@ def _verify_metadata_file(self, metadata_file_object, if valid and metadata_role == 'root': valid = self._verify_root_self_signed(metadata_signable) if not valid: - raise securesystemslib.exceptions.BadSignatureError(metadata_role) + raise sslib_exceptions.BadSignatureError(metadata_role) @@ -1563,7 +1564,7 @@ def _get_metadata_file(self, metadata_role, remote_filename, ". The update will continue as the major versions match.") except (ValueError, TypeError) as error: - six.raise_from(securesystemslib.exceptions.FormatError('Improperly' + six.raise_from(sslib_exceptions.FormatError('Improperly' ' formatted spec_version, which must be in major.minor.fix format'), error) @@ -2890,7 +2891,7 @@ def _visit_child_role(self, child_role, target_filepath): # 'role_name' should have been validated when it was downloaded. # The 'paths' or 'path_hash_prefixes' fields should not be missing, # so we raise a format error here in case they are both missing. - raise securesystemslib.exceptions.FormatError(repr(child_role_name) + ' ' + raise sslib_exceptions.FormatError(repr(child_role_name) + ' ' 'has neither a "paths" nor "path_hash_prefixes". At least' ' one of these attributes must be present.') @@ -3090,7 +3091,7 @@ def updated_targets(self, targets, destination_directory): algorithm=algorithm) # This exception would occur if the target does not exist locally. - except securesystemslib.exceptions.StorageError: + except sslib_exceptions.StorageError: updated_targets.append(target) updated_targetpaths.append(target_filepath) break diff --git a/tuf/developer_tool.py b/tuf/developer_tool.py index 7547b523d2..0d728693c2 100755 --- a/tuf/developer_tool.py +++ b/tuf/developer_tool.py @@ -38,6 +38,8 @@ import tempfile import json +from securesystemslib import exceptions as sslib_exceptions + import tuf from tuf import exceptions from tuf import formats @@ -322,7 +324,7 @@ def add_verification_key(self, key, expires=None): # more than one key. # TODO: Add condition check for the requirement stated above. if len(self.keys) > 0: - raise securesystemslib.exceptions.Error("This project already contains a key.") + raise sslib_exceptions.Error("This project already contains a key.") super(Project, self).add_verification_key(key, expires) @@ -389,7 +391,7 @@ def status(self): repository_name=self.repository_name) self._log_status(delegated_role, signable[0], self.repository_name) - except securesystemslib.exceptions.Error: + except sslib_exceptions.Error: insufficient_signatures.append(delegated_role) if len(insufficient_keys): @@ -516,7 +518,7 @@ def _generate_and_write_metadata(rolename, metadata_filename, write_partial, # 'signable' contains an invalid threshold of signatures. else: message = 'Not enough signatures for ' + repr(metadata_filename) - raise securesystemslib.exceptions.Error(message, signable) + raise sslib_exceptions.Error(message, signable) return signable, filename diff --git a/tuf/formats.py b/tuf/formats.py index 0c59168469..cfc1a8f053 100755 --- a/tuf/formats.py +++ b/tuf/formats.py @@ -69,6 +69,7 @@ import time import copy +from securesystemslib import exceptions as sslib_exceptions import securesystemslib.formats import securesystemslib.schema as SCHEMA @@ -637,7 +638,7 @@ def expiry_string_to_datetime(expires): try: return datetime.datetime.strptime(expires, "%Y-%m-%dT%H:%M:%SZ") except ValueError as error: - six.raise_from(securesystemslib.exceptions.FormatError( + six.raise_from(sslib_exceptions.FormatError( 'Failed to parse ' + repr(expires) + ' as an expiry time'), error) @@ -675,7 +676,7 @@ def datetime_to_unix_timestamp(datetime_object): # Raise 'securesystemslib.exceptions.FormatError' if not. if not isinstance(datetime_object, datetime.datetime): message = repr(datetime_object) + ' is not a datetime.datetime() object.' - raise securesystemslib.exceptions.FormatError(message) + raise sslib_exceptions.FormatError(message) unix_timestamp = calendar.timegm(datetime_object.timetuple()) @@ -753,7 +754,7 @@ def format_base64(data): return binascii.b2a_base64(data).decode('utf-8').rstrip('=\n ') except (TypeError, binascii.Error) as e: - raise securesystemslib.exceptions.FormatError('Invalid base64' + raise sslib_exceptions.FormatError('Invalid base64' ' encoding: ' + str(e)) @@ -782,7 +783,7 @@ def parse_base64(base64_string): if not isinstance(base64_string, six.string_types): message = 'Invalid argument: '+repr(base64_string) - raise securesystemslib.exceptions.FormatError(message) + raise sslib_exceptions.FormatError(message) extra = len(base64_string) % 4 if extra: @@ -793,7 +794,7 @@ def parse_base64(base64_string): return binascii.a2b_base64(base64_string.encode('utf-8')) except (TypeError, binascii.Error) as e: - raise securesystemslib.exceptions.FormatError('Invalid base64' + raise sslib_exceptions.FormatError('Invalid base64' ' encoding: ' + str(e)) @@ -990,14 +991,14 @@ def check_signable_object_format(signable): role_type = signable['signed']['_type'] except (KeyError, TypeError) as error: - six.raise_from(securesystemslib.exceptions.FormatError( + six.raise_from(sslib_exceptions.FormatError( 'Untyped signable object.'), error) try: schema = SCHEMAS_BY_TYPE[role_type] except KeyError as error: - six.raise_from(securesystemslib.exceptions.FormatError( + six.raise_from(sslib_exceptions.FormatError( 'Unrecognized type ' + repr(role_type)), error) if not signable['signatures']: diff --git a/tuf/keydb.py b/tuf/keydb.py index dc153d8841..6a57739462 100755 --- a/tuf/keydb.py +++ b/tuf/keydb.py @@ -44,11 +44,13 @@ import logging import copy +import securesystemslib +from securesystemslib import exceptions as sslib_exceptions + from tuf import exceptions from tuf import formats import six -import securesystemslib # List of strings representing the key types supported by TUF. _SUPPORTED_KEY_TYPES = ['rsa', 'ed25519', 'ecdsa-sha2-nistp256'] @@ -171,7 +173,7 @@ def create_keydb(repository_name): securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) if repository_name in _keydb_dict: - raise securesystemslib.exceptions.InvalidNameError('Repository name already exists:' + raise sslib_exceptions.InvalidNameError('Repository name already exists:' ' ' + repr(repository_name)) _keydb_dict[repository_name] = {} @@ -211,7 +213,7 @@ def remove_keydb(repository_name): return if repository_name == 'default': - raise securesystemslib.exceptions.InvalidNameError('Cannot remove the default repository:' + raise sslib_exceptions.InvalidNameError('Cannot remove the default repository:' ' ' + repr(repository_name)) del _keydb_dict[repository_name] @@ -277,11 +279,11 @@ def add_key(key_dict, keyid=None, repository_name='default'): # Check if each keyid found in 'key_dict' matches 'keyid'. if keyid != key_dict['keyid']: - raise securesystemslib.exceptions.Error('Incorrect keyid. Got ' + key_dict['keyid'] + ' but expected ' + keyid) + raise sslib_exceptions.Error('Incorrect keyid. Got ' + key_dict['keyid'] + ' but expected ' + keyid) # Ensure 'repository_name' is actually set in the key database. if repository_name not in _keydb_dict: - raise securesystemslib.exceptions.InvalidNameError('Repository name does not exist:' + raise sslib_exceptions.InvalidNameError('Repository name does not exist:' ' ' + repr(repository_name)) # Check if the keyid belonging to 'key_dict' is not already @@ -336,7 +338,7 @@ def get_key(keyid, repository_name='default'): securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) if repository_name not in _keydb_dict: - raise securesystemslib.exceptions.InvalidNameError('Repository name does not exist:' + raise sslib_exceptions.InvalidNameError('Repository name does not exist:' ' ' + repr(repository_name)) # Return the key belonging to 'keyid', if found in the key database. @@ -389,7 +391,7 @@ def remove_key(keyid, repository_name='default'): securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) if repository_name not in _keydb_dict: - raise securesystemslib.exceptions.InvalidNameError('Repository name does not exist:' + raise sslib_exceptions.InvalidNameError('Repository name does not exist:' ' ' + repr(repository_name)) # Remove the key belonging to 'keyid' if found in the key database. @@ -442,7 +444,7 @@ def clear_keydb(repository_name='default', clear_all=False): _keydb_dict['default'] = {} if repository_name not in _keydb_dict: - raise securesystemslib.exceptions.InvalidNameError('Repository name does not exist:' + raise sslib_exceptions.InvalidNameError('Repository name does not exist:' ' ' + repr(repository_name)) _keydb_dict[repository_name] = {} diff --git a/tuf/log.py b/tuf/log.py index b55896f4fa..318695c821 100755 --- a/tuf/log.py +++ b/tuf/log.py @@ -72,6 +72,8 @@ import logging import time +from securesystemslib import exceptions as sslib_exceptions + import tuf from tuf import exceptions from tuf import settings @@ -269,7 +271,7 @@ def set_console_log_level(log_level=_DEFAULT_CONSOLE_LOG_LEVEL): 'log_level' examples: logging.INFO; logging.CRITICAL. - securesystems.exceptions.Error, if the 'log.py' console handler has not + securesystemslib.exceptions.Error, if the 'log.py' console handler has not been set yet with add_console_handler(). @@ -291,7 +293,7 @@ def set_console_log_level(log_level=_DEFAULT_CONSOLE_LOG_LEVEL): else: message = 'The console handler has not been set with add_console_handler().' - raise securesystemslib.exceptions.Error(message) + raise sslib_exceptions.Error(message) diff --git a/tuf/mirrors.py b/tuf/mirrors.py index 4ba90654b6..913c5eb717 100755 --- a/tuf/mirrors.py +++ b/tuf/mirrors.py @@ -32,10 +32,12 @@ import os +import securesystemslib +from securesystemslib import exceptions as sslib_exceptions + import tuf from tuf import formats -import securesystemslib import six # The type of file to be downloaded from a repository. The @@ -90,7 +92,7 @@ def get_list_of_mirrors(file_type, file_path, mirrors_dict): # Verify 'file_type' is supported. if file_type not in _SUPPORTED_FILE_TYPES: - raise securesystemslib.exceptions.Error('Invalid file_type argument.' + raise sslib_exceptions.Error('Invalid file_type argument.' ' Supported file types: ' + repr(_SUPPORTED_FILE_TYPES)) path_key = 'metadata_path' if file_type == 'meta' else 'targets_path' diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index 2af1abd708..66399271ae 100644 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -39,6 +39,8 @@ import json import tempfile +from securesystemslib import exceptions as sslib_exceptions + import tuf from tuf import exceptions from tuf import formats @@ -540,7 +542,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): # Ensure the 'consistent_snapshot' field is extracted. consistent_snapshot = root_metadata['consistent_snapshot'] - except securesystemslib.exceptions.StorageError as error: + except sslib_exceptions.StorageError as error: six.raise_from(exceptions.RepositoryError('Cannot load the required' ' root file: ' + repr(root_filename)), error) @@ -569,7 +571,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): roledb.update_roleinfo('timestamp', roleinfo, mark_role_as_dirty=False, repository_name=repository_name) - except securesystemslib.exceptions.StorageError as error: + except sslib_exceptions.StorageError as error: six.raise_from(exceptions.RepositoryError('Cannot load the Timestamp ' 'file: ' + repr(timestamp_filename)), error) @@ -615,7 +617,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): roledb.update_roleinfo('snapshot', roleinfo, mark_role_as_dirty=False, repository_name=repository_name) - except securesystemslib.exceptions.StorageError as error: + except sslib_exceptions.StorageError as error: six.raise_from(exceptions.RepositoryError('The Snapshot file ' 'cannot be loaded: '+ repr(snapshot_filename)), error) @@ -678,7 +680,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): except exceptions.KeyAlreadyExistsError: pass - except securesystemslib.exceptions.StorageError as error: + except sslib_exceptions.StorageError as error: six.raise_from(exceptions.RepositoryError('The Targets file ' 'can not be loaded: ' + repr(targets_filename)), error) @@ -759,7 +761,7 @@ def import_rsa_privatekey_from_file(filepath, password=None): # The user might not have given a password for an encrypted private key. # Prompt for a password for convenience. - except securesystemslib.exceptions.CryptoError: + except sslib_exceptions.CryptoError: if password is None: private_key = securesystemslib.interface.import_rsa_privatekey_from_file( filepath, password, prompt=True) @@ -822,7 +824,7 @@ def import_ed25519_privatekey_from_file(filepath, password=None): # The user might not have given a password for an encrypted private key. # Prompt for a password for convenience. - except securesystemslib.exceptions.CryptoError: + except sslib_exceptions.CryptoError: if password is None: private_key = securesystemslib.interface.import_ed25519_privatekey_from_file( filepath, password, prompt=True) @@ -1116,7 +1118,7 @@ def get_bin_numbers(number_of_bins): # Note: x % y != 0 does not guarantee that y is not a power of 2 for # arbitrary x and y values. However, due to the relationship between # number_of_bins and prefix_count, it is true for them. - raise securesystemslib.exceptions.Error('The "number_of_bins" argument' + raise sslib_exceptions.Error('The "number_of_bins" argument' ' must be a power of 2.') return prefix_length, prefix_count, bin_size @@ -1256,7 +1258,7 @@ def generate_root_metadata(version, expiration_date, consistent_snapshot, # If a top-level role is missing from 'roledb', raise an exception. if not roledb.role_exists(rolename, repository_name): - raise securesystemslib.exceptions.Error(repr(rolename) + ' not in' + raise sslib_exceptions.Error(repr(rolename) + ' not in' ' "roledb".') # Collect keys from all roles in a list @@ -1396,7 +1398,7 @@ def generate_targets_metadata(targets_directory, target_files, version, securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_existing_fileinfo) if write_consistent_targets and use_existing_fileinfo: - raise securesystemslib.exceptions.Error('Cannot support writing consistent' + raise sslib_exceptions.Error('Cannot support writing consistent' ' targets and using existing fileinfo.') if delegations is not None: @@ -1431,12 +1433,12 @@ def generate_targets_metadata(targets_directory, target_files, version, # Ensure all fileinfo entries in target_files have a non-empty hashes dict if not fileinfo.get('hashes', None): - raise securesystemslib.exceptions.Error('use_existing_fileinfo option' + raise sslib_exceptions.Error('use_existing_fileinfo option' ' set but no hashes exist in fileinfo for ' + repr(target)) # and a non-empty length if fileinfo.get('length', -1) < 0: - raise securesystemslib.exceptions.Error('use_existing_fileinfo option' + raise sslib_exceptions.Error('use_existing_fileinfo option' ' set but no length exists in fileinfo for ' + repr(target)) filedict[target] = fileinfo @@ -1870,7 +1872,7 @@ def sign_metadata(metadata_object, keyids, filename, repository_name): logger.debug('Private key unset. Skipping: ' + repr(keyid)) else: - raise securesystemslib.exceptions.Error('The keydb contains a key with' + raise sslib_exceptions.Error('The keydb contains a key with' ' an invalid key type.' + repr(key['keytype'])) # Raise 'securesystemslib.exceptions.FormatError' if the resulting 'signable' diff --git a/tuf/repository_tool.py b/tuf/repository_tool.py index 5ef35b4a4b..add03ebfe1 100755 --- a/tuf/repository_tool.py +++ b/tuf/repository_tool.py @@ -42,6 +42,8 @@ from collections import deque +from securesystemslib import exceptions as sslib_exceptions + import tuf from tuf import exceptions from tuf import formats @@ -252,7 +254,7 @@ def __init__(self, repository_directory, metadata_directory, roledb.create_roledb(repository_name) tuf.keydb.create_keydb(repository_name) - except securesystemslib.exceptions.InvalidNameError: + except sslib_exceptions.InvalidNameError: logger.debug(repr(repository_name) + ' already exists. Overwriting' ' its contents.') @@ -630,7 +632,7 @@ def get_filepaths_in_directory(files_directory, recursive_walk=False, # Ensure a valid directory is given. if not os.path.isdir(files_directory): - raise securesystemslib.exceptions.Error(repr(files_directory) + ' is not' + raise sslib_exceptions.Error(repr(files_directory) + ' is not' ' a directory.') # A list of the target filepaths found in 'files_directory'. @@ -759,7 +761,7 @@ def add_verification_key(self, key, expires=None): # Is 'expires' a datetime.datetime() object? # Raise 'securesystemslib.exceptions.FormatError' if not. if not isinstance(expires, datetime.datetime): - raise securesystemslib.exceptions.FormatError(repr(expires) + ' is not a' + raise sslib_exceptions.FormatError(repr(expires) + ' is not a' ' datetime.datetime() object.') # Truncate the microseconds value to produce a correct schema string @@ -771,7 +773,7 @@ def add_verification_key(self, key, expires=None): formats.unix_timestamp_to_datetime(int(time.time())) if expires < current_datetime: - raise securesystemslib.exceptions.Error(repr(key) + ' has already' + raise sslib_exceptions.Error(repr(key) + ' has already' ' expired.') # Update the key's 'expires' entry. @@ -853,7 +855,7 @@ def remove_verification_key(self, key): repository_name=self._repository_name) else: - raise securesystemslib.exceptions.Error('Verification key not found.') + raise sslib_exceptions.Error('Verification key not found.') @@ -896,7 +898,7 @@ def load_signing_key(self, key): # Ensure the private portion of the key is available, otherwise signatures # cannot be generated when the metadata file is written to disk. if 'private' not in key['keyval'] or not len(key['keyval']['private']): - raise securesystemslib.exceptions.Error('This is not a private key.') + raise sslib_exceptions.Error('This is not a private key.') # Has the key, with the private portion included, been added to the keydb? # The public version of the key may have been previously added. @@ -966,7 +968,7 @@ def unload_signing_key(self, key): repository_name=self._repository_name) else: - raise securesystemslib.exceptions.Error('Signing key not found.') + raise sslib_exceptions.Error('Signing key not found.') @@ -1077,7 +1079,7 @@ def remove_signature(self, signature): repository_name=self._repository_name) else: - raise securesystemslib.exceptions.Error('Signature not found.') + raise sslib_exceptions.Error('Signature not found.') @@ -1375,7 +1377,7 @@ def expiration(self, datetime_object): # Is 'datetime_object' a datetime.datetime() object? # Raise 'securesystemslib.exceptions.FormatError' if not. if not isinstance(datetime_object, datetime.datetime): - raise securesystemslib.exceptions.FormatError( + raise sslib_exceptions.FormatError( repr(datetime_object) + ' is not a datetime.datetime() object.') # truncate the microseconds value to produce a correct schema string @@ -1387,7 +1389,7 @@ def expiration(self, datetime_object): formats.unix_timestamp_to_datetime(int(time.time())) if datetime_object < current_datetime_object: - raise securesystemslib.exceptions.Error(repr(self.rolename) + ' has' + raise sslib_exceptions.Error(repr(self.rolename) + ' has' ' already expired.') # Update the role's 'expires' entry in 'roledb'. @@ -1809,7 +1811,7 @@ def add_delegated_role(self, rolename, targets_object): formats.ROLENAME_SCHEMA.check_match(rolename) if not isinstance(targets_object, Targets): - raise securesystemslib.exceptions.FormatError(repr(targets_object) + ' is' + raise sslib_exceptions.FormatError(repr(targets_object) + ' is' ' not a Targets object.') @@ -1939,7 +1941,7 @@ def add_paths(self, paths, child_rolename): # Ensure that 'child_rolename' exists, otherwise it will not have an entry # in the parent role's delegations field. if not roledb.role_exists(child_rolename, self._repository_name): - raise securesystemslib.exceptions.Error(repr(child_rolename) + ' does' + raise sslib_exceptions.Error(repr(child_rolename) + ' does' ' not exist.') for path in paths: @@ -2025,7 +2027,7 @@ def add_target(self, filepath, custom=None, fileinfo=None): formats.RELPATH_SCHEMA.check_match(filepath) if fileinfo and custom: - raise securesystemslib.exceptions.Error("Can only take one of" + raise sslib_exceptions.Error("Can only take one of" " custom or fileinfo, not both.") if fileinfo: @@ -2170,7 +2172,7 @@ def remove_target(self, filepath): repository_name=self._repository_name) else: - raise securesystemslib.exceptions.Error('Target file path not found.') + raise sslib_exceptions.Error('Target file path not found.') @@ -2719,7 +2721,7 @@ def add_target_to_bin(self, target_filepath, number_of_bins=DEFAULT_NUM_BINS, # Ensure the Targets object has delegated to hashed bins if not self._delegated_roles.get(bin_name, None): - raise securesystemslib.exceptions.Error(self.rolename + ' does not have' + raise sslib_exceptions.Error(self.rolename + ' does not have' ' a delegated role ' + bin_name) self._delegated_roles[bin_name].add_target(target_filepath, @@ -2781,7 +2783,7 @@ def remove_target_from_bin(self, target_filepath, # Ensure the Targets object has delegated to hashed bins if not self._delegated_roles.get(bin_name, None): - raise securesystemslib.exceptions.Error(self.rolename + ' does not have' + raise sslib_exceptions.Error(self.rolename + ' does not have' ' a delegated role ' + bin_name) self._delegated_roles[bin_name].remove_target(target_filepath) @@ -3112,7 +3114,7 @@ def load_repository(repository_directory, repository_name='default', try: signable = securesystemslib.util.load_json_file(metadata_path) - except (securesystemslib.exceptions.Error, ValueError, IOError): + except (sslib_exceptions.Error, ValueError, IOError): logger.debug('Tried to load metadata with invalid JSON' ' content: ' + repr(metadata_path)) continue diff --git a/tuf/requests_fetcher.py b/tuf/requests_fetcher.py index e867e6b82b..6ac8437dfd 100644 --- a/tuf/requests_fetcher.py +++ b/tuf/requests_fetcher.py @@ -13,7 +13,7 @@ import urllib3.exceptions -import tuf.exceptions +from tuf import exceptions import tuf.settings from tuf.client.fetcher import FetcherInterface @@ -81,7 +81,7 @@ def fetch(self, url, required_length): except requests.HTTPError as e: response.close() status = e.response.status_code - raise tuf.exceptions.FetcherHTTPError(str(e), status) + raise exceptions.FetcherHTTPError(str(e), status) # Define a generator function to be returned by fetch. This way the caller @@ -122,7 +122,7 @@ def chunks(): break except urllib3.exceptions.ReadTimeoutError as e: - raise tuf.exceptions.SlowRetrievalError(str(e)) + raise exceptions.SlowRetrievalError(str(e)) finally: response.close() @@ -140,7 +140,7 @@ def _get_session(self, url): parsed_url = six.moves.urllib.parse.urlparse(url) if not parsed_url.scheme or not parsed_url.hostname: - raise tuf.exceptions.URLParsingError( + raise exceptions.URLParsingError( 'Could not get scheme and hostname from URL: ' + url) session_index = parsed_url.scheme + '+' + parsed_url.hostname diff --git a/tuf/roledb.py b/tuf/roledb.py index 0b69537ae1..6377076e33 100755 --- a/tuf/roledb.py +++ b/tuf/roledb.py @@ -52,12 +52,14 @@ import logging import copy +import securesystemslib +from securesystemslib import exceptions as sslib_exceptions + import tuf from tuf import exceptions from tuf import formats from tuf import log -import securesystemslib import six # See 'tuf.log' to learn how logging is handled in TUF. @@ -189,7 +191,7 @@ def create_roledb(repository_name): global _dirty_roles if repository_name in _roledb_dict or repository_name in _dirty_roles: - raise securesystemslib.exceptions.InvalidNameError('Repository name' + raise sslib_exceptions.InvalidNameError('Repository name' ' already exists: ' + repr(repository_name)) _roledb_dict[repository_name] = {} @@ -237,7 +239,7 @@ def remove_roledb(repository_name): return if repository_name == 'default': - raise securesystemslib.exceptions.InvalidNameError('Cannot remove the' + raise sslib_exceptions.InvalidNameError('Cannot remove the' ' default repository: ' + repr(repository_name)) del _roledb_dict[repository_name] @@ -309,7 +311,7 @@ def add_role(rolename, roleinfo, repository_name='default'): _validate_rolename(rolename) if repository_name not in _roledb_dict: - raise securesystemslib.exceptions.InvalidNameError('Repository name does not exist: ' + repository_name) + raise sslib_exceptions.InvalidNameError('Repository name does not exist: ' + repository_name) if rolename in _roledb_dict[repository_name]: raise exceptions.RoleAlreadyExistsError('Role already exists: ' + rolename) @@ -393,7 +395,7 @@ def update_roleinfo(rolename, roleinfo, mark_role_as_dirty=True, repository_name global _dirty_roles if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise securesystemslib.exceptions.InvalidNameError('Repository name does not' ' exist: ' + + raise sslib_exceptions.InvalidNameError('Repository name does not' ' exist: ' + repository_name) if rolename not in _roledb_dict[repository_name]: @@ -445,7 +447,7 @@ def get_dirty_roles(repository_name='default'): global _dirty_roles if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise securesystemslib.exceptions.InvalidNameError('Repository name does' + raise sslib_exceptions.InvalidNameError('Repository name does' ' not' ' exist: ' + repository_name) return sorted(list(_dirty_roles[repository_name])) @@ -488,7 +490,7 @@ def mark_dirty(roles, repository_name='default'): global _dirty_roles if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise securesystemslib.exceptions.InvalidNameError('Repository name does' + raise sslib_exceptions.InvalidNameError('Repository name does' ' not' ' exist: ' + repository_name) _dirty_roles[repository_name].update(roles) @@ -531,7 +533,7 @@ def unmark_dirty(roles, repository_name='default'): global _dirty_roles if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise securesystemslib.exceptions.InvalidNameError('Repository name does' + raise sslib_exceptions.InvalidNameError('Repository name does' ' not exist: ' + repository_name) for role in roles: @@ -675,7 +677,7 @@ def get_rolenames(repository_name='default'): global _dirty_roles if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise securesystemslib.exceptions.InvalidNameError('Repository name does' + raise sslib_exceptions.InvalidNameError('Repository name does' ' not' ' exist: ' + repository_name) return list(_roledb_dict[repository_name].keys()) @@ -1003,7 +1005,7 @@ def clear_roledb(repository_name='default', clear_all=False): global _dirty_roles if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise securesystemslib.exceptions.InvalidNameError('Repository name does not' + raise sslib_exceptions.InvalidNameError('Repository name does not' ' exist: ' + repository_name) if clear_all: @@ -1043,7 +1045,7 @@ def _check_rolename(rolename, repository_name='default'): global _dirty_roles if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise securesystemslib.exceptions.InvalidNameError('Repository name does not' + raise sslib_exceptions.InvalidNameError('Repository name does not' ' exist: ' + repository_name) if rolename not in _roledb_dict[repository_name]: @@ -1060,13 +1062,13 @@ def _validate_rolename(rolename): 'ROLENAME_SCHEMA' prior to calling this function. """ if rolename == '': - raise securesystemslib.exceptions.InvalidNameError('Rolename must *not* be' + raise sslib_exceptions.InvalidNameError('Rolename must *not* be' ' an empty string.') if rolename != rolename.strip(): - raise securesystemslib.exceptions.InvalidNameError('Invalid rolename.' + raise sslib_exceptions.InvalidNameError('Invalid rolename.' ' Cannot start or end with whitespace: ' + rolename) if rolename.startswith('/') or rolename.endswith('/'): - raise securesystemslib.exceptions.InvalidNameError('Invalid rolename.' + raise sslib_exceptions.InvalidNameError('Invalid rolename.' ' Cannot start or end with a "/": ' + rolename) diff --git a/tuf/scripts/repo.py b/tuf/scripts/repo.py index 3b1c279736..6fb18f426f 100755 --- a/tuf/scripts/repo.py +++ b/tuf/scripts/repo.py @@ -147,6 +147,10 @@ import time import fnmatch +import securesystemslib +from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import interface + import tuf from tuf import exceptions from tuf import formats @@ -154,10 +158,6 @@ from tuf import roledb import tuf.repository_tool as repo_tool -# 'pip install securesystemslib[crypto,pynacl]' is required for the CLI, -# which installs the cryptography and pynacl. -import securesystemslib -from securesystemslib import interface import six @@ -458,14 +458,14 @@ def import_privatekey_from_file(keypath, password=None): try: key_object = securesystemslib.keys.decrypt_key(encrypted_key, password) - except securesystemslib.exceptions.CryptoError: + except sslib_exceptions.CryptoError: try: logger.debug( 'Decryption failed. Attempting to import a private PEM instead.') key_object = securesystemslib.keys.import_rsakey_from_private_pem( encrypted_key, 'rsassa-pss-sha256', password) - except securesystemslib.exceptions.CryptoError as error: + except sslib_exceptions.CryptoError as error: six.raise_from(exceptions.Error(repr(keypath) + ' cannot be ' ' imported, possibly because an invalid key file is given or ' ' the decryption password is incorrect.'), error) @@ -492,7 +492,7 @@ def import_publickey_from_file(keypath): # An RSA public key is saved to disk in PEM format (not JSON), so the # load_json_file() call above can fail for this reason. Try to potentially # load the PEM string in keypath if an exception is raised. - except securesystemslib.exceptions.Error: + except sslib_exceptions.Error: key_metadata = securesystemslib.interface.import_rsa_publickey_from_file( keypath) @@ -576,7 +576,7 @@ def remove_verification_key(parsed_arguments): # securesystemslib.exceptions.FormatError, and the latter is not raised # because a valid key should have been returned by # import_publickey_from_file(). - except securesystemslib.exceptions.Error: + except sslib_exceptions.Error: print(repr(keypath) + ' is not a trusted key. Skipping.') consistent_snapshot = roledb.get_roleinfo('root', diff --git a/tuf/sig.py b/tuf/sig.py index 8a69bae7c0..8107b4d790 100755 --- a/tuf/sig.py +++ b/tuf/sig.py @@ -50,14 +50,15 @@ import logging +import securesystemslib +from securesystemslib import exceptions as sslib_exceptions + import tuf from tuf import exceptions from tuf import formats from tuf import roledb import tuf.keydb -import securesystemslib - # See 'log.py' to learn how logging is handled in TUF. logger = logging.getLogger(__name__) @@ -168,7 +169,7 @@ def get_signature_status(signable, role=None, repository_name='default', try: valid_sig = securesystemslib.keys.verify_signature(key, signature, signed) - except securesystemslib.exceptions.UnsupportedAlgorithmError: + except sslib_exceptions.UnsupportedAlgorithmError: unknown_signing_schemes.append(keyid) continue @@ -299,7 +300,7 @@ def verify(signable, role, repository_name='default', threshold=None, # Note: get_signature_status() is expected to verify that 'threshold' is # not None or <= 0. if threshold is None or threshold <= 0: #pragma: no cover - raise securesystemslib.exceptions.Error("Invalid threshold: " + repr(threshold)) + raise sslib_exceptions.Error("Invalid threshold: " + repr(threshold)) unique_keys = set() for keyid in good_sigs: From fe3daccdb509703d802f5428111d16f230cb1f18 Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Mon, 11 Jan 2021 21:49:14 +0200 Subject: [PATCH 11/25] imports: Make 'updater' import vendoring-compatible Make the updater imports compatible with vendoring tool by importing the Updater class directly (don't import the whole module to avoid the clash with the obvious variable name 'updater'). Also update the example: This is not required in the clients but tuf source code will be vendored and this import line (even though in a comment) might trigger an error in future vendoring tool releases. Signed-off-by: Jussi Kukkonen --- tuf/client/updater.py | 9 +++++---- tuf/scripts/client.py | 4 ++-- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index ddb4c6bbdc..27f0977613 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -54,12 +54,13 @@ # The client first imports the 'updater.py' module, the only module the # client is required to import. The client will utilize a single class # from this module. - import tuf.client.updater + from tuf.client.updater import Updater # The only other module the client interacts with is 'tuf.settings'. The # client accesses this module solely to set the repository directory. # This directory will hold the files downloaded from a remote repository. - tuf.settings.repositories_directory = 'local-repository' + from tuf import settings + settings.repositories_directory = 'local-repository' # Next, the client creates a dictionary object containing the repository # mirrors. The client may download content from any one of these mirrors. @@ -82,7 +83,7 @@ # is called with two arguments. The first argument assigns a name to this # particular updater and the second argument the repository mirrors defined # above. - updater = tuf.client.updater.Updater('updater', repository_mirrors) + updater = Updater('updater', repository_mirrors) # The client next calls the refresh() method to ensure it has the latest # copies of the metadata files. @@ -512,7 +513,7 @@ def get_updater(self, repository_name): # NOTE: State (e.g., keys) should NOT be shared across different # updater instances. logger.debug('Adding updater for ' + repr(repository_name)) - updater = tuf.client.updater.Updater(repository_name, repo_mirrors) + updater = Updater(repository_name, repo_mirrors) except Exception: return None diff --git a/tuf/scripts/client.py b/tuf/scripts/client.py index 9433987ed8..0f4ffea3fc 100755 --- a/tuf/scripts/client.py +++ b/tuf/scripts/client.py @@ -75,7 +75,7 @@ from tuf import exceptions from tuf import log from tuf import settings -import tuf.client.updater +from tuf.client.updater import Updater # See 'log.py' to learn how logging is handled in TUF. logger = logging.getLogger(__name__) @@ -122,7 +122,7 @@ def update_client(parsed_arguments): # Create the repository object using the repository name 'repository' # and the repository mirrors defined above. - updater = tuf.client.updater.Updater('tufrepo', repository_mirrors) + updater = Updater('tufrepo', repository_mirrors) # The local destination directory to save the target files. destination_directory = './tuftargets' From 79385cc10f27d814fa47a8870fff077d1ea7c0c4 Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Tue, 12 Jan 2021 10:21:12 +0200 Subject: [PATCH 12/25] imports: Fix securesystemslib.formats imports Make the import compatible with vendoring tool and alias the import so it does not clash with the local module. Fix all references to the module in the code. Signed-off-by: Jussi Kukkonen --- tuf/client/updater.py | 15 ++++---- tuf/developer_tool.py | 37 ++++++++++---------- tuf/download.py | 5 +-- tuf/formats.py | 62 ++++++++++++++++----------------- tuf/keydb.py | 25 +++++++------- tuf/log.py | 12 +++---- tuf/mirrors.py | 3 +- tuf/repository_lib.py | 59 ++++++++++++++++---------------- tuf/repository_tool.py | 78 +++++++++++++++++++++--------------------- tuf/roledb.py | 43 +++++++++++------------ tuf/scripts/repo.py | 3 +- tuf/sig.py | 11 +++--- 12 files changed, 181 insertions(+), 172 deletions(-) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index 27f0977613..fdbd2d8b03 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -132,6 +132,7 @@ import io from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import formats as sslib_formats import tuf from tuf import download @@ -200,7 +201,7 @@ def __init__(self, map_file): # Is 'map_file' a path? If not, raise # 'securesystemslib.exceptions.FormatError'. The actual content of the map # file is validated later on in this method. - securesystemslib.formats.PATH_SCHEMA.check_match(map_file) + sslib_formats.PATH_SCHEMA.check_match(map_file) # A dictionary mapping repositories to TUF updaters. self.repository_names_to_updaters = {} @@ -689,7 +690,7 @@ def __init__(self, repository_name, repository_mirrors, fetcher=None): # number of objects and object types and that all dict # keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mistmatch. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) formats.MIRRORDICT_SCHEMA.check_match(repository_mirrors) # Save the validated arguments. @@ -1052,7 +1053,7 @@ def refresh(self, unsafely_update_root_if_necessary=True): # number of objects and object types, and that all dict # keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fail. - securesystemslib.formats.BOOLEAN_SCHEMA.check_match( + sslib_formats.BOOLEAN_SCHEMA.check_match( unsafely_update_root_if_necessary) # Update the top-level metadata. The _update_metadata_if_changed() and @@ -1358,7 +1359,7 @@ def _verify_root_self_signed(self, signable): keyids = signable['signed']['roles']['root']['keyids'] keys = signable['signed']['keys'] signatures = signable['signatures'] - signed = securesystemslib.formats.encode_canonical( + signed = sslib_formats.encode_canonical( signable['signed']).encode('utf-8') verified_sig_keyids = set() @@ -2971,7 +2972,7 @@ def remove_obsolete_targets(self, destination_directory): # Does 'destination_directory' have the correct format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(destination_directory) + sslib_formats.PATH_SCHEMA.check_match(destination_directory) # Iterate the rolenames and verify whether the 'previous' directory # contains a target no longer found in 'current'. @@ -3061,7 +3062,7 @@ def updated_targets(self, targets, destination_directory): # Do the arguments have the correct format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. formats.TARGETINFOS_SCHEMA.check_match(targets) - securesystemslib.formats.PATH_SCHEMA.check_match(destination_directory) + sslib_formats.PATH_SCHEMA.check_match(destination_directory) # Keep track of the target objects and filepaths of updated targets. # Return 'updated_targets' and use 'updated_targetpaths' to avoid @@ -3159,7 +3160,7 @@ def download_target(self, target, destination_directory, # keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fail. formats.TARGETINFO_SCHEMA.check_match(target) - securesystemslib.formats.PATH_SCHEMA.check_match(destination_directory) + sslib_formats.PATH_SCHEMA.check_match(destination_directory) # Extract the target file information. target_filepath = target['filepath'] diff --git a/tuf/developer_tool.py b/tuf/developer_tool.py index 0d728693c2..b527ff8f0a 100755 --- a/tuf/developer_tool.py +++ b/tuf/developer_tool.py @@ -39,6 +39,7 @@ import json from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import formats as sslib_formats import tuf from tuf import exceptions @@ -197,11 +198,11 @@ def __init__(self, project_name, metadata_directory, targets_directory, # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly # formatted. - securesystemslib.formats.NAME_SCHEMA.check_match(project_name) - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_directory) - securesystemslib.formats.PATH_SCHEMA.check_match(targets_directory) - securesystemslib.formats.ANY_STRING_SCHEMA.check_match(file_prefix) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(project_name) + sslib_formats.PATH_SCHEMA.check_match(metadata_directory) + sslib_formats.PATH_SCHEMA.check_match(targets_directory) + sslib_formats.ANY_STRING_SCHEMA.check_match(file_prefix) + sslib_formats.NAME_SCHEMA.check_match(repository_name) self.metadata_directory = metadata_directory self.targets_directory = targets_directory @@ -252,7 +253,7 @@ def write(self, write_partial=False): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(write_partial) + sslib_formats.BOOLEAN_SCHEMA.check_match(write_partial) # At this point the tuf.keydb and roledb stores must be fully # populated, otherwise write() throwns a 'tuf.Repository' exception if @@ -589,13 +590,13 @@ def create_new_project(project_name, metadata_directory, # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_directory) + sslib_formats.PATH_SCHEMA.check_match(metadata_directory) # Do the same for the location in the repo and the project name, we must # ensure they are valid pathnames. - securesystemslib.formats.NAME_SCHEMA.check_match(project_name) - securesystemslib.formats.ANY_STRING_SCHEMA.check_match(location_in_repository) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(project_name) + sslib_formats.ANY_STRING_SCHEMA.check_match(location_in_repository) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # for the targets directory we do the same, but first, let's find out what # layout the user needs, layout_type is a variable that is usually set to @@ -611,10 +612,10 @@ def create_new_project(project_name, metadata_directory, layout_type = 'repo-like' if targets_directory is not None: - securesystemslib.formats.PATH_SCHEMA.check_match(targets_directory) + sslib_formats.PATH_SCHEMA.check_match(targets_directory) if key is not None: - securesystemslib.formats.KEY_SCHEMA.check_match(key) + sslib_formats.KEY_SCHEMA.check_match(key) # Set the metadata and targets directories. These directories # are created if they do not exist. @@ -722,9 +723,9 @@ def _save_project_configuration(metadata_directory, targets_directory, """ # Schema check for the arguments. - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_directory) - securesystemslib.formats.PATH_SCHEMA.check_match(prefix) - securesystemslib.formats.PATH_SCHEMA.check_match(targets_directory) + sslib_formats.PATH_SCHEMA.check_match(metadata_directory) + sslib_formats.PATH_SCHEMA.check_match(prefix) + sslib_formats.PATH_SCHEMA.check_match(targets_directory) formats.RELPATH_SCHEMA.check_match(project_name) cfg_file_directory = metadata_directory @@ -803,11 +804,11 @@ def load_project(project_directory, prefix='', new_targets_location=None, # Does 'repository_directory' have the correct format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(project_directory) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.PATH_SCHEMA.check_match(project_directory) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Do the same for the prefix - securesystemslib.formats.ANY_STRING_SCHEMA.check_match(prefix) + sslib_formats.ANY_STRING_SCHEMA.check_match(prefix) # Clear the role and key databases since we are loading in a new project. roledb.clear_roledb(clear_all=True) diff --git a/tuf/download.py b/tuf/download.py index 18f41da4f2..1b22d0a309 100755 --- a/tuf/download.py +++ b/tuf/download.py @@ -36,6 +36,7 @@ import tempfile import securesystemslib +from securesystemslib import formats as sslib_formats import securesystemslib.util import six @@ -87,7 +88,7 @@ def safe_download(url, required_length, fetcher): # Do all of the arguments have the appropriate format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.URL_SCHEMA.check_match(url) + sslib_formats.URL_SCHEMA.check_match(url) formats.LENGTH_SCHEMA.check_match(required_length) return _download_file(url, required_length, fetcher, STRICT_REQUIRED_LENGTH=True) @@ -135,7 +136,7 @@ def unsafe_download(url, required_length, fetcher): # Do all of the arguments have the appropriate format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.URL_SCHEMA.check_match(url) + sslib_formats.URL_SCHEMA.check_match(url) formats.LENGTH_SCHEMA.check_match(required_length) return _download_file(url, required_length, fetcher, STRICT_REQUIRED_LENGTH=False) diff --git a/tuf/formats.py b/tuf/formats.py index cfc1a8f053..9527b51223 100755 --- a/tuf/formats.py +++ b/tuf/formats.py @@ -70,8 +70,8 @@ import copy from securesystemslib import exceptions as sslib_exceptions -import securesystemslib.formats -import securesystemslib.schema as SCHEMA +from securesystemslib import formats as sslib_formats +from securesystemslib import schema as SCHEMA import tuf from tuf import exceptions @@ -135,9 +135,9 @@ ROLE_SCHEMA = SCHEMA.Object( object_name = 'ROLE_SCHEMA', name = SCHEMA.Optional(ROLENAME_SCHEMA), - keyids = securesystemslib.formats.KEYIDS_SCHEMA, + keyids = sslib_formats.KEYIDS_SCHEMA, threshold = THRESHOLD_SCHEMA, - terminating = SCHEMA.Optional(securesystemslib.formats.BOOLEAN_SCHEMA), + terminating = SCHEMA.Optional(sslib_formats.BOOLEAN_SCHEMA), paths = SCHEMA.Optional(RELPATHS_SCHEMA), path_hash_prefixes = SCHEMA.Optional(PATH_HASH_PREFIXES_SCHEMA)) @@ -152,13 +152,13 @@ # repository (corresponding to the repository belonging to named repository in # the dictionary key) ROLEDICTDB_SCHEMA = SCHEMA.DictOf( - key_schema = securesystemslib.formats.NAME_SCHEMA, + key_schema = sslib_formats.NAME_SCHEMA, value_schema = ROLEDICT_SCHEMA) # Command argument list, as used by the CLI tool. # Example: {'keytype': ed25519, 'expires': 365,} COMMAND_SCHEMA = SCHEMA.DictOf( - key_schema = securesystemslib.formats.NAME_SCHEMA, + key_schema = sslib_formats.NAME_SCHEMA, value_schema = SCHEMA.Any()) # A dictionary holding version information. @@ -276,7 +276,7 @@ # A dict of repository names to mirrors. REPO_NAMES_TO_MIRRORS_SCHEMA = SCHEMA.DictOf( key_schema = NAME_SCHEMA, - value_schema = SCHEMA.ListOf(securesystemslib.formats.URL_SCHEMA)) + value_schema = SCHEMA.ListOf(sslib_formats.URL_SCHEMA)) # An object containing the map file's "mapping" attribute. MAPPING_SCHEMA = SCHEMA.ListOf(SCHEMA.Object( @@ -335,7 +335,7 @@ previous_threshold = SCHEMA.Optional(THRESHOLD_SCHEMA), version = SCHEMA.Optional(METADATAVERSION_SCHEMA), expires = SCHEMA.Optional(ISO8601_DATETIME_SCHEMA), - signatures = SCHEMA.Optional(securesystemslib.formats.SIGNATURES_SCHEMA), + signatures = SCHEMA.Optional(sslib_formats.SIGNATURES_SCHEMA), paths = SCHEMA.Optional(SCHEMA.OneOf([RELPATHS_SCHEMA, PATH_FILEINFO_SCHEMA])), path_hash_prefixes = SCHEMA.Optional(PATH_HASH_PREFIXES_SCHEMA), delegations = SCHEMA.Optional(DELEGATIONS_SCHEMA), @@ -345,7 +345,7 @@ SIGNABLE_SCHEMA = SCHEMA.Object( object_name = 'SIGNABLE_SCHEMA', signed = SCHEMA.Any(), - signatures = SCHEMA.ListOf(securesystemslib.formats.SIGNATURE_SCHEMA)) + signatures = SCHEMA.ListOf(sslib_formats.SIGNATURE_SCHEMA)) # Root role: indicates root keys and top-level roles. ROOT_SCHEMA = SCHEMA.Object( @@ -374,7 +374,7 @@ object_name = 'SNAPSHOT_SCHEMA', _type = SCHEMA.String('snapshot'), version = METADATAVERSION_SCHEMA, - expires = securesystemslib.formats.ISO8601_DATETIME_SCHEMA, + expires = sslib_formats.ISO8601_DATETIME_SCHEMA, spec_version = SPECIFICATION_VERSION_SCHEMA, meta = FILEINFODICT_SCHEMA) @@ -384,7 +384,7 @@ _type = SCHEMA.String('timestamp'), spec_version = SPECIFICATION_VERSION_SCHEMA, version = METADATAVERSION_SCHEMA, - expires = securesystemslib.formats.ISO8601_DATETIME_SCHEMA, + expires = sslib_formats.ISO8601_DATETIME_SCHEMA, meta = FILEINFODICT_SCHEMA) @@ -393,10 +393,10 @@ object_name = 'PROJECT_CFG_SCHEMA', project_name = SCHEMA.AnyString(), layout_type = SCHEMA.OneOf([SCHEMA.String('repo-like'), SCHEMA.String('flat')]), - targets_location = securesystemslib.formats.PATH_SCHEMA, - metadata_location = securesystemslib.formats.PATH_SCHEMA, - prefix = securesystemslib.formats.PATH_SCHEMA, - public_keys = securesystemslib.formats.KEYDICT_SCHEMA, + targets_location = sslib_formats.PATH_SCHEMA, + metadata_location = sslib_formats.PATH_SCHEMA, + prefix = sslib_formats.PATH_SCHEMA, + public_keys = sslib_formats.KEYDICT_SCHEMA, threshold = SCHEMA.Integer(lo = 0, hi = 2) ) @@ -404,7 +404,7 @@ # such as a url, the path of the directory metadata files, etc. MIRROR_SCHEMA = SCHEMA.Object( object_name = 'MIRROR_SCHEMA', - url_prefix = securesystemslib.formats.URL_SCHEMA, + url_prefix = sslib_formats.URL_SCHEMA, metadata_path = SCHEMA.Optional(RELPATH_SCHEMA), targets_path = SCHEMA.Optional(RELPATH_SCHEMA), confined_target_dirs = SCHEMA.Optional(RELPATHS_SCHEMA), @@ -424,7 +424,7 @@ object_name = 'MIRRORLIST_SCHEMA', _type = SCHEMA.String('mirrors'), version = METADATAVERSION_SCHEMA, - expires = securesystemslib.formats.ISO8601_DATETIME_SCHEMA, + expires = sslib_formats.ISO8601_DATETIME_SCHEMA, mirrors = SCHEMA.ListOf(MIRROR_SCHEMA)) # Any of the role schemas (e.g., TIMESTAMP_SCHEMA, SNAPSHOT_SCHEMA, etc.) @@ -442,14 +442,14 @@ general = SCHEMA.Object( object_name = '[general]', transfer_module = SCHEMA.String('scp'), - metadata_path = securesystemslib.formats.PATH_SCHEMA, - targets_directory = securesystemslib.formats.PATH_SCHEMA), + metadata_path = sslib_formats.PATH_SCHEMA, + targets_directory = sslib_formats.PATH_SCHEMA), scp=SCHEMA.Object( object_name = '[scp]', - host = securesystemslib.formats.URL_SCHEMA, - user = securesystemslib.formats.NAME_SCHEMA, - identity_file = securesystemslib.formats.PATH_SCHEMA, - remote_directory = securesystemslib.formats.PATH_SCHEMA)) + host = sslib_formats.URL_SCHEMA, + user = sslib_formats.NAME_SCHEMA, + identity_file = sslib_formats.PATH_SCHEMA, + remote_directory = sslib_formats.PATH_SCHEMA)) # The format of the resulting "receive config dict" after extraction from the # receive configuration file (i.e., receive.cfg). The receive config file @@ -459,11 +459,11 @@ RECEIVECONFIG_SCHEMA = SCHEMA.Object( object_name = 'RECEIVECONFIG_SCHEMA', general=SCHEMA.Object( object_name = '[general]', - pushroots = SCHEMA.ListOf(securesystemslib.formats.PATH_SCHEMA), - repository_directory = securesystemslib.formats.PATH_SCHEMA, - metadata_directory = securesystemslib.formats.PATH_SCHEMA, - targets_directory = securesystemslib.formats.PATH_SCHEMA, - backup_directory = securesystemslib.formats.PATH_SCHEMA)) + pushroots = SCHEMA.ListOf(sslib_formats.PATH_SCHEMA), + repository_directory = sslib_formats.PATH_SCHEMA, + metadata_directory = sslib_formats.PATH_SCHEMA, + targets_directory = sslib_formats.PATH_SCHEMA, + backup_directory = sslib_formats.PATH_SCHEMA)) @@ -633,7 +633,7 @@ def expiry_string_to_datetime(expires): """ # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.ISO8601_DATETIME_SCHEMA.check_match(expires) + sslib_formats.ISO8601_DATETIME_SCHEMA.check_match(expires) try: return datetime.datetime.strptime(expires, "%Y-%m-%dT%H:%M:%SZ") @@ -715,7 +715,7 @@ def unix_timestamp_to_datetime(unix_timestamp): # Is 'unix_timestamp' properly formatted? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.UNIX_TIMESTAMP_SCHEMA.check_match(unix_timestamp) + sslib_formats.UNIX_TIMESTAMP_SCHEMA.check_match(unix_timestamp) # Convert 'unix_timestamp' to a 'time.struct_time', in UTC. The Daylight # Savings Time (DST) flag is set to zero. datetime.fromtimestamp() is not @@ -946,7 +946,7 @@ def expected_meta_rolename(meta_rolename): # This check ensures 'meta_rolename' conforms to # 'securesystemslib.formats.NAME_SCHEMA'. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.NAME_SCHEMA.check_match(meta_rolename) + sslib_formats.NAME_SCHEMA.check_match(meta_rolename) return meta_rolename.lower() diff --git a/tuf/keydb.py b/tuf/keydb.py index 6a57739462..57b6769173 100755 --- a/tuf/keydb.py +++ b/tuf/keydb.py @@ -46,6 +46,7 @@ import securesystemslib from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import formats as sslib_formats from tuf import exceptions from tuf import formats @@ -105,7 +106,7 @@ def create_keydb_from_root_metadata(root_metadata, repository_name='default'): formats.ROOT_SCHEMA.check_match(root_metadata) # Does 'repository_name' have the correct format? - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Clear the key database for 'repository_name', or create it if non-existent. if repository_name in _keydb_dict: @@ -170,7 +171,7 @@ def create_keydb(repository_name): """ # Is 'repository_name' properly formatted? Raise 'securesystemslib.exceptions.FormatError' if not. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) if repository_name in _keydb_dict: raise sslib_exceptions.InvalidNameError('Repository name already exists:' @@ -206,7 +207,7 @@ def remove_keydb(repository_name): """ # Is 'repository_name' properly formatted? Raise 'securesystemslib.exceptions.FormatError' if not. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) if repository_name not in _keydb_dict: logger.warning('Repository name does not exist: ' + repr(repository_name)) @@ -267,15 +268,15 @@ def add_key(key_dict, keyid=None, repository_name='default'): # This check will ensure 'key_dict' has the appropriate number of objects # and object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError if the check fails. - securesystemslib.formats.ANYKEY_SCHEMA.check_match(key_dict) + sslib_formats.ANYKEY_SCHEMA.check_match(key_dict) # Does 'repository_name' have the correct format? - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Does 'keyid' have the correct format? if keyid is not None: # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - securesystemslib.formats.KEYID_SCHEMA.check_match(keyid) + sslib_formats.KEYID_SCHEMA.check_match(keyid) # Check if each keyid found in 'key_dict' matches 'keyid'. if keyid != key_dict['keyid']: @@ -332,10 +333,10 @@ def get_key(keyid, repository_name='default'): # This check will ensure 'keyid' has the appropriate number of objects # and object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' is the match fails. - securesystemslib.formats.KEYID_SCHEMA.check_match(keyid) + sslib_formats.KEYID_SCHEMA.check_match(keyid) # Does 'repository_name' have the correct format? - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) if repository_name not in _keydb_dict: raise sslib_exceptions.InvalidNameError('Repository name does not exist:' @@ -385,10 +386,10 @@ def remove_key(keyid, repository_name='default'): # This check will ensure 'keyid' has the appropriate number of objects # and object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' is the match fails. - securesystemslib.formats.KEYID_SCHEMA.check_match(keyid) + sslib_formats.KEYID_SCHEMA.check_match(keyid) # Does 'repository_name' have the correct format? - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) if repository_name not in _keydb_dict: raise sslib_exceptions.InvalidNameError('Repository name does not exist:' @@ -434,8 +435,8 @@ def clear_keydb(repository_name='default', clear_all=False): # Do the arguments have the correct format? Raise 'securesystemslib.exceptions.FormatError' if # 'repository_name' is improperly formatted. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(clear_all) + sslib_formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.BOOLEAN_SCHEMA.check_match(clear_all) global _keydb_dict diff --git a/tuf/log.py b/tuf/log.py index 318695c821..cdeb98f542 100755 --- a/tuf/log.py +++ b/tuf/log.py @@ -73,12 +73,12 @@ import time from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import formats as sslib_formats import tuf from tuf import exceptions from tuf import settings -import securesystemslib.formats # Setting a handler's log level filters only logging messages of that level # (and above). For example, setting the built-in StreamHandler's log level to @@ -214,7 +214,7 @@ def set_log_level(log_level=_DEFAULT_LOG_LEVEL): # Does 'log_level' have the correct format? # Raise 'securesystems.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.LOGLEVEL_SCHEMA.check_match(log_level) + sslib_formats.LOGLEVEL_SCHEMA.check_match(log_level) logger.setLevel(log_level) @@ -245,7 +245,7 @@ def set_filehandler_log_level(log_level=_DEFAULT_FILE_LOG_LEVEL): # Does 'log_level' have the correct format? # Raise 'securesystems.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.LOGLEVEL_SCHEMA.check_match(log_level) + sslib_formats.LOGLEVEL_SCHEMA.check_match(log_level) if file_handler: file_handler.setLevel(log_level) @@ -283,7 +283,7 @@ def set_console_log_level(log_level=_DEFAULT_CONSOLE_LOG_LEVEL): # Does 'log_level' have the correct format? # Raise 'securesystems.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.LOGLEVEL_SCHEMA.check_match(log_level) + sslib_formats.LOGLEVEL_SCHEMA.check_match(log_level) # Assign to the global console_handler object. global console_handler @@ -322,7 +322,7 @@ def add_console_handler(log_level=_DEFAULT_CONSOLE_LOG_LEVEL): # Does 'log_level' have the correct format? # Raise 'securesystems.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.LOGLEVEL_SCHEMA.check_match(log_level) + sslib_formats.LOGLEVEL_SCHEMA.check_match(log_level) # Assign to the global console_handler object. global console_handler @@ -408,7 +408,7 @@ def enable_file_logging(log_filename=settings.LOG_FILENAME): """ # Are the arguments properly formatted? - securesystemslib.formats.PATH_SCHEMA.check_match(log_filename) + sslib_formats.PATH_SCHEMA.check_match(log_filename) global file_handler diff --git a/tuf/mirrors.py b/tuf/mirrors.py index 913c5eb717..632d823fcc 100755 --- a/tuf/mirrors.py +++ b/tuf/mirrors.py @@ -34,6 +34,7 @@ import securesystemslib from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import formats as sslib_formats import tuf from tuf import formats @@ -88,7 +89,7 @@ def get_list_of_mirrors(file_type, file_path, mirrors_dict): # Checking if all the arguments have appropriate format. formats.RELPATH_SCHEMA.check_match(file_path) formats.MIRRORDICT_SCHEMA.check_match(mirrors_dict) - securesystemslib.formats.NAME_SCHEMA.check_match(file_type) + sslib_formats.NAME_SCHEMA.check_match(file_type) # Verify 'file_type' is supported. if file_type not in _SUPPORTED_FILE_TYPES: diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index 66399271ae..8e0ad5ea67 100644 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -40,6 +40,7 @@ import tempfile from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import formats as sslib_formats import tuf from tuf import exceptions @@ -334,7 +335,7 @@ def _remove_invalid_and_duplicate_signatures(signable, repository_name): signature_keyids = [] for signature in signable['signatures']: - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') + signed = sslib_formats.encode_canonical(signable['signed']).encode('utf-8') keyid = signature['keyid'] key = None @@ -920,7 +921,7 @@ def get_top_level_metadata_filenames(metadata_directory): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_directory) + sslib_formats.PATH_SCHEMA.check_match(metadata_directory) # Store the filepaths of the top-level roles, including the # 'metadata_directory' for each one. @@ -985,7 +986,7 @@ def get_targets_metadata_fileinfo(filename, storage_backend, custom=None): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(filename) + sslib_formats.PATH_SCHEMA.check_match(filename) if custom is not None: formats.CUSTOM_SCHEMA.check_match(custom) @@ -1241,9 +1242,9 @@ def generate_root_metadata(version, expiration_date, consistent_snapshot, # 'securesystemslib.exceptions.FormatError' if any of the arguments are # improperly formatted. formats.METADATAVERSION_SCHEMA.check_match(version) - securesystemslib.formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) + sslib_formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # The role and key dictionaries to be saved in the root metadata object. # Conformant to 'ROLEDICT_SCHEMA' and 'KEYDICT_SCHEMA', respectively. @@ -1390,12 +1391,12 @@ def generate_targets_metadata(targets_directory, target_files, version, # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(targets_directory) + sslib_formats.PATH_SCHEMA.check_match(targets_directory) formats.PATH_FILEINFO_SCHEMA.check_match(target_files) formats.METADATAVERSION_SCHEMA.check_match(version) - securesystemslib.formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(write_consistent_targets) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_existing_fileinfo) + sslib_formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) + sslib_formats.BOOLEAN_SCHEMA.check_match(write_consistent_targets) + sslib_formats.BOOLEAN_SCHEMA.check_match(use_existing_fileinfo) if write_consistent_targets and use_existing_fileinfo: raise sslib_exceptions.Error('Cannot support writing consistent' @@ -1622,13 +1623,13 @@ def generate_snapshot_metadata(metadata_directory, version, expiration_date, # This check ensures arguments have the appropriate number of objects and # object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_directory) + sslib_formats.PATH_SCHEMA.check_match(metadata_directory) formats.METADATAVERSION_SCHEMA.check_match(version) - securesystemslib.formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_length) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_hashes) + sslib_formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) + sslib_formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) + sslib_formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.BOOLEAN_SCHEMA.check_match(use_length) + sslib_formats.BOOLEAN_SCHEMA.check_match(use_hashes) # Snapshot's 'fileinfodict' shall contain the version number of Root, # Targets, and all delegated roles of the repository. @@ -1759,12 +1760,12 @@ def generate_timestamp_metadata(snapshot_file_path, version, expiration_date, # This check ensures arguments have the appropriate number of objects and # object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - securesystemslib.formats.PATH_SCHEMA.check_match(snapshot_file_path) + sslib_formats.PATH_SCHEMA.check_match(snapshot_file_path) formats.METADATAVERSION_SCHEMA.check_match(version) - securesystemslib.formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_length) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_hashes) + sslib_formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) + sslib_formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.BOOLEAN_SCHEMA.check_match(use_length) + sslib_formats.BOOLEAN_SCHEMA.check_match(use_hashes) snapshot_fileinfo = {} @@ -1841,9 +1842,9 @@ def sign_metadata(metadata_object, keyids, filename, repository_name): # object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fails. formats.ANYROLE_SCHEMA.check_match(metadata_object) - securesystemslib.formats.KEYIDS_SCHEMA.check_match(keyids) - securesystemslib.formats.PATH_SCHEMA.check_match(filename) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.KEYIDS_SCHEMA.check_match(keyids) + sslib_formats.PATH_SCHEMA.check_match(filename) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Make sure the metadata is in 'signable' format. That is, # it contains a 'signatures' field containing the result @@ -1860,7 +1861,7 @@ def sign_metadata(metadata_object, keyids, filename, repository_name): # Generate the signature using the appropriate signing method. if key['keytype'] in SUPPORTED_KEY_TYPES: if 'private' in key['keyval']: - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') + signed = sslib_formats.encode_canonical(signable['signed']).encode('utf-8') try: signature = securesystemslib.keys.create_signature(key, signed) signable['signatures'].append(signature) @@ -1939,9 +1940,9 @@ def write_metadata_file(metadata, filename, version_number, consistent_snapshot, # object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fails. formats.SIGNABLE_SCHEMA.check_match(metadata) - securesystemslib.formats.PATH_SCHEMA.check_match(filename) + sslib_formats.PATH_SCHEMA.check_match(filename) formats.METADATAVERSION_SCHEMA.check_match(version_number) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) + sslib_formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) if storage_backend is None: storage_backend = securesystemslib.storage.FilesystemBackend() @@ -2203,8 +2204,8 @@ def create_tuf_client_directory(repository_directory, client_directory): # This check ensures arguments have the appropriate number of objects and # object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - securesystemslib.formats.PATH_SCHEMA.check_match(repository_directory) - securesystemslib.formats.PATH_SCHEMA.check_match(client_directory) + sslib_formats.PATH_SCHEMA.check_match(repository_directory) + sslib_formats.PATH_SCHEMA.check_match(client_directory) # Set the absolute path of the Repository's metadata directory. The metadata # directory should be the one served by the Live repository. At a minimum, diff --git a/tuf/repository_tool.py b/tuf/repository_tool.py index add03ebfe1..ee12b76604 100755 --- a/tuf/repository_tool.py +++ b/tuf/repository_tool.py @@ -43,6 +43,7 @@ from collections import deque from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import formats as sslib_formats import tuf from tuf import exceptions @@ -52,7 +53,6 @@ import tuf.repository_lib as repo_lib import securesystemslib.keys -import securesystemslib.formats import securesystemslib.util import six @@ -231,14 +231,14 @@ def __init__(self, repository_directory, metadata_directory, # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.PATH_SCHEMA.check_match(repository_directory) - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_directory) - securesystemslib.formats.PATH_SCHEMA.check_match(targets_directory) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_timestamp_length) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_timestamp_hashes) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_snapshot_length) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_snapshot_hashes) + sslib_formats.PATH_SCHEMA.check_match(repository_directory) + sslib_formats.PATH_SCHEMA.check_match(metadata_directory) + sslib_formats.PATH_SCHEMA.check_match(targets_directory) + sslib_formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.BOOLEAN_SCHEMA.check_match(use_timestamp_length) + sslib_formats.BOOLEAN_SCHEMA.check_match(use_timestamp_hashes) + sslib_formats.BOOLEAN_SCHEMA.check_match(use_snapshot_length) + sslib_formats.BOOLEAN_SCHEMA.check_match(use_snapshot_hashes) self._repository_directory = repository_directory self._metadata_directory = metadata_directory @@ -313,7 +313,7 @@ def writeall(self, consistent_snapshot=False, use_existing_fileinfo=False): # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly # formatted. - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) + sslib_formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) # At this point, tuf.keydb and roledb must be fully populated, # otherwise writeall() throws a 'tuf.exceptions.UnsignedMetadataError' for @@ -626,9 +626,9 @@ def get_filepaths_in_directory(files_directory, recursive_walk=False, # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.PATH_SCHEMA.check_match(files_directory) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(recursive_walk) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(followlinks) + sslib_formats.PATH_SCHEMA.check_match(files_directory) + sslib_formats.BOOLEAN_SCHEMA.check_match(recursive_walk) + sslib_formats.BOOLEAN_SCHEMA.check_match(followlinks) # Ensure a valid directory is given. if not os.path.isdir(files_directory): @@ -732,7 +732,7 @@ def add_verification_key(self, key, expires=None): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.ANYKEY_SCHEMA.check_match(key) + sslib_formats.ANYKEY_SCHEMA.check_match(key) # If 'expires' is unset, choose a default expiration for 'key'. By # default, Root, Targets, Snapshot, and Timestamp keys are set to expire @@ -843,7 +843,7 @@ def remove_verification_key(self, key): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.ANYKEY_SCHEMA.check_match(key) + sslib_formats.ANYKEY_SCHEMA.check_match(key) keyid = key['keyid'] roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) @@ -893,7 +893,7 @@ def load_signing_key(self, key): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.ANYKEY_SCHEMA.check_match(key) + sslib_formats.ANYKEY_SCHEMA.check_match(key) # Ensure the private portion of the key is available, otherwise signatures # cannot be generated when the metadata file is written to disk. @@ -953,7 +953,7 @@ def unload_signing_key(self, key): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.ANYKEY_SCHEMA.check_match(key) + sslib_formats.ANYKEY_SCHEMA.check_match(key) # Update the role's 'signing_keys' field in 'roledb'. roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) @@ -1014,8 +1014,8 @@ def add_signature(self, signature, mark_role_as_dirty=True): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.SIGNATURE_SCHEMA.check_match(signature) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(mark_role_as_dirty) + sslib_formats.SIGNATURE_SCHEMA.check_match(signature) + sslib_formats.BOOLEAN_SCHEMA.check_match(mark_role_as_dirty) roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) @@ -1068,7 +1068,7 @@ def remove_signature(self, signature): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.SIGNATURE_SCHEMA.check_match(signature) + sslib_formats.SIGNATURE_SCHEMA.check_match(signature) roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) @@ -1545,7 +1545,7 @@ def __init__(self, repository_name): # Is 'repository_name' properly formatted? Otherwise, raise a # tuf.exceptions.FormatError exception. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # By default, 'root' metadata is set to expire 1 year from the current # time. The expiration may be modified. @@ -1608,7 +1608,7 @@ def __init__(self, repository_name): # Is 'repository_name' properly formatted? Otherwise, raise a # tuf.exceptions.FormatError exception. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # By default, 'snapshot' metadata is set to expire 1 week from the current # time. The expiration may be modified. @@ -1692,9 +1692,9 @@ def __init__(self, targets_directory, rolename='targets', roleinfo=None, # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.PATH_SCHEMA.check_match(targets_directory) + sslib_formats.PATH_SCHEMA.check_match(targets_directory) formats.ROLENAME_SCHEMA.check_match(rolename) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) if roleinfo is not None: formats.ROLEDB_SCHEMA.check_match(roleinfo) @@ -1935,7 +1935,7 @@ def add_paths(self, paths, child_rolename): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATHS_SCHEMA.check_match(paths) + sslib_formats.PATHS_SCHEMA.check_match(paths) formats.ROLENAME_SCHEMA.check_match(child_rolename) # Ensure that 'child_rolename' exists, otherwise it will not have an entry @@ -2364,10 +2364,10 @@ def delegate(self, rolename, public_keys, paths, threshold=1, # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. formats.ROLENAME_SCHEMA.check_match(rolename) - securesystemslib.formats.ANYKEYLIST_SCHEMA.check_match(public_keys) + sslib_formats.ANYKEYLIST_SCHEMA.check_match(public_keys) formats.RELPATHS_SCHEMA.check_match(paths) formats.THRESHOLD_SCHEMA.check_match(threshold) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(terminating) + sslib_formats.BOOLEAN_SCHEMA.check_match(terminating) if list_of_targets is not None: formats.RELPATHS_SCHEMA.check_match(list_of_targets) @@ -2567,8 +2567,8 @@ def delegate_hashed_bins(self, list_of_targets, keys_of_hashed_bins, # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATHS_SCHEMA.check_match(list_of_targets) - securesystemslib.formats.ANYKEYLIST_SCHEMA.check_match(keys_of_hashed_bins) + sslib_formats.PATHS_SCHEMA.check_match(list_of_targets) + sslib_formats.ANYKEYLIST_SCHEMA.check_match(keys_of_hashed_bins) formats.NUMBINS_SCHEMA.check_match(number_of_bins) prefix_length, prefix_count, bin_size = repo_lib.get_bin_numbers(number_of_bins) @@ -2711,7 +2711,7 @@ def add_target_to_bin(self, target_filepath, number_of_bins=DEFAULT_NUM_BINS, # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(target_filepath) + sslib_formats.PATH_SCHEMA.check_match(target_filepath) formats.NUMBINS_SCHEMA.check_match(number_of_bins) # TODO: check target_filepath is sane @@ -2773,7 +2773,7 @@ def remove_target_from_bin(self, target_filepath, # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(target_filepath) + sslib_formats.PATH_SCHEMA.check_match(target_filepath) formats.NUMBINS_SCHEMA.check_match(number_of_bins) # TODO: check target_filepath is sane? @@ -2925,8 +2925,8 @@ def create_new_repository(repository_directory, repository_name='default', # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(repository_directory) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.PATH_SCHEMA.check_match(repository_directory) + sslib_formats.NAME_SCHEMA.check_match(repository_name) if storage_backend is None: storage_backend = securesystemslib.storage.FilesystemBackend() @@ -3035,8 +3035,8 @@ def load_repository(repository_directory, repository_name='default', # Does 'repository_directory' have the correct format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(repository_directory) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.PATH_SCHEMA.check_match(repository_directory) + sslib_formats.NAME_SCHEMA.check_match(repository_name) if storage_backend is None: storage_backend = securesystemslib.storage.FilesystemBackend() @@ -3217,14 +3217,14 @@ def dump_signable_metadata(metadata_filepath): """ # Are the argument properly formatted? - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_filepath) + sslib_formats.PATH_SCHEMA.check_match(metadata_filepath) signable = securesystemslib.util.load_json_file(metadata_filepath) # Is 'signable' a valid metadata file? formats.SIGNABLE_SCHEMA.check_match(signable) - return securesystemslib.formats.encode_canonical(signable['signed']) + return sslib_formats.encode_canonical(signable['signed']) @@ -3272,8 +3272,8 @@ def append_signature(signature, metadata_filepath): """ # Are the arguments properly formatted? - securesystemslib.formats.SIGNATURE_SCHEMA.check_match(signature) - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_filepath) + sslib_formats.SIGNATURE_SCHEMA.check_match(signature) + sslib_formats.PATH_SCHEMA.check_match(metadata_filepath) signable = securesystemslib.util.load_json_file(metadata_filepath) diff --git a/tuf/roledb.py b/tuf/roledb.py index 6377076e33..e5914af9be 100755 --- a/tuf/roledb.py +++ b/tuf/roledb.py @@ -54,6 +54,7 @@ import securesystemslib from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import formats as sslib_formats import tuf from tuf import exceptions @@ -117,7 +118,7 @@ def create_roledb_from_root_metadata(root_metadata, repository_name='default'): formats.ROOT_SCHEMA.check_match(root_metadata) # Is 'repository_name' formatted correctly? - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) global _roledb_dict global _dirty_roles @@ -185,7 +186,7 @@ def create_roledb(repository_name): # Is 'repository_name' properly formatted? If not, raise # 'securesystemslib.exceptions.FormatError'. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) global _roledb_dict global _dirty_roles @@ -228,7 +229,7 @@ def remove_roledb(repository_name): # Is 'repository_name' properly formatted? If not, raise # 'securesystemslib.exceptions.FormatError'. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) global _roledb_dict global _dirty_roles @@ -303,7 +304,7 @@ def add_role(rolename, roleinfo, repository_name='default'): formats.ROLEDB_SCHEMA.check_match(roleinfo) # Is 'repository_name' correctly formatted? - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) global _roledb_dict @@ -382,8 +383,8 @@ def update_roleinfo(rolename, roleinfo, mark_role_as_dirty=True, repository_name # This check will ensure arguments have the appropriate number of objects # and object types, and that all dict keys are properly named. formats.ROLENAME_SCHEMA.check_match(rolename) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(mark_role_as_dirty) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.BOOLEAN_SCHEMA.check_match(mark_role_as_dirty) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Does 'roleinfo' have the correct object format? formats.ROLEDB_SCHEMA.check_match(roleinfo) @@ -441,7 +442,7 @@ def get_dirty_roles(repository_name='default'): # Does 'repository_name' have the correct format? Raise # 'securesystemslib.exceptions.FormatError' if not. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) global _roledb_dict global _dirty_roles @@ -483,8 +484,8 @@ def mark_dirty(roles, repository_name='default'): # Are the arguments properly formatted? If not, raise # securesystemslib.exceptions.FormatError. - securesystemslib.formats.NAMES_SCHEMA.check_match(roles) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAMES_SCHEMA.check_match(roles) + sslib_formats.NAME_SCHEMA.check_match(repository_name) global _roledb_dict global _dirty_roles @@ -526,8 +527,8 @@ def unmark_dirty(roles, repository_name='default'): # Are the arguments properly formatted? If not, raise # securesystemslib.exceptions.FormatError. - securesystemslib.formats.NAMES_SCHEMA.check_match(roles) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAMES_SCHEMA.check_match(roles) + sslib_formats.NAME_SCHEMA.check_match(repository_name) global _roledb_dict global _dirty_roles @@ -627,7 +628,7 @@ def remove_role(rolename, repository_name='default'): # Does 'repository_name' have the correct format? Raise # 'securesystemslib.exceptions.FormatError' if it is improperly formatted. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Raises securesystemslib.exceptions.FormatError, # tuf.exceptions.UnknownRoleError, or @@ -671,7 +672,7 @@ def get_rolenames(repository_name='default'): # Does 'repository_name' have the correct format? Raise # 'securesystemslib.exceptions.FormatError' if it is improperly formatted. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) global _roledb_dict global _dirty_roles @@ -729,7 +730,7 @@ def get_roleinfo(rolename, repository_name='default'): # Is 'repository_name' properly formatted? If not, raise # 'securesystemslib.exceptions.FormatError'. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Raises securesystemslib.exceptions.FormatError, # tuf.exceptions.UnknownRoleError, or @@ -782,7 +783,7 @@ def get_role_keyids(rolename, repository_name='default'): # Raise 'securesystemslib.exceptions.FormatError' if 'repository_name' is # improperly formatted. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Raises securesystemslib.exceptions.FormatError, # tuf.exceptions.UnknownRoleError, or @@ -834,7 +835,7 @@ def get_role_threshold(rolename, repository_name='default'): # Raise 'securesystemslib.exceptions.FormatError' if 'repository_name' is # improperly formatted. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Raises securesystemslib.exceptions.FormatError, # tuf.exceptions.UnknownRoleError, or @@ -885,7 +886,7 @@ def get_role_paths(rolename, repository_name='default'): # Raise 'securesystemslib.exceptions.FormatError' if 'repository_name' is # improperly formatted. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Raises securesystemslib.exceptions.FormatError, # tuf.exceptions.UnknownRoleError, or @@ -945,7 +946,7 @@ def get_delegated_rolenames(rolename, repository_name='default'): # Does 'repository_name' have the correct format? Raise # 'securesystemslib.exceptions.FormatError' if it does not. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Raises securesystemslib.exceptions.FormatError, # tuf.exceptions.UnknownRoleError, or @@ -998,8 +999,8 @@ def clear_roledb(repository_name='default', clear_all=False): # Do the arguments have the correct format? If not, raise # 'securesystemslib.exceptions.FormatError' - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(clear_all) + sslib_formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.BOOLEAN_SCHEMA.check_match(clear_all) global _roledb_dict global _dirty_roles @@ -1036,7 +1037,7 @@ def _check_rolename(rolename, repository_name='default'): formats.ROLENAME_SCHEMA.check_match(rolename) # Does 'repository_name' have the correct format? - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Raises securesystemslib.exceptions.InvalidNameError. _validate_rolename(rolename) diff --git a/tuf/scripts/repo.py b/tuf/scripts/repo.py index 6fb18f426f..fa6d9c52fa 100755 --- a/tuf/scripts/repo.py +++ b/tuf/scripts/repo.py @@ -149,6 +149,7 @@ import securesystemslib from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import formats as sslib_formats from securesystemslib import interface import tuf @@ -443,7 +444,7 @@ def import_privatekey_from_file(keypath, password=None): confirm=False) # Does 'password' have the correct format? - securesystemslib.formats.PASSWORD_SCHEMA.check_match(password) + sslib_formats.PASSWORD_SCHEMA.check_match(password) # Store the encrypted contents of 'filepath' prior to calling the decryption # routine. diff --git a/tuf/sig.py b/tuf/sig.py index 8107b4d790..7bb1f4ade1 100755 --- a/tuf/sig.py +++ b/tuf/sig.py @@ -52,6 +52,7 @@ import securesystemslib from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import formats as sslib_formats import tuf from tuf import exceptions @@ -128,7 +129,7 @@ def get_signature_status(signable, role=None, repository_name='default', # all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if the check fails. formats.SIGNABLE_SCHEMA.check_match(signable) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) if role is not None: formats.ROLENAME_SCHEMA.check_match(role) @@ -137,7 +138,7 @@ def get_signature_status(signable, role=None, repository_name='default', formats.THRESHOLD_SCHEMA.check_match(threshold) if keyids is not None: - securesystemslib.formats.KEYIDS_SCHEMA.check_match(keyids) + sslib_formats.KEYIDS_SCHEMA.check_match(keyids) # The signature status dictionary returned. signature_status = {} @@ -149,7 +150,7 @@ def get_signature_status(signable, role=None, repository_name='default', # Extract the relevant fields from 'signable' that will allow us to identify # the different classes of keys (i.e., good_sigs, bad_sigs, etc.). - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') + signed = sslib_formats.encode_canonical(signable['signed']).encode('utf-8') signatures = signable['signatures'] # Iterate the signatures and enumerate the signature_status fields. @@ -283,7 +284,7 @@ def verify(signable, role, repository_name='default', threshold=None, formats.SIGNABLE_SCHEMA.check_match(signable) formats.ROLENAME_SCHEMA.check_match(role) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Retrieve the signature status. tuf.sig.get_signature_status() raises: # tuf.exceptions.UnknownRoleError @@ -393,7 +394,7 @@ def generate_rsa_signature(signed, rsakey_dict): # We need 'signed' in canonical JSON format to generate # the 'method' and 'sig' fields of the signature. - signed = securesystemslib.formats.encode_canonical(signed).encode('utf-8') + signed = sslib_formats.encode_canonical(signed).encode('utf-8') # Generate the RSA signature. # Raises securesystemslib.exceptions.FormatError and TypeError. From f702fdfd0cc240f2f304cab6c7b46db6e0fc0662 Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Tue, 12 Jan 2021 11:04:59 +0200 Subject: [PATCH 13/25] imports: Fix securesystemslib.keys imports Make them compatible with vendoring, use from securesystemslib import keys as sslib_keys to have the same style as other securesystemslib imports. Note that developer_tool already used a from securesystemslib.keys import ... for some functions so that style was used consistently there. Signed-off-by: Jussi Kukkonen --- tuf/client/updater.py | 6 +++--- tuf/developer_tool.py | 10 +++++----- tuf/keydb.py | 3 ++- tuf/repository_lib.py | 9 +++++---- tuf/repository_tool.py | 4 ++-- tuf/scripts/repo.py | 7 ++++--- tuf/sig.py | 5 +++-- 7 files changed, 24 insertions(+), 20 deletions(-) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index fdbd2d8b03..79eade8d27 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -133,6 +133,7 @@ from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats +from securesystemslib import keys as sslib_keys import tuf from tuf import download @@ -147,7 +148,6 @@ import tuf.keydb import securesystemslib.hash -import securesystemslib.keys import securesystemslib.util import six @@ -967,7 +967,7 @@ def _import_delegations(self, parent_role): # We specify the keyid to ensure that it's the correct keyid # for the key. try: - key, _ = securesystemslib.keys.format_metadata_to_key(keyinfo, keyid) + key, _ = sslib_keys.format_metadata_to_key(keyinfo, keyid) tuf.keydb.add_key(key, repository_name=self.repository_name) @@ -1376,7 +1376,7 @@ def _verify_root_self_signed(self, signable): # The ANYKEY_SCHEMA check in verify_signature expects the keydict to # include a keyid key['keyid'] = keyid - valid_sig = securesystemslib.keys.verify_signature(key, signature, signed) + valid_sig = sslib_keys.verify_signature(key, signature, signed) if valid_sig: verified_sig_keyids.add(keyid) diff --git a/tuf/developer_tool.py b/tuf/developer_tool.py index b527ff8f0a..77dba0b565 100755 --- a/tuf/developer_tool.py +++ b/tuf/developer_tool.py @@ -53,7 +53,6 @@ import securesystemslib import securesystemslib.util -import securesystemslib.keys import six @@ -76,7 +75,8 @@ import_rsa_privatekey_from_file) from securesystemslib.keys import ( - format_keyval_to_metadata) + format_keyval_to_metadata, + format_metadata_to_key) from securesystemslib.interface import ( generate_and_write_rsa_keypair, @@ -859,7 +859,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, keydict = project_configuration['public_keys'] for keyid in keydict: - key, junk = securesystemslib.keys.format_metadata_to_key(keydict[keyid]) + key, junk = format_metadata_to_key(keydict[keyid]) project.add_verification_key(key) # Load the project's metadata. @@ -898,7 +898,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, repository_name=repository_name) for key_metadata in targets_metadata['delegations']['keys'].values(): - key_object, junk = securesystemslib.keys.format_metadata_to_key(key_metadata) + key_object, junk = format_metadata_to_key(key_metadata) tuf.keydb.add_key(key_object, repository_name=repository_name) for role in targets_metadata['delegations']['roles']: @@ -976,7 +976,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, # Add the keys specified in the delegations field of the Targets role. for key_metadata in metadata_object['delegations']['keys'].values(): - key_object, junk = securesystemslib.keys.format_metadata_to_key(key_metadata) + key_object, junk = format_metadata_to_key(key_metadata) try: tuf.keydb.add_key(key_object, repository_name=repository_name) diff --git a/tuf/keydb.py b/tuf/keydb.py index 57b6769173..e261bfa7ba 100755 --- a/tuf/keydb.py +++ b/tuf/keydb.py @@ -47,6 +47,7 @@ import securesystemslib from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats +from securesystemslib import keys as sslib_keys from tuf import exceptions from tuf import formats @@ -126,7 +127,7 @@ def create_keydb_from_root_metadata(root_metadata, repository_name='default'): # format_metadata_to_key() uses the provided keyid as the default keyid. # All other keyids returned are ignored. - key_dict, _ = securesystemslib.keys.format_metadata_to_key(key_metadata, + key_dict, _ = sslib_keys.format_metadata_to_key(key_metadata, keyid) # Make sure to update key_dict['keyid'] to use one of the other valid diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index 8e0ad5ea67..7ea6a2c8f0 100644 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -41,6 +41,7 @@ from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats +from securesystemslib import keys as sslib_keys import tuf from tuf import exceptions @@ -349,7 +350,7 @@ def _remove_invalid_and_duplicate_signatures(signable, repository_name): continue # Remove 'signature' from 'signable' if it is an invalid signature. - if not securesystemslib.keys.verify_signature(key, signature, signed): + if not sslib_keys.verify_signature(key, signature, signed): logger.debug('Removing invalid signature for ' + repr(keyid)) signable['signatures'].remove(signature) @@ -666,7 +667,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): for keyid, key_metadata in six.iteritems(targets_metadata['delegations']['keys']): # Use the keyid found in the delegation - key_object, _ = securesystemslib.keys.format_metadata_to_key(key_metadata, + key_object, _ = sslib_keys.format_metadata_to_key(key_metadata, keyid) # Add 'key_object' to the list of recognized keys. Keys may be shared, @@ -1863,7 +1864,7 @@ def sign_metadata(metadata_object, keyids, filename, repository_name): if 'private' in key['keyval']: signed = sslib_formats.encode_canonical(signable['signed']).encode('utf-8') try: - signature = securesystemslib.keys.create_signature(key, signed) + signature = sslib_keys.create_signature(key, signed) signable['signatures'].append(signature) except Exception: @@ -2298,7 +2299,7 @@ def keys_to_keydict(keys): for key in keys: keyid = key['keyid'] - key_metadata_format = securesystemslib.keys.format_keyval_to_metadata( + key_metadata_format = sslib_keys.format_keyval_to_metadata( key['keytype'], key['scheme'], key['keyval']) new_keydict = {keyid: key_metadata_format} diff --git a/tuf/repository_tool.py b/tuf/repository_tool.py index ee12b76604..cd6920b548 100755 --- a/tuf/repository_tool.py +++ b/tuf/repository_tool.py @@ -52,7 +52,6 @@ from tuf import roledb import tuf.repository_lib as repo_lib -import securesystemslib.keys import securesystemslib.util import six @@ -89,6 +88,7 @@ import_ecdsa_privatekey_from_file) from securesystemslib.keys import ( + format_metadata_to_key, generate_rsa_key, generate_ecdsa_key, generate_ed25519_key, @@ -3167,7 +3167,7 @@ def load_repository(repository_directory, repository_name='default', # The repo may have used hashing algorithms for the generated keyids # that doesn't match the client's set of hash algorithms. Make sure # to only used the repo's selected hashing algorithms. - key_object, keyids = securesystemslib.keys.format_metadata_to_key(key_metadata, + key_object, keyids = format_metadata_to_key(key_metadata, keyid_hash_algorithms=key_metadata['keyid_hash_algorithms']) try: for keyid in keyids: # pragma: no branch diff --git a/tuf/scripts/repo.py b/tuf/scripts/repo.py index fa6d9c52fa..794a1009cf 100755 --- a/tuf/scripts/repo.py +++ b/tuf/scripts/repo.py @@ -150,6 +150,7 @@ import securesystemslib from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats +from securesystemslib import keys as sslib_keys from securesystemslib import interface import tuf @@ -457,13 +458,13 @@ def import_privatekey_from_file(keypath, password=None): # the derived encryption key from 'password'. Raise # 'securesystemslib.exceptions.CryptoError' if the decryption fails. try: - key_object = securesystemslib.keys.decrypt_key(encrypted_key, password) + key_object = sslib_keys.decrypt_key(encrypted_key, password) except sslib_exceptions.CryptoError: try: logger.debug( 'Decryption failed. Attempting to import a private PEM instead.') - key_object = securesystemslib.keys.import_rsakey_from_private_pem( + key_object = sslib_keys.import_rsakey_from_private_pem( encrypted_key, 'rsassa-pss-sha256', password) except sslib_exceptions.CryptoError as error: @@ -497,7 +498,7 @@ def import_publickey_from_file(keypath): key_metadata = securesystemslib.interface.import_rsa_publickey_from_file( keypath) - key_object, junk = securesystemslib.keys.format_metadata_to_key(key_metadata) + key_object, junk = sslib_keys.format_metadata_to_key(key_metadata) if key_object['keytype'] not in SUPPORTED_KEY_TYPES: raise exceptions.Error('Trying to import an unsupported key' diff --git a/tuf/sig.py b/tuf/sig.py index 7bb1f4ade1..b67d6c797d 100755 --- a/tuf/sig.py +++ b/tuf/sig.py @@ -53,6 +53,7 @@ import securesystemslib from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats +from securesystemslib import keys as sslib_keys import tuf from tuf import exceptions @@ -168,7 +169,7 @@ def get_signature_status(signable, role=None, repository_name='default', # Does the signature use an unknown/unsupported signing scheme? try: - valid_sig = securesystemslib.keys.verify_signature(key, signature, signed) + valid_sig = sslib_keys.verify_signature(key, signature, signed) except sslib_exceptions.UnsupportedAlgorithmError: unknown_signing_schemes.append(keyid) @@ -398,6 +399,6 @@ def generate_rsa_signature(signed, rsakey_dict): # Generate the RSA signature. # Raises securesystemslib.exceptions.FormatError and TypeError. - signature = securesystemslib.keys.create_signature(rsakey_dict, signed) + signature = sslib_keys.create_signature(rsakey_dict, signed) return signature From a7c2b8d7d8c386453e2315bd31a3f7b26d9af04b Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Tue, 12 Jan 2021 11:16:14 +0200 Subject: [PATCH 14/25] imports: Fix securesystemslib.interface imports Make them compatible with vendoring, use from securesystemslib import interface as sslib_interface to have the same style as other securesystemslib imports. Signed-off-by: Jussi Kukkonen --- tuf/repository_lib.py | 10 +++++----- tuf/scripts/repo.py | 24 ++++++++++++------------ 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index 7ea6a2c8f0..5ea095015d 100644 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -41,6 +41,7 @@ from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats +from securesystemslib import interface as sslib_interface from securesystemslib import keys as sslib_keys import tuf @@ -54,7 +55,6 @@ import securesystemslib import securesystemslib.hash -import securesystemslib.interface import securesystemslib.util import six @@ -758,14 +758,14 @@ def import_rsa_privatekey_from_file(filepath, password=None): # prompt for a password if the key file is encrypted and a password isn't # given. try: - private_key = securesystemslib.interface.import_rsa_privatekey_from_file( + private_key = sslib_interface.import_rsa_privatekey_from_file( filepath, password) # The user might not have given a password for an encrypted private key. # Prompt for a password for convenience. except sslib_exceptions.CryptoError: if password is None: - private_key = securesystemslib.interface.import_rsa_privatekey_from_file( + private_key = sslib_interface.import_rsa_privatekey_from_file( filepath, password, prompt=True) else: @@ -821,14 +821,14 @@ def import_ed25519_privatekey_from_file(filepath, password=None): # automatically prompt for a password if the key file is encrypted and a # password isn't given. try: - private_key = securesystemslib.interface.import_ed25519_privatekey_from_file( + private_key = sslib_interface.import_ed25519_privatekey_from_file( filepath, password) # The user might not have given a password for an encrypted private key. # Prompt for a password for convenience. except sslib_exceptions.CryptoError: if password is None: - private_key = securesystemslib.interface.import_ed25519_privatekey_from_file( + private_key = sslib_interface.import_ed25519_privatekey_from_file( filepath, password, prompt=True) else: diff --git a/tuf/scripts/repo.py b/tuf/scripts/repo.py index 794a1009cf..c2ca0ae961 100755 --- a/tuf/scripts/repo.py +++ b/tuf/scripts/repo.py @@ -151,7 +151,7 @@ from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats from securesystemslib import keys as sslib_keys -from securesystemslib import interface +from securesystemslib import interface as sslib_interface import tuf from tuf import exceptions @@ -397,16 +397,16 @@ def gen_key(parsed_arguments): ' key types: ' + repr(SUPPORTED_CLI_KEYTYPES)) elif parsed_arguments.key == ECDSA_KEYTYPE: - keypath = securesystemslib.interface._generate_and_write_ecdsa_keypair( + keypath = sslib_interface._generate_and_write_ecdsa_keypair( **keygen_kwargs) elif parsed_arguments.key == ED25519_KEYTYPE: - keypath = securesystemslib.interface._generate_and_write_ed25519_keypair( + keypath = sslib_interface._generate_and_write_ed25519_keypair( **keygen_kwargs) # RSA key.. else: - keypath = securesystemslib.interface._generate_and_write_rsa_keypair( + keypath = sslib_interface._generate_and_write_rsa_keypair( **keygen_kwargs) @@ -440,8 +440,8 @@ def import_privatekey_from_file(keypath, password=None): # worry about leaking sensitive information about the key's location. # However, care should be taken when including the full path in exceptions # and log files. - password = securesystemslib.interface.get_password('Enter a password for' - ' the encrypted key (' + interface.TERM_RED + repr(keypath) + interface.TERM_RED + '): ', + password = sslib_interface.get_password('Enter a password for' + ' the encrypted key (' + sslib_interface.TERM_RED + repr(keypath) + sslib_interface.TERM_RED + '): ', confirm=False) # Does 'password' have the correct format? @@ -495,7 +495,7 @@ def import_publickey_from_file(keypath): # load_json_file() call above can fail for this reason. Try to potentially # load the PEM string in keypath if an exception is raised. except sslib_exceptions.Error: - key_metadata = securesystemslib.interface.import_rsa_publickey_from_file( + key_metadata = sslib_interface.import_rsa_publickey_from_file( keypath) key_object, junk = sslib_keys.format_metadata_to_key(key_metadata) @@ -835,7 +835,7 @@ def remove_targets(parsed_arguments): # repo.py --init --pw my_password: parsed_arguments.pw = 'my_password' # repo.py --init --pw: The user is prompted for a password, as follows: if not parsed_arguments.pw: - parsed_arguments.pw = securesystemslib.interface.get_password( + parsed_arguments.pw = sslib_interface.get_password( prompt='Enter a password for the top-level role keys: ', confirm=True) targets_private = import_privatekey_from_file( @@ -907,19 +907,19 @@ def set_top_level_keys(repository, parsed_arguments): # repo.py --init --*_pw my_pw: parsed_arguments.*_pw = 'my_pw' # repo.py --init --*_pw: The user is prompted for a password. - securesystemslib.interface._generate_and_write_ed25519_keypair( + sslib_interface._generate_and_write_ed25519_keypair( password=parsed_arguments.root_pw, filepath=os.path.join(parsed_arguments.path, KEYSTORE_DIR, ROOT_KEY_NAME), prompt=(not parsed_arguments.root_pw)) - securesystemslib.interface._generate_and_write_ed25519_keypair( + sslib_interface._generate_and_write_ed25519_keypair( password=parsed_arguments.targets_pw, filepath=os.path.join(parsed_arguments.path, KEYSTORE_DIR, TARGETS_KEY_NAME), prompt=(not parsed_arguments.targets_pw)) - securesystemslib.interface._generate_and_write_ed25519_keypair( + sslib_interface._generate_and_write_ed25519_keypair( password=parsed_arguments.snapshot_pw, filepath=os.path.join(parsed_arguments.path, KEYSTORE_DIR, SNAPSHOT_KEY_NAME), prompt=(not parsed_arguments.snapshot_pw)) - securesystemslib.interface._generate_and_write_ed25519_keypair( + sslib_interface._generate_and_write_ed25519_keypair( password=parsed_arguments.timestamp_pw, filepath=os.path.join(parsed_arguments.path, KEYSTORE_DIR, TIMESTAMP_KEY_NAME), prompt=(not parsed_arguments.timestamp_pw)) From 46ebfd04617a6910822f6579ff841a92b5945283 Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Tue, 12 Jan 2021 11:45:26 +0200 Subject: [PATCH 15/25] imports: Fix securesystemslib.util imports Make the import compatible with vendoring tool and alias the import so it does not clash with the local module. Fix all references to the module in the code. In one instance import a specific function to avoid a more complex redirection in the code. Signed-off-by: Jussi Kukkonen --- tuf/client/updater.py | 25 ++++++++++++------------- tuf/developer_tool.py | 10 +++++----- tuf/download.py | 1 - tuf/mirrors.py | 14 ++++---------- tuf/repository_lib.py | 18 +++++++++--------- tuf/repository_tool.py | 10 +++++----- tuf/scripts/repo.py | 12 ++++++------ 7 files changed, 41 insertions(+), 49 deletions(-) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index 79eade8d27..629f8e214a 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -134,6 +134,7 @@ from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats from securesystemslib import keys as sslib_keys +from securesystemslib import util as sslib_util import tuf from tuf import download @@ -148,7 +149,6 @@ import tuf.keydb import securesystemslib.hash -import securesystemslib.util import six # The Timestamp role does not have signed metadata about it; otherwise we @@ -208,7 +208,7 @@ def __init__(self, map_file): try: # The map file dictionary that associates targets with repositories. - self.map_file = securesystemslib.util.load_json_file(map_file) + self.map_file = sslib_util.load_json_file(map_file) except (sslib_exceptions.Error) as e: raise exceptions.Error('Cannot load the map file: ' + str(e)) @@ -845,7 +845,7 @@ def _load_metadata_from_file(self, metadata_set, metadata_role): # Load the file. The loaded object should conform to # 'tuf.formats.SIGNABLE_SCHEMA'. try: - metadata_signable = securesystemslib.util.load_json_file( + metadata_signable = sslib_util.load_json_file( metadata_filepath) # Although the metadata file may exist locally, it may not @@ -1434,7 +1434,7 @@ def _verify_metadata_file(self, metadata_file_object, metadata = metadata_file_object.read().decode('utf-8') try: - metadata_signable = securesystemslib.util.load_json_string(metadata) + metadata_signable = sslib_util.load_json_string(metadata) except Exception as exception: raise exceptions.InvalidMetadataJSONError(exception) @@ -1531,7 +1531,7 @@ def _get_metadata_file(self, metadata_role, remote_filename, # 'file_object' is also verified if decompressed above (i.e., the # uncompressed version). metadata_signable = \ - securesystemslib.util.load_json_string(file_object.read().decode('utf-8')) + sslib_util.load_json_string(file_object.read().decode('utf-8')) # Determine if the specification version number is supported. It is # assumed that "spec_version" is in (major.minor.fix) format, (for @@ -1706,7 +1706,7 @@ def _update_metadata(self, metadata_role, upperbound_filelength, version=None): current_filepath = os.path.join(self.metadata_directory['current'], metadata_filename) current_filepath = os.path.abspath(current_filepath) - securesystemslib.util.ensure_parent_dir(current_filepath) + sslib_util.ensure_parent_dir(current_filepath) previous_filepath = os.path.join(self.metadata_directory['previous'], metadata_filename) @@ -1714,15 +1714,15 @@ def _update_metadata(self, metadata_role, upperbound_filelength, version=None): if os.path.exists(current_filepath): # Previous metadata might not exist, say when delegations are added. - securesystemslib.util.ensure_parent_dir(previous_filepath) + sslib_util.ensure_parent_dir(previous_filepath) shutil.move(current_filepath, previous_filepath) # Next, move the verified updated metadata file to the 'current' directory. metadata_file_object.seek(0) metadata_signable = \ - securesystemslib.util.load_json_string(metadata_file_object.read().decode('utf-8')) + sslib_util.load_json_string(metadata_file_object.read().decode('utf-8')) - securesystemslib.util.persist_temp_file(metadata_file_object, current_filepath) + sslib_util.persist_temp_file(metadata_file_object, current_filepath) # Extract the metadata object so we can store it to the metadata store. # 'current_metadata_object' set to 'None' if there is not an object @@ -2153,8 +2153,7 @@ def _update_fileinfo(self, metadata_filename): # Extract the file information from the actual file and save it # to the fileinfo store. - file_length, hashes = securesystemslib.util.get_file_details( - current_filepath) + file_length, hashes = sslib_util.get_file_details(current_filepath) metadata_fileinfo = formats.make_targets_fileinfo(file_length, hashes) self.fileinfo[metadata_filename] = metadata_fileinfo @@ -2199,7 +2198,7 @@ def _move_current_to_previous(self, metadata_role): # Move the current path to the previous path. if os.path.exists(current_filepath): - securesystemslib.util.ensure_parent_dir(previous_filepath) + sslib_util.ensure_parent_dir(previous_filepath) os.rename(current_filepath, previous_filepath) @@ -3195,4 +3194,4 @@ def download_target(self, target, destination_directory, target_file_object = self._get_target_file(target_filepath, trusted_length, trusted_hashes, prefix_filename_with_hash) - securesystemslib.util.persist_temp_file(target_file_object, destination) + sslib_util.persist_temp_file(target_file_object, destination) diff --git a/tuf/developer_tool.py b/tuf/developer_tool.py index 77dba0b565..5d589452d5 100755 --- a/tuf/developer_tool.py +++ b/tuf/developer_tool.py @@ -52,7 +52,7 @@ import tuf.repository_tool import securesystemslib -import securesystemslib.util +from securesystemslib import util as sslib_util import six @@ -270,7 +270,7 @@ def write(self, write_partial=False): # Ensure the parent directories of 'metadata_filepath' exist, otherwise an # IO exception is raised if 'metadata_filepath' is written to a # sub-directory. - securesystemslib.util.ensure_parent_dir(delegated_filename) + sslib_util.ensure_parent_dir(delegated_filename) _generate_and_write_metadata(delegated_rolename, delegated_filename, write_partial, self.targets_directory, prefix=self.prefix, @@ -820,7 +820,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, # Load the cfg file and the project. config_filename = os.path.join(project_directory, PROJECT_FILENAME) - project_configuration = securesystemslib.util.load_json_file(config_filename) + project_configuration = sslib_util.load_json_file(config_filename) formats.PROJECT_CFG_SCHEMA.check_match(project_configuration) targets_directory = os.path.join(project_directory, @@ -865,7 +865,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, # Load the project's metadata. targets_metadata_path = os.path.join(project_directory, metadata_directory, project_filename) - signable = securesystemslib.util.load_json_file(targets_metadata_path) + signable = sslib_util.load_json_file(targets_metadata_path) try: formats.check_signable_object_format(signable) except exceptions.UnsignedMetadataError: @@ -936,7 +936,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, continue signable = None - signable = securesystemslib.util.load_json_file(metadata_path) + signable = sslib_util.load_json_file(metadata_path) # Strip the prefix from the local working copy, it will be added again # when the targets metadata is written to disk. diff --git a/tuf/download.py b/tuf/download.py index 1b22d0a309..c6ddeec36b 100755 --- a/tuf/download.py +++ b/tuf/download.py @@ -37,7 +37,6 @@ import securesystemslib from securesystemslib import formats as sslib_formats -import securesystemslib.util import six import tuf diff --git a/tuf/mirrors.py b/tuf/mirrors.py index 632d823fcc..f01166a233 100755 --- a/tuf/mirrors.py +++ b/tuf/mirrors.py @@ -35,6 +35,7 @@ import securesystemslib from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats +from securesystemslib.util import file_in_confined_directories import tuf from tuf import formats @@ -97,14 +98,6 @@ def get_list_of_mirrors(file_type, file_path, mirrors_dict): ' Supported file types: ' + repr(_SUPPORTED_FILE_TYPES)) path_key = 'metadata_path' if file_type == 'meta' else 'targets_path' - # Reference to 'securesystemslib.util.file_in_confined_directories()' (improve - # readability). This function checks whether a mirror should serve a file to - # the client. A client may be confined to certain paths on a repository - # mirror when fetching target files. This field may be set by the client - # when the repository mirror is added to the 'tuf.client.updater.Updater' - # object. - in_confined_directory = securesystemslib.util.file_in_confined_directories - list_of_mirrors = [] for junk, mirror_info in six.iteritems(mirrors_dict): # Does mirror serve this file type at all? @@ -116,8 +109,9 @@ def get_list_of_mirrors(file_type, file_path, mirrors_dict): if path_key == 'targets_path': full_filepath = os.path.join(path, file_path) confined_target_dirs = mirror_info.get('confined_target_dirs') - # confined_target_dirs is an optional field - if confined_target_dirs and not in_confined_directory(full_filepath, + # confined_target_dirs is optional and can used to confine the client to + # certain paths on a repository mirror when fetching target files. + if confined_target_dirs and not file_in_confined_directories(full_filepath, confined_target_dirs): continue diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index 5ea095015d..f33a0ce838 100644 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -43,6 +43,7 @@ from securesystemslib import formats as sslib_formats from securesystemslib import interface as sslib_interface from securesystemslib import keys as sslib_keys +from securesystemslib import util as sslib_util import tuf from tuf import exceptions @@ -55,7 +56,6 @@ import securesystemslib import securesystemslib.hash -import securesystemslib.util import six import securesystemslib.storage @@ -502,7 +502,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): # written. try: # Initialize the key and role metadata of the top-level roles. - signable = securesystemslib.util.load_json_file(root_filename) + signable = sslib_util.load_json_file(root_filename) try: formats.check_signable_object_format(signable) except exceptions.UnsignedMetadataError: @@ -551,7 +551,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): # Load 'timestamp.json'. A Timestamp role file without a version number is # always written. try: - signable = securesystemslib.util.load_json_file(timestamp_filename) + signable = sslib_util.load_json_file(timestamp_filename) timestamp_metadata = signable['signed'] for signature in signable['signatures']: repository.timestamp.add_signature(signature, mark_role_as_dirty=False) @@ -589,7 +589,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): str(snapshot_version) + '.' + basename + METADATA_EXTENSION) try: - signable = securesystemslib.util.load_json_file(snapshot_filename) + signable = sslib_util.load_json_file(snapshot_filename) try: formats.check_signable_object_format(signable) except exceptions.UnsignedMetadataError: @@ -631,7 +631,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): targets_filename = os.path.join(dirname, str(targets_version) + '.' + basename) try: - signable = securesystemslib.util.load_json_file(targets_filename) + signable = sslib_util.load_json_file(targets_filename) try: formats.check_signable_object_format(signable) except exceptions.UnsignedMetadataError: @@ -996,7 +996,7 @@ def get_targets_metadata_fileinfo(filename, storage_backend, custom=None): # dictionary that a client might define to include additional # file information, such as the file's author, version/revision # numbers, etc. - filesize, filehashes = securesystemslib.util.get_file_details(filename, + filesize, filehashes = sslib_util.get_file_details(filename, settings.FILE_HASH_ALGORITHMS, storage_backend) return formats.make_targets_fileinfo(filesize, filehashes, custom=custom) @@ -1542,11 +1542,11 @@ def _get_hashes_and_length_if_needed(use_length, use_hashes, full_file_path, length = None hashes = None if use_length: - length = securesystemslib.util.get_file_length(full_file_path, + length = sslib_util.get_file_length(full_file_path, storage_backend) if use_hashes: - hashes = securesystemslib.util.get_file_hashes(full_file_path, + hashes = sslib_util.get_file_hashes(full_file_path, settings.FILE_HASH_ALGORITHMS, storage_backend) return length, hashes @@ -1980,7 +1980,7 @@ def write_metadata_file(metadata, filename, version_number, consistent_snapshot, # the consistent snapshot and point 'written_filename' to it. logger.debug('Creating a consistent file for ' + repr(filename)) logger.debug('Saving ' + repr(written_consistent_filename)) - securesystemslib.util.persist_temp_file(file_object, + sslib_util.persist_temp_file(file_object, written_consistent_filename, should_close=False) else: diff --git a/tuf/repository_tool.py b/tuf/repository_tool.py index cd6920b548..0912384ef5 100755 --- a/tuf/repository_tool.py +++ b/tuf/repository_tool.py @@ -44,6 +44,7 @@ from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats +from securesystemslib import util as sslib_util import tuf from tuf import exceptions @@ -52,7 +53,6 @@ from tuf import roledb import tuf.repository_lib as repo_lib -import securesystemslib.util import six import securesystemslib.storage @@ -3112,7 +3112,7 @@ def load_repository(repository_directory, repository_name='default', signable = None try: - signable = securesystemslib.util.load_json_file(metadata_path) + signable = sslib_util.load_json_file(metadata_path) except (sslib_exceptions.Error, ValueError, IOError): logger.debug('Tried to load metadata with invalid JSON' @@ -3219,7 +3219,7 @@ def dump_signable_metadata(metadata_filepath): # Are the argument properly formatted? sslib_formats.PATH_SCHEMA.check_match(metadata_filepath) - signable = securesystemslib.util.load_json_file(metadata_filepath) + signable = sslib_util.load_json_file(metadata_filepath) # Is 'signable' a valid metadata file? formats.SIGNABLE_SCHEMA.check_match(signable) @@ -3275,7 +3275,7 @@ def append_signature(signature, metadata_filepath): sslib_formats.SIGNATURE_SCHEMA.check_match(signature) sslib_formats.PATH_SCHEMA.check_match(metadata_filepath) - signable = securesystemslib.util.load_json_file(metadata_filepath) + signable = sslib_util.load_json_file(metadata_filepath) # Is 'signable' a valid metadata file? formats.SIGNABLE_SCHEMA.check_match(signable) @@ -3288,7 +3288,7 @@ def append_signature(signature, metadata_filepath): separators=(',', ': '), sort_keys=True).encode('utf-8') file_object.write(written_metadata_content) - securesystemslib.util.persist_temp_file(file_object, metadata_filepath) + sslib_util.persist_temp_file(file_object, metadata_filepath) diff --git a/tuf/scripts/repo.py b/tuf/scripts/repo.py index c2ca0ae961..4f0e47302c 100755 --- a/tuf/scripts/repo.py +++ b/tuf/scripts/repo.py @@ -150,8 +150,9 @@ import securesystemslib from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats -from securesystemslib import keys as sslib_keys from securesystemslib import interface as sslib_interface +from securesystemslib import keys as sslib_keys +from securesystemslib import util as sslib_util import tuf from tuf import exceptions @@ -419,8 +420,8 @@ def gen_key(parsed_arguments): pubkey_repo_path = os.path.join(parsed_arguments.path, KEYSTORE_DIR, os.path.basename(keypath + '.pub')) - securesystemslib.util.ensure_parent_dir(privkey_repo_path) - securesystemslib.util.ensure_parent_dir(pubkey_repo_path) + sslib_util.ensure_parent_dir(privkey_repo_path) + sslib_util.ensure_parent_dir(pubkey_repo_path) # Move them from the CWD to the repo's keystore. shutil.move(keypath, privkey_repo_path) @@ -489,7 +490,7 @@ def import_privatekey_from_file(keypath, password=None): def import_publickey_from_file(keypath): try: - key_metadata = securesystemslib.util.load_json_file(keypath) + key_metadata = sslib_util.load_json_file(keypath) # An RSA public key is saved to disk in PEM format (not JSON), so the # load_json_file() call above can fail for this reason. Try to potentially @@ -714,8 +715,7 @@ def add_target_to_repo(parsed_arguments, target_path, repo_targets_path, logger.debug(repr(target_path) + ' does not exist. Skipping.') else: - securesystemslib.util.ensure_parent_dir( - os.path.join(repo_targets_path, target_path)) + sslib_util.ensure_parent_dir(os.path.join(repo_targets_path, target_path)) shutil.copy(target_path, os.path.join(repo_targets_path, target_path)) From 4b66c173d8ffc9979fbb156712f262d9bb57f498 Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Tue, 12 Jan 2021 11:51:30 +0200 Subject: [PATCH 16/25] imports: Fix securesystemslib.storage imports Make them compatible with vendoring, use from securesystemslib import storage as sslib_storage to have the same style as other securesystemslib imports. Signed-off-by: Jussi Kukkonen --- tuf/developer_tool.py | 3 ++- tuf/repository_lib.py | 7 +++---- tuf/repository_tool.py | 7 +++---- 3 files changed, 8 insertions(+), 9 deletions(-) diff --git a/tuf/developer_tool.py b/tuf/developer_tool.py index 5d589452d5..f3cb788e96 100755 --- a/tuf/developer_tool.py +++ b/tuf/developer_tool.py @@ -40,6 +40,7 @@ from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats +from securesystemslib import storage as sslib_storage import tuf from tuf import exceptions @@ -512,7 +513,7 @@ def _generate_and_write_metadata(rolename, metadata_filename, write_partial, if sig.verify(signable, rolename, repository_name) or write_partial: repo_lib._remove_invalid_and_duplicate_signatures(signable, repository_name) - storage_backend = securesystemslib.storage.FilesystemBackend() + storage_backend = sslib_storage.FilesystemBackend() filename = repo_lib.write_metadata_file(signable, metadata_filename, metadata['version'], False, storage_backend) diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index f33a0ce838..4a1b8e8d54 100644 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -44,6 +44,7 @@ from securesystemslib import interface as sslib_interface from securesystemslib import keys as sslib_keys from securesystemslib import util as sslib_util +from securesystemslib import storage as sslib_storage import tuf from tuf import exceptions @@ -58,8 +59,6 @@ import securesystemslib.hash import six -import securesystemslib.storage - # See 'log.py' to learn how logging is handled in TUF. logger = logging.getLogger(__name__) @@ -1449,7 +1448,7 @@ def generate_targets_metadata(targets_directory, target_files, version, # Generate the fileinfo dicts by accessing the target files on storage. # Default to accessing files on local storage. if storage_backend is None: - storage_backend = securesystemslib.storage.FilesystemBackend() + storage_backend = sslib_storage.FilesystemBackend() filedict = _generate_targets_fileinfo(target_files, targets_directory, write_consistent_targets, storage_backend) @@ -1946,7 +1945,7 @@ def write_metadata_file(metadata, filename, version_number, consistent_snapshot, sslib_formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) if storage_backend is None: - storage_backend = securesystemslib.storage.FilesystemBackend() + storage_backend = sslib_storage.FilesystemBackend() # Generate the actual metadata file content of 'metadata'. Metadata is # saved as JSON and includes formatting, such as indentation and sorted diff --git a/tuf/repository_tool.py b/tuf/repository_tool.py index 0912384ef5..73b3007be1 100755 --- a/tuf/repository_tool.py +++ b/tuf/repository_tool.py @@ -45,6 +45,7 @@ from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats from securesystemslib import util as sslib_util +from securesystemslib import storage as sslib_storage import tuf from tuf import exceptions @@ -55,8 +56,6 @@ import six -import securesystemslib.storage - # Copy API # pylint: disable=unused-import @@ -2929,7 +2928,7 @@ def create_new_repository(repository_directory, repository_name='default', sslib_formats.NAME_SCHEMA.check_match(repository_name) if storage_backend is None: - storage_backend = securesystemslib.storage.FilesystemBackend() + storage_backend = sslib_storage.FilesystemBackend() # Set the repository, metadata, and targets directories. These directories # are created if they do not exist. @@ -3039,7 +3038,7 @@ def load_repository(repository_directory, repository_name='default', sslib_formats.NAME_SCHEMA.check_match(repository_name) if storage_backend is None: - storage_backend = securesystemslib.storage.FilesystemBackend() + storage_backend = sslib_storage.FilesystemBackend() repository_directory = os.path.abspath(repository_directory) metadata_directory = os.path.join(repository_directory, From dd134a43c8455cc3f3acc543e7fe4e8400734af7 Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Tue, 12 Jan 2021 11:54:17 +0200 Subject: [PATCH 17/25] imports: Fix securesystemslib.hash imports Make them compatible with vendoring, use from securesystemslib import hash as sslib_hash to have the same style as other securesystemslib imports (and to avoid potential conflict with system hash()). Signed-off-by: Jussi Kukkonen --- tuf/client/updater.py | 8 ++++---- tuf/repository_lib.py | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index 629f8e214a..99321978e3 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -133,6 +133,7 @@ from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats +from securesystemslib import hash as sslib_hash from securesystemslib import keys as sslib_keys from securesystemslib import util as sslib_util @@ -148,7 +149,6 @@ import tuf.requests_fetcher import tuf.keydb -import securesystemslib.hash import six # The Timestamp role does not have signed metadata about it; otherwise we @@ -1207,7 +1207,7 @@ def _check_hashes(self, file_object, trusted_hashes): # Verify each hash, raise an exception if any hash fails to verify for algorithm, trusted_hash in six.iteritems(trusted_hashes): - digest_object = securesystemslib.hash.digest_fileobject(file_object, + digest_object = sslib_hash.digest_fileobject(file_object, algorithm) computed_hash = digest_object.hexdigest() @@ -2933,7 +2933,7 @@ def _get_target_hash(self, target_filepath, hash_function='sha256'): # Calculate the hash of the filepath to determine which bin to find the # target. The client currently assumes the repository (i.e., repository # tool) uses 'hash_function' to generate hashes and UTF-8. - digest_object = securesystemslib.hash.digest(hash_function) + digest_object = sslib_hash.digest(hash_function) encoded_target_filepath = target_filepath.encode('utf-8') digest_object.update(encoded_target_filepath) target_filepath_hash = digest_object.hexdigest() @@ -3088,7 +3088,7 @@ def updated_targets(self, targets, destination_directory): for algorithm, digest in six.iteritems(target['fileinfo']['hashes']): digest_object = None try: - digest_object = securesystemslib.hash.digest_filename(target_filepath, + digest_object = sslib_hash.digest_filename(target_filepath, algorithm=algorithm) # This exception would occur if the target does not exist locally. diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index 4a1b8e8d54..0384ad6688 100644 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -39,8 +39,10 @@ import json import tempfile +import securesystemslib from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats +from securesystemslib import hash as sslib_hash from securesystemslib import interface as sslib_interface from securesystemslib import keys as sslib_keys from securesystemslib import util as sslib_util @@ -55,8 +57,6 @@ from tuf import sig import tuf.keydb -import securesystemslib -import securesystemslib.hash import six @@ -1187,7 +1187,7 @@ def get_target_hash(target_filepath): """ formats.RELPATH_SCHEMA.check_match(target_filepath) - digest_object = securesystemslib.hash.digest(algorithm=HASH_FUNCTION) + digest_object = sslib_hash.digest(algorithm=HASH_FUNCTION) digest_object.update(target_filepath.encode('utf-8')) return digest_object.hexdigest() From 996b2a0f9151718fc31f1bc3b7985047ecee4a8a Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Tue, 12 Jan 2021 12:00:41 +0200 Subject: [PATCH 18/25] imports: Fix securesystemslib.settings imports Make the import compatible with vendoring tool and alias the import so it does not clash with the local module. Fix all references to the module in the code. Signed-off-by: Jussi Kukkonen --- tuf/requests_fetcher.py | 10 +++++----- tuf/scripts/repo.py | 3 ++- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/tuf/requests_fetcher.py b/tuf/requests_fetcher.py index 6ac8437dfd..34eee80692 100644 --- a/tuf/requests_fetcher.py +++ b/tuf/requests_fetcher.py @@ -14,7 +14,7 @@ import urllib3.exceptions from tuf import exceptions -import tuf.settings +from tuf import settings from tuf.client.fetcher import FetcherInterface @@ -74,7 +74,7 @@ def fetch(self, url, required_length): # - connect timeout (max delay before first byte is received) # - read (gap) timeout (max delay between bytes received) response = session.get(url, stream=True, - timeout=tuf.settings.SOCKET_TIMEOUT) + timeout=settings.SOCKET_TIMEOUT) # Check response status. try: response.raise_for_status() @@ -96,11 +96,11 @@ def chunks(): # wish to download an extremely large file in one shot. # Before beginning the round, sleep (if set) for a short amount of # time so that the CPU is not hogged in the while loop. - if tuf.settings.SLEEP_BEFORE_ROUND: - time.sleep(tuf.settings.SLEEP_BEFORE_ROUND) + if settings.SLEEP_BEFORE_ROUND: + time.sleep(settings.SLEEP_BEFORE_ROUND) read_amount = min( - tuf.settings.CHUNK_SIZE, required_length - bytes_received) + settings.CHUNK_SIZE, required_length - bytes_received) # NOTE: This may not handle some servers adding a Content-Encoding # header, which may cause urllib3 to misbehave: diff --git a/tuf/scripts/repo.py b/tuf/scripts/repo.py index 4f0e47302c..303077a1a7 100755 --- a/tuf/scripts/repo.py +++ b/tuf/scripts/repo.py @@ -152,6 +152,7 @@ from securesystemslib import formats as sslib_formats from securesystemslib import interface as sslib_interface from securesystemslib import keys as sslib_keys +from securesystemslib import settings as sslib_settings from securesystemslib import util as sslib_util import tuf @@ -481,7 +482,7 @@ def import_privatekey_from_file(keypath, password=None): else: # Add "keyid_hash_algorithms" so that equal keys with different keyids can # be associated using supported keyid_hash_algorithms. - key_object['keyid_hash_algorithms'] = securesystemslib.settings.HASH_ALGORITHMS + key_object['keyid_hash_algorithms'] = sslib_settings.HASH_ALGORITHMS return key_object From 538623b6eba66d2a8cb27c22aa328cf9e39a6b9a Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Mon, 11 Jan 2021 19:40:29 +0200 Subject: [PATCH 19/25] imports: Make 'keydb' imports vendoring-compatible Use "from tuf import " instead of "import tuf.": this makes it possible for vendoring tool to vendor tuf. Fix all references to in the code. Also fix import orders so tuf internal imports are last. Signed-off-by: Jussi Kukkonen --- tuf/client/updater.py | 6 +++--- tuf/developer_tool.py | 24 ++++++++++++------------ tuf/repository_lib.py | 25 ++++++++++++------------- tuf/repository_tool.py | 28 ++++++++++++++-------------- tuf/scripts/repo.py | 10 +++++----- tuf/sig.py | 6 +++--- 6 files changed, 49 insertions(+), 50 deletions(-) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index 99321978e3..85c86185d4 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -141,13 +141,13 @@ from tuf import download from tuf import exceptions from tuf import formats +from tuf import keydb from tuf import log from tuf import mirrors from tuf import roledb from tuf import settings from tuf import sig import tuf.requests_fetcher -import tuf.keydb import six @@ -914,7 +914,7 @@ def _rebuild_key_and_role_db(self): # metadata files for delegated roles are also not loaded when the # repository is first instantiated. Due to this setup, reloading delegated # roles is not required here. - tuf.keydb.create_keydb_from_root_metadata(self.metadata['current']['root'], + keydb.create_keydb_from_root_metadata(self.metadata['current']['root'], self.repository_name) roledb.create_roledb_from_root_metadata(self.metadata['current']['root'], @@ -969,7 +969,7 @@ def _import_delegations(self, parent_role): try: key, _ = sslib_keys.format_metadata_to_key(keyinfo, keyid) - tuf.keydb.add_key(key, repository_name=self.repository_name) + keydb.add_key(key, repository_name=self.repository_name) except exceptions.KeyAlreadyExistsError: pass diff --git a/tuf/developer_tool.py b/tuf/developer_tool.py index f3cb788e96..788240add0 100755 --- a/tuf/developer_tool.py +++ b/tuf/developer_tool.py @@ -37,26 +37,26 @@ import shutil import tempfile import json +import six + +import securesystemslib from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats from securesystemslib import storage as sslib_storage +from securesystemslib import util as sslib_util import tuf from tuf import exceptions from tuf import formats +from tuf import keydb from tuf import log +from tuf import repository_lib as repo_lib from tuf import roledb from tuf import sig import tuf.keydb -import tuf.repository_lib as repo_lib import tuf.repository_tool -import securesystemslib -from securesystemslib import util as sslib_util - -import six - from tuf.repository_tool import Targets from tuf.repository_lib import _check_role_keys from tuf.repository_lib import _metadata_is_partially_loaded @@ -256,7 +256,7 @@ def write(self, write_partial=False): # Raise 'securesystemslib.exceptions.FormatError' if any are improperly formatted. sslib_formats.BOOLEAN_SCHEMA.check_match(write_partial) - # At this point the tuf.keydb and roledb stores must be fully + # At this point the keydb and roledb stores must be fully # populated, otherwise write() throwns a 'tuf.Repository' exception if # any of the project roles are missing signatures, keys, etc. @@ -315,7 +315,7 @@ def add_verification_key(self, key, expires=None): securesystemslib.exceptions.Error, if the project already contains a key. - The role's entries in 'tuf.keydb.py' and 'roledb' are updated. + The role's entries in 'keydb' and 'roledb' are updated. None @@ -754,7 +754,7 @@ def _save_project_configuration(metadata_directory, targets_directory, # Build a dictionary containing the actual keys. for key in public_keys: - key_info = tuf.keydb.get_key(key) + key_info = keydb.get_key(key) key_metadata = format_keyval_to_metadata(key_info['keytype'], key_info['scheme'], key_info['keyval']) project_config['public_keys'][key] = key_metadata @@ -813,7 +813,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, # Clear the role and key databases since we are loading in a new project. roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) + keydb.clear_keydb(clear_all=True) # Locate metadata filepaths and targets filepath. project_directory = os.path.abspath(project_directory) @@ -900,7 +900,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, for key_metadata in targets_metadata['delegations']['keys'].values(): key_object, junk = format_metadata_to_key(key_metadata) - tuf.keydb.add_key(key_object, repository_name=repository_name) + keydb.add_key(key_object, repository_name=repository_name) for role in targets_metadata['delegations']['roles']: rolename = role['name'] @@ -980,7 +980,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, key_object, junk = format_metadata_to_key(key_metadata) try: - tuf.keydb.add_key(key_object, repository_name=repository_name) + keydb.add_key(key_object, repository_name=repository_name) except exceptions.KeyAlreadyExistsError: pass diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index 0384ad6688..f6cf9e961e 100644 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -37,6 +37,7 @@ import logging import shutil import json +import six import tempfile import securesystemslib @@ -51,13 +52,11 @@ import tuf from tuf import exceptions from tuf import formats +from tuf import keydb from tuf import log from tuf import roledb from tuf import settings from tuf import sig -import tuf.keydb - -import six # See 'log.py' to learn how logging is handled in TUF. @@ -340,9 +339,9 @@ def _remove_invalid_and_duplicate_signatures(signable, repository_name): key = None # Remove 'signature' from 'signable' if the listed keyid does not exist - # in 'tuf.keydb'. + # in 'keydb'. try: - key = tuf.keydb.get_key(keyid, repository_name=repository_name) + key = keydb.get_key(keyid, repository_name=repository_name) except exceptions.UnknownKeyError: signable['signatures'].remove(signature) @@ -510,7 +509,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): logger.warning('Unsigned metadata object: ' + repr(signable)) root_metadata = signable['signed'] - tuf.keydb.create_keydb_from_root_metadata(root_metadata, repository_name) + keydb.create_keydb_from_root_metadata(root_metadata, repository_name) roledb.create_roledb_from_root_metadata(root_metadata, repository_name) # Load Root's roleinfo and update 'roledb'. @@ -676,7 +675,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): # repository maintainer should have also been made aware of the duplicate # key when it was added. try: - tuf.keydb.add_key(key_object, keyid=None, repository_name=repository_name) + keydb.add_key(key_object, keyid=None, repository_name=repository_name) except exceptions.KeyAlreadyExistsError: pass @@ -1198,7 +1197,7 @@ def generate_root_metadata(version, expiration_date, consistent_snapshot, repository_name='default'): """ - Create the root metadata. 'roledb' and 'tuf.keydb.py' + Create the root metadata. 'roledb' and 'keydb' are read and the information returned by these modules is used to generate the root metadata object. @@ -1230,7 +1229,7 @@ def generate_root_metadata(version, expiration_date, consistent_snapshot, found in 'roledb'.) - The contents of 'tuf.keydb.py' and 'roledb' are read. + The contents of 'keydb' and 'roledb' are read. A root metadata object, conformant to 'tuf.formats.ROOT_SCHEMA'. @@ -1265,7 +1264,7 @@ def generate_root_metadata(version, expiration_date, consistent_snapshot, # Collect keys from all roles in a list keyids = roledb.get_role_keyids(rolename, repository_name) for keyid in keyids: - key = tuf.keydb.get_key(keyid, repository_name=repository_name) + key = keydb.get_key(keyid, repository_name=repository_name) keylist.append(key) # Generate the authentication information Root establishes for each @@ -1416,7 +1415,7 @@ def generate_targets_metadata(targets_directory, target_files, version, # Collect all delegations keys for generating the delegations keydict for keyid in role['keyids']: - key = tuf.keydb.get_key(keyid, repository_name=repository_name) + key = keydb.get_key(keyid, repository_name=repository_name) delegations_keys.append(key) _, delegations['keys'] = keys_to_keydict(delegations_keys) @@ -1803,7 +1802,7 @@ def sign_metadata(metadata_object, keyids, filename, repository_name): Sign a metadata object. If any of the keyids have already signed the file, the old signature is replaced. The keys in 'keyids' must already be - loaded in 'tuf.keydb'. + loaded in 'keydb'. metadata_object: @@ -1857,7 +1856,7 @@ def sign_metadata(metadata_object, keyids, filename, repository_name): for keyid in keyids: # Load the signing key. - key = tuf.keydb.get_key(keyid, repository_name=repository_name) + key = keydb.get_key(keyid, repository_name=repository_name) # Generate the signature using the appropriate signing method. if key['keytype'] in SUPPORTED_KEY_TYPES: if 'private' in key['keyval']: diff --git a/tuf/repository_tool.py b/tuf/repository_tool.py index 73b3007be1..04681ac53e 100755 --- a/tuf/repository_tool.py +++ b/tuf/repository_tool.py @@ -39,6 +39,7 @@ import tempfile import shutil import json +import six from collections import deque @@ -50,11 +51,10 @@ import tuf from tuf import exceptions from tuf import formats +from tuf import keydb from tuf import log +from tuf import repository_lib as repo_lib from tuf import roledb -import tuf.repository_lib as repo_lib - -import six # Copy API @@ -251,7 +251,7 @@ def __init__(self, repository_directory, metadata_directory, try: roledb.create_roledb(repository_name) - tuf.keydb.create_keydb(repository_name) + keydb.create_keydb(repository_name) except sslib_exceptions.InvalidNameError: logger.debug(repr(repository_name) + ' already exists. Overwriting' @@ -314,7 +314,7 @@ def writeall(self, consistent_snapshot=False, use_existing_fileinfo=False): # formatted. sslib_formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) - # At this point, tuf.keydb and roledb must be fully populated, + # At this point, keydb and roledb must be fully populated, # otherwise writeall() throws a 'tuf.exceptions.UnsignedMetadataError' for # the top-level roles. exception if any of the top-level roles are missing # signatures, keys, etc. @@ -721,7 +721,7 @@ def add_verification_key(self, key, expires=None): expired. - The role's entries in 'tuf.keydb.py' and 'roledb' are updated. + The role's entries in 'keydb' and 'roledb' are updated. None. @@ -780,11 +780,11 @@ def add_verification_key(self, key, expires=None): key['expires'] = expires # Ensure 'key', which should contain the public portion, is added to - # 'tuf.keydb.py'. Add 'key' to the list of recognized keys. + # 'keydb'. Add 'key' to the list of recognized keys. # Keys may be shared, so do not raise an exception if 'key' has already # been loaded. try: - tuf.keydb.add_key(key, repository_name=self._repository_name) + keydb.add_key(key, repository_name=self._repository_name) except exceptions.KeyAlreadyExistsError: logger.warning('Adding a verification key that has already been used.') @@ -882,7 +882,7 @@ def load_signing_key(self, key): securesystemslib.exceptions.Error, if the private key is not found in 'key'. - Updates the role's 'tuf.keydb.py' and 'roledb' entries. + Updates the role's 'keydb' and 'roledb' entries. None. @@ -902,11 +902,11 @@ def load_signing_key(self, key): # Has the key, with the private portion included, been added to the keydb? # The public version of the key may have been previously added. try: - tuf.keydb.add_key(key, repository_name=self._repository_name) + keydb.add_key(key, repository_name=self._repository_name) except exceptions.KeyAlreadyExistsError: - tuf.keydb.remove_key(key['keyid'], self._repository_name) - tuf.keydb.add_key(key, repository_name=self._repository_name) + keydb.remove_key(key['keyid'], self._repository_name) + keydb.add_key(key, repository_name=self._repository_name) # Update the role's 'signing_keys' field in 'roledb'. roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) @@ -2351,7 +2351,7 @@ def delegate(self, rolename, public_keys, paths, threshold=1, A new Target object is created for 'rolename' that is accessible to the - caller (i.e., targets.). The 'tuf.keydb.py' and + caller (i.e., targets.). The 'keydb' and 'roledb' stores are updated with 'public_keys'. @@ -3171,7 +3171,7 @@ def load_repository(repository_directory, repository_name='default', try: for keyid in keyids: # pragma: no branch key_object['keyid'] = keyid - tuf.keydb.add_key(key_object, keyid=None, + keydb.add_key(key_object, keyid=None, repository_name=repository_name) except exceptions.KeyAlreadyExistsError: diff --git a/tuf/scripts/repo.py b/tuf/scripts/repo.py index 303077a1a7..9c1b6982d7 100755 --- a/tuf/scripts/repo.py +++ b/tuf/scripts/repo.py @@ -146,6 +146,7 @@ import shutil import time import fnmatch +import six import securesystemslib from securesystemslib import exceptions as sslib_exceptions @@ -158,11 +159,10 @@ import tuf from tuf import exceptions from tuf import formats +from tuf import keydb from tuf import log +from tuf import repository_tool as repo_tool from tuf import roledb -import tuf.repository_tool as repo_tool - -import six # See 'log.py' to learn how logging is handled in TUF. @@ -623,9 +623,9 @@ def sign_role(parsed_arguments): # Load the private key keydb and set the roleinfo in roledb so that # metadata can be written with repository.write(). - tuf.keydb.remove_key(role_privatekey['keyid'], + keydb.remove_key(role_privatekey['keyid'], repository_name = repository._repository_name) - tuf.keydb.add_key( + keydb.add_key( role_privatekey, repository_name = repository._repository_name) # Set the delegated metadata file to expire in 3 months. diff --git a/tuf/sig.py b/tuf/sig.py index b67d6c797d..0b04240fa3 100755 --- a/tuf/sig.py +++ b/tuf/sig.py @@ -58,8 +58,8 @@ import tuf from tuf import exceptions from tuf import formats +from tuf import keydb from tuf import roledb -import tuf.keydb # See 'log.py' to learn how logging is handled in TUF. logger = logging.getLogger(__name__) @@ -161,7 +161,7 @@ def get_signature_status(signable, role=None, repository_name='default', # Does the signature use an unrecognized key? try: - key = tuf.keydb.get_key(keyid, repository_name) + key = keydb.get_key(keyid, repository_name) except exceptions.UnknownKeyError: unknown_sigs.append(keyid) @@ -306,7 +306,7 @@ def verify(signable, role, repository_name='default', threshold=None, unique_keys = set() for keyid in good_sigs: - key = tuf.keydb.get_key(keyid, repository_name) + key = keydb.get_key(keyid, repository_name) unique_keys.add(key['keyval']['public']) return len(unique_keys) >= threshold From 0aabb82a802fa0f435a8b9339980b2a001a73c7e Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Tue, 12 Jan 2021 14:41:31 +0200 Subject: [PATCH 20/25] imports: Move six imports to 3rd party section Signed-off-by: Jussi Kukkonen --- tuf/client/updater.py | 3 +-- tuf/download.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index 85c86185d4..d22c0a19b1 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -128,6 +128,7 @@ import time import fnmatch import copy +import six import warnings import io @@ -149,8 +150,6 @@ from tuf import sig import tuf.requests_fetcher -import six - # The Timestamp role does not have signed metadata about it; otherwise we # would need an infinite regress of metadata. Therefore, we use some # default, but sane, upper file length for its metadata. diff --git a/tuf/download.py b/tuf/download.py index c6ddeec36b..2dfb159c16 100755 --- a/tuf/download.py +++ b/tuf/download.py @@ -32,12 +32,12 @@ from __future__ import unicode_literals import logging +import six import timeit import tempfile import securesystemslib from securesystemslib import formats as sslib_formats -import six import tuf from tuf import exceptions From d5b6f91f6fbb0bf4abc7c57446c89bfaa58783fa Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Tue, 12 Jan 2021 17:38:50 +0200 Subject: [PATCH 21/25] imports: Remove unused imports The linter now understands our imports (yay), and complains a lot (boo): * Remove really unused imports * disable lints for tuf.log and securesystemslib imports: these imports have logging side-effects (they set default loggers for tuf and securesystemslib respectively) and I'm cautious about just removing them Signed-off-by: Jussi Kukkonen --- tuf/client/updater.py | 2 +- tuf/developer_tool.py | 7 ++----- tuf/download.py | 2 +- tuf/keydb.py | 2 +- tuf/log.py | 1 - tuf/mirrors.py | 3 +-- tuf/repository_lib.py | 3 +-- tuf/repository_tool.py | 1 - tuf/roledb.py | 4 +--- tuf/scripts/client.py | 1 - tuf/scripts/repo.py | 3 +-- tuf/sig.py | 3 +-- 12 files changed, 10 insertions(+), 22 deletions(-) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index d22c0a19b1..80475a1485 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -143,7 +143,7 @@ from tuf import exceptions from tuf import formats from tuf import keydb -from tuf import log +from tuf import log # pylint: disable=unused-import from tuf import mirrors from tuf import roledb from tuf import settings diff --git a/tuf/developer_tool.py b/tuf/developer_tool.py index 788240add0..df1c17c212 100755 --- a/tuf/developer_tool.py +++ b/tuf/developer_tool.py @@ -40,22 +40,19 @@ import six -import securesystemslib +import securesystemslib # pylint: disable=unused-import from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats from securesystemslib import storage as sslib_storage from securesystemslib import util as sslib_util -import tuf from tuf import exceptions from tuf import formats from tuf import keydb -from tuf import log +from tuf import log # pylint: disable=unused-import from tuf import repository_lib as repo_lib from tuf import roledb from tuf import sig -import tuf.keydb -import tuf.repository_tool from tuf.repository_tool import Targets from tuf.repository_lib import _check_role_keys diff --git a/tuf/download.py b/tuf/download.py index 2dfb159c16..2378e7cedb 100755 --- a/tuf/download.py +++ b/tuf/download.py @@ -36,7 +36,7 @@ import timeit import tempfile -import securesystemslib +import securesystemslib # pylint: disable=unused-import from securesystemslib import formats as sslib_formats import tuf diff --git a/tuf/keydb.py b/tuf/keydb.py index e261bfa7ba..71ef1058a4 100755 --- a/tuf/keydb.py +++ b/tuf/keydb.py @@ -44,7 +44,7 @@ import logging import copy -import securesystemslib +import securesystemslib # pylint: disable=unused-import from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats from securesystemslib import keys as sslib_keys diff --git a/tuf/log.py b/tuf/log.py index cdeb98f542..368845333e 100755 --- a/tuf/log.py +++ b/tuf/log.py @@ -75,7 +75,6 @@ from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats -import tuf from tuf import exceptions from tuf import settings diff --git a/tuf/mirrors.py b/tuf/mirrors.py index f01166a233..78d5053402 100755 --- a/tuf/mirrors.py +++ b/tuf/mirrors.py @@ -32,12 +32,11 @@ import os -import securesystemslib +import securesystemslib # pylint: disable=unused-import from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats from securesystemslib.util import file_in_confined_directories -import tuf from tuf import formats import six diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index f6cf9e961e..d3158cb6c9 100644 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -40,7 +40,7 @@ import six import tempfile -import securesystemslib +import securesystemslib # pylint: disable=unused-import from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats from securesystemslib import hash as sslib_hash @@ -49,7 +49,6 @@ from securesystemslib import util as sslib_util from securesystemslib import storage as sslib_storage -import tuf from tuf import exceptions from tuf import formats from tuf import keydb diff --git a/tuf/repository_tool.py b/tuf/repository_tool.py index 04681ac53e..9d195da000 100755 --- a/tuf/repository_tool.py +++ b/tuf/repository_tool.py @@ -48,7 +48,6 @@ from securesystemslib import util as sslib_util from securesystemslib import storage as sslib_storage -import tuf from tuf import exceptions from tuf import formats from tuf import keydb diff --git a/tuf/roledb.py b/tuf/roledb.py index e5914af9be..62f83bb8ae 100755 --- a/tuf/roledb.py +++ b/tuf/roledb.py @@ -52,14 +52,12 @@ import logging import copy -import securesystemslib +import securesystemslib # pylint: disable=unused-import from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats -import tuf from tuf import exceptions from tuf import formats -from tuf import log import six diff --git a/tuf/scripts/client.py b/tuf/scripts/client.py index 0f4ffea3fc..c1c7bd7cb2 100755 --- a/tuf/scripts/client.py +++ b/tuf/scripts/client.py @@ -71,7 +71,6 @@ import argparse import logging -import tuf from tuf import exceptions from tuf import log from tuf import settings diff --git a/tuf/scripts/repo.py b/tuf/scripts/repo.py index 9c1b6982d7..5866a8853c 100755 --- a/tuf/scripts/repo.py +++ b/tuf/scripts/repo.py @@ -148,7 +148,7 @@ import fnmatch import six -import securesystemslib +import securesystemslib # pylint: disable=unused-import from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats from securesystemslib import interface as sslib_interface @@ -156,7 +156,6 @@ from securesystemslib import settings as sslib_settings from securesystemslib import util as sslib_util -import tuf from tuf import exceptions from tuf import formats from tuf import keydb diff --git a/tuf/sig.py b/tuf/sig.py index 0b04240fa3..cc572ae647 100755 --- a/tuf/sig.py +++ b/tuf/sig.py @@ -50,12 +50,11 @@ import logging -import securesystemslib +import securesystemslib # pylint: disable=unused-import from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats from securesystemslib import keys as sslib_keys -import tuf from tuf import exceptions from tuf import formats from tuf import keydb From d0e5bd23112a75d007c1f88d6a6bb9e6a58aef95 Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Mon, 1 Mar 2021 10:12:11 +0200 Subject: [PATCH 22/25] imports: Fix urllib3 exception import Make it compatible with vendoring: import the exception only to avoid having to rename the module locally. Signed-off-by: Jussi Kukkonen --- tuf/requests_fetcher.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tuf/requests_fetcher.py b/tuf/requests_fetcher.py index 34eee80692..80a9f56191 100644 --- a/tuf/requests_fetcher.py +++ b/tuf/requests_fetcher.py @@ -10,8 +10,7 @@ import six import logging import time - -import urllib3.exceptions +from urllib3.exceptions import ReadTimeoutError from tuf import exceptions from tuf import settings @@ -121,7 +120,7 @@ def chunks(): if bytes_received >= required_length: break - except urllib3.exceptions.ReadTimeoutError as e: + except ReadTimeoutError as e: raise exceptions.SlowRetrievalError(str(e)) finally: From 7dcfb12f61ba6afa3664cc77e25f76306fd034b1 Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Mon, 1 Mar 2021 10:17:18 +0200 Subject: [PATCH 23/25] requests_fetcher: Move 'tuf' import from download requests_fetcher uses tuf.__version__ for user-agent, move the import to the correct file. Signed-off-by: Jussi Kukkonen --- tuf/download.py | 1 - tuf/requests_fetcher.py | 1 + 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/tuf/download.py b/tuf/download.py index 2378e7cedb..6b56ba2569 100755 --- a/tuf/download.py +++ b/tuf/download.py @@ -39,7 +39,6 @@ import securesystemslib # pylint: disable=unused-import from securesystemslib import formats as sslib_formats -import tuf from tuf import exceptions from tuf import formats from tuf import settings diff --git a/tuf/requests_fetcher.py b/tuf/requests_fetcher.py index 80a9f56191..25a2f9d0db 100644 --- a/tuf/requests_fetcher.py +++ b/tuf/requests_fetcher.py @@ -12,6 +12,7 @@ import time from urllib3.exceptions import ReadTimeoutError +import tuf from tuf import exceptions from tuf import settings From 30ab8385753c8f6c261889ab803917a1c51cb266 Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Fri, 19 Mar 2021 17:01:45 +0200 Subject: [PATCH 24/25] Make requests_fetcher import vendoring compatible Signed-off-by: Jussi Kukkonen --- tuf/client/updater.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index 80475a1485..8084dffa64 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -148,7 +148,7 @@ from tuf import roledb from tuf import settings from tuf import sig -import tuf.requests_fetcher +from tuf import requests_fetcher # The Timestamp role does not have signed metadata about it; otherwise we # would need an infinite regress of metadata. Therefore, we use some @@ -699,7 +699,7 @@ def __init__(self, repository_name, repository_mirrors, fetcher=None): # Initialize Updater with an externally provided 'fetcher' implementing # the network download. By default tuf.fetcher.RequestsFetcher is used. if fetcher is None: - self.fetcher = tuf.requests_fetcher.RequestsFetcher() + self.fetcher = requests_fetcher.RequestsFetcher() else: self.fetcher = fetcher From ab56344a533c5def9187570bacb3cd52d70ff64d Mon Sep 17 00:00:00 2001 From: Jussi Kukkonen Date: Fri, 19 Mar 2021 17:10:45 +0200 Subject: [PATCH 25/25] metadata: Make isort happy and bundle imports Signed-off-by: Jussi Kukkonen --- tuf/api/metadata.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index 725c44383a..4d236b576b 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -24,8 +24,7 @@ from securesystemslib.storage import FilesystemBackend, StorageBackendInterface from securesystemslib.util import persist_temp_file -from tuf import exceptions -from tuf import formats +from tuf import exceptions, formats from tuf.api.serialization import ( MetadataDeserializer, MetadataSerializer,