From b6d57d8b898a8370eba3a0cd1679034f241e91ae Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Thu, 23 Jun 2022 15:53:58 -0500 Subject: [PATCH 1/5] initial pass at type hinting and getting mypy to run --- netutils/asn.py | 6 +- netutils/bandwidth.py | 7 +- netutils/banner.py | 5 +- netutils/config/clean.py | 12 +-- netutils/config/compliance.py | 50 ++++++---- netutils/config/parser.py | 181 +++++++++++++++++----------------- netutils/dns.py | 4 +- netutils/interface.py | 81 +++++++++------ netutils/ip.py | 42 ++++---- netutils/lib_mapper.py | 3 +- netutils/mac.py | 28 +++--- netutils/password.py | 51 +++++----- netutils/ping.py | 7 +- netutils/protocol_mapper.py | 4 +- netutils/route.py | 7 +- netutils/time.py | 7 +- netutils/utils.py | 3 +- netutils/vlan.py | 25 +++-- poetry.lock | 46 ++++++++- pyproject.toml | 23 +++++ 20 files changed, 357 insertions(+), 235 deletions(-) diff --git a/netutils/asn.py b/netutils/asn.py index 522e616e..8965f1ba 100644 --- a/netutils/asn.py +++ b/netutils/asn.py @@ -1,7 +1,7 @@ """Functions for working with BGP ASNs.""" -def asn_to_int(asplain): +def asn_to_int(asplain: str) -> int: """Convert AS Number to standardized asplain notation as an integer. Args: @@ -20,7 +20,7 @@ def asn_to_int(asplain): """ # ASN is in asdot notation if "." in asplain: - asn = asplain.split(".") - asn = int(f"{int(asn[0]):016b}{int(asn[1]):016b}", 2) + asn_list = asplain.split(".") + asn = int(f"{int(asn_list[0]):016b}{int(asn_list[1]):016b}", 2) return asn return int(asplain) diff --git a/netutils/bandwidth.py b/netutils/bandwidth.py index 13eb6a67..63baeaad 100644 --- a/netutils/bandwidth.py +++ b/netutils/bandwidth.py @@ -1,8 +1,9 @@ """Functions for performing bandwidth calculations.""" import re +from typing import Dict -def _get_bits_mapping(): +def _get_bits_mapping() -> Dict[str, Dict[str, int]]: bits_value = 0 bits_mapping = {} for _bit in ["bps", "Kbps", "Mbps", "Gbps", "Tbps", "Pbps", "Ebps", "Zbps"]: @@ -18,7 +19,7 @@ def _get_bits_mapping(): BITS_MAPPING = _get_bits_mapping() -def _get_bytes_mapping(): +def _get_bytes_mapping() -> Dict[str, Dict[str, int]]: bytes_value = 0 bytes_mapping = {} for _byte in ["Bps", "KBps", "MBps", "GBps", "TBps", "PBps", "EBps", "ZBps"]: @@ -123,7 +124,7 @@ def bits_to_name( # pylint: disable=too-many-branches,too-many-return-statement for bit_type, val in BITS_MAPPING.items(): if val["low"] <= speed < val["high"]: if nbr_decimal == 0: - nbr_decimal = None + nbr_decimal = None # type: ignore if val["low"] == 0: return f"{round(speed, nbr_decimal)}{bit_type}" return f"{round(speed / val['low'], nbr_decimal)}{bit_type}" diff --git a/netutils/banner.py b/netutils/banner.py index 3e2ee213..ebe209ef 100644 --- a/netutils/banner.py +++ b/netutils/banner.py @@ -1,9 +1,10 @@ """Functions for working with the banner configuration.""" import re + from netutils.constants import CARET_C -def delimiter_change(config, from_delimiter, to_delimiter): +def delimiter_change(config: str, from_delimiter: str, to_delimiter: str) -> str: r"""Change the banner delimiter. Args: @@ -29,7 +30,7 @@ def delimiter_change(config, from_delimiter, to_delimiter): return config_line -def normalise_delimiter_caret_c(delimiter, config): +def normalise_delimiter_caret_c(delimiter: str, config: str) -> str: r"""Normalise delimiter to ^C. Args: diff --git a/netutils/config/clean.py b/netutils/config/clean.py index 8112ab5c..1a672d1e 100644 --- a/netutils/config/clean.py +++ b/netutils/config/clean.py @@ -1,11 +1,11 @@ """Functions for working with configuration to clean the config.""" # pylint: disable=anomalous-backslash-in-string - import re +import typing as t -def clean_config(config, filters): +def clean_config(config: str, filters: t.List[str]) -> str: r"""Given a list of regex patterns, delete those lines that match. Args: @@ -48,11 +48,11 @@ def clean_config(config, filters): >>> """ for item in filters: - config = re.sub(item["regex"], "", config, flags=re.MULTILINE) + config = re.sub(item["regex"], "", config, flags=re.MULTILINE) # type: ignore return config -def sanitize_config(config, filters): +def sanitize_config(config: str, filters: t.Optional[t.Dict[str, str]]) -> str: r"""Given a dictionary of filters, remove sensitive data from the provided config. Args: @@ -75,6 +75,6 @@ def sanitize_config(config, filters): 'enable secret 5 ' >>> """ - for item in filters: - config = re.sub(item["regex"], item["replace"], config, flags=re.MULTILINE) + for item in filters: # type: ignore + config = re.sub(item["regex"], item["replace"], config, flags=re.MULTILINE) # type: ignore return config diff --git a/netutils/config/compliance.py b/netutils/config/compliance.py index 7f6a6fe4..22aa30cb 100644 --- a/netutils/config/compliance.py +++ b/netutils/config/compliance.py @@ -1,8 +1,10 @@ """Filter Plugins for compliance checks.""" +import typing as t + from . import parser # pylint: disable=relative-beyond-top-level -parser_map = { +parser_map: t.Dict[str, t.Callable] = { # type: ignore "arista_eos": parser.EOSConfigParser, "cisco_ios": parser.IOSConfigParser, "cisco_nxos": parser.NXOSConfigParser, @@ -15,7 +17,7 @@ "nokia_sros": parser.NokiaConfigParser, } -default_feature = { +default_feature: t.Dict[str, t.Union[str, bool, None]] = { "compliant": None, "missing": None, "extra": None, @@ -27,7 +29,7 @@ } -def _check_configs_differences(intended_cfg, actual_cfg, network_os): +def _check_configs_differences(intended_cfg: str, actual_cfg: str, network_os: str) -> t.Dict[str, t.Union[str, bool]]: r"""Find differences between intended and actual config lines. Args: @@ -69,7 +71,7 @@ def _check_configs_differences(intended_cfg, actual_cfg, network_os): } -def _is_feature_ordered_compliant(feature_intended_cfg, feature_actual_cfg): +def _is_feature_ordered_compliant(feature_intended_cfg: str, feature_actual_cfg: str) -> bool: """Check if feature intended cfg is compliant with feature actual cfg. Args: @@ -97,17 +99,23 @@ def _is_feature_ordered_compliant(feature_intended_cfg, feature_actual_cfg): return False -def _open_file_config(cfg_path): +def _open_file_config(cfg_path: str) -> t.Union[str, bool]: """Open config file from local disk.""" try: with open(cfg_path, encoding="utf-8") as filehandler: device_cfg = filehandler.read() except IOError: - return False + return False # This should probably be changed to a exception raise. Causing mypy issues on L183, L184 return device_cfg.strip() -def compliance(features, backup, intended, network_os, cfg_type="file"): +def compliance( + features: t.List[t.Dict[str, t.Union[str, bool, t.List[str]]]], + backup: str, + intended: str, + network_os: str, + cfg_type: t.Optional[str] = "file", +) -> t.Dict[str, t.Dict[str, t.Union[str, bool]]]: r"""Report compliance for all features provided as input. Args: @@ -172,14 +180,16 @@ def compliance(features, backup, intended, network_os, cfg_type="file"): compliance_results = {} for feature in features: - backup_str = section_config(feature, backup_cfg, network_os) - intended_str = section_config(feature, intended_cfg, network_os) + backup_str = section_config(feature, backup_cfg, network_os) # type: ignore + intended_str = section_config(feature, intended_cfg, network_os) # type: ignore compliance_results.update({feature["name"]: feature_compliance(feature, backup_str, intended_str, network_os)}) - return compliance_results + return compliance_results # type: ignore -def config_section_not_parsed(features, device_cfg, network_os): +def config_section_not_parsed( + features: t.List[t.Dict[str, t.Union[str, bool, t.List[str]]]], device_cfg: str, network_os: str +) -> t.Dict[str, t.Union[str, t.List[str]]]: r"""Return device config section that is not checked by compliance. Args: @@ -216,11 +226,11 @@ def config_section_not_parsed(features, device_cfg, network_os): remaining_cfg = remaining_cfg.replace(feature_cfg, "") return { "remaining_cfg": remaining_cfg.strip(), - "section_not_found": section_not_found, + "section_not_found": section_not_found, # type: ignore } -def diff_network_config(compare_config, base_config, network_os): +def diff_network_config(compare_config: str, base_config: str, network_os: str) -> str: """Identify which lines in compare_config are not in base_config. Args: @@ -270,7 +280,9 @@ def diff_network_config(compare_config, base_config, network_os): return "\n".join(needed_lines) -def feature_compliance(feature, backup_cfg, intended_cfg, network_os): +def feature_compliance( + feature: t.Dict[str, t.Union[str, bool, t.List[str]]], backup_cfg: str, intended_cfg: str, network_os: str +) -> t.Dict[str, t.Union[str, bool]]: r"""Report compliance for all features provided as input. Args: @@ -328,10 +340,10 @@ def feature_compliance(feature, backup_cfg, intended_cfg, network_os): else: raise # pylint: disable=misplaced-bare-raise - return feature_data + return feature_data # type: ignore -def find_unordered_cfg_lines(intended_cfg, actual_cfg): +def find_unordered_cfg_lines(intended_cfg: str, actual_cfg: str) -> t.Tuple[bool, t.List[t.Tuple[str, str]]]: """Check if config lines are miss-ordered, i.e in ACL-s. Args: @@ -367,7 +379,7 @@ def find_unordered_cfg_lines(intended_cfg, actual_cfg): return (False, unordered_lines) -def section_config(feature, device_cfg, network_os): +def section_config(feature: t.Dict[str, t.Union[str, bool, t.List[str]]], device_cfg: str, network_os: str) -> str: """Parse feature section config from device cfg. In case section attribute of the the feature is not provided @@ -379,7 +391,7 @@ def section_config(feature, device_cfg, network_os): network_os (str): Device network operating system that is in parser_map keys. Returns: - list: The hash report data mapping file hashes to report data. + str: The hash report data mapping file hashes to report data. Example: >>> feature = { @@ -418,7 +430,7 @@ def section_config(feature, device_cfg, network_os): continue else: match = False - for line_start in section_starts_with: + for line_start in section_starts_with: # type: ignore if not match and line.config_line.startswith(line_start): section_config_list.append(line.config_line) match = True diff --git a/netutils/config/parser.py b/netutils/config/parser.py index 2c9f7e3b..5e42bb05 100644 --- a/netutils/config/parser.py +++ b/netutils/config/parser.py @@ -2,6 +2,7 @@ # pylint: disable=no-member,super-with-arguments,invalid-overridden-method,raise-missing-from,invalid-overridden-method,inconsistent-return-statements,super-with-arguments,redefined-argument-from-local,no-else-break,useless-super-delegation,too-many-lines import re +import typing as t from collections import namedtuple from netutils.banner import normalise_delimiter_caret_c @@ -12,23 +13,23 @@ class BaseConfigParser: # pylint: disable=too-few-public-methods """Base class for parsers.""" - comment_chars = ["!"] - banner_start = ["banner", "vacant-message"] + comment_chars: t.List[str] = ["!"] + banner_start: t.List[str] = ["banner", "vacant-message"] - def __init__(self, config): + def __init__(self, config: str): """Create ConfigParser Object. Args: config (str): The config text to parse. """ self.config = config - self._config = None - self._current_parents = () - self.generator_config = (line for line in self.config_lines_only.splitlines()) - self.config_lines = [] - self.build_config_relationship() + self._config: t.Optional[str] = None + self._current_parents: t.Tuple[str, ...] = () + self.generator_config: t.Generator[str, None, None] = (line for line in self.config_lines_only.splitlines()) # type: ignore + self.config_lines: t.List[ConfigLine] = [] + self.build_config_relationship() # type: ignore - def config_lines_only(self): + def config_lines_only(self): # type: ignore """Remove lines not related to config.""" raise NotImplementedError @@ -36,10 +37,10 @@ def config_lines_only(self): class BaseSpaceConfigParser(BaseConfigParser): """Base parser class for config syntax that demarcates using spaces/indentation.""" - comment_chars = ["!"] - banner_start = ["banner", "vacant-message"] + comment_chars: t.List[str] = ["!"] + banner_start: t.List[str] = ["banner", "vacant-message"] - def __init__(self, config): + def __init__(self, config: str): """Create ConfigParser Object. Args: @@ -49,15 +50,15 @@ def __init__(self, config): super(BaseSpaceConfigParser, self).__init__(config) @property - def indent_level(self): + def indent_level(self) -> int: """Count the number of spaces a config line is indented.""" return self._indent_level @indent_level.setter - def indent_level(self, value): + def indent_level(self, value: int) -> None: self._indent_level = value - def is_banner_end(self, line): + def is_banner_end(self, line: str) -> bool: """Determine if line ends the banner config. Args: @@ -66,11 +67,11 @@ def is_banner_end(self, line): Returns: bool: True if line ends banner, else False. """ - if self.banner_end in line: + if self.banner_end in line: # type: ignore return True return False - def is_banner_start(self, line): + def is_banner_start(self, line: str) -> bool: """Determine if the line starts a banner config. Args: @@ -84,7 +85,7 @@ def is_banner_start(self, line): return True return False - def is_comment(self, line): + def is_comment(self, line: str) -> bool: """Determine if line is a comment. Args: @@ -106,7 +107,7 @@ def is_comment(self, line): return False @property - def config_lines_only(self): + def config_lines_only(self) -> str: """Remove spaces and comments from config lines. Returns: @@ -137,7 +138,7 @@ def config_lines_only(self): return self._config @staticmethod - def get_leading_space_count(config_line): + def get_leading_space_count(config_line: str) -> int: r"""Determine how many spaces the ``config_line`` is indented. Args: @@ -156,7 +157,7 @@ def get_leading_space_count(config_line): """ return len(config_line) - len(config_line.lstrip()) - def _remove_parents(self, line, current_spaces): + def _remove_parents(self, line: str, current_spaces: int) -> t.Tuple: # type: ignore """Remove parents from ``self._curent_parents`` based on indent levels. Args: @@ -178,7 +179,7 @@ def _remove_parents(self, line, current_spaces): parents = self._current_parents[:-deindent_level] or (self._current_parents[0],) return parents - def _build_banner(self, config_line): + def _build_banner(self, config_line: str) -> t.Union[str, None]: """Handle banner config lines. Args: @@ -198,7 +199,7 @@ def _build_banner(self, config_line): if not self.is_banner_end(line): banner_config.append(line) else: - line = normalise_delimiter_caret_c(self.banner_end, line) + line = normalise_delimiter_caret_c(self.banner_end, line) # type: ignore banner_config.append(line) line = "\n".join(banner_config) if line.endswith("^C"): @@ -213,7 +214,7 @@ def _build_banner(self, config_line): raise ValueError("Unable to parse banner end.") - def _build_nested_config(self, line): + def _build_nested_config(self, line: str) -> t.Optional[str]: # type: ignore """Handle building child config sections. Args: @@ -246,7 +247,7 @@ def _build_nested_config(self, line): self.indent_level = spaces if self.is_banner_start(line): - line = self._build_banner(line) + line = self._build_banner(line) # type: ignore if line is None or not line[0].isspace(): self._current_parents = () self.indent_level = 0 @@ -254,7 +255,7 @@ def _build_nested_config(self, line): self._update_config_lines(line) - def _update_config_lines(self, config_line): + def _update_config_lines(self, config_line: str) -> None: """Add a ``ConfigLine`` object to ``self.config_lines``. Args: @@ -266,7 +267,7 @@ def _update_config_lines(self, config_line): entry = ConfigLine(config_line, self._current_parents) self.config_lines.append(entry) - def build_config_relationship(self): + def build_config_relationship(self) -> t.List[ConfigLine]: r"""Parse text tree of config lines and their parents. Example: @@ -291,24 +292,24 @@ def build_config_relationship(self): if not line[0].isspace(): self._current_parents = () if self.is_banner_start(line): - line = self._build_banner(line) + line = self._build_banner(line) # type: ignore else: previous_config = self.config_lines[-1] self._current_parents = (previous_config.config_line,) self.indent_level = self.get_leading_space_count(line) if not self.is_banner_start(line): - line = self._build_nested_config(line) + line = self._build_nested_config(line) # type: ignore else: - line = self._build_banner(line) + line = self._build_banner(line) # type: ignore if line is not None and line[0].isspace(): - line = self._build_nested_config(line) + line = self._build_nested_config(line) # type: ignore else: self._current_parents = () if line is None: break elif self.is_banner_start(line): - line = self._build_banner(line) + line = self._build_banner(line) # type: ignore self._update_config_lines(line) return self.config_lines @@ -317,9 +318,9 @@ def build_config_relationship(self): class BaseBraceConfigParser(BaseConfigParser): """Base parser class for config syntax that demarcates using braces.""" - multiline_delimiters = [] + multiline_delimiters: t.List[str] = [] - def __init__(self, config): + def __init__(self, config: str): """Create ConfigParser Object. Args: @@ -328,7 +329,7 @@ def __init__(self, config): super(BaseBraceConfigParser, self).__init__(config) @property - def config_lines_only(self): + def config_lines_only(self) -> str: """Remove trailing spaces and empty lines from config lines. Returns: @@ -337,7 +338,7 @@ def config_lines_only(self): config_lines = [line.rstrip() for line in self.config.splitlines() if line and not line.isspace()] return "\n".join(config_lines) - def build_config_relationship(self): + def build_config_relationship(self) -> t.List[ConfigLine]: r"""Parse text tree of config lines and their parents. Example: @@ -375,7 +376,7 @@ def build_config_relationship(self): return self.config_lines - def _build_multiline_config(self, delimiter): + def _build_multiline_config(self, delimiter: str): # type: ignore r"""Build config sections between characters demarcating multiline strings. Args: @@ -421,16 +422,16 @@ class CiscoConfigParser(BaseSpaceConfigParser): regex_banner = re.compile(r"^(banner\s+\S+|\s*vacant-message)\s+(?P\^C|.)") - def __init__(self, config): + def __init__(self, config: str): """Create ConfigParser Object. Args: config (str): The config text to parse. """ - self._banner_end = None + self._banner_end: t.Optional[str] = None super(CiscoConfigParser, self).__init__(config) - def _build_banner(self, config_line): + def _build_banner(self, config_line: str) -> t.Optional[str]: """Handle banner config lines. Args: @@ -452,7 +453,7 @@ def _build_banner(self, config_line): return super(CiscoConfigParser, self)._build_banner(config_line) @staticmethod - def is_banner_one_line(config_line): + def is_banner_one_line(config_line: str) -> bool: """Determine if all banner config is on one line.""" _, delimeter, banner = config_line.partition("^C") # Based on NXOS configs, the banner delimeter is ignored until another char is used @@ -461,7 +462,7 @@ def is_banner_one_line(config_line): return False return True - def is_banner_start(self, line): + def is_banner_start(self, line: str) -> bool: """Determine if the line starts a banner config.""" state = super(CiscoConfigParser, self).is_banner_start(line) if state: @@ -469,12 +470,12 @@ def is_banner_start(self, line): return state @property - def banner_end(self): + def banner_end(self) -> str: """Demarcate End of Banner char(s).""" - return self._banner_end + return self._banner_end # type: ignore @banner_end.setter - def banner_end(self, banner_start_line): + def banner_end(self, banner_start_line: str) -> None: banner_parsed = self.regex_banner.match(banner_start_line) if not banner_parsed: raise ValueError("There was an error parsing your banner, the end of the banner could not be found") @@ -484,17 +485,17 @@ def banner_end(self, banner_start_line): class IOSConfigParser(CiscoConfigParser, BaseSpaceConfigParser): """Cisco IOS implementation of ConfigParser Class.""" - def __init__(self, config): + def __init__(self, config: str): """Create ConfigParser Object. Args: config (str): The config text to parse. """ - self.unique_config_lines = set() - self.same_line_children = set() + self.unique_config_lines: t.Set[ConfigLine] = set() + self.same_line_children: t.Set[ConfigLine] = set() super(IOSConfigParser, self).__init__(config) - def _build_banner(self, config_line): + def _build_banner(self, config_line: str) -> t.Union[str, None]: """Handle banner config lines. Args: @@ -510,9 +511,9 @@ def _build_banner(self, config_line): config_line = normalise_delimiter_caret_c(self.banner_end, config_line) return super(IOSConfigParser, self)._build_banner(config_line) - def _update_same_line_children_configs(self): + def _update_same_line_children_configs(self) -> None: """Update parents in ``self.config_lines`` per ``self.same_line_children``.""" - new_config_lines = [] + new_config_lines: t.List[ConfigLine] = [] for line in self.config_lines: if line in self.same_line_children: previous_line = new_config_lines[-1] @@ -522,7 +523,7 @@ def _update_same_line_children_configs(self): new_config_lines.append(line) self.config_lines = new_config_lines - def _update_config_lines(self, config_line): + def _update_config_lines(self, config_line: str) -> None: """Add a ``ConfigLine`` object to ``self.config_lines``. In addition to adding entries to config_lines, this also updates: @@ -541,7 +542,7 @@ def _update_config_lines(self, config_line): self.same_line_children.add(entry) self.unique_config_lines.add(entry) - def build_config_relationship(self): + def build_config_relationship(self) -> t.List[ConfigLine]: r"""Parse text tree of config lines and their parents. Example: @@ -572,17 +573,17 @@ class NXOSConfigParser(CiscoConfigParser, BaseSpaceConfigParser): regex_banner = re.compile(r"^banner\s+\S+\s+(?P\S)") - def __init__(self, config): + def __init__(self, config: str): """Create ConfigParser Object. Args: config (str): The config text to parse. """ - self.unique_config_lines = set() - self.same_line_children = set() + self.unique_config_lines: t.Set[ConfigLine] = set() + self.same_line_children: t.Set[ConfigLine] = set() super(NXOSConfigParser, self).__init__(config) - def _build_banner(self, config_line): + def _build_banner(self, config_line: str) -> t.Union[str, None]: """Handle banner config lines. Args: @@ -608,24 +609,24 @@ class EOSConfigParser(BaseSpaceConfigParser): class AIREOSConfigParser(CiscoConfigParser, BaseSpaceConfigParser): """AireOSConfigParser implementation fo ConfigParser Class.""" - banner_start = [] + banner_start: t.List[str] = [] - def _build_banner(self, config_line): + def _build_banner(self, config_line: str) -> None: raise NotImplementedError() class LINUXConfigParser(BaseSpaceConfigParser): """Linux config parser.""" - comment_chars = ["#"] + comment_chars: t.List[str] = ["#"] class F5ConfigParser(BaseBraceConfigParser): """F5ConfigParser implementation fo ConfigParser Class.""" - multiline_delimiters = ['"'] + multiline_delimiters: t.List[str] = ['"'] - def __init__(self, config): + def __init__(self, config: str): """Create ConfigParser Object. Args: @@ -633,7 +634,7 @@ def __init__(self, config): """ super().__init__(self._clean_config_f5(config)) - def _clean_config_f5(self, config_text): # pylint: disable=no-self-use + def _clean_config_f5(self, config_text: str) -> str: # pylint: disable=no-self-use """Removes all configuration items with 'ltm rule'. iRules are essentially impossible to parse with the lack of uniformity, @@ -655,7 +656,7 @@ def _clean_config_f5(self, config_text): # pylint: disable=no-self-use final_config = config_text return final_config - def build_config_relationship(self): + def build_config_relationship(self) -> t.List[ConfigLine]: r"""Parse text tree of config lines and their parents. Example: @@ -700,7 +701,7 @@ def build_config_relationship(self): return self.config_lines - def _build_multiline_single_configuration_line(self, delimiter, prev_line): + def _build_multiline_single_configuration_line(self, delimiter: str, prev_line: str): # type: ignore r"""Concatenate Multiline strings between delimiter when newlines causes string to traverse multiple lines. Args: @@ -753,26 +754,26 @@ def _build_multiline_single_configuration_line(self, delimiter, prev_line): class JunosConfigParser(BaseSpaceConfigParser): """Junos config parser.""" - comment_chars = [] - banner_start = [] + comment_chars: t.List[str] = [] + banner_start: t.List[str] = [] class ASAConfigParser(CiscoConfigParser): """Cisco ASA implementation of ConfigParser Class.""" - comment_chars = ["!", ":"] + comment_chars: t.List[str] = ["!", ":"] - def __init__(self, config): + def __init__(self, config: str): """Create ConfigParser Object. Args: config (str): The config text to parse. """ - self.unique_config_lines = set() - self.same_line_children = set() + self.unique_config_lines: t.Set[ConfigLine] = set() + self.same_line_children: t.Set[ConfigLine] = set() super(ASAConfigParser, self).__init__(config) - def _update_config_lines(self, config_line): + def _update_config_lines(self, config_line: str) -> None: """Add a ``ConfigLine`` object to ``self.config_lines``. In addition to adding entries to config_lines, this also updates: @@ -791,7 +792,7 @@ def _update_config_lines(self, config_line): self.same_line_children.add(entry) self.unique_config_lines.add(entry) - def build_config_relationship(self): + def build_config_relationship(self) -> t.List[ConfigLine]: r"""Parse text tree of config lines and their parents. Example: @@ -820,7 +821,7 @@ def build_config_relationship(self): self._current_parents = (previous_config.config_line,) self.indent_level = self.get_leading_space_count(line) if line is not None and line[0].isspace(): - line = self._build_nested_config(line) + line = self._build_nested_config(line) # type: ignore else: self._current_parents = () @@ -835,10 +836,10 @@ def build_config_relationship(self): class FortinetConfigParser(BaseSpaceConfigParser): """Fortinet Fortios config parser.""" - comment_chars = [] - banner_start = [] + comment_chars: t.List[str] = [] + banner_start: t.List[str] = [] - def __init__(self, config): + def __init__(self, config: str): """Create ConfigParser Object. Args: @@ -847,7 +848,7 @@ def __init__(self, config): self.uncommon_data = self._get_uncommon_lines(config) super(FortinetConfigParser, self).__init__(config) - def is_end_next(self, line): # pylint: disable=no-self-use + def is_end_next(self, line: str) -> bool: # pylint: disable=no-self-use """Determine if line has 'end' or 'next' in it. Args: @@ -868,7 +869,7 @@ def is_end_next(self, line): # pylint: disable=no-self-use return True return False - def _parse_out_offending(self, config): # pylint: disable=no-self-use + def _parse_out_offending(self, config: str) -> str: # pylint: disable=no-self-use """Preprocess out strings that offend the normal spaced configuration syntax. Args: @@ -881,7 +882,7 @@ def _parse_out_offending(self, config): # pylint: disable=no-self-use return re.sub(pattern, r"\1 [\2]\n", config) @property - def config_lines_only(self): + def config_lines_only(self) -> str: """Remove spaces and comments from config lines. Returns: @@ -898,7 +899,7 @@ def config_lines_only(self): self._config = "\n".join(config_lines) return self._config - def _get_uncommon_lines(self, config): # pylint: disable=no-self-use + def _get_uncommon_lines(self, config: str) -> t.Dict[str, str]: # pylint: disable=no-self-use """Regex to find replacemsg lines which can contain html/css data. Args: @@ -914,7 +915,7 @@ def _get_uncommon_lines(self, config): # pylint: disable=no-self-use result.update({group_match[0].split('"')[1]: group_match[1]}) return result - def _build_nested_config(self, line): + def _build_nested_config(self, line: str) -> t.Optional[str]: # type: ignore """Handle building child config sections. Args: @@ -928,7 +929,7 @@ def _build_nested_config(self, line): IndexError: When the number of parents does not match the expected deindent level. """ if "[" in line: - line = self.uncommon_data.get(line.split('"')[1]) + line = self.uncommon_data.get(line.split('"')[1]) # type: ignore self._update_config_lines(line) for line in self.generator_config: if not line[0].isspace(): @@ -954,10 +955,10 @@ def _build_nested_config(self, line): class NokiaConfigParser(BaseSpaceConfigParser): """Nokia SrOS config parser.""" - comment_chars = ["#"] - banner_start = [] + comment_chars: t.List[str] = ["#"] + banner_start: t.List[str] = [] - def __init__(self, config): + def __init__(self, config: str): """Create ConfigParser Object. Args: @@ -965,7 +966,7 @@ def __init__(self, config): """ super(NokiaConfigParser, self).__init__(config) - def _is_section_title(self, line): # pylint: disable=no-self-use + def _is_section_title(self, line: str) -> bool: # pylint: disable=no-self-use """Determine if line is a section title in banner. Args: @@ -978,7 +979,7 @@ def _is_section_title(self, line): # pylint: disable=no-self-use return True return False - def _get_section_title(self, line): # pylint: disable=no-self-use + def _get_section_title(self, line: str) -> t.Union[str, bool]: # pylint: disable=no-self-use """Determine section title from banner. Args: @@ -993,7 +994,7 @@ def _get_section_title(self, line): # pylint: disable=no-self-use return False @property - def config_lines_only(self): + def config_lines_only(self) -> str: """Remove spaces and comments from config lines. Returns: @@ -1007,5 +1008,5 @@ def config_lines_only(self): config_lines.append(self._get_section_title(line)) else: config_lines.append(line.rstrip()) - self._config = "\n".join(config_lines) + self._config = "\n".join(config_lines) # type: ignore return self._config diff --git a/netutils/dns.py b/netutils/dns.py index a57ac16a..88baa814 100644 --- a/netutils/dns.py +++ b/netutils/dns.py @@ -2,7 +2,7 @@ import socket -def fqdn_to_ip(hostname): +def fqdn_to_ip(hostname: str) -> str: """Provides the IP address of a resolvable name on the machine it is running from. There are many reasons that a valid FQDN may not be resolvable, such as a network error @@ -28,7 +28,7 @@ def fqdn_to_ip(hostname): return socket.getaddrinfo(hostname, 0)[0][4][0] -def is_fqdn_resolvable(hostname): +def is_fqdn_resolvable(hostname: str) -> bool: """Verifies whether a hostname is resolvable on the machine it is running from. There are many reasons that a valid FQDN may not be resolvable, such as a network error diff --git a/netutils/interface.py b/netutils/interface.py index d0579261..5d44779a 100644 --- a/netutils/interface.py +++ b/netutils/interface.py @@ -5,10 +5,11 @@ from abc import ABC, abstractmethod from functools import total_ordering from operator import itemgetter + from .constants import BASE_INTERFACES, REVERSE_MAPPING -def interface_range_expansion(interface_pattern): +def interface_range_expansion(interface_pattern: str) -> t.List[str]: """Expand interface pattern into a list of interfaces. Args: @@ -25,8 +26,8 @@ def interface_range_expansion(interface_pattern): ['FastEthernet1/0/10', 'FastEthernet1/0/11', 'FastEthernet1/0/12', 'FastEthernet1/0/13', 'FastEthernet1/0/14', 'FastEthernet1/0/15', 'FastEthernet2/0/10', 'FastEthernet2/0/11', 'FastEthernet2/0/12', 'FastEthernet2/0/13', 'FastEthernet2/0/14', 'FastEthernet2/0/15'] """ - def _range_expand(regex_match): - number_range = [] + def _range_expand(regex_match: str) -> t.List[int]: + number_range: t.List[int] = [] for value in regex_match.split(","): if "-" in value[1:]: first_number, second_number = value[1:].split("-", 1) @@ -35,7 +36,7 @@ def _range_expand(regex_match): number_range.append(int(value)) return number_range - def _pairwise(interface_constant): + def _pairwise(interface_constant: t.List[int]) -> t.List[t.Iterable[t.Any]]: interface_constant_it = iter(interface_constant) return list(zip(interface_constant_it, interface_constant_it)) @@ -57,14 +58,14 @@ def _pairwise(interface_constant): for element in itertools.product(*cartesian_list): current_interface = "" for count, item in enumerate(interface_constant_out): - current_interface += interface_pattern[item[0] : item[1]] # noqa: E203 + current_interface += interface_pattern[item[0] : item[1]] # type: ignore # noqa: E203 current_interface += str(element[count]) expanded_interfaces.append(current_interface) return expanded_interfaces -def split_interface(interface): +def split_interface(interface: str) -> t.Tuple[str, str]: """Split an interface name based on first digit, slash, or space match. Args: @@ -86,7 +87,9 @@ def split_interface(interface): return (head, tail) -def canonical_interface_name(interface, addl_name_map=None, verify=False): +def canonical_interface_name( + interface: str, addl_name_map: t.Optional[t.Dict[str, str]] = None, verify: t.Optional[bool] = False +) -> str: """Function to return an interface's canonical name (fully expanded name). Use of explicit matches used to indicate a clear understanding on any potential @@ -119,7 +122,7 @@ def canonical_interface_name(interface, addl_name_map=None, verify=False): name_map.update(addl_name_map) # check in dict for mapping if name_map.get(interface_type): - long_int = name_map.get(interface_type) + long_int = name_map.get(interface_type, "") return long_int + str(interface_number) if verify: raise ValueError(f"Verify interface on and no match found for {interface}") @@ -127,7 +130,13 @@ def canonical_interface_name(interface, addl_name_map=None, verify=False): return interface -def canonical_interface_name_list(interfaces, addl_name_map=None, verify=False, order=None, reverse=None): +def canonical_interface_name_list( + interfaces: t.List[str], + addl_name_map: t.Optional[t.Dict[str, str]] = None, + verify: bool = False, + order: t.Optional[str] = None, + reverse: t.Optional[bool] = None, +) -> t.List[str]: """Function to return a list of interface's canonical name (fully expanded name). Use of explicit matches used to indicate a clear understanding on any potential @@ -178,7 +187,7 @@ def canonical_interface_name_list(interfaces, addl_name_map=None, verify=False, raise ValueError(f"Verify interface on and no match found for {no_match_string}") if order: - canonical_interface_list = INTERFACE_LIST_ORDERING_OPTIONS.get(order)(canonical_interface_list) + canonical_interface_list = INTERFACE_LIST_ORDERING_OPTIONS.get(order)(canonical_interface_list) # type: ignore if reverse: canonical_interface_list = _reverse_list(canonical_interface_list) @@ -186,7 +195,12 @@ def canonical_interface_name_list(interfaces, addl_name_map=None, verify=False, return canonical_interface_list -def abbreviated_interface_name(interface, addl_name_map=None, addl_reverse_map=None, verify=False): +def abbreviated_interface_name( + interface: str, + addl_name_map: t.Optional[t.Dict[str, str]] = None, + addl_reverse_map: t.Optional[t.Dict[str, str]] = None, + verify: t.Optional[bool] = False, +) -> str: """Function to return an abbreviated representation of the interface name. Args: @@ -225,7 +239,7 @@ def abbreviated_interface_name(interface, addl_name_map=None, addl_reverse_map=N canonical_type = interface_type try: - abbreviated_name = rev_name_map[canonical_type] + str(interface_number) + abbreviated_name = rev_name_map[canonical_type] + str(interface_number) # type: ignore return abbreviated_name except KeyError: @@ -237,7 +251,7 @@ def abbreviated_interface_name(interface, addl_name_map=None, addl_reverse_map=N return interface -@total_ordering +@total_ordering # type: ignore class CharacterClass(ABC): """CharacterClass embodies the state needed to sort interfaces.""" @@ -247,10 +261,10 @@ def __init__(self, val: str, terminal: bool = False) -> None: # noqa: D107 super().__init__() @abstractmethod - def __lt__(self, other) -> bool: # noqa: D105 + def __lt__(self, other) -> bool: # type: ignore # noqa: D105 ... - def __eq__(self, other) -> bool: # noqa: D105 + def __eq__(self, other) -> t.Any: # type: ignore # noqa: D105 return self.weight == other.weight and self.val == other.val @property @@ -260,7 +274,7 @@ def weight(self) -> int: ... @property - def terminal(self): + def terminal(self) -> bool: """Flag whether a node is terminal.""" return self._terminal @@ -280,8 +294,8 @@ def __hash__(self) -> int: # noqa: D105 class CCString(CharacterClass): """Strings are sorted lexicographically.""" - def __lt__(self, other) -> bool: # noqa: D105 - return self.weight < other.weight or self.val < other.val + def __lt__(self, other) -> bool: # type: ignore # noqa: D105 + return self.weight < other.weight or self.val < other.val # type: ignore def __repr__(self) -> str: # noqa: D105 return f'CCString("{self.val}", {self.terminal})' @@ -294,7 +308,7 @@ def weight(self) -> int: # noqa: D107,D102 class CCInt(CharacterClass): """Ints must be sorted canonically because '11' < '5'.""" - def __lt__(self, other) -> bool: # noqa: D105 + def __lt__(self, other) -> bool: # type: ignore # noqa: D105 return self.weight < other.weight or int(self.val) < int(other.val) def __repr__(self) -> str: # noqa: D105 @@ -310,7 +324,7 @@ class CCSeparator(CharacterClass): weights: t.Dict[str, int] = {".": 10, "/": 20} - def __lt__(self, other) -> bool: # noqa: D105 + def __lt__(self, other) -> bool: # type: ignore # noqa: D105 return self.weight < other.weight or self.weights.get(self.val, 0) < self.weights.get(other.val, 0) def __repr__(self) -> str: # noqa: D105 @@ -321,7 +335,7 @@ def weight(self) -> int: # noqa: D102 return 30 -def _CCfail(*args): # pylint: disable=C0103 +def _CCfail(*args) -> None: # type: ignore # pylint: disable=C0103 """Helper to raise an exception on a bad character match.""" raise ValueError(f"unknown character '{args[0][0]}'.") @@ -345,15 +359,15 @@ def _split_interface_tuple(interface: str) -> t.Tuple[CharacterClass, ...]: part += interface[idx] idx += 1 if part and idx == len(interface): - tail = (*tail, cls(part, True)) + tail = (*tail, cls(part, True)) # type: ignore break if part: - tail = (*tail, cls(part)) + tail = (*tail, cls(part)) # type: ignore break return tail -def _reverse_list(interface_list): +def _reverse_list(interface_list: t.List[str]) -> t.List[str]: """Reverses an alphabetical list of interfaces. Args: @@ -434,8 +448,13 @@ def sort_interface_list(interfaces: t.List[str]) -> t.List[str]: def abbreviated_interface_name_list( # pylint: disable=R0913, R0914 - interfaces, addl_name_map=None, addl_reverse_map=None, verify=False, order=None, reverse=None -): + interfaces: t.List[str], + addl_name_map: t.Optional[t.Dict[str, str]] = None, + addl_reverse_map: t.Optional[t.Dict[str, str]] = None, + verify: t.Optional[bool] = False, + order: t.Optional[str] = None, + reverse: t.Optional[bool] = None, +) -> t.List[str]: """Function to return a list of interface's abbreviated name. Args: @@ -489,7 +508,7 @@ def abbreviated_interface_name_list( # pylint: disable=R0913, R0914 canonical_type = interface_type try: - abbreviated_name = rev_name_map[canonical_type] + str(interface_number) + abbreviated_name = rev_name_map[canonical_type] + str(interface_number) # type: ignore abbreviated_interface_list.append(abbreviated_name) except KeyError: abbreviated_interface_list.append(interface) @@ -500,7 +519,7 @@ def abbreviated_interface_name_list( # pylint: disable=R0913, R0914 raise ValueError(f"Verify interface on and no match found for {no_match_string}") if order: - abbreviated_interface_list = INTERFACE_LIST_ORDERING_OPTIONS.get(order)(abbreviated_interface_list) + abbreviated_interface_list = INTERFACE_LIST_ORDERING_OPTIONS.get(order)(abbreviated_interface_list) # type: ignore if reverse: abbreviated_interface_list = _reverse_list(abbreviated_interface_list) @@ -508,7 +527,7 @@ def abbreviated_interface_name_list( # pylint: disable=R0913, R0914 return abbreviated_interface_list -def _check_order_option_exists(order): +def _check_order_option_exists(order: str) -> None: """Check if the given order for an interface list exists. Args: @@ -521,7 +540,7 @@ def _check_order_option_exists(order): raise ValueError(f"{order} is not one of the supported orderings") -def _ranges_in_list(numbers: t.List[int]): +def _ranges_in_list(numbers: t.List[int]) -> t.List[t.List[int]]: """Find contiguous ranges in a list of numbers. Example: @@ -556,7 +575,7 @@ def interface_range_compress(interface_list: t.List[str]) -> t.List[str]: Returns: list: list of interface ranges """ - result_dict = {} + result_dict = {} # type: ignore final_result_list = [] sorted_ints = [_split_interface_tuple(x) for x in sort_interface_list(interface_list)] if not sorted_ints: diff --git a/netutils/ip.py b/netutils/ip.py index 497797bd..bb7471d3 100644 --- a/netutils/ip.py +++ b/netutils/ip.py @@ -1,10 +1,12 @@ """Functions for working with IP addresses.""" import ipaddress +import typing as t from operator import attrgetter + from netutils.constants import IPV4_MASKS, IPV6_MASKS -def ipaddress_address(ip, attr): +def ipaddress_address(ip: str, attr: str) -> t.Any: """Convenience function primarily built to expose ipaddress.ip_address to Jinja. Args: @@ -31,7 +33,7 @@ def ipaddress_address(ip, attr): return retrieved_method -def ipaddress_interface(ip, attr): +def ipaddress_interface(ip: str, attr: str) -> t.Any: """Convenience function primarily built to expose ipaddress.ip_interface to Jinja. Args: @@ -55,7 +57,7 @@ def ipaddress_interface(ip, attr): return retrieved_method -def ipaddress_network(ip, attr): +def ipaddress_network(ip: str, attr: str) -> t.Any: """Convenience function primarily built to expose ipaddress.ip_network to Jinja. Args: @@ -80,7 +82,7 @@ def ipaddress_network(ip, attr): return retrieved_method -def ip_to_hex(ip): +def ip_to_hex(ip: str) -> str: """Converts an IP address in string format to a hex string. Args: @@ -99,7 +101,7 @@ def ip_to_hex(ip): return str(hex(int(ip_obj)))[2:].zfill(int(ip_obj.max_prefixlen / 4)) -def ip_addition(ip, val): +def ip_addition(ip: str, val: int) -> str: """Adds an integer to an IP address. Args: @@ -118,7 +120,7 @@ def ip_addition(ip, val): return str(ipaddress.ip_address(ip) + val) -def ip_to_bin(ip): +def ip_to_bin(ip: str) -> str: """Converts an IP address in string format to a binary string. Args: @@ -137,7 +139,7 @@ def ip_to_bin(ip): return bin(int(ip_obj))[2:].zfill(ip_obj.max_prefixlen) -def ip_subtract(ip, val): +def ip_subtract(ip: str, val: int) -> str: """Subtract an integer to an IP address. Args: @@ -156,7 +158,7 @@ def ip_subtract(ip, val): return str(ipaddress.ip_address(ip) - val) -def is_ip(ip): +def is_ip(ip: str) -> bool: """Verifies whether or not a string is a valid IP address. Args: @@ -174,13 +176,13 @@ def is_ip(ip): >>> """ try: - ip = ipaddress.ip_address(ip) + ip = ipaddress.ip_address(ip) # type: ignore return True except ValueError: return False -def is_netmask(netmask): +def is_netmask(netmask: str) -> bool: """Verifies whether or not a string is a valid subnet mask. Args: @@ -204,7 +206,7 @@ def is_netmask(netmask): return False -def netmask_to_cidr(netmask): +def netmask_to_cidr(netmask: str) -> str: """Creates a CIDR notation of a given subnet mask in decimal format. Args: @@ -221,11 +223,11 @@ def netmask_to_cidr(netmask): 23 """ if is_netmask(netmask): - return bin(int(ipaddress.ip_address(netmask))).count("1") + return bin(int(ipaddress.ip_address(netmask))).count("1") # type: ignore raise ValueError("Subnet mask is not valid.") -def cidr_to_netmask(cidr): +def cidr_to_netmask(cidr: int) -> str: """Creates a decimal format of a CIDR value. **IPv4** only. For IPv6, please use `cidr_to_netmaskv6`. @@ -248,7 +250,7 @@ def cidr_to_netmask(cidr): raise ValueError("Parameter must be an integer between 0 and 32.") -def cidr_to_netmaskv6(cidr): +def cidr_to_netmaskv6(cidr: int) -> str: """Creates a decimal format of a CIDR value. Args: @@ -269,7 +271,7 @@ def cidr_to_netmaskv6(cidr): raise ValueError("Parameter must be an integer between 0 and 128.") -def get_all_host(ip_network): +def get_all_host(ip_network: str) -> t.List[str]: """Given a network, return the list of usable IP addresses. Args: @@ -284,10 +286,10 @@ def get_all_host(ip_network): ['10.100.100.1', '10.100.100.2', '10.100.100.3', '10.100.100.4', '10.100.100.5', '10.100.100.6'] >>> """ - return (str(ip) for ip in ipaddress.ip_network(ip_network).hosts()) + return (str(ip) for ip in ipaddress.ip_network(ip_network).hosts()) # type: ignore -def get_broadcast_address(ip_network): +def get_broadcast_address(ip_network: str) -> str: """Given a network, determine the broadcast IP address. Args: @@ -305,7 +307,7 @@ def get_broadcast_address(ip_network): return str(ipaddress.ip_network(ip_network).broadcast_address) -def get_first_usable(ip_network): +def get_first_usable(ip_network: str) -> str: """Given a network, determine the first usable IP address. Args: @@ -326,7 +328,7 @@ def get_first_usable(ip_network): return str(net[1]) -def get_peer_ip(ip_interface): +def get_peer_ip(ip_interface: str) -> t.Any: """Given an IP interface (an ip address, with subnet mask) that is on a peer network, return the peer IP. Args: @@ -364,7 +366,7 @@ def get_peer_ip(ip_interface): return val[0] -def get_usable_range(ip_network): +def get_usable_range(ip_network: str) -> str: """Given a network, return the string of usable IP addresses. Args: diff --git a/netutils/lib_mapper.py b/netutils/lib_mapper.py index c55c7c9a..141dc6b7 100644 --- a/netutils/lib_mapper.py +++ b/netutils/lib_mapper.py @@ -1,8 +1,9 @@ """Variable definitions to map from network automation library to network automation library.""" import copy +import typing as t -_NETMIKO_LIB_MAPPER = { +_NETMIKO_LIB_MAPPER: t.Dict[str, t.Dict[str, str]] = { "a10": {}, "accedian": {}, "adtran_os": {}, diff --git a/netutils/mac.py b/netutils/mac.py index 73d5affd..c78dcb8e 100644 --- a/netutils/mac.py +++ b/netutils/mac.py @@ -1,27 +1,29 @@ """Functions for working with MAC addresses.""" import re +import typing as t from functools import wraps + from .constants import MAC_CREATE, MAC_REGEX -def _valid_mac(func): +def _valid_mac(func): # type: ignore """Decorator to validate a MAC address is valid.""" @wraps(func) - def decorated(*args, **kwargs): + def decorated(*args, **kwargs): # type: ignore if kwargs.get("mac"): mac = kwargs.get("mac") else: mac = args[0] - if not is_valid_mac(mac): + if not is_valid_mac(mac): # type: ignore raise ValueError(f"There was not a valid mac address in: `{mac}`") return func(*args, **kwargs) return decorated -def is_valid_mac(mac): +def is_valid_mac(mac: str) -> bool: """Verifies whether or not a string is a valid MAC address. Args: @@ -44,8 +46,8 @@ def is_valid_mac(mac): return False -@_valid_mac -def mac_to_format(mac, frmt="MAC_NO_SPECIAL"): +@_valid_mac # type: ignore +def mac_to_format(mac: str, frmt: str = "MAC_NO_SPECIAL") -> str: """Converts the MAC address to a specific format. Args: @@ -66,11 +68,11 @@ def mac_to_format(mac, frmt="MAC_NO_SPECIAL"): mac = mac_normalize(mac) count = MAC_CREATE[frmt]["count"] char = MAC_CREATE[frmt]["char"] - return char.join([mac[i : i + count] for i in range(0, len(mac), count)]) # noqa: E203 + return char.join([mac[i : i + count] for i in range(0, len(mac), count)]) # type: ignore # noqa: E203 -@_valid_mac -def mac_to_int(mac): +@_valid_mac # type: ignore +def mac_to_int(mac: str) -> int: """Converts the MAC address to an integer. Args: @@ -88,8 +90,8 @@ def mac_to_int(mac): return int(mac_normalize(mac), 16) -@_valid_mac -def mac_type(mac): # pylint: disable=inconsistent-return-statements +@_valid_mac # type: ignore +def mac_type(mac: str) -> str: # type: ignore # pylint: disable=inconsistent-return-statements """Retuns the "type" of MAC address, as defined by the regex pattern names. Args: @@ -111,8 +113,8 @@ def mac_type(mac): # pylint: disable=inconsistent-return-statements return name -@_valid_mac -def mac_normalize(mac): +@_valid_mac # type: ignore +def mac_normalize(mac: str) -> str: """Retuns the MAC address with only the address, and no special characters. Args: diff --git a/netutils/password.py b/netutils/password.py index 814eaf6c..4c042aa2 100644 --- a/netutils/password.py +++ b/netutils/password.py @@ -2,9 +2,10 @@ import crypt import random -import string import secrets +import string import sys +import typing as t from functools import wraps # Code example from Python docs @@ -69,11 +70,11 @@ ] -def _fail_on_mac(func): +def _fail_on_mac(func): # type: ignore """There is an issue with Macintosh for encryption.""" @wraps(func) - def decorated(*args, **kwargs): + def decorated(*args, **kwargs): # type: ignore if sys.platform == "darwin": raise ValueError("Macintosh is not supported, see https://bugs.python.org/issue33213 for upstream issue.") return func(*args, **kwargs) @@ -81,7 +82,9 @@ def decorated(*args, **kwargs): return decorated -def compare_type5(unencrypted_password, encrypted_password, return_original=False): +def compare_type5( + unencrypted_password: str, encrypted_password: str, return_original: t.Optional[bool] = False +) -> bool: """Given an encrypted and unencrypted password of Cisco Type 5 password, compare if they are a match. Args: @@ -103,12 +106,14 @@ def compare_type5(unencrypted_password, encrypted_password, return_original=Fals salt = get_hash_salt(encrypted_password) if encrypt_type5(unencrypted_password, salt) == encrypted_password: if return_original is True: - return encrypted_password + return encrypted_password # type: ignore return True return False -def compare_type7(unencrypted_password, encrypted_password, return_original=False): +def compare_type7( + unencrypted_password: str, encrypted_password: str, return_original: t.Optional[bool] = False +) -> bool: """Given an encrypted and unencrypted password of Cisco Type 7 password, compare if they are a match. Args: @@ -129,19 +134,19 @@ def compare_type7(unencrypted_password, encrypted_password, return_original=Fals """ if decrypt_type7(encrypted_password) == unencrypted_password: if return_original is True: - return encrypted_password + return encrypted_password # type: ignore return True return False -def decrypt_type7(encrypted_password): +def decrypt_type7(encrypted_password: str) -> str: """Given an unencrypted password of Cisco Type 7 password decrypt it. Args: encrypted_password (str): A password that has been encrypted, and will be decrypted. Returns: - string: The unencrypted_password password. + str: The unencrypted_password password. Example: >>> from netutils.password import decrypt_type7 @@ -164,8 +169,8 @@ def decrypt_type7(encrypted_password): ) -@_fail_on_mac -def encrypt_type5(unencrypted_password, salt=None, salt_len=4): +@_fail_on_mac # type: ignore +def encrypt_type5(unencrypted_password: str, salt: t.Optional[str] = None, salt_len: t.Optional[int] = 4) -> str: """Given an unencrypted password of Cisco Type 5 password, encrypt it. Args: @@ -174,7 +179,7 @@ def encrypt_type5(unencrypted_password, salt=None, salt_len=4): salt_len (int, optional): The number of random set of characters, when not manually set. Defaults to 4. Returns: - string: The encrypted password. + str: The encrypted password. Example: >>> from netutils.password import encrypt_type5 @@ -183,13 +188,13 @@ def encrypt_type5(unencrypted_password, salt=None, salt_len=4): >>> """ if not salt: - salt = "".join(secrets.choice(ALPHABET) for i in range(salt_len)) + salt = "".join(secrets.choice(ALPHABET) for i in range(salt_len)) # type: ignore elif not set(salt) <= set(ALPHABET): raise ValueError(f"type5_pw salt used inproper characters, must be one of {ALPHABET}") return crypt.crypt(unencrypted_password, f"$1${salt}$") -def encrypt_type7(unencrypted_password, salt=None): +def encrypt_type7(unencrypted_password: str, salt: t.Optional[str] = None) -> str: """Given an unencrypted password of Cisco Type 7 password, encypt it. Args: @@ -197,7 +202,7 @@ def encrypt_type7(unencrypted_password, salt=None): salt (str, optional): A random number between 0 and 15 that can be set by the operator. Defaults to random generated one. Returns: - string: The encrypted password. + str: The encrypted password. Example: >>> from netutils.password import encrypt_type7 @@ -206,25 +211,25 @@ def encrypt_type7(unencrypted_password, salt=None): >>> """ if not salt: - salt = random.randrange(0, 15) # nosec - encrypted_password = "%02x" % salt # pylint: disable=consider-using-f-string + salt = random.randrange(0, 15) # type: ignore # nosec + encrypted_password = "%02x" % salt # type: ignore # pylint: disable=consider-using-f-string for i, _ in enumerate(unencrypted_password): - hex_password = "%02x" % (ord(unencrypted_password[i]) ^ XLAT[salt]) # pylint: disable=consider-using-f-string + hex_password = "%02x" % (ord(unencrypted_password[i]) ^ XLAT[salt]) # type: ignore # pylint: disable=consider-using-f-string encrypted_password += hex_password - salt += 1 - if salt == 51: - salt = 0 + salt += 1 # type: ignore + if salt == 51: # type: ignore + salt = 0 # type: ignore return encrypted_password -def get_hash_salt(encrypted_password): +def get_hash_salt(encrypted_password: str) -> str: """Given an encrypted password obtain the salt value from it. Args: encrypted_password (str): A password that has been encrypted, which the salt will be taken from. Returns: - string: The encrypted password. + str: The encrypted password. Example: >>> from netutils.password import get_hash_salt diff --git a/netutils/ping.py b/netutils/ping.py index f5cc0976..39dc373c 100644 --- a/netutils/ping.py +++ b/netutils/ping.py @@ -1,8 +1,9 @@ """Functions to create a ping via pure Python.""" import socket +import typing as t -def tcp_ping(ip, port, timeout=1): # pylint: disable=invalid-name +def tcp_ping(ip: str, port: int, timeout: t.Optional[int] = 1) -> bool: # pylint: disable=invalid-name """Verifies whether a TCP port is open on a given IP address. Args: @@ -22,10 +23,10 @@ def tcp_ping(ip, port, timeout=1): # pylint: disable=invalid-name >>> """ sckt = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sckt.settimeout(int(timeout)) + sckt.settimeout(int(timeout)) # type: ignore try: sckt.connect((ip, int(port))) # pylint: disable=invalid-name - sckt.shutdown(int(timeout)) + sckt.shutdown(int(timeout)) # type: ignore return True except socket.error: return False diff --git a/netutils/protocol_mapper.py b/netutils/protocol_mapper.py index ed48dc37..b06cd59c 100644 --- a/netutils/protocol_mapper.py +++ b/netutils/protocol_mapper.py @@ -1,8 +1,10 @@ """Protocol Mappers.""" +import typing as t + from netutils.constants import PROTOCOLS -def _number_to_name_mapper(proto: str) -> dict: +def _number_to_name_mapper(proto: str) -> t.Dict[int, str]: """Create a dictionary that maps protocol port number to a name. Args: diff --git a/netutils/route.py b/netutils/route.py index da11a918..8d899a5b 100644 --- a/netutils/route.py +++ b/netutils/route.py @@ -1,13 +1,14 @@ """Utilities to get best route from routing table.""" import ipaddress +import typing as t class NoRouteFound(BaseException): """Custom Exception for No Route Found.""" -def longest_prefix_match(ip_addr, routes): +def longest_prefix_match(ip_addr: str, routes: t.List[t.Dict[str, str]]) -> str: """From a list of networks and an IP address, find the most specific route. Args: @@ -29,7 +30,7 @@ def longest_prefix_match(ip_addr, routes): if not len(routes) > 0: raise IndexError(f"'routing_table' should have more than zero indexes. Got {len(routes)}") if isinstance(ip_addr, str): - ip_addr = ipaddress.ip_address(ip_addr) + ip_addr = ipaddress.ip_address(ip_addr) # type: ignore else: if not isinstance(ip_addr, (ipaddress.IPv4Address, ipaddress.IPv6Address)): raise TypeError(f"'ip_addr' should be a str, got {type(ip_addr)}") @@ -37,7 +38,7 @@ def longest_prefix_match(ip_addr, routes): networks = [ ipaddress.IPv4Network(f'{route["network"]}/{route["mask"]}') for route in routes - if ip_addr in ipaddress.IPv4Network(f'{route["network"]}/{route["mask"]}') + if ip_addr in ipaddress.IPv4Network(f'{route["network"]}/{route["mask"]}') # type: ignore ] try: return str(sorted(networks)[-1]) diff --git a/netutils/time.py b/netutils/time.py index 0545d55a..1db6c798 100644 --- a/netutils/time.py +++ b/netutils/time.py @@ -1,9 +1,10 @@ """Functions for working with time.""" import re + from .constants import TIME_MAPPINGS, UPTIME_REGEX_PATTERNS -def uptime_seconds_to_string(uptime_seconds): +def uptime_seconds_to_string(uptime_seconds: int) -> str: """Converts uptime in seconds to uptime in string format. Args: @@ -29,7 +30,7 @@ def uptime_seconds_to_string(uptime_seconds): return ", ".join(result) -def uptime_string_to_seconds(uptime_string): +def uptime_string_to_seconds(uptime_string: str) -> int: """Converts uptime string seconds. Args: @@ -65,5 +66,5 @@ def uptime_string_to_seconds(uptime_string): uptime_seconds = 0 for time_interval, value in TIME_MAPPINGS: if uptime_dict.get(time_interval): - uptime_seconds += int(uptime_dict.get(time_interval)) * value + uptime_seconds += int(uptime_dict.get(time_interval)) * value # type: ignore return uptime_seconds diff --git a/netutils/utils.py b/netutils/utils.py index 9d4c586e..8255f450 100644 --- a/netutils/utils.py +++ b/netutils/utils.py @@ -1,4 +1,5 @@ """Utilities for the netutils library.""" +import typing as t from importlib import import_module _JINJA2_FUNCTION_MAPPINGS = { @@ -65,7 +66,7 @@ } -def jinja2_convenience_function(): +def jinja2_convenience_function() -> t.Dict[str, str]: """Convenience function that allows netutils filter to be used easily with jinja2. Returns: diff --git a/netutils/vlan.py b/netutils/vlan.py index b7cfe476..812c2c5d 100644 --- a/netutils/vlan.py +++ b/netutils/vlan.py @@ -1,12 +1,17 @@ """Functions for working with VLANs.""" import re - -from operator import itemgetter +import typing as t from itertools import groupby +from operator import itemgetter -def vlanlist_to_config(vlan_list, first_line_len=48, other_line_len=44, min_grouping_size=3): +def vlanlist_to_config( + vlan_list: t.List[int], + first_line_len: t.Optional[int] = 48, + other_line_len: t.Optional[int] = 44, + min_grouping_size: t.Optional[int] = 3, +) -> t.List[str]: """Given a List of VLANs, build the IOS-like vlan list of configurations. Args: @@ -28,20 +33,20 @@ def vlanlist_to_config(vlan_list, first_line_len=48, other_line_len=44, min_grou ['1,3,5,6,100,101,102,103,104,105,107,109'] """ - def build_final_vlan_cfg(vlan_cfg): - if len(vlan_cfg) <= first_line_len: + def build_final_vlan_cfg(vlan_cfg: str) -> t.List[str]: + if len(vlan_cfg) <= first_line_len: # type: ignore return [vlan_cfg] # Split VLAN config if lines are too long first_line = re.match(f"^.{{0,{first_line_len}}}(?=,)", vlan_cfg) - vlan_cfg_lines = [first_line.group(0)] + vlan_cfg_lines = [first_line.group(0)] # type: ignore next_lines = next_lines = re.compile(f"(?<=,).{{0,{other_line_len}}}(?=,|$)") - for line in next_lines.findall(vlan_cfg, first_line.end()): + for line in next_lines.findall(vlan_cfg, first_line.end()): # type: ignore vlan_cfg_lines.append(line) return vlan_cfg_lines # Fail if min_grouping_size is less than 1. - if min_grouping_size < 1: + if min_grouping_size < 1: # type: ignore raise ValueError("Minimum grouping size must be equal to or greater than one.") # Sort and de-dup VLAN list @@ -66,7 +71,7 @@ def build_final_vlan_cfg(vlan_cfg): group_length = len(group) group_string = f"{group[0]}" # Compress based on grouping_size - if group_length >= min_grouping_size: + if group_length >= min_grouping_size: # type: ignore group_string += f"-{group[-1]}" # If it does not match grouping_size, and is greater than one elif group_length != 1: @@ -76,7 +81,7 @@ def build_final_vlan_cfg(vlan_cfg): return build_final_vlan_cfg(",".join(vlan_strings)) -def vlanconfig_to_list(vlan_config): +def vlanconfig_to_list(vlan_config: str) -> t.List[int]: """Given an IOS-like vlan list of configurations, return the list of VLANs. Args: diff --git a/poetry.lock b/poetry.lock index de573944..bc7c7b5e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -318,6 +318,25 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "mypy" +version = "0.961" +description = "Optional static typing for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +mypy-extensions = ">=0.4.3" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} +typing-extensions = ">=3.10" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +python2 = ["typed-ast (>=1.4.0,<2)"] +reports = ["lxml"] + [[package]] name = "mypy-extensions" version = "0.4.3" @@ -743,7 +762,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes [metadata] lock-version = "1.1" python-versions = "^3.6" -content-hash = "56454ebe211ab923d3e1ce70e867397ed84ba4d31766941daf3e85a1c2c036fa" +content-hash = "9d23f91405a5a3ea5cc425913d6c78c410cb941d131ee3bf36dad117cc421934" [metadata.files] alabaster = [ @@ -995,6 +1014,31 @@ mistune = [ {file = "mistune-0.8.4-py2.py3-none-any.whl", hash = "sha256:88a1051873018da288eee8538d476dffe1262495144b33ecb586c4ab266bb8d4"}, {file = "mistune-0.8.4.tar.gz", hash = "sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e"}, ] +mypy = [ + {file = "mypy-0.961-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:697540876638ce349b01b6786bc6094ccdaba88af446a9abb967293ce6eaa2b0"}, + {file = "mypy-0.961-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b117650592e1782819829605a193360a08aa99f1fc23d1d71e1a75a142dc7e15"}, + {file = "mypy-0.961-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bdd5ca340beffb8c44cb9dc26697628d1b88c6bddf5c2f6eb308c46f269bb6f3"}, + {file = "mypy-0.961-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3e09f1f983a71d0672bbc97ae33ee3709d10c779beb613febc36805a6e28bb4e"}, + {file = "mypy-0.961-cp310-cp310-win_amd64.whl", hash = "sha256:e999229b9f3198c0c880d5e269f9f8129c8862451ce53a011326cad38b9ccd24"}, + {file = "mypy-0.961-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b24be97351084b11582fef18d79004b3e4db572219deee0212078f7cf6352723"}, + {file = "mypy-0.961-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f4a21d01fc0ba4e31d82f0fff195682e29f9401a8bdb7173891070eb260aeb3b"}, + {file = "mypy-0.961-cp36-cp36m-win_amd64.whl", hash = "sha256:439c726a3b3da7ca84a0199a8ab444cd8896d95012c4a6c4a0d808e3147abf5d"}, + {file = "mypy-0.961-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5a0b53747f713f490affdceef835d8f0cb7285187a6a44c33821b6d1f46ed813"}, + {file = "mypy-0.961-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e9f70df36405c25cc530a86eeda1e0867863d9471fe76d1273c783df3d35c2e"}, + {file = "mypy-0.961-cp37-cp37m-win_amd64.whl", hash = "sha256:b88f784e9e35dcaa075519096dc947a388319cb86811b6af621e3523980f1c8a"}, + {file = "mypy-0.961-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d5aaf1edaa7692490f72bdb9fbd941fbf2e201713523bdb3f4038be0af8846c6"}, + {file = "mypy-0.961-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9f5f5a74085d9a81a1f9c78081d60a0040c3efb3f28e5c9912b900adf59a16e6"}, + {file = "mypy-0.961-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f4b794db44168a4fc886e3450201365c9526a522c46ba089b55e1f11c163750d"}, + {file = "mypy-0.961-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:64759a273d590040a592e0f4186539858c948302c653c2eac840c7a3cd29e51b"}, + {file = "mypy-0.961-cp38-cp38-win_amd64.whl", hash = "sha256:63e85a03770ebf403291ec50097954cc5caf2a9205c888ce3a61bd3f82e17569"}, + {file = "mypy-0.961-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f1332964963d4832a94bebc10f13d3279be3ce8f6c64da563d6ee6e2eeda932"}, + {file = "mypy-0.961-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:006be38474216b833eca29ff6b73e143386f352e10e9c2fbe76aa8549e5554f5"}, + {file = "mypy-0.961-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9940e6916ed9371809b35b2154baf1f684acba935cd09928952310fbddaba648"}, + {file = "mypy-0.961-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a5ea0875a049de1b63b972456542f04643daf320d27dc592d7c3d9cd5d9bf950"}, + {file = "mypy-0.961-cp39-cp39-win_amd64.whl", hash = "sha256:1ece702f29270ec6af25db8cf6185c04c02311c6bb21a69f423d40e527b75c56"}, + {file = "mypy-0.961-py3-none-any.whl", hash = "sha256:03c6cc893e7563e7b2949b969e63f02c000b32502a1b4d1314cabe391aa87d66"}, + {file = "mypy-0.961.tar.gz", hash = "sha256:f730d56cb924d371c26b8eaddeea3cc07d78ff51c521c6d04899ac6904b75492"}, +] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, diff --git a/pyproject.toml b/pyproject.toml index 6db13547..4031f511 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,6 +44,7 @@ sphinx = "*" sphinx-rtd-theme = "*" toml = "*" yamllint = "*" +mypy = "^0.961" [tool.black] line-length = 120 @@ -94,6 +95,28 @@ python_paths = "./" testpaths = "tests/" addopts = "-vv --doctest-modules -p no:warnings --ignore-glob='*mock*'" +[tool.mypy] +python_version = 3.7 +ignore_errors = false +disallow_untyped_calls = true +disallow_untyped_defs = true +disallow_incomplete_defs = true +disallow_untyped_decorators = true +check_untyped_defs = true +disallow_any_generics = true +ignore_missing_imports = true +strict_optional = true +warn_unused_ignores = true +warn_return_any = true +warn_unused_configs = true +warn_redundant_casts = true +disallow_subclassing_any = true +no_implicit_optional = true +implicit_reexport = true +strict_equality = true +exclude = ["tests/", "tasks.py"] +show_error_codes = true + [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" From a00dcd98c71647506217f8317a60c8e77d7f9739 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 24 Jun 2022 08:54:56 -0500 Subject: [PATCH 2/5] update tasks and ci to do mypy as well --- .github/workflows/ci.yml | 14 + README.md | 3 +- .../lib_mapping/ANSIBLE_reverse_table.rst | 78 ++++ .../netutils/lib_mapping/ANSIBLE_table.rst | 78 ++++ .../lib_mapping/NAPALM_reverse_table.rst | 42 +++ .../netutils/lib_mapping/NAPALM_table.rst | 42 +++ .../NTCTEMPLATES_reverse_table.rst | 339 ++++++++++++++++++ .../lib_mapping/NTCTEMPLATES_table.rst | 339 ++++++++++++++++++ .../lib_mapping/PYATS_reverse_table.rst | 36 ++ .../netutils/lib_mapping/PYATS_table.rst | 39 ++ .../lib_mapping/PYNTC_reverse_table.rst | 27 ++ .../netutils/lib_mapping/PYNTC_table.rst | 27 ++ .../lib_mapping/SCRAPLI_reverse_table.rst | 21 ++ .../netutils/lib_mapping/SCRAPLI_table.rst | 21 ++ tasks.py | 14 + 15 files changed, 1119 insertions(+), 1 deletion(-) create mode 100755 docs/source/netutils/lib_mapping/ANSIBLE_reverse_table.rst create mode 100755 docs/source/netutils/lib_mapping/ANSIBLE_table.rst create mode 100755 docs/source/netutils/lib_mapping/NAPALM_reverse_table.rst create mode 100755 docs/source/netutils/lib_mapping/NAPALM_table.rst create mode 100755 docs/source/netutils/lib_mapping/NTCTEMPLATES_reverse_table.rst create mode 100755 docs/source/netutils/lib_mapping/NTCTEMPLATES_table.rst create mode 100755 docs/source/netutils/lib_mapping/PYATS_reverse_table.rst create mode 100755 docs/source/netutils/lib_mapping/PYATS_table.rst create mode 100755 docs/source/netutils/lib_mapping/PYNTC_reverse_table.rst create mode 100755 docs/source/netutils/lib_mapping/PYNTC_table.rst create mode 100755 docs/source/netutils/lib_mapping/SCRAPLI_reverse_table.rst create mode 100755 docs/source/netutils/lib_mapping/SCRAPLI_table.rst diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 88d06660..021f53d0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -32,6 +32,19 @@ jobs: run: "poetry run invoke bandit" needs: - "black" + mypy: + runs-on: "ubuntu-20.04" + env: + INVOKE_LOCAL: "True" + steps: + - name: "Check out repository code" + uses: "actions/checkout@v2" + - name: "Setup environment" + uses: "networktocode/gh-action-setup-poetry-environment@v2" + - name: "Type-Hints: mypy" + run: "poetry run invoke mypy" + needs: + - "black" pydocstyle: runs-on: "ubuntu-20.04" env: @@ -103,6 +116,7 @@ jobs: PYTHON_VER=${{ env.PYTHON_VER }} needs: - "bandit" + - "mypy" - "pydocstyle" - "flake8" - "yamllint" diff --git a/README.md b/README.md index 5688826a..1d0cc596 100644 --- a/README.md +++ b/README.md @@ -152,7 +152,7 @@ Except for unit tests, testing is only supported on Python 3.7. The project is packaged with a light development environment based on `docker-compose` to help with the local development of the project and to run tests within TravisCI. The project is following Network to Code software development guidelines and are leveraging the following: -- Black, Pylint, Bandit, flake8, and pydocstyle for Python linting and formatting. +- Black, Pylint, Bandit, Mypy, flake8, and pydocstyle for Python linting and formatting. - pytest, coverage, and unittest for unit tests. There are a number of things that are required in order to have a successful PR. @@ -219,6 +219,7 @@ Each command can be executed with `invoke `. Each command also has its black Run black to check that Python files adhere to its style standards. coverage Run the coverage report against pytest. flake8 Run flake8 to check that Python files adhere to its style standards. + mypy Run mypy to validate typing-hints. pylint Run pylint code analysis. pydocstyle Run pydocstyle to validate docstring formatting adheres to NTC defined standards. pytest Run pytest for the specified name and Python version. diff --git a/docs/source/netutils/lib_mapping/ANSIBLE_reverse_table.rst b/docs/source/netutils/lib_mapping/ANSIBLE_reverse_table.rst new file mode 100755 index 00000000..01b2482c --- /dev/null +++ b/docs/source/netutils/lib_mapping/ANSIBLE_reverse_table.rst @@ -0,0 +1,78 @@ +.. raw:: html + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NORMALIZEDANSIBLE
arista_eosarista.eos.eos
ciena_saosciena.saos6.saos6
cisco_asacisco.asa.asa
cisco_ioscisco.ios.ios
cisco_xrcisco.iosxr.iosxr
cisco_nxoscisco.nxos.nxos
huaweicommunity.network.ce
dell_os6dellemc.os6.os6
dell_os9dellemc.os9.os9
dell_os10dellemc.os10.0s10
ericsson_iposcommunity.network.eric_eccli
extreme_exoscommunity.network.exos
extreme_netironcommunity.network.ironware
extreme_noscommunity.network.nos
extreme_slxcommunity.network.slxos
extreme_vspcommunity.network.voss
juniper_junosjunipernetworks.junos.junos
lenovo_cnoscommunity.network.cnos
lenovo_enoscommunity.network.enos
mikrotik_routeroscommunity.network.routeros
nokia_sroscommunity.network.sros
pluribuscommunity.network.netvisor
ruckus_icxcommunity.network.icx
vyosvyos.vyos.vyos
\ No newline at end of file diff --git a/docs/source/netutils/lib_mapping/ANSIBLE_table.rst b/docs/source/netutils/lib_mapping/ANSIBLE_table.rst new file mode 100755 index 00000000..d5fb3069 --- /dev/null +++ b/docs/source/netutils/lib_mapping/ANSIBLE_table.rst @@ -0,0 +1,78 @@ +.. raw:: html + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ANSIBLENORMALIZED
arista.eos.eosarista_eos
ciena.saos6.saos6ciena_saos
cisco.asa.asacisco_asa
cisco.ios.ioscisco_ios
cisco.iosxr.iosxrcisco_xr
cisco.nxos.nxoscisco_nxos
community.network.cehuawei
dellemc.os6.os6dell_os6
dellemc.os9.os9dell_os9
dellemc.os10.0s10dell_os10
community.network.eric_eccliericsson_ipos
community.network.exosextreme_exos
community.network.ironwareextreme_netiron
community.network.nosextreme_nos
community.network.slxosextreme_slx
community.network.vossextreme_vsp
junipernetworks.junos.junosjuniper_junos
community.network.cnoslenovo_cnos
community.network.enoslenovo_enos
community.network.routerosmikrotik_routeros
community.network.netvisorpluribus
community.network.icxruckus_icx
community.network.srosnokia_sros
vyos.vyos.vyosvyos
\ No newline at end of file diff --git a/docs/source/netutils/lib_mapping/NAPALM_reverse_table.rst b/docs/source/netutils/lib_mapping/NAPALM_reverse_table.rst new file mode 100755 index 00000000..b5513e2c --- /dev/null +++ b/docs/source/netutils/lib_mapping/NAPALM_reverse_table.rst @@ -0,0 +1,42 @@ +.. raw:: html + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NORMALIZEDNAPALM
arista_eoseos
brocade_vyosvyos
cisco_asaasa
cisco_iosios
cisco_nxosnxos
cisco_xriosxr
cisco_wlccisco_wlc_ssh
fortinetfortios
huawei_vrphuawei
juniper_junosjunos
paloalto_panospanos
nokia_srossros
\ No newline at end of file diff --git a/docs/source/netutils/lib_mapping/NAPALM_table.rst b/docs/source/netutils/lib_mapping/NAPALM_table.rst new file mode 100755 index 00000000..dfb5f475 --- /dev/null +++ b/docs/source/netutils/lib_mapping/NAPALM_table.rst @@ -0,0 +1,42 @@ +.. raw:: html + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NAPALMNORMALIZED
asacisco_asa
cisco_wlc_sshcisco_wlc
eosarista_eos
fortiosfortinet
huaweihuawei_vrp
ioscisco_ios
nxoscisco_nxos
iosxrcisco_xr
junosjuniper_junos
panospaloalto_panos
srosnokia_sros
vyosbrocade_vyos
\ No newline at end of file diff --git a/docs/source/netutils/lib_mapping/NTCTEMPLATES_reverse_table.rst b/docs/source/netutils/lib_mapping/NTCTEMPLATES_reverse_table.rst new file mode 100755 index 00000000..7da16de3 --- /dev/null +++ b/docs/source/netutils/lib_mapping/NTCTEMPLATES_reverse_table.rst @@ -0,0 +1,339 @@ +.. raw:: html + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NORMALIZEDNTCTEMPLATES
a10a10
accedianaccedian
adtran_osadtran_os
alcatel_aosalcatel_aos
alcatel_srosalcatel_sros
apresia_aeosapresia_aeos
arista_eosarista_eos
aruba_osaruba_os
aruba_osswitcharuba_osswitch
aruba_procurvearuba_procurve
avaya_ersavaya_ers
avaya_vspavaya_vsp
allied_telesis_awplusallied_telesis_awplus
broadcom_icosbroadcom_icos
brocade_fosbrocade_fos
brocade_fastironbrocade_fastiron
brocade_netironbrocade_netiron
brocade_nosbrocade_nos
brocade_vdxbrocade_vdx
brocade_vyosbrocade_vyos
checkpoint_gaiacheckpoint_gaia
calix_b6calix_b6
centec_oscentec_os
ciena_saosciena_saos
cisco_asacisco_asa
cisco_ftdcisco_ftd
cisco_ioscisco_ios
cisco_nxoscisco_nxos
cisco_s300cisco_s300
cisco_tpcisco_tp
cisco_wlccisco_wlc
cisco_xecisco_xe
cisco_xrcisco_xr
cloudgenix_ioncloudgenix_ion
coriantcoriant
dell_dnos9dell_dnos9
dell_force10dell_force10
dell_os6dell_os6
dell_os9dell_os9
dell_os10dell_os10
dell_powerconnectdell_powerconnect
dell_isilondell_isilon
dlink_dsdlink_ds
endaceendace
eltexeltex
eltex_esreltex_esr
enterasysenterasys
ericsson_iposericsson_ipos
extremeextreme
extreme_ersextreme_ers
extreme_exosextreme_exos
extreme_netironextreme_netiron
extreme_nosextreme_nos
extreme_slxextreme_slx
extreme_vdxextreme_vdx
extreme_vspextreme_vsp
extreme_wingextreme_wing
f5_ltmf5_ltm
f5_tmshf5_tmsh
f5_linuxf5_linux
flexvnfflexvnf
fortinetfortinet
genericgeneric
generic_termservergeneric_termserver
hp_comwarehp_comware
hp_procurvehp_procurve
huaweihuawei
huawei_smartaxhuawei_smartax
huawei_olthuawei_olt
huawei_vrpv8huawei_vrpv8
ipinfusion_ocnosipinfusion_ocnos
juniperjuniper
juniper_junosjuniper_junos
juniper_screenosjuniper_screenos
keymilekeymile
keymile_noskeymile_nos
linuxlinux
mikrotik_routerosmikrotik_routeros
mikrotik_switchosmikrotik_switchos
mellanoxmellanox
mellanox_mlnxosmellanox_mlnxos
mrv_lxmrv_lx
mrv_optiswitchmrv_optiswitch
netapp_cdotnetapp_cdot
netgear_prosafenetgear_prosafe
netscalernetscaler
nokia_srosnokia_sros
oneaccess_oneosoneaccess_oneos
ovs_linuxovs_linux
paloalto_panospaloalto_panos
pluribuspluribus
quanta_meshquanta_mesh
rad_etxrad_etx
raisecom_roapraisecom_roap
ruckus_fastironruckus_fastiron
ruijie_osruijie_os
sixwind_ossixwind_os
sophos_sfossophos_sfos
tplink_jetstreamtplink_jetstream
ubiquiti_edgeubiquiti_edge
ubiquiti_edgerouterubiquiti_edgerouter
ubiquiti_edgeswitchubiquiti_edgeswitch
ubiquiti_unifiswitchubiquiti_unifiswitch
vyatta_vyosvyatta_vyos
vyosvyos
watchguard_firewarewatchguard_fireware
zte_zxroszte_zxros
yamahayamaha
watchguard_fireboxwatchguard_firebox
huawei_vrphuawei_vrp
vmware_nsxvvmware_nsxv
\ No newline at end of file diff --git a/docs/source/netutils/lib_mapping/NTCTEMPLATES_table.rst b/docs/source/netutils/lib_mapping/NTCTEMPLATES_table.rst new file mode 100755 index 00000000..35ab51ff --- /dev/null +++ b/docs/source/netutils/lib_mapping/NTCTEMPLATES_table.rst @@ -0,0 +1,339 @@ +.. raw:: html + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NTCTEMPLATESNORMALIZED
a10a10
accedianaccedian
adtran_osadtran_os
alcatel_aosalcatel_aos
alcatel_srosalcatel_sros
apresia_aeosapresia_aeos
arista_eosarista_eos
aruba_osaruba_os
aruba_osswitcharuba_osswitch
aruba_procurvearuba_procurve
avaya_ersavaya_ers
avaya_vspavaya_vsp
allied_telesis_awplusallied_telesis_awplus
broadcom_icosbroadcom_icos
brocade_fosbrocade_fos
brocade_fastironbrocade_fastiron
brocade_netironbrocade_netiron
brocade_nosbrocade_nos
brocade_vdxbrocade_vdx
brocade_vyosbrocade_vyos
checkpoint_gaiacheckpoint_gaia
calix_b6calix_b6
centec_oscentec_os
ciena_saosciena_saos
cisco_asacisco_asa
cisco_ftdcisco_ftd
cisco_ioscisco_ios
cisco_nxoscisco_nxos
cisco_s300cisco_s300
cisco_tpcisco_tp
cisco_wlccisco_wlc
cisco_xecisco_xe
cisco_xrcisco_xr
cloudgenix_ioncloudgenix_ion
coriantcoriant
dell_dnos9dell_dnos9
dell_force10dell_force10
dell_os6dell_os6
dell_os9dell_os9
dell_os10dell_os10
dell_powerconnectdell_powerconnect
dell_isilondell_isilon
dlink_dsdlink_ds
endaceendace
eltexeltex
eltex_esreltex_esr
enterasysenterasys
ericsson_iposericsson_ipos
extremeextreme
extreme_ersextreme_ers
extreme_exosextreme_exos
extreme_netironextreme_netiron
extreme_nosextreme_nos
extreme_slxextreme_slx
extreme_vdxextreme_vdx
extreme_vspextreme_vsp
extreme_wingextreme_wing
f5_ltmf5_ltm
f5_tmshf5_tmsh
f5_linuxf5_linux
flexvnfflexvnf
fortinetfortinet
genericgeneric
generic_termservergeneric_termserver
hp_comwarehp_comware
hp_procurvehp_procurve
huaweihuawei
huawei_smartaxhuawei_smartax
huawei_olthuawei_olt
huawei_vrpv8huawei_vrpv8
ipinfusion_ocnosipinfusion_ocnos
juniperjuniper
juniper_junosjuniper_junos
juniper_screenosjuniper_screenos
keymilekeymile
keymile_noskeymile_nos
linuxlinux
mikrotik_routerosmikrotik_routeros
mikrotik_switchosmikrotik_switchos
mellanoxmellanox
mellanox_mlnxosmellanox_mlnxos
mrv_lxmrv_lx
mrv_optiswitchmrv_optiswitch
netapp_cdotnetapp_cdot
netgear_prosafenetgear_prosafe
netscalernetscaler
nokia_srosnokia_sros
oneaccess_oneosoneaccess_oneos
ovs_linuxovs_linux
paloalto_panospaloalto_panos
pluribuspluribus
quanta_meshquanta_mesh
rad_etxrad_etx
raisecom_roapraisecom_roap
ruckus_fastironruckus_fastiron
ruijie_osruijie_os
sixwind_ossixwind_os
sophos_sfossophos_sfos
tplink_jetstreamtplink_jetstream
ubiquiti_edgeubiquiti_edge
ubiquiti_edgerouterubiquiti_edgerouter
ubiquiti_edgeswitchubiquiti_edgeswitch
ubiquiti_unifiswitchubiquiti_unifiswitch
vyatta_vyosvyatta_vyos
vyosvyos
watchguard_firewarewatchguard_fireware
zte_zxroszte_zxros
yamahayamaha
watchguard_fireboxwatchguard_firebox
huawei_vrphuawei_vrp
vmware_nsxvvmware_nsxv
\ No newline at end of file diff --git a/docs/source/netutils/lib_mapping/PYATS_reverse_table.rst b/docs/source/netutils/lib_mapping/PYATS_reverse_table.rst new file mode 100755 index 00000000..405e1a43 --- /dev/null +++ b/docs/source/netutils/lib_mapping/PYATS_reverse_table.rst @@ -0,0 +1,36 @@ +.. raw:: html + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NORMALIZEDPYATS
cisco_asaasa
f5_tmshbigip
cisco_dnacdnac
cisco_iosiosxe
cisco_xriosxr
juniper_junosjunos
linuxlinux
cisco_nxosnxos
nokia_srossros
cisco_viptellaviptela
\ No newline at end of file diff --git a/docs/source/netutils/lib_mapping/PYATS_table.rst b/docs/source/netutils/lib_mapping/PYATS_table.rst new file mode 100755 index 00000000..1ffc20c7 --- /dev/null +++ b/docs/source/netutils/lib_mapping/PYATS_table.rst @@ -0,0 +1,39 @@ +.. raw:: html + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
PYATSNORMALIZED
asacisco_asa
bigipf5_tmsh
dnaccisco_dnac
ioscisco_ios
iosxecisco_ios
iosxrcisco_xr
junosjuniper_junos
linuxlinux
nxoscisco_nxos
srosnokia_sros
viptelacisco_viptella
\ No newline at end of file diff --git a/docs/source/netutils/lib_mapping/PYNTC_reverse_table.rst b/docs/source/netutils/lib_mapping/PYNTC_reverse_table.rst new file mode 100755 index 00000000..c6ce781d --- /dev/null +++ b/docs/source/netutils/lib_mapping/PYNTC_reverse_table.rst @@ -0,0 +1,27 @@ +.. raw:: html + + + + + + + + + + + + + + + + + + + + + + + + + +
NORMALIZEDPYNTC
cisco_asacisco_asa_ssh
arista_eosarista_eos_eapi
f5_tmshf5_tmos_icontrol
cisco_ioscisco_ios_ssh
juniper_junosjuniper_junos_netconf
cisco_nxoscisco_nxos_nxapi
cisco_wlccisco_aireos_ssh
\ No newline at end of file diff --git a/docs/source/netutils/lib_mapping/PYNTC_table.rst b/docs/source/netutils/lib_mapping/PYNTC_table.rst new file mode 100755 index 00000000..53470f21 --- /dev/null +++ b/docs/source/netutils/lib_mapping/PYNTC_table.rst @@ -0,0 +1,27 @@ +.. raw:: html + + + + + + + + + + + + + + + + + + + + + + + + + +
PYNTCNORMALIZED
cisco_asa_sshcisco_asa
arista_eos_eapiarista_eos
f5_tmos_icontrolf5_tmsh
cisco_ios_sshcisco_ios
juniper_junos_netconfjuniper_junos
cisco_nxos_nxapicisco_nxos
cisco_aireos_sshcisco_wlc
\ No newline at end of file diff --git a/docs/source/netutils/lib_mapping/SCRAPLI_reverse_table.rst b/docs/source/netutils/lib_mapping/SCRAPLI_reverse_table.rst new file mode 100755 index 00000000..cdd7f74f --- /dev/null +++ b/docs/source/netutils/lib_mapping/SCRAPLI_reverse_table.rst @@ -0,0 +1,21 @@ +.. raw:: html + + + + + + + + + + + + + + + + + + + +
NORMALIZEDSCRAPLI
cisco_ioscisco_iosxe
cisco_xrcisco_iosxr
cisco_nxoscisco_nxos
arista_eosarista_eos
juniper_junosjuniper_junos
\ No newline at end of file diff --git a/docs/source/netutils/lib_mapping/SCRAPLI_table.rst b/docs/source/netutils/lib_mapping/SCRAPLI_table.rst new file mode 100755 index 00000000..f420334b --- /dev/null +++ b/docs/source/netutils/lib_mapping/SCRAPLI_table.rst @@ -0,0 +1,21 @@ +.. raw:: html + + + + + + + + + + + + + + + + + + + +
SCRAPLINORMALIZED
cisco_iosxecisco_ios
cisco_iosxrcisco_xr
cisco_nxoscisco_nxos
arista_eosarista_eos
juniper_junosjuniper_junos
\ No newline at end of file diff --git a/tasks.py b/tasks.py index bf476002..7b2bb611 100644 --- a/tasks.py +++ b/tasks.py @@ -2,6 +2,7 @@ import os import sys from distutils.util import strtobool + from invoke import task try: @@ -210,6 +211,18 @@ def bandit(context, local=INVOKE_LOCAL): run_cmd(context, exec_cmd, local) +@task +def mypy(context, local=INVOKE_LOCAL): + """Run mypy to validate typing-hints. + + Args: + context (obj): Used to run specific commands + local (bool): Define as `True` to execute locally + """ + exec_cmd = "mypy ./netutils" + run_cmd(context, exec_cmd, local) + + @task def cli(context): """Enter the image to perform troubleshooting or dev work. @@ -235,6 +248,7 @@ def tests(context, local=INVOKE_LOCAL): yamllint(context, local) pydocstyle(context, local) bandit(context, local) + mypy(context, local) pytest(context, local) print("All tests have passed!") From 7e0b92428f58624f5a30119b78baef304adbf0a4 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 24 Jun 2022 09:00:17 -0500 Subject: [PATCH 3/5] rm unused import --- netutils/mac.py | 1 - 1 file changed, 1 deletion(-) diff --git a/netutils/mac.py b/netutils/mac.py index c78dcb8e..6ff8574d 100644 --- a/netutils/mac.py +++ b/netutils/mac.py @@ -1,7 +1,6 @@ """Functions for working with MAC addresses.""" import re -import typing as t from functools import wraps from .constants import MAC_CREATE, MAC_REGEX From 6327e1310a4cdf693494dfe00ca7c38003c90ab6 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 24 Jun 2022 09:15:18 -0500 Subject: [PATCH 4/5] fix contributing end-line number --- docs/source/contributing/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/contributing/index.rst b/docs/source/contributing/index.rst index 5c88e01d..83655f04 100644 --- a/docs/source/contributing/index.rst +++ b/docs/source/contributing/index.rst @@ -4,5 +4,5 @@ Contributing .. mdinclude:: ../../../README.md :start-line: 148 - :end-line: 233 + :end-line: 235 From eba1a0ab9d641ff2ae441015c2aad79fedc912b3 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 24 Jun 2022 09:24:46 -0500 Subject: [PATCH 5/5] fix contributing end-line number --- docs/source/contributing/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/contributing/index.rst b/docs/source/contributing/index.rst index 83655f04..3aec60de 100644 --- a/docs/source/contributing/index.rst +++ b/docs/source/contributing/index.rst @@ -4,5 +4,5 @@ Contributing .. mdinclude:: ../../../README.md :start-line: 148 - :end-line: 235 + :end-line: 234