diff --git a/.github/workflows/check.yaml b/.github/workflows/check.yaml index 062fe6f..61939a9 100644 --- a/.github/workflows/check.yaml +++ b/.github/workflows/check.yaml @@ -25,7 +25,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8", "3.10"] + python-version: ['3.8', '3.10'] runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 @@ -52,7 +52,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8", "3.10"] + python-version: ['3.8', '3.10'] runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 diff --git a/actions/ack-reboot.py b/actions/ack-reboot.py index 198f171..9175cbd 100755 --- a/actions/ack-reboot.py +++ b/actions/ack-reboot.py @@ -26,8 +26,6 @@ if hookenv.config("reboot"): reboot_time = nrpe_helpers.set_known_reboot_time() services.get_manager().reconfigure_services("nrpe-config") - hookenv.action_set( - {"message": "known reboot time updated to {}".format(reboot_time)} - ) + hookenv.action_set({"message": "known reboot time updated to {}".format(reboot_time)}) else: hookenv.action_fail("reboot check is not enabled, this action has no effect") diff --git a/files/plugins/check_arp_cache.py b/files/plugins/check_arp_cache.py index be11e02..9d8c2bd 100755 --- a/files/plugins/check_arp_cache.py +++ b/files/plugins/check_arp_cache.py @@ -45,14 +45,10 @@ def check_arp_cache(warn, crit): crit_threshold = gc_thresh3 * crit / 100 if arp_cache >= crit_threshold: - message = "CRITICAL: arp cache is more than {} of limit, {}".format( - crit, extra_info - ) + message = "CRITICAL: arp cache is more than {} of limit, {}".format(crit, extra_info) raise CriticalError(message) if arp_cache >= warn_threshold: - message = "WARNING: arp cache is more than {} of limit, {}".format( - warn, extra_info - ) + message = "WARNING: arp cache is more than {} of limit, {}".format(warn, extra_info) raise WarnError(message) print("OK: arp cache is healthy: {}".format(extra_info)) diff --git a/files/plugins/check_cis_audit.py b/files/plugins/check_cis_audit.py index f33d648..e9ca137 100755 --- a/files/plugins/check_cis_audit.py +++ b/files/plugins/check_cis_audit.py @@ -125,9 +125,7 @@ def check_cis_audit(target_profile, max_age, tailoring, warning, critical): msg.format("CRITICAL", score, critical, warning, profile, results_filepath) ) if score < warning: - raise WarnError( - msg.format("WARNING", score, critical, warning, profile, results_filepath) - ) + raise WarnError(msg.format("WARNING", score, critical, warning, profile, results_filepath)) if target_profile != "" and target_profile != profile: msg = ( @@ -170,7 +168,7 @@ def parse_args(args): "-t", action="store_true", default=False, - help="Whether is using the default tailoring file or not." + help="Whether is using the default tailoring file or not.", ) parser.add_argument( "--warn", diff --git a/files/plugins/check_lacp_bond.py b/files/plugins/check_lacp_bond.py index ac64b4f..7b5f45d 100755 --- a/files/plugins/check_lacp_bond.py +++ b/files/plugins/check_lacp_bond.py @@ -115,10 +115,7 @@ def parse_args(): if not args.iface: ifaces = map(os.path.basename, glob.glob("/sys/class/net/bond?")) - print( - "UNKNOWN: Please specify one of these bond " - "ifaces: {}".format(",".join(ifaces)) - ) + print("UNKNOWN: Please specify one of these bond " "ifaces: {}".format(",".join(ifaces))) sys.exit(1) return args diff --git a/files/plugins/check_netlinks.py b/files/plugins/check_netlinks.py index 35c7755..a4a9a97 100755 --- a/files/plugins/check_netlinks.py +++ b/files/plugins/check_netlinks.py @@ -58,15 +58,12 @@ def check_iface(iface, skiperror, crit_thr): continue else: raise CriticalError( - "CRITICAL: {} ({} returns " - "invalid argument)".format(iface, metric_key) + "CRITICAL: {} ({} returns " "invalid argument)".format(iface, metric_key) ) if metric_key == "operstate" and metric_value != "up": if metric_value != crit_thr["operstate"]: - raise CriticalError( - "CRITICAL: {} link state is {}".format(iface, metric_value) - ) + raise CriticalError("CRITICAL: {} link state is {}".format(iface, metric_value)) if metric_value != crit_thr[metric_key]: raise CriticalError( @@ -79,8 +76,7 @@ def check_iface(iface, skiperror, crit_thr): crit_thr[metric] = "n/a" crit_thr["iface"] = iface print( - "OK: {iface} matches thresholds: " - "o:{operstate}, m:{mtu}, s:{speed}".format(**crit_thr) + "OK: {iface} matches thresholds: " "o:{operstate}, m:{mtu}, s:{speed}".format(**crit_thr) ) @@ -107,9 +103,7 @@ def parse_args(): type=str, help="operstate: up, down, unknown (default: up)", ) - parser.add_argument( - "--mtu", "-m", default="1500", type=str, help="mtu size (default: 1500)" - ) + parser.add_argument("--mtu", "-m", default="1500", type=str, help="mtu size (default: 1500)") parser.add_argument( "--speed", "-s", @@ -121,10 +115,7 @@ def parse_args(): if not args.iface: ifaces = map(os.path.basename, glob.glob("/sys/class/net/*")) - print( - "UNKNOWN: Please specify one of these " - "ifaces: {}".format(",".join(ifaces)) - ) + print("UNKNOWN: Please specify one of these " "ifaces: {}".format(",".join(ifaces))) sys.exit(1) return args diff --git a/files/plugins/check_reboot.py b/files/plugins/check_reboot.py index dd8e134..a453859 100755 --- a/files/plugins/check_reboot.py +++ b/files/plugins/check_reboot.py @@ -77,9 +77,7 @@ def main(): parser.add_argument( "known_reboot_time", type=convert_time, - help="in format {}, same as output from `uptime --since`".format( - UPTIME_FORMAT_HUMAN - ), + help="in format {}, same as output from `uptime --since`".format(UPTIME_FORMAT_HUMAN), ) args = parser.parse_args() @@ -90,13 +88,9 @@ def main(): # `uptime --since` output maybe flapping because ntp is changing sytem time # here we allow 5s gap to avoid fake alert if delta.total_seconds() > 5.0: - nagios_exit( - NAGIOS_STATUS_CRITICAL, "unknown reboot at {}".format(current_reboot_time) - ) + nagios_exit(NAGIOS_STATUS_CRITICAL, "unknown reboot at {}".format(current_reboot_time)) else: - nagios_exit( - NAGIOS_STATUS_OK, "system is up since {}".format(current_reboot_time) - ) + nagios_exit(NAGIOS_STATUS_OK, "system is up since {}".format(current_reboot_time)) if __name__ == "__main__": diff --git a/files/plugins/check_ro_filesystem.py b/files/plugins/check_ro_filesystem.py index d251b21..591e9c6 100755 --- a/files/plugins/check_ro_filesystem.py +++ b/files/plugins/check_ro_filesystem.py @@ -44,9 +44,7 @@ def check_ro_filesystem(excludes=""): # if current fs matches EXCLUDE_FS then next, else check it's not readonly if fs in EXCLUDE_FS: continue - if not any( - mount_point.startswith(exclusion.strip()) for exclusion in exclude_mounts - ): + if not any(mount_point.startswith(exclusion.strip()) for exclusion in exclude_mounts): mount_options = mount_options.split(",") if "ro" in mount_options: ro_filesystems.append(mount_point) diff --git a/files/plugins/check_status_file.py b/files/plugins/check_status_file.py index 1e15bbe..13fb47e 100755 --- a/files/plugins/check_status_file.py +++ b/files/plugins/check_status_file.py @@ -36,9 +36,7 @@ def parse_args(): default="WARNING", help="String indicating warning status", ) - parser.add_argument( - "-o", "--ok-text", default="OK", help="String indicating OK status" - ) + parser.add_argument("-o", "--ok-text", default="OK", help="String indicating OK status") parser.add_argument( "-u", "--unknown-text", diff --git a/files/plugins/check_systemd.py b/files/plugins/check_systemd.py index ae8b601..81e99a3 100755 --- a/files/plugins/check_systemd.py +++ b/files/plugins/check_systemd.py @@ -23,9 +23,7 @@ try: service_unit = manager.LoadUnit(service_name) service_proxy = bus.get_object("org.freedesktop.systemd1", str(service_unit)) - service = dbus.Interface( - service_proxy, dbus_interface="org.freedesktop.systemd1.Unit" - ) + service = dbus.Interface(service_proxy, dbus_interface="org.freedesktop.systemd1.Unit") service_res = service_proxy.Get( "org.freedesktop.systemd1.Unit", "SubState", diff --git a/files/plugins/check_systemd_scopes.py b/files/plugins/check_systemd_scopes.py index 2763d7c..1efec85 100755 --- a/files/plugins/check_systemd_scopes.py +++ b/files/plugins/check_systemd_scopes.py @@ -51,9 +51,7 @@ def count_systemd_scopes_state(state): ) except ValueError: # ideally, this should never occur - raise UnknownError( - "UNKNOWN: Counting systemd abandoned state scopes returns non-integer" - ) + raise UnknownError("UNKNOWN: Counting systemd abandoned state scopes returns non-integer") def check_systemd_scopes(args): @@ -65,23 +63,17 @@ def check_systemd_scopes(args): "CRITICAL: System has {} systemd scopes in error state".format(error_count) ) elif error_count >= args.warn_error: - raise WarnError( - "WARNING: System has {} systemd scopes in error state".format(error_count) - ) + raise WarnError("WARNING: System has {} systemd scopes in error state".format(error_count)) # Check scopes in 'abandoned' state abandoned_count = count_systemd_scopes_state("abandoned") if error_count >= args.crit_abandoned: raise CriticalError( - "CRITICAL: System has {} systemd scopes in abandoned state".format( - error_count - ) + "CRITICAL: System has {} systemd scopes in abandoned state".format(error_count) ) elif error_count >= args.warn_abandoned: raise WarnError( - "WARNING: System has {} systemd scopes in abandoned state".format( - error_count - ) + "WARNING: System has {} systemd scopes in abandoned state".format(error_count) ) # With no nagios errors raised, we are in an "OK" state @@ -107,9 +99,7 @@ def positive_int(value): def parse_args(args=None): """Parse command-line options.""" - parser = ArgumentParser( - description=__doc__, formatter_class=ArgumentDefaultsHelpFormatter - ) + parser = ArgumentParser(description=__doc__, formatter_class=ArgumentDefaultsHelpFormatter) # Thresholds for the scopes in 'error' state parser.add_argument( diff --git a/files/plugins/check_xfs_errors.py b/files/plugins/check_xfs_errors.py index 2450511..bb29043 100755 --- a/files/plugins/check_xfs_errors.py +++ b/files/plugins/check_xfs_errors.py @@ -35,9 +35,7 @@ # dmesg -T formatted timestamps are inside [], so we need to add them datetime_delta = datetime.now() - timedelta(minutes=check_delta) -recent_logs = [ - i for i in err_results if datetime.strptime(i[1:25], "%c") >= datetime_delta -] +recent_logs = [i for i in err_results if datetime.strptime(i[1:25], "%c") >= datetime_delta] if recent_logs: print("CRITICAL: Recent XFS errors in kern.log." + "\n" + "{}".format(recent_logs)) diff --git a/files/plugins/cron_cis_audit.py b/files/plugins/cron_cis_audit.py index f1d1f1a..51d872b 100755 --- a/files/plugins/cron_cis_audit.py +++ b/files/plugins/cron_cis_audit.py @@ -126,8 +126,7 @@ def run_audit(profile): try: print("Run cis-audit: {}".format(cmd_run_audit), flush=True) subprocess.run( - cmd_run_audit, stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, check=True + cmd_run_audit, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=True ) _set_permissions() except subprocess.CalledProcessError as e: @@ -170,7 +169,7 @@ def parse_args(args): "-t", action="store_true", default=False, - help="Whether is using the default tailoring file or not." + help="Whether is using the default tailoring file or not.", ) args = parser.parse_args(args) @@ -188,9 +187,7 @@ def main(): # folder does not exist - usg-cisbenchmark likely not installed if not os.path.exists(AUDIT_FOLDER) and DISTRO_VERSION < 20: raise FileNotFoundError( - "Folder {} does not exist, is usg-cisbenchmark installed?".format( - AUDIT_FOLDER - ) + "Folder {} does not exist, is usg-cisbenchmark installed?".format(AUDIT_FOLDER) ) # Ensure a single instance via a simple pidfile diff --git a/hooks/nrpe_helpers.py b/hooks/nrpe_helpers.py index 9504a27..97bc966 100644 --- a/hooks/nrpe_helpers.py +++ b/hooks/nrpe_helpers.py @@ -172,9 +172,7 @@ def get_ingress_address(binding, external=False): network_info = hookenv.network_get(binding) if network_info is not None and "ingress-addresses" in network_info: try: - ip_address = network_info["bind-addresses"][0]["addresses"][0][ - "address" - ] + ip_address = network_info["bind-addresses"][0]["addresses"][0]["address"] hookenv.log("Using ingress-addresses, found %s" % ip_address) except KeyError: hookenv.log("Using primary-addresses") @@ -182,9 +180,7 @@ def get_ingress_address(binding, external=False): except (NotImplementedError, FileNotFoundError) as e: hookenv.log( - "Unable to determine inbound IP address for binding {} with {}".format( - binding, e - ), + "Unable to determine inbound IP address for binding {} with {}".format(binding, e), level=hookenv.ERROR, ) @@ -392,10 +388,7 @@ class RsyncEnabled(helpers.RelationContext): def __init__(self): """Set export_nagios_definitions.""" self["export_nagios_definitions"] = hookenv.config("export_nagios_definitions") - if ( - hookenv.config("nagios_master") - and hookenv.config("nagios_master") != "None" - ): + if hookenv.config("nagios_master") and hookenv.config("nagios_master") != "None": self["export_nagios_definitions"] = True def is_ready(self): @@ -414,13 +407,9 @@ def __init__(self, checktype, check_opts, monitor_src): plugin_path = "/usr/lib/nagios/plugins" if checktype == "procrunning": self["cmd_exec"] = plugin_path + "/check_procs" - self["description"] = "Check process {executable} is running".format( - **check_opts - ) + self["description"] = "Check process {executable} is running".format(**check_opts) self["cmd_name"] = "check_proc_" + check_opts["executable"] - self["cmd_params"] = "-w {min} -c {max} -C {executable}".format( - **check_opts - ) + self["cmd_params"] = "-w {min} -c {max} -C {executable}".format(**check_opts) elif checktype == "processcount": self["cmd_exec"] = plugin_path + "/check_procs" self["description"] = "Check process count" @@ -431,9 +420,7 @@ def __init__(self, checktype, check_opts, monitor_src): self["cmd_params"] = "-c {max}".format(**check_opts) elif checktype == "disk": self["cmd_exec"] = plugin_path + "/check_disk" - self["description"] = "Check disk usage " + check_opts["path"].replace( - "/", "_" - ) + self["description"] = "Check disk usage " + check_opts["path"].replace("/", "_") self["cmd_name"] = "check_disk_principal" self["cmd_params"] = "-w 20 -c 10 -p " + check_opts["path"] elif checktype == "custom": @@ -546,17 +533,13 @@ def __init__(self): # noqa: C901 { "description": "ARP cache entries", "cmd_name": "check_arp_cache", - "cmd_exec": os.path.join( - local_plugin_dir, "check_arp_cache.py" - ), + "cmd_exec": os.path.join(local_plugin_dir, "check_arp_cache.py"), "cmd_params": hookenv.config("arp_cache"), }, { "description": "Readonly filesystems", "cmd_name": "check_ro_filesystem", - "cmd_exec": os.path.join( - local_plugin_dir, "check_ro_filesystem.py" - ), + "cmd_exec": os.path.join(local_plugin_dir, "check_ro_filesystem.py"), "cmd_params": ( "-e {}".format(hookenv.config("ro_filesystem_excludes")) if hookenv.config("ro_filesystem_excludes") @@ -591,9 +574,7 @@ def __init__(self): # noqa: C901 "cmd_name": "check_space{}".format(check_path), "cmd_exec": pkg_plugin_dir + "check_disk", "cmd_params": ( - cmd_params - if space_check["check"].strip() != "disabled" - else "" + cmd_params if space_check["check"].strip() != "disabled" else "" ), } ) @@ -632,9 +613,7 @@ def __init__(self): # noqa: C901 checks.append(netlink_check) # Checking if CPU governor is supported by the system and add nrpe check - cpu_governor_supported = glob.glob( - "/sys/devices/system/cpu/cpu*/cpufreq/scaling_governor" - ) + cpu_governor_supported = glob.glob("/sys/devices/system/cpu/cpu*/cpufreq/scaling_governor") cpu_governor_setting = hookenv.config("cpu_governor") if not cpu_governor_setting: principal_unit = hookenv.principal_unit() @@ -672,9 +651,7 @@ def __init__(self): # noqa: C901 if enable_check_reboot: # read from db if exist, or set current uptime in db and use it known_reboot_time = get_known_reboot_time() or set_known_reboot_time() - check_reboot_context = get_check_reboot_context( - known_reboot_time=known_reboot_time - ) + check_reboot_context = get_check_reboot_context(known_reboot_time=known_reboot_time) else: # set to None will disable/remove this check check_reboot_context = get_check_reboot_context(known_reboot_time=None) @@ -692,9 +669,7 @@ def __init__(self): # noqa: C901 md = hookenv._metadata_unit(principal_unit) if md and md.pop("name", None) == "nagios": sub_postfix = "sub" - nrpe_config_sub_tmpl = "/etc/nagios/nrpe.d/{}{}*.cfg".format( - "{}", sub_postfix_sep - ) + nrpe_config_sub_tmpl = "/etc/nagios/nrpe.d/{}{}*.cfg".format("{}", sub_postfix_sep) nrpe_config_tmpl = "/etc/nagios/nrpe.d/{}.cfg" disable_system_checks = hookenv.config("disable_system_checks") for check in checks: @@ -707,17 +682,13 @@ def __init__(self): # noqa: C901 check["description"] += " (sub)" if sub_postfix: check["cmd_name"] += sub_postfix_sep + sub_postfix - check["cmd_params"] = ( - check["cmd_params"] if not disable_system_checks else "" - ) + check["cmd_params"] = check["cmd_params"] if not disable_system_checks else "" self["checks"].append(check) def _get_proc_thresholds(self): """Return suitable processor thresholds.""" if hookenv.config("procs") == "auto": - proc_thresholds = "-k -w {} -c {}".format( - 25 * self.procs + 100, 50 * self.procs + 100 - ) + proc_thresholds = "-k -w {} -c {}".format(25 * self.procs + 100, 50 * self.procs + 100) else: proc_thresholds = hookenv.config("procs") return proc_thresholds @@ -786,9 +757,7 @@ def parse_netlinks(self, ifaces): matches = [target] iface_devs = [ - target - for target in matches - if os.path.exists(iface_path.format(target)) + target for target in matches if os.path.exists(iface_path.format(target)) ] # no ifaces found; SKIP if not iface_devs: @@ -872,10 +841,7 @@ def match_cidr_to_ifaces(cidr): matches = [] for adapter in netifaces.interfaces(): ipv4_addr_structs = netifaces.ifaddresses(adapter).get(netifaces.AF_INET, []) - addrs = [ - ipaddress.IPv4Address(addr_struct["addr"]) - for addr_struct in ipv4_addr_structs - ] + addrs = [ipaddress.IPv4Address(addr_struct["addr"]) for addr_struct in ipv4_addr_structs] if any(addr in network for addr in addrs): matches.append(adapter) return matches @@ -917,9 +883,7 @@ def is_cis_misconfigured(): Return True and a message if CIS config is invalid, otherwise return False with empty message. """ - if hookenv.config("cis_audit_profile") and hookenv.config( - "cis_audit_tailoring_file" - ): + if hookenv.config("cis_audit_profile") and hookenv.config("cis_audit_tailoring_file"): return ( True, "You cannot provide both cis_audit_profile and cis_audit_tailoring_file", diff --git a/hooks/nrpe_utils.py b/hooks/nrpe_utils.py index fb2aa17..4710276 100644 --- a/hooks/nrpe_utils.py +++ b/hooks/nrpe_utils.py @@ -64,7 +64,7 @@ def install_charm_files(service_name): """Install files shipped with charm.""" # The preinst script of nagios-nrpe-server deb package will add nagios user # and create this dir as home - # ref: https://git.launchpad.net/ubuntu/+source/nagios-nrpe/tree/debian/nagios-nrpe-server.preinst#n28 # NOQA: E501 + # ref: https://git.launchpad.net/ubuntu/+source/nagios-nrpe/tree/debian/nagios-nrpe-server.preinst#n28 # noqa: W505 nagios_home = "/var/lib/nagios" # it's possible dir owner be changed to root by other process, e.g.: LP1866382 @@ -98,9 +98,7 @@ def install_charm_files(service_name): os.chmod(pkg_plugin_dir + "/nagios_plugin3.py", 0o644) if hookenv.config("export_nagios_definitions"): - shutil.copy2( - os.path.join(charm_file_dir, "default_rsync"), "/etc/default/rsync" - ) + shutil.copy2(os.path.join(charm_file_dir, "default_rsync"), "/etc/default/rsync") shutil.copy2(os.path.join(charm_file_dir, "rsyncd.conf"), "/etc/rsyncd.conf") host.mkdir("/etc/rsync-juju.d", perms=0o755) @@ -200,9 +198,7 @@ def update_nrpe_external_master_relation(service_name): principal_relation = nrpe_helpers.PrincipalRelation() for rid in hookenv.relation_ids("nrpe-external-master"): - hookenv.relation_set( - relation_id=rid, relation_settings=principal_relation.provide_data() - ) + hookenv.relation_set(relation_id=rid, relation_settings=principal_relation.provide_data()) def update_monitor_relation(service_name): @@ -210,9 +206,7 @@ def update_monitor_relation(service_name): monitor_relation = nrpe_helpers.MonitorsRelation() for rid in hookenv.relation_ids("monitors"): - hookenv.relation_set( - relation_id=rid, relation_settings=monitor_relation.provide_data() - ) + hookenv.relation_set(relation_id=rid, relation_settings=monitor_relation.provide_data()) def has_consumer(): diff --git a/hooks/services.py b/hooks/services.py index e6df411..cdb1816 100644 --- a/hooks/services.py +++ b/hooks/services.py @@ -42,9 +42,7 @@ def get_manager(): nrpe_utils.create_host_export_fragment, nrpe_utils.render_nrped_files, nrpe_utils.update_cis_audit_cronjob, - helpers.render_template( - source="nrpe.tmpl", target="/etc/nagios/nrpe.cfg" - ), + helpers.render_template(source="nrpe.tmpl", target="/etc/nagios/nrpe.cfg"), ], "provided_data": [nrpe_helpers.PrincipalRelation()], "ports": [hookenv.config("server_port"), "ICMP"], diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..2bbcde6 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,104 @@ +# This file is centrally managed as a template file in https://github.com/canonical/solutions-engineering-automation +# To update the file: +# - Edit it in the canonical/solutions-engineering-automation repository. +# - Open a PR with the changes. +# - When the PR merges, the soleng-terraform bot will open a PR to the target repositories with the changes. + +[tool.flake8] +max-line-length = 99 +max-doc-length = 99 +max-complexity = 10 +exclude = [ + ".git", + "__pycache__", + ".tox", + ".build", + "build", + "dist", + ".eggs", + "*.egg_info", + "venv", + ".venv", + "report", + "docs", + "lib", + "mod", + "hooks/charmhelpers", + "tests/charmhelpers", +] +select = ["E", "W", "F", "C", "N", "R", "D", "H"] +# Ignore W503, E501 because using black creates errors with this +# Ignore D107 Missing docstring in __init__ +# Ignore D415 Docstring first line punctuation (doesn't make sense for properties) +# Ignore N818 Exceptions end with "Error" (not all exceptions are errors) +# D100, D101, D102, D103: Ignore missing docstrings in tests +ignore = ["C901", "W503", "E501", "D107", "D415", "N818", "D100", "D101", "D102", "D103", "W504"] +per-file-ignores = ["tests/*:D100,D101,D102,D103,D104"] +# Check for properly formatted copyright header in each file +copyright-check = "True" +copyright-author = "Canonical Ltd." +copyright-regexp = "Copyright\\s\\d{4}([-,]\\d{4})*\\s+%(author)s" + +[tool.black] +line-length = 99 +exclude = ''' +/( + | .eggs + | .git + | .tox + | .venv + | .build + | build + | lib + | report + | docs + | mod + | hooks/charmhelpers + | tests/charmhelpers +)/ +''' + +[tool.isort] +profile = "black" +line_length = 99 +skip_glob = [".eggs", ".git", ".tox", ".venv", ".build", "build", "lib", "report", "mod", "hooks/charmhelpers", "tests/charmhelpers"] + +[tool.pylint] +max-line-length = 99 +disable = ["E1102"] +ignore = ['.eggs', '.git', '.tox', '.venv', '.build', 'lib', 'report', 'tests', 'docs', "mod", "hooks/charmhelpers", "tests/charmhelpers"] + +[tool.mypy] +warn_unused_ignores = true +warn_unused_configs = true +warn_unreachable = true +disallow_untyped_defs = true +ignore_missing_imports = true +no_namespace_packages = true +exclude = ['.eggs', '.git', '.tox', '.venv', '.build', 'lib', 'report', 'tests', 'docs', "mod", "hooks/charmhelpers", "tests/charmhelpers"] + +[tool.codespell] +skip = ".eggs,.tox,.git,.venv,venv,build,.build,lib,report,docs,poetry.lock,htmlcov,mod,hooks/charmhelpers,tests/charmhelpers" +quiet-level = 3 +check-filenames = true +ignore-words-list = "assertIn" + +## Ignore unsupported imports +[[tool.mypy.overrides]] +module = ["charmhelpers.*", "setuptools"] +ignore_missing_imports = true + +[tool.coverage.run] +relative_files = true +source = ["."] +omit = ["tests/**", "docs/**", "lib/**", "snap/**", "build/**", "setup.py", "mod/**", "hooks/charmhelpers/**", "tests/charmhelpers/**"] + +[tool.coverage.report] +fail_under = 60 +show_missing = true + +[tool.coverage.html] +directory = "tests/report/html" + +[tool.coverage.xml] +output = "tests/report/coverage.xml" diff --git a/tests/functional/tests/nrpe_tests.py b/tests/functional/tests/nrpe_tests.py index 3e57a5d..ffbd966 100644 --- a/tests/functional/tests/nrpe_tests.py +++ b/tests/functional/tests/nrpe_tests.py @@ -36,9 +36,7 @@ class TestNrpe(TestBase): @RETRY def test_01_nrpe_check(self): """Verify nrpe check exists.""" - logging.debug( - "Verify the nrpe checks are created and have the required content..." - ) + logging.debug("Verify the nrpe checks are created and have the required content...") nrpe_checks = { "check_conntrack.cfg": "command[check_conntrack]=" @@ -53,9 +51,7 @@ def test_01_nrpe_check(self): if code != "0": logging.warning( - "Unable to find nrpe check {} at /etc/nagios/nrpe.d/".format( - nrpe_check - ) + "Unable to find nrpe check {} at /etc/nagios/nrpe.d/".format(nrpe_check) ) raise model.CommandRunFailed(cmd, result) @@ -99,9 +95,7 @@ def test_03_user_monitor(self): }, }, } - model.set_application_config( - self.application_name, {"monitors": yaml.dump(user_monitors)} - ) + model.set_application_config(self.application_name, {"monitors": yaml.dump(user_monitors)}) model.block_until_all_units_idle() local_nrpe_checks = { @@ -121,9 +115,7 @@ def test_03_user_monitor(self): if code != "0": logging.warning( - "Unable to find nrpe check {} at /etc/nagios/nrpe.d/".format( - nrpe_check - ) + "Unable to find nrpe check {} at /etc/nagios/nrpe.d/".format(nrpe_check) ) raise model.CommandRunFailed(cmd, result) content = result.get("Stdout") @@ -136,15 +128,9 @@ def test_03_user_monitor(self): } for nrpe_check in remote_nrpe_checks: - logging.info( - "Checking content of '{}' nrpe command in nagios unit".format( - nrpe_check - ) - ) + logging.info("Checking content of '{}' nrpe command in nagios unit".format(nrpe_check)) cmd = "cat /etc/nagios3/conf.d/commands/" + nrpe_check - nagios_lead_unit_name = model.get_lead_unit_name( - "nagios", model_name=self.model_name - ) + nagios_lead_unit_name = model.get_lead_unit_name("nagios", model_name=self.model_name) result = model.run_on_unit(nagios_lead_unit_name, cmd) code = result.get("Code") @@ -257,11 +243,7 @@ class TestNrpeActions(TestBase): @RETRY def test_01_ack_reboot(self): """Test the ack-reboot action.""" - uptime = ( - model.run_on_leader(self.application_name, "uptime --since") - .get("Stdout") - .strip() - ) + uptime = model.run_on_leader(self.application_name, "uptime --since").get("Stdout").strip() action = model.run_action_on_leader(self.application_name, "ack-reboot") message = action.data["results"].get("message") self.assertIsNotNone(message) diff --git a/tests/unit/requirements.txt b/tests/unit/requirements.txt index 4d8cb55..dd463bb 100644 --- a/tests/unit/requirements.txt +++ b/tests/unit/requirements.txt @@ -1,3 +1,4 @@ +setuptools coverage six PyYAML diff --git a/tests/unit/test_nrpe_helpers.py b/tests/unit/test_nrpe_helpers.py index cbb22cd..9564632 100644 --- a/tests/unit/test_nrpe_helpers.py +++ b/tests/unit/test_nrpe_helpers.py @@ -49,9 +49,7 @@ def test_iface_passed_in_as_cidr(self): def _run_mocked_test(self, cidr, matches, ifaces_mock, addrs_mock): iface_ip_tuples = list(self.mock_iface_ip_data.items()) ifaces_mock.return_value = [t[0] for t in iface_ip_tuples] - addrs_mock.side_effect = [ - {netifaces.AF_INET: [{"addr": t[1]}]} for t in iface_ip_tuples - ] + addrs_mock.side_effect = [{netifaces.AF_INET: [{"addr": t[1]}]} for t in iface_ip_tuples] self.assertEqual(match_cidr_to_ifaces(cidr), matches) @@ -82,9 +80,7 @@ def test_get_bind_address(self, mock_network_get, mock_config): "egress-subnets": ["3.8.134.119/32"], "ingress-addresses": ["3.8.134.119"], } - self.assertEqual( - nrpe_helpers.get_ingress_address("mockbinding"), "172.31.29.247" - ) + self.assertEqual(nrpe_helpers.get_ingress_address("mockbinding"), "172.31.29.247") @mock.patch("nrpe_helpers.hookenv.config") @mock.patch("nrpe_helpers.hookenv.network_get") @@ -121,9 +117,7 @@ def test_get_public_address(self, mock_unit_get, mock_config): """Prove we get a public IP address for Nagios relation.""" mock_config.return_value = "public" mock_unit_get.return_value = "1.2.3.4" - self.assertEqual( - nrpe_helpers.get_ingress_address("mockbinding", external=True), "1.2.3.4" - ) + self.assertEqual(nrpe_helpers.get_ingress_address("mockbinding", external=True), "1.2.3.4") class TestCheckReboot(unittest.TestCase): @@ -148,7 +142,7 @@ def test_get_check_reboot_context_add(self): """Test get_check_reboot_context will render time correctly.""" t0 = nrpe_helpers.get_cmd_output(["uptime", "--since"]) context = nrpe_helpers.get_check_reboot_context(known_reboot_time=t0) - self.assertEquals(context["cmd_params"], '"{}"'.format(t0)) + self.assertEqual(context["cmd_params"], '"{}"'.format(t0)) def test_get_check_reboot_context_remove(self): """Test get_check_reboot_context will render None correctly.""" @@ -310,9 +304,7 @@ def test_get_partitions_to_check(self, lock_lsblk_output): (mp, False), ] for partition, expected in params: - with self.subTest( - msg="Validate partition filtering", p1=partition, p2=expected - ): + with self.subTest(msg="Validate partition filtering", p1=partition, p2=expected): if expected: self.assertIn(partition, partitions) else: @@ -712,9 +704,7 @@ def test_cis_cmd_params_no_tailoring_file(self, mock_config): "-p level1_server -w 85 -c 80", ) # doesn't include score - self.assertEqual( - nrpe_helpers.cis_cmd_params(include_score=False), "-p level1_server" - ) + self.assertEqual(nrpe_helpers.cis_cmd_params(include_score=False), "-p level1_server") @mock.patch("nrpe_helpers.cis_tailoring_file_handler") @mock.patch("nrpe_helpers.hookenv.config") @@ -727,9 +717,7 @@ def test_cis_cmd_params_no_profile(self, mock_config, mock_tailor_handler): } mock_config.side_effect = lambda key: config[key] # include score - self.assertEqual( - nrpe_helpers.cis_cmd_params(include_score=True), "-t -w 85 -c 80" - ) + self.assertEqual(nrpe_helpers.cis_cmd_params(include_score=True), "-t -w 85 -c 80") # doesn't include score self.assertEqual(nrpe_helpers.cis_cmd_params(include_score=False), "-t") mock_tailor_handler.assert_called() diff --git a/tests/unit/test_plugins_cis_audit.py b/tests/unit/test_plugins_cis_audit.py index 207b90c..8686b92 100644 --- a/tests/unit/test_plugins_cis_audit.py +++ b/tests/unit/test_plugins_cis_audit.py @@ -146,9 +146,7 @@ def test_get_cis_result_age(self): self.assertFalse(cron_cis_audit._get_cis_result_age()) # file was created when test initiated, should return 0 - with mock.patch( - "files.plugins.cron_cis_audit.AUDIT_RESULT_GLOB", self.cloud_init_logfile - ): + with mock.patch("files.plugins.cron_cis_audit.AUDIT_RESULT_GLOB", self.cloud_init_logfile): age_in_hours = cron_cis_audit._get_cis_result_age() self.assertLess( age_in_hours, @@ -161,31 +159,23 @@ def test_parse_args(self, mock_stderr): """Test the default parsing behavior of the argument parser.""" # test empty parameters args = cron_cis_audit.parse_args([]) - self.assertEqual( - args, argparse.Namespace(cis_profile="", max_age=168, tailoring=False) - ) + self.assertEqual(args, argparse.Namespace(cis_profile="", max_age=168, tailoring=False)) # test setting parameters args = cron_cis_audit.parse_args(["-a 1", "-p=level2_workstation"]) self.assertEqual( args, - argparse.Namespace( - cis_profile="level2_workstation", max_age=1, tailoring=False - ), + argparse.Namespace(cis_profile="level2_workstation", max_age=1, tailoring=False), ) # test to use tailoring file args = cron_cis_audit.parse_args(["-t"]) - self.assertEqual( - args, argparse.Namespace(cis_profile="", max_age=168, tailoring=True) - ) + self.assertEqual(args, argparse.Namespace(cis_profile="", max_age=168, tailoring=True)) # test setting invalid parameter with self.assertRaises(SystemExit): cron_cis_audit.parse_args(["-p=invalid-parameter-test"]) - self.assertRegex( - mock_stderr.getvalue(), r"invalid choice: 'invalid-parameter-test'" - ) + self.assertRegex(mock_stderr.getvalue(), r"invalid choice: 'invalid-parameter-test'") # test setting mutual exclusive parameters with self.assertRaises(SystemExit): @@ -209,9 +199,7 @@ def test_main_raise_exception(self): @mock.patch("grp.getgrnam") def test_set_permissions(self, mock_grp, mock_chown, mock_chmod, mock_glob): """Test if _set_permissions changes the permissions as expected.""" - mock_grp.return_value = grp.struct_group( - ("mockgroup", "mockpasswd", "1000", "mockuser") - ) + mock_grp.return_value = grp.struct_group(("mockgroup", "mockpasswd", "1000", "mockuser")) cron_cis_audit._set_permissions() mock_chown.assert_has_calls( [ @@ -367,9 +355,7 @@ def test_get_audit_result_filepath_found(self): self.bionic_audit_result_glob, ): audit_result_filepath = check_cis_audit.get_audit_result_filepath() - expected = os.path.join( - self.audit_result_folder, "cis-testfile2-results.xml" - ) + expected = os.path.join(self.audit_result_folder, "cis-testfile2-results.xml") self.assertEqual(audit_result_filepath, expected) # check focal with mock.patch( @@ -377,9 +363,7 @@ def test_get_audit_result_filepath_found(self): self.focal_audit_result_glob, ): audit_result_filepath = check_cis_audit.get_audit_result_filepath() - expected = os.path.join( - self.audit_result_folder, "usg-results-testfile2.123.xml" - ) + expected = os.path.join(self.audit_result_folder, "usg-results-testfile2.123.xml") self.assertEqual(audit_result_filepath, expected) def test_check_file_max_age(self): @@ -394,9 +378,7 @@ def test_parse_profile_idref(self): check_cis_audit.parse_profile_idref("unknown_profile") profile_id = "xccdf_com.ubuntu.bionic.cis_profile_Level_2_Workstation" - self.assertEqual( - "level2_workstation", check_cis_audit.parse_profile_idref(profile_id) - ) + self.assertEqual("level2_workstation", check_cis_audit.parse_profile_idref(profile_id)) @mock.patch("files.plugins.check_cis_audit.PROFILE_MAP", bionic_profile_map) def test_get_audit_score_and_profile_bionic(self): @@ -406,9 +388,7 @@ def test_get_audit_score_and_profile_bionic(self): check_cis_audit.get_audit_score_and_profile(self.bionic_testfile1, False) # score and profile correctly read from xml - score, profile = check_cis_audit.get_audit_score_and_profile( - self.bionic_testfile2, False - ) + score, profile = check_cis_audit.get_audit_score_and_profile(self.bionic_testfile2, False) self.assertEqual(score, 89.444443) self.assertEqual(profile, "level1_server") @@ -420,9 +400,7 @@ def test_get_audit_score_and_profile_focal(self): check_cis_audit.get_audit_score_and_profile(self.focal_testfile1, False) # score and profile correctly read from xml - score, profile = check_cis_audit.get_audit_score_and_profile( - self.focal_testfile2, False - ) + score, profile = check_cis_audit.get_audit_score_and_profile(self.focal_testfile2, False) self.assertEqual(score, 66.160233) self.assertEqual(profile, "level1_server") @@ -434,9 +412,7 @@ def test_get_audit_score_and_profile_tailoring(self): check_cis_audit.get_audit_score_and_profile(self.focal_testfile1, True) # score and profile correctly read from xml - score, profile = check_cis_audit.get_audit_score_and_profile( - self.focal_testfile2, True - ) + score, profile = check_cis_audit.get_audit_score_and_profile(self.focal_testfile2, True) self.assertEqual(score, 66.160233) self.assertEqual(profile, "default-tailoring-file") @@ -472,9 +448,7 @@ def test_parse_args(self): ) # test setting tailoring - arguments = check_cis_audit.parse_args( - ["-a", "1", "-c", "99", "-w", "90", "-t"] - ) + arguments = check_cis_audit.parse_args(["-a", "1", "-c", "99", "-w", "90", "-t"]) self.assertEqual( arguments, argparse.Namespace( diff --git a/tests/unit/test_plugins_systemd_scopes.py b/tests/unit/test_plugins_systemd_scopes.py index 7b0181f..273d826 100644 --- a/tests/unit/test_plugins_systemd_scopes.py +++ b/tests/unit/test_plugins_systemd_scopes.py @@ -93,9 +93,7 @@ def test_check_systemd_scopes(self): ["-e", "1000", "-E", "2000", "-a", "1000", "-A", "2000", "-o", "Nominal"] ) - self.assert_check_stdout( - args, "OK: Nominal; 4 in error state, 4 in abandoned state\n" - ) + self.assert_check_stdout(args, "OK: Nominal; 4 in error state, 4 in abandoned state\n") # test arguments to invoke WARN on error state x4 scopes args = check_systemd_scopes.parse_args( diff --git a/tox.ini b/tox.ini index 7ee5026..b91ae34 100644 --- a/tox.ini +++ b/tox.ini @@ -36,13 +36,11 @@ passenv = CS_* TEST* TERM -allowlist_externals = - charmcraft [testenv:lint] commands = flake8 - black --check --exclude "/(\.eggs|\.git|\.tox|\.venv|\.build|dist|charmhelpers|mod|files)/" . + black --check . deps = black flake8 @@ -50,22 +48,11 @@ deps = flake8-import-order pep8-naming flake8-colors + flake8-pyproject -[flake8] -exclude = - .git, - __pycache__, - .tox, - charmhelpers, - mod, - .build - -max-line-length = 88 -max-complexity = 14 - -[testenv:black] +[testenv:reformat] commands = - black --exclude "/(\.eggs|\.git|\.tox|\.venv|\.build|dist|charmhelpers|mod|files)/" . + black . deps = black