From da226c808c582ca86e6a0e00efcb3a8046855ec1 Mon Sep 17 00:00:00 2001 From: Jakub Judiny <judiny@cesnet.cz> Date: Wed, 10 May 2023 16:55:36 +0200 Subject: [PATCH] Fix some Exceotion warnings, missing encoding warning and other minor warnings (Redmine issue: #7652) --- lib/hawat/forms.py | 30 ++++++++++++------------ lib/hawat/utils.py | 2 +- lib/mentat/daemon/component/filer.py | 4 ++-- lib/mentat/daemon/component/filter.py | 4 ++-- lib/mentat/daemon/component/sampler.py | 2 +- lib/mentat/datatype/internal.py | 4 ++-- lib/mentat/dirq.py | 15 ++++++------ lib/mentat/emails/test_event.py | 2 +- lib/mentat/module/detmngr.py | 7 +++--- lib/mentat/module/ideagen.py | 5 ++-- lib/mentat/module/netmngr.py | 25 ++++++++++---------- lib/mentat/plugin/enricher/passivedns.py | 12 +++++----- lib/mentat/reports/overview.py | 5 ++-- lib/mentat/services/nerd.py | 4 ++-- lib/mentat/services/pdnsr.py | 8 +++---- lib/mentat/services/test_eventstorage.py | 2 +- lib/mentat/services/test_whois.py | 4 ++-- lib/mentat/stats/rrd.py | 28 ++++++++++++---------- lib/mentat/system.py | 14 +++++------ 19 files changed, 89 insertions(+), 88 deletions(-) diff --git a/lib/hawat/forms.py b/lib/hawat/forms.py index 03a3e12d..e7f5af43 100644 --- a/lib/hawat/forms.py +++ b/lib/hawat/forms.py @@ -83,8 +83,8 @@ def str_to_int_with_none(value): return None try: return int(value) - except: - raise ValueError('Invalid string value {} to be converted to integer'.format(str(value))) + except Exception as exc: + raise ValueError('Invalid string value {} to be converted to integer'.format(str(value))) from exc # ------------------------------------------------------------------------------- @@ -296,13 +296,13 @@ def check_network_record_list(_form, field): # pylint: disable=locally-disabled for value in field.data: try: ipranges.from_str(value) - except ValueError: + except ValueError as exc: raise wtforms.validators.ValidationError( gettext( 'The "%(val)s" value does not look like valid IPv4/IPv6 address/range/network.', val=str(value) ) - ) + ) from exc def check_port_list(_form, field): # pylint: disable=locally-disabled,unused-argument @@ -318,13 +318,13 @@ def check_port_list(_form, field): # pylint: disable=locally-disabled,unused-ar val=str(data) ) ) - except ValueError: + except ValueError as exc: raise wtforms.validators.ValidationError( gettext( 'The "%(val)s" value does not look like valid port number.', val=str(data) ) - ) + ) from exc def check_int_list(_form, field): # pylint: disable=locally-disabled,unused-argument @@ -340,13 +340,13 @@ def check_int_list(_form, field): # pylint: disable=locally-disabled,unused-arg val=str(data) ) ) - except ValueError: + except ValueError as exc: raise wtforms.validators.ValidationError( gettext( 'The "%(val)s" value does not look like valid positive integer.', val=str(data) ) - ) + ) from exc def check_null_character(_form, field): # pylint: disable=locally-disabled,unused-argument @@ -468,9 +468,9 @@ class DateTimeLocalField(wtforms.DateTimeField): dt_local = localtz.localize(dt_naive, is_dst=None) self.data = dt_local.astimezone( pytz.utc) # pylint: disable=locally-disabled,attribute-defined-outside-init - except (pytz.exceptions.AmbiguousTimeError, pytz.exceptions.NonExistentTimeError, ValueError): + except (pytz.exceptions.AmbiguousTimeError, pytz.exceptions.NonExistentTimeError, ValueError) as exc: self.data = None # pylint: disable=locally-disabled,attribute-defined-outside-init - raise ValueError(self.gettext('Not a valid datetime value')) + raise ValueError(self.gettext('Not a valid datetime value')) from exc class SmartDateTimeField(wtforms.Field): @@ -485,7 +485,7 @@ class SmartDateTimeField(wtforms.Field): utcisoformat = '%Y-%m-%dT%H:%M:%SZ' def __init__(self, label=None, validators=None, formats=None, **kwargs): - super(SmartDateTimeField, self).__init__(label, validators, **kwargs) + super().__init__(label, validators, **kwargs) if formats is None: self.formats = [ '%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M', @@ -588,8 +588,8 @@ class RadioFieldWithNone(wtforms.RadioField): if valuelist: try: self.data = self.coerce(valuelist[0]) if valuelist[0] != 'None' else None # pylint: disable=locally-disabled,attribute-defined-outside-init - except ValueError: - raise ValueError(self.gettext("Invalid Choice: could not coerce")) + except ValueError as exc: + raise ValueError(self.gettext("Invalid Choice: could not coerce")) from exc def pre_validate(self, form): for val, _ in self.choices: @@ -609,8 +609,8 @@ class SelectFieldWithNone(wtforms.SelectField): try: self.data = self.coerce(valuelist[0]) if valuelist[ 0].lower() != 'none' else None # pylint: disable=locally-disabled,attribute-defined-outside-init - except ValueError: - raise ValueError(self.gettext("Invalid Choice: could not coerce")) + except ValueError as exc: + raise ValueError(self.gettext("Invalid Choice: could not coerce")) from exc else: self.data = None # pylint: disable=locally-disabled,attribute-defined-outside-init diff --git a/lib/hawat/utils.py b/lib/hawat/utils.py index 3c84b2ce..7709aa9e 100644 --- a/lib/hawat/utils.py +++ b/lib/hawat/utils.py @@ -213,7 +213,7 @@ def load_json_from_file(filename): """ Load JSON from given file. """ - with open(filename) as fhnd: + with open(filename, encoding="utf8") as fhnd: res = json.load(fhnd) return res diff --git a/lib/mentat/daemon/component/filer.py b/lib/mentat/daemon/component/filer.py index 90816c8b..06e44737 100644 --- a/lib/mentat/daemon/component/filer.py +++ b/lib/mentat/daemon/component/filer.py @@ -194,12 +194,12 @@ class FilerDaemonComponent(pyzenkit.zendaemon.ZenDaemonComponent): # pylint: di os.chmod(dir_name, perms) if user or group: shutil.chown(dir_name, user = user[0], group = group[0]) - except: + except Exception as exc: msg = "Unable to create work directory '{}' with 'UID:{},GID:{},PERMS:{}' as current effective user 'EUID:{},EGID:{}': '{}'".format( dir_name, str(user), str(group), str(oct(perms)), os.geteuid(), os.getegid(), traceback.format_exc() ) daemon.logger.error("[STATUS] Component '{}': {}".format(self.cid, msg)) - raise pyzenkit.zendaemon.ZenDaemonComponentException(msg) + raise pyzenkit.zendaemon.ZenDaemonComponentException(msg) from exc def _check_workdir_writability(self, daemon, dir_name): if not os.path.isdir(dir_name): diff --git a/lib/mentat/daemon/component/filter.py b/lib/mentat/daemon/component/filter.py index e7456361..e49f610a 100644 --- a/lib/mentat/daemon/component/filter.py +++ b/lib/mentat/daemon/component/filter.py @@ -97,7 +97,7 @@ class FilterDaemonComponent(pyzenkit.zendaemon.ZenDaemonComponent): "filter": flt }) daemon.logger.debug("[STATUS] Component '{}': Loaded filter rule '{}'".format(self.cid, nme)) - except: + except Exception: daemon.logger.debug("[STATUS] Component '{}': Unable to load filter rule '{}'".format(self.cid, rule)) @@ -118,7 +118,7 @@ class FilterDaemonComponent(pyzenkit.zendaemon.ZenDaemonComponent): else: daemon.logger.debug("Message '{}' passed by filter '{}'".format(args['id'], rule["rule"])) return (daemon.FLAG_CONTINUE, args) - except: + except Exception: daemon.logger.debug("Message '{}' caused some trouble during processing: '{}'".format(args['id'], sys.exc_info()[1])) daemon.queue.schedule('message_banish', args) return (daemon.FLAG_STOP, args) diff --git a/lib/mentat/daemon/component/sampler.py b/lib/mentat/daemon/component/sampler.py index 8bb83031..18652135 100644 --- a/lib/mentat/daemon/component/sampler.py +++ b/lib/mentat/daemon/component/sampler.py @@ -110,7 +110,7 @@ class SamplerDaemonComponent(pyzenkit.zendaemon.ZenDaemonComponent): daemon.queue.schedule('message_cancel', args) return (daemon.FLAG_STOP, args) - except: + except Exception: daemon.logger.debug("Message '{}' caused some trouble during sampling: '{}'".format(args['id'], sys.exc_info()[1])) self.inc_statistic(self.STATS_CNT_ERRORS) daemon.queue.schedule('message_banish', args) diff --git a/lib/mentat/datatype/internal.py b/lib/mentat/datatype/internal.py index 8b3ad251..b768a9f9 100644 --- a/lib/mentat/datatype/internal.py +++ b/lib/mentat/datatype/internal.py @@ -423,8 +423,8 @@ def t_detector_record(val, source): record['credibility'] = 1.0 if 'registered' in val: record['registered'] = val['registered'] - except: - raise ValueError('Unknown detector record {}'.format(pprint.pformat(val))) + except Exception as exc: + raise ValueError('Unknown detector record {}'.format(pprint.pformat(val))) from exc return Detector(record) #------------------------------------------------------------------------------- diff --git a/lib/mentat/dirq.py b/lib/mentat/dirq.py index 7a86f349..436d4330 100644 --- a/lib/mentat/dirq.py +++ b/lib/mentat/dirq.py @@ -147,12 +147,12 @@ class DirectoryQueue: os.chmod(dir_name, perms) if user or group: shutil.chown(dir_name, user = user[0], group = group[0]) - except: + except Exception as exc: raise DirectoryQueueException( "Unable to create queue directory '{}' with 'UID:{},GID:{},PERMS:{}' as current effective user 'EUID:{},EGID:{}': '{}'".format( dir_name, str(user), str(group), str(oct(perms)), os.geteuid(), os.getegid(), traceback.format_exc() ) - ) + ) from exc def _check_writability(self, dir_name): if not os.access(dir_name, os.W_OK): @@ -180,7 +180,7 @@ class DirectoryQueue: """ Load and return contents of given file (helper method for testing). """ - with open(filename, 'r') as tmpf: + with open(filename, 'r', encoding="utf8") as tmpf: return tmpf.read() def _append_metadata(self, file_tgt, metadata): @@ -188,9 +188,8 @@ class DirectoryQueue: Append given metadata to given file. """ try: - mf = open("{}.meta".format(file_tgt), 'w') - json.dump(metadata, mf, sort_keys = True, indent = 4) - mf.close() + with open("{}.meta".format(file_tgt), 'w', encoding="utf8") as mf: + json.dump(metadata, mf, sort_keys = True, indent = 4) except OSError: pass @@ -239,7 +238,7 @@ class DirectoryQueue: (next_id, next_file) = self.next_file() if next_file: data = None - with open(next_file) as nf: + with open(next_file, encoding="utf8") as nf: data = ''.join(nf.readlines()) return (next_id, data) return (None, None) @@ -296,7 +295,7 @@ class DirectoryQueue: """ Reload given message from within pending queue. """ - with open(os.path.join(self.dir_pending, itemid)) as mf: + with open(os.path.join(self.dir_pending, itemid), encoding="utf8") as mf: data = ''.join(mf.readlines()) return data return None diff --git a/lib/mentat/emails/test_event.py b/lib/mentat/emails/test_event.py index 58bca55b..25000841 100644 --- a/lib/mentat/emails/test_event.py +++ b/lib/mentat/emails/test_event.py @@ -75,7 +75,7 @@ class TestReportEmail(unittest.TestCase): ideas_obj = [mentat.idea.internal.Idea(x) for x in ideas_raw] def setUp(self): - with open(UTEST_JSON_RAW, 'w') as rawf: + with open(UTEST_JSON_RAW, 'w', encoding="utf8") as rawf: json.dump( self.ideas_obj, rawf, diff --git a/lib/mentat/module/detmngr.py b/lib/mentat/module/detmngr.py index 5cd00780..d7310ddf 100644 --- a/lib/mentat/module/detmngr.py +++ b/lib/mentat/module/detmngr.py @@ -246,11 +246,12 @@ class MentatDetmngrScript(mentat.script.fetcher.FetcherScript): :rtype: dict """ try: - with open(detectors_file, 'r') as jsf: + with open(detectors_file, 'r', encoding="utf8") as jsf: json_data = jsf.read() detectors_file_data = json.loads(json_data) - except: - raise pyzenkit.zenscript.ZenScriptException("Invalid detectors file '{}', expected JSON formated file".format(detectors_file)) + except Exception as exc: + raise pyzenkit.zenscript.ZenScriptException("Invalid detectors file '{}', expected JSON formated file" + .format(detectors_file)) from exc detectors_file_type = self.c(self.CONFIG_DETECTORS_SOURCE) self.logger.info("Loaded reference detectors file '%s :: %s'", detectors_file, detectors_file_type) diff --git a/lib/mentat/module/ideagen.py b/lib/mentat/module/ideagen.py index db80fcc5..05a31c0d 100644 --- a/lib/mentat/module/ideagen.py +++ b/lib/mentat/module/ideagen.py @@ -495,9 +495,8 @@ class MentatIdeagenScript(mentat.script.base.MentatBaseScript): tfn = os.path.join(self.c(self.CONFIG_TEMP_DIR), "{}.idea".format(msg_id)) ifn = os.path.join(self.c(self.CONFIG_QUEUE_DIR), "{}.idea".format(msg_id)) - imf = open(tfn, 'w') - imf.write(msg) - imf.close() + with open(tfn, 'w', encoding="utf8") as imf: + imf.write(msg) if tfn != ifn: os.rename(tfn, ifn) diff --git a/lib/mentat/module/netmngr.py b/lib/mentat/module/netmngr.py index 4cfec0ab..b79287d0 100644 --- a/lib/mentat/module/netmngr.py +++ b/lib/mentat/module/netmngr.py @@ -367,7 +367,7 @@ class MentatNetmngrScript(mentat.script.fetcher.FetcherScript): wi_file_data = self._process_whois_data(wi_file_data_raw, wi_file_type) self.logger.info("Number of abuse groups in reference whois file: %d", len(wi_file_data.keys())) - abuse_groups = self.sqlservice.session.query(GroupModel).filter(GroupModel.enabled == True).order_by(GroupModel.name).all() + abuse_groups = self.sqlservice.session.query(GroupModel).filter(bool(GroupModel.enabled)).order_by(GroupModel.name).all() self.sqlservice.session.commit() for abg in abuse_groups: @@ -395,11 +395,12 @@ class MentatNetmngrScript(mentat.script.fetcher.FetcherScript): :rtype: dict """ try: - with open(whois_file, 'r') as jsf: + with open(whois_file, 'r', encoding="utf8") as jsf: json_data = jsf.read() whois_file_data = json.loads(json_data) - except: - raise pyzenkit.zenscript.ZenScriptException("Invalid whois file '{}', expected JSON formated file".format(whois_file)) + except Exception as exc: + raise pyzenkit.zenscript.ZenScriptException("Invalid whois file '{}', expected JSON formated file" + .format(whois_file)) from exc whois_file_type = self.c(self.CONFIG_WHOIS_SOURCE) self.logger.info("Loaded reference whois file '%s :: %s'", whois_file, whois_file_type) @@ -567,8 +568,10 @@ class MentatNetmngrScript(mentat.script.fetcher.FetcherScript): """ for group_name in sorted(abuse_group_dict.keys()): name = None - if group_name in wi_file_data: name = group_name - if abuse_group_dict[group_name].name in wi_file_data: name = abuse_group_dict[group_name].name + if group_name in wi_file_data: + name = group_name + if abuse_group_dict[group_name].name in wi_file_data: + name = abuse_group_dict[group_name].name if name: self._group_update_networks( abuse_group_dict[group_name], @@ -686,13 +689,11 @@ class MentatNetmngrScript(mentat.script.fetcher.FetcherScript): if isinstance(original_network, NetworkModel): if isinstance(new_network, NetworkModel): return original_network.rank != new_network.rank or original_network.is_base != new_network.is_base - else: - return original_network.rank != new_network.get('rank', None) or original_network.is_base != new_network.get('is_base') + return original_network.rank != new_network.get('rank', None) or original_network.is_base != new_network.get('is_base') else: if isinstance(new_network, NetworkModel): return original_network.get('rank', None) != new_network.rank or original_network.get('is_base') != new_network.is_base - else: - return original_network.get('rank', None) != new_network.get('rank', None) or original_network.get('is_base') != new_network.get('is_base') + return original_network.get('rank', None) != new_network.get('rank', None) or original_network.get('is_base') != new_network.get('is_base') def _load_exceptions_file(self, path, abuse): """ @@ -704,7 +705,7 @@ class MentatNetmngrScript(mentat.script.fetcher.FetcherScript): :rtype: dict """ exceptions = [] - with open(path, 'r') as excfh: + with open(path, 'r', encoding="utf8") as excfh: self.logger.info("Loading whois exceptions file '%s'", path) for line in excfh: line = line.strip() @@ -740,6 +741,6 @@ class MentatNetmngrScript(mentat.script.fetcher.FetcherScript): } } ) - with open(whois_file, 'w') as excfh: + with open(whois_file, 'w', encoding="utf8") as excfh: json.dump(exception_dict, excfh, indent = 4, sort_keys = True) self.logger.info("Saved '%d' whois exceptions into target file '%s'", len(exceptions), whois_file) diff --git a/lib/mentat/plugin/enricher/passivedns.py b/lib/mentat/plugin/enricher/passivedns.py index fda03759..0a8d0f34 100644 --- a/lib/mentat/plugin/enricher/passivedns.py +++ b/lib/mentat/plugin/enricher/passivedns.py @@ -293,9 +293,9 @@ class PassiveDNSConnectorEML(PassiveDNSConnectorBase): new_domain = self._create_rec(name, ts_first, ts_last, TTL=ttl) domains.append(new_domain) except json.decoder.JSONDecodeError as err: - raise PassiveDNSConnectorError("Failed to parse JSON response: " + str(err)) + raise PassiveDNSConnectorError("Failed to parse JSON response: " + str(err)) from err except (KeyError, TypeError, ValueError) as err: - raise PassiveDNSConnectorError("Unexpected response structure: " + str(err)) + raise PassiveDNSConnectorError("Unexpected response structure: " + str(err)) from err return domains def _query_fn(self, ip_addr, timeout): @@ -317,7 +317,7 @@ class PassiveDNSConnectorEML(PassiveDNSConnectorBase): response = self._session.get(url, timeout=timeout) ret_code = response.status_code except requests.exceptions.RequestException as err: - raise PassiveDNSConnectorError("API request failed: " + str(err)) + raise PassiveDNSConnectorError("API request failed: " + str(err)) from err if ret_code == 200: # Success domains = self._query_parse(response.text) @@ -410,9 +410,9 @@ class PassiveDNSConnectorCESNET(PassiveDNSConnectorBase): new_domain = self._create_rec(name, ts_first, ts_last, Type=rec_type) domains.append(new_domain) except json.decoder.JSONDecodeError as err: - raise PassiveDNSConnectorError("Failed to parse JSON response: " + str(err)) + raise PassiveDNSConnectorError("Failed to parse JSON response: " + str(err)) from err except (KeyError, TypeError, ValueError) as err: - raise PassiveDNSConnectorError("Unexpected response structure: " + str(err)) + raise PassiveDNSConnectorError("Unexpected response structure: " + str(err)) from err limit = self._cfg_api_limit if limit is not None and limit < len(domains): @@ -440,7 +440,7 @@ class PassiveDNSConnectorCESNET(PassiveDNSConnectorBase): response = self._session.get(url, timeout=timeout) ret_code = response.status_code except requests.exceptions.RequestException as err: - raise PassiveDNSConnectorError("API request failed: " + str(err)) + raise PassiveDNSConnectorError("API request failed: " + str(err)) from err if ret_code == 200: # Success domains = self._query_parser(response.text) diff --git a/lib/mentat/reports/overview.py b/lib/mentat/reports/overview.py index 325116cb..62ae1c3b 100644 --- a/lib/mentat/reports/overview.py +++ b/lib/mentat/reports/overview.py @@ -234,8 +234,7 @@ class OverviewReporter(BaseReporter): filepath = os.path.join(self.reports_dir, filename) - imf = open(filepath, 'w') - imf.write(data_json) - imf.close() + with open(filepath, 'w', encoding="utf8") as imf: + imf.write(data_json) return filepath diff --git a/lib/mentat/services/nerd.py b/lib/mentat/services/nerd.py index b7270550..c5761db4 100644 --- a/lib/mentat/services/nerd.py +++ b/lib/mentat/services/nerd.py @@ -127,7 +127,7 @@ class NerdService: except Exception as exc: raise NerdRuntimeException( "Can't get data from NERD service: {}".format(str(exc)) - ) + ) from exc if resp.status_code == requests.codes.not_found: return None @@ -139,7 +139,7 @@ class NerdService: except Exception as exc: raise NerdRuntimeException( "Invalid data received from NERD service: {}".format(str(exc)) - ) + ) from exc class NerdServiceManager: diff --git a/lib/mentat/services/pdnsr.py b/lib/mentat/services/pdnsr.py index b49ebf94..6773dca1 100644 --- a/lib/mentat/services/pdnsr.py +++ b/lib/mentat/services/pdnsr.py @@ -118,10 +118,10 @@ class PDNSRService: # Send request try: resp = requests.get(url) - except Exception: + except Exception as exc: raise PDNSRRuntimeException( "Can't get data from PassiveDNS service: {}ip/{}".format(self.base_api_url, str(ipaddr)) - ) + ) from exc if resp.status_code == requests.codes.not_found: return None @@ -132,7 +132,7 @@ class PDNSRService: result = resp.json() if sortby: field, direction = sortby.split('.') - reverse = True if direction == 'desc' else False + reverse = direction == 'desc' result = sorted(result, key = lambda x: x.get(field, None), reverse = reverse) if limit and int(limit): result = result[:int(limit)] @@ -140,7 +140,7 @@ class PDNSRService: except Exception as exc: raise PDNSRRuntimeException( "Invalid data received from PassiveDNS service: {}".format(str(exc)) - ) + ) from exc class PDNSRServiceManager: diff --git a/lib/mentat/services/test_eventstorage.py b/lib/mentat/services/test_eventstorage.py index 5d6341f8..dc146fb2 100644 --- a/lib/mentat/services/test_eventstorage.py +++ b/lib/mentat/services/test_eventstorage.py @@ -323,7 +323,7 @@ class TestMentatStorage(unittest.TestCase): try: idea_into['ID'] = 'a1' storage.insert_event_bulkci(idea_into) - except: + except Exception: pass storage.commit_bulk() diff --git a/lib/mentat/services/test_whois.py b/lib/mentat/services/test_whois.py index d9dc3fb0..07fe6715 100644 --- a/lib/mentat/services/test_whois.py +++ b/lib/mentat/services/test_whois.py @@ -156,9 +156,9 @@ class TestMentatWhois(unittest.TestCase): """ Perform unit test case setup. """ - with open(FILE_WHOIS_NEGISTRY, 'w') as fhnd: + with open(FILE_WHOIS_NEGISTRY, 'w', encoding="utf8") as fhnd: fhnd.write(CONTENT_WHOIS_NEGISTRY) - with open(FILE_WHOIS_EXCEPTIONS, 'w') as fhnd: + with open(FILE_WHOIS_EXCEPTIONS, 'w', encoding="utf8") as fhnd: fhnd.write(CONTENT_WHOIS_EXCEPTIONS) def tearDown(self): diff --git a/lib/mentat/stats/rrd.py b/lib/mentat/stats/rrd.py index 4e64204e..b404622a 100644 --- a/lib/mentat/stats/rrd.py +++ b/lib/mentat/stats/rrd.py @@ -496,7 +496,7 @@ class RrdStats: :param str fname: Name of the JSON file. :param dict data: Data to be stored. """ - with open(fname, 'w') as expf: + with open(fname, 'w', encoding="utf8") as expf: json.dump(data, expf, indent=4, sort_keys=True) def prepare_db(self, ds_id, time_start = None): @@ -549,7 +549,7 @@ class RrdStats: ) except rrdtool.OperationalError as exc: - raise RrdsCreateException("Unable to create RRD database '{}' in file '{}': {}".format(ds_id, rrddb, str(exc))) + raise RrdsCreateException("Unable to create RRD database '{}' in file '{}': {}".format(ds_id, rrddb, str(exc))) from exc return (rrddb, True) @@ -620,7 +620,8 @@ class RrdStats: return (rrddb, flag_new) except rrdtool.OperationalError as exc: - raise RrdsUpdateException("Unable to update RRD database '{}' in file '{}' with value '{}' and timestamp '{}': {}".format(ds_id, rrddb, value, str(tst), str(exc))) + raise RrdsUpdateException("Unable to update RRD database '{}' in file '{}' with value '{}' and timestamp '{}': {}" + .format(ds_id, rrddb, value, str(tst), str(exc))) from exc def update_all(self, value, tst = None, flt_type = None): """ @@ -658,14 +659,15 @@ class RrdStats: # 'DEF:eventcnt_proc_a01=./spool/proc.a01.rrd:eventcnt:MAX' 'DEF:{}={}:{}:MAX'.format(FEED_ID, rrddb, DS_NAME), # 'CDEF:eventcnt_proc_a01_r=eventcnt_proc_a01,300,*' - 'CDEF:{}_r={},300,*'.format(FEED_ID, FEED_ID), + 'CDEF:{feed_id}_r={feed_id},300,*'.format(feed_id=FEED_ID), # 'XPORT:eventcnt_proc_a01_r:' 'XPORT:{}_r:{}'.format(FEED_ID, '# of messages from {}'.format(ds_id)) ) return (rrddb, flag_new, result) except rrdtool.OperationalError as exc: - raise RrdsExportException("Unable to export RRD database '{}' in file '{}': {}".format(ds_id, rrddb, str(exc))) + raise RrdsExportException("Unable to export RRD database '{}' in file '{}': {}" + .format(ds_id, rrddb, str(exc))) from exc def lookup(self, flt_type = None): """ @@ -820,19 +822,19 @@ class RrdStats: # 'DEF:eventcnt_proc_a01=./spool/proc.a01.rrd:eventcnt:MIN' 'DEF:{}={}:{}:MAX'.format(idsid, rrd[3], DS_NAME), # 'CDEF:eventcnt_proc_a01_r=eventcnt_proc_a01,300,*' - 'CDEF:{}_r={},300,*'.format(idsid, idsid), + 'CDEF:{id}_r={id},300,*'.format(id=idsid), # Calculate current value. # 'VDEF:cur_eventcnt_proc_a01=eventcnt_proc_a01_r,LAST' - 'VDEF:cur_{}={}_r,LAST'.format(idsid, idsid), + 'VDEF:cur_{id}={id}_r,LAST'.format(id=idsid), # Calculate overall average value. # 'VDEF:avg_eventcnt_proc_a01=eventcnt_proc_a01_r,AVERAGE' - 'VDEF:avg_{}={}_r,AVERAGE'.format(idsid, idsid), + 'VDEF:avg_{id}={id}_r,AVERAGE'.format(id=idsid), # Calculate overall maximum value. # 'VDEF:max_eventcnt_proc_a01=eventcnt_proc_a01_r,MAXIMUM' - 'VDEF:max_{}={}_r,MAXIMUM'.format(idsid, idsid), + 'VDEF:max_{id}={id}_r,MAXIMUM'.format(id=idsid), # Calculate overall minimum value. # 'VDEF:min_eventcnt_proc_a01=eventcnt_proc_a01_r,MINIMUM' - 'VDEF:min_{}={}_r,MINIMUM'.format(idsid, idsid) + 'VDEF:min_{id}={id}_r,MINIMUM'.format(id=idsid) ] # Generate RRD drawing definitions. @@ -1007,7 +1009,7 @@ class RrdStats: "Unable to export RRD chart data '{}:{}' into file '{}': {}".format( chspec['fid'], chspec['title'], chspec['path_xport'], str(exc) ) - ) + ) from exc def _rrd_generate_chart(self, chspec, time_end): """ @@ -1041,7 +1043,7 @@ class RrdStats: "Unable to generate RRD chart '{}:{}' into file '{}': {}".format( chspec['fid'], chspec['title'], chspec['path_chart'], str(exc) ) - ) + ) from exc def _rrd_generate_sparkchart(self, chspec, time_end): """ @@ -1084,4 +1086,4 @@ class RrdStats: "Unable to generate sparkline RRD chart '{}':'{}' into file '{}': {}".format( chspec['fid'], chspec['title'], chspec['path_schart'], str(exc) ) - ) + ) from exc diff --git a/lib/mentat/system.py b/lib/mentat/system.py index cf54768e..46122e46 100644 --- a/lib/mentat/system.py +++ b/lib/mentat/system.py @@ -295,7 +295,7 @@ def analyze_pid_file(pid_file, pid_file_path): return None pid = None - with open(pid_file_path, 'r') as pidf: + with open(pid_file_path, 'r', encoding="utf8") as pidf: pid = pidf.read() pid = int(pid) @@ -305,12 +305,12 @@ def analyze_pid_file(pid_file, pid_file_path): 'file': pid_file, 'path': pid_file_path, 'pid': int(pid), - 'paralel': True if match.group(2) else False, + 'paralel': bool(match.group(2)), 'size': fsstat.st_size, 'atime': datetime.datetime.utcfromtimestamp(fsstat.st_atime), 'mtime': datetime.datetime.utcfromtimestamp(fsstat.st_mtime) } - except: + except Exception: return None def analyze_pid_files(pid_dir_path): @@ -402,7 +402,7 @@ def analyze_cron_file(cron_file, cron_file_path, cron_links): 'mtime': datetime.datetime.utcfromtimestamp(fsstat.st_mtime), 'link': cron_links.get(cron_file_path, None) } - except: + except Exception: return None def analyze_cron_files(cfg_dir_path, cron_dir_path): @@ -490,7 +490,7 @@ def analyze_log_file(log_file, log_file_path): 'atime': datetime.datetime.utcfromtimestamp(fsstat.st_atime), 'mtime': datetime.datetime.utcfromtimestamp(fsstat.st_mtime) } - except: + except Exception: return None def analyze_log_files(log_dir_path): @@ -575,7 +575,7 @@ def analyze_runlog_file(runlog_file, runlog_file_path): 'mtime': datetime.datetime.utcfromtimestamp(fsstat.st_mtime), 'data': pyzenkit.jsonconf.json_load(runlog_file_path) } - except: + except Exception: return None def analyze_runlog_files(run_dir_path, limit = None): @@ -647,7 +647,7 @@ def analyze_cache_file(cache_file, cache_file_path): 'mtime': datetime.datetime.utcfromtimestamp(fsstat.st_mtime), 'data': pyzenkit.jsonconf.json_load(cache_file_path) } - except: + except Exception: return None def analyze_cache_files(cache_dir_path): -- GitLab