From c80240733812ff7f007bef01a9df526be12fa394 Mon Sep 17 00:00:00 2001 From: Jakub Judiny <judiny@cesnet.cz> Date: Wed, 10 May 2023 16:49:16 +0200 Subject: [PATCH] Fix minor pylint issues (newlines, whitespaces, useless "object" inheritance, [] instead of list(), generators instead of [], ignore warnings...) (Redmine issue: #7652) --- lib/hawat/blueprints/reports/__init__.py | 2 +- lib/hawat/jsglue.py | 2 +- ...29_simplification_of_reporting_settings.py | 1 - ...c7_drop_reports_events_message_not_null.py | 1 - lib/hawat/test/fixtures.py | 2 +- lib/mentat/daemon/component/test_storage.py | 2 +- lib/mentat/daemon/component/testsuite.py | 2 +- lib/mentat/datatype/sqldb.py | 7 ++-- lib/mentat/fixtures.py | 1 - lib/mentat/idea/internal.py | 4 +-- lib/mentat/idea/sqldb.py | 6 ++-- lib/mentat/idea/test_internal.py | 10 +++--- lib/mentat/idea/test_jsondict.py | 4 +-- lib/mentat/idea/test_sqldb.py | 2 +- lib/mentat/module/backup.py | 2 +- lib/mentat/module/dbmngr.py | 5 ++- lib/mentat/module/storage.py | 1 - lib/mentat/plugin/__init__.py | 1 - lib/mentat/reports/base.py | 8 ++--- lib/mentat/reports/event.py | 26 +++++++------- lib/mentat/reports/utils.py | 2 +- lib/mentat/services/bench_whois.py | 2 +- lib/mentat/services/eventstorage.py | 16 ++++----- lib/mentat/services/sqlstorage.py | 6 ++-- lib/mentat/stats/idea.py | 34 +++++++++---------- lib/mentat/stats/test_idea.py | 2 +- lib/mentat/stats/test_rrd.py | 2 +- lib/mentat/test_idea.py | 4 +-- 28 files changed, 76 insertions(+), 81 deletions(-) diff --git a/lib/hawat/blueprints/reports/__init__.py b/lib/hawat/blueprints/reports/__init__.py index 37a35fa5..c79d77aa 100644 --- a/lib/hawat/blueprints/reports/__init__.py +++ b/lib/hawat/blueprints/reports/__init__.py @@ -153,7 +153,7 @@ class SearchView(HTMLMixin, SQLAlchemyMixin, BaseSearchView): # pylint: disable def do_after_search(self, items): if items: self.response_context.update( - max_evcount_rep=max([x.evcount_rep for x in items]) + max_evcount_rep=max(x.evcount_rep for x in items) ) diff --git a/lib/hawat/jsglue.py b/lib/hawat/jsglue.py index 955454ed..af0afc34 100644 --- a/lib/hawat/jsglue.py +++ b/lib/hawat/jsglue.py @@ -50,7 +50,7 @@ def get_routes(app): return sorted(output, key=lambda x: len(x[1]), reverse=True) -class JSGlue(object): +class JSGlue(): def __init__(self, app=None, **kwargs): self.app = app if app is not None: diff --git a/lib/hawat/migrations/versions/0df0d44a1429_simplification_of_reporting_settings.py b/lib/hawat/migrations/versions/0df0d44a1429_simplification_of_reporting_settings.py index aa6c29ec..307c0901 100644 --- a/lib/hawat/migrations/versions/0df0d44a1429_simplification_of_reporting_settings.py +++ b/lib/hawat/migrations/versions/0df0d44a1429_simplification_of_reporting_settings.py @@ -56,4 +56,3 @@ def downgrade(): op.add_column('settings_reporting', sa.Column('timing_thr_hi', sa.INTEGER(), autoincrement=False, nullable=True)) op.add_column('settings_reporting', sa.Column('max_attachment_size', sa.INTEGER(), autoincrement=False, nullable=True)) op.add_column('settings_reporting', sa.Column('mute', sa.BOOLEAN(), autoincrement=False, nullable=True)) - diff --git a/lib/hawat/migrations/versions/29c1e46e89c7_drop_reports_events_message_not_null.py b/lib/hawat/migrations/versions/29c1e46e89c7_drop_reports_events_message_not_null.py index 92840f75..e5f7d1db 100644 --- a/lib/hawat/migrations/versions/29c1e46e89c7_drop_reports_events_message_not_null.py +++ b/lib/hawat/migrations/versions/29c1e46e89c7_drop_reports_events_message_not_null.py @@ -26,4 +26,3 @@ def downgrade(): op.alter_column('reports_events', 'message', existing_type=sa.VARCHAR(), nullable=False) - diff --git a/lib/hawat/test/fixtures.py b/lib/hawat/test/fixtures.py index a8a035c5..2d7acf20 100644 --- a/lib/hawat/test/fixtures.py +++ b/lib/hawat/test/fixtures.py @@ -23,7 +23,7 @@ def get_fixtures_db(app): """ Get general database object fixtures. """ - fixture_list = list() + fixture_list = [] user_model = app.get_model(hawat.const.MODEL_USER) group_model = app.get_model(hawat.const.MODEL_GROUP) diff --git a/lib/mentat/daemon/component/test_storage.py b/lib/mentat/daemon/component/test_storage.py index fc481b50..be7e1bcb 100644 --- a/lib/mentat/daemon/component/test_storage.py +++ b/lib/mentat/daemon/component/test_storage.py @@ -216,7 +216,7 @@ class TestMentatDaemonStorage(unittest.TestCase): print(json.dumps(idea_out, indent=4, sort_keys=True, default=idea_out.json_default)) orig = json.dumps(idea_msg, indent=4, sort_keys=True, default=idea_msg.json_default) new = json.dumps(idea_out, indent=4, sort_keys=True, default=idea_out.json_default) - self.assertEqual(orig, new, "\n".join([l for l in difflib.context_diff(orig.split("\n"), new.split("\n"))])) + self.assertEqual(orig, new, list(difflib.context_diff(orig.split('\n'), new.split('\n')))) #------------------------------------------------------------------------------- diff --git a/lib/mentat/daemon/component/testsuite.py b/lib/mentat/daemon/component/testsuite.py index a1c39b67..2aadba7e 100644 --- a/lib/mentat/daemon/component/testsuite.py +++ b/lib/mentat/daemon/component/testsuite.py @@ -116,7 +116,7 @@ class DaemonComponentTestCase(unittest.TestCase): res = json.dumps(res, indent=4, sort_keys=True) exp = expected_results[result['idea_id']] exp = json.dumps(exp, indent=4, sort_keys=True) - self.assertEqual(res, exp, "\n".join([l for l in difflib.context_diff(res.split("\n"), exp.split("\n"))])) + self.assertEqual(res, exp, list(difflib.context_diff(res.split('\n'), exp.split('\n')))) def _build_daemon_mock(self, config_list, core_config_list = None): """ diff --git a/lib/mentat/datatype/sqldb.py b/lib/mentat/datatype/sqldb.py index 1cf3583a..d1c3f4e3 100644 --- a/lib/mentat/datatype/sqldb.py +++ b/lib/mentat/datatype/sqldb.py @@ -70,7 +70,6 @@ from sqlalchemy.ext.declarative import declarative_base, declared_attr from sqlalchemy.schema import DropTable from sqlalchemy.ext.compiler import compiles from sqlalchemy.orm import validates -import sqlalchemy.types as types from werkzeug.security import generate_password_hash, check_password_hash @@ -459,7 +458,7 @@ def groupmodel_from_typeddict(structure, defaults = None): return sqlobj -class iprange(types.UserDefinedType): +class iprange(sqlalchemy.types.UserDefinedType): cache_ok = True def get_col_spec(self, **kw): @@ -780,8 +779,8 @@ def eventstatsmodel_from_typeddict(structure, defaults = None): sqlobj.delta = delta.total_seconds() sqlobj.count = structure.get('count', structure['overall'].get('cnt_alerts')) sqlobj.stats_overall = structure['overall'] - sqlobj.stats_internal = structure.get('internal', dict()) - sqlobj.stats_external = structure.get('external', dict()) + sqlobj.stats_internal = structure.get('internal', dict()) # pylint: disable=locally-disabled,use-dict-literal + sqlobj.stats_external = structure.get('external', dict()) # pylint: disable=locally-disabled,use-dict-literal return sqlobj diff --git a/lib/mentat/fixtures.py b/lib/mentat/fixtures.py index 18fc9af5..25a63abe 100644 --- a/lib/mentat/fixtures.py +++ b/lib/mentat/fixtures.py @@ -146,4 +146,3 @@ class MentatFixtures(): except Exception as exc: self.sqlservice.session.rollback() self.logservice.info("Unable to remove demo object from database: '%s'", str(exc)) - diff --git a/lib/mentat/idea/internal.py b/lib/mentat/idea/internal.py index 733c0b38..a60b4bf1 100644 --- a/lib/mentat/idea/internal.py +++ b/lib/mentat/idea/internal.py @@ -250,7 +250,7 @@ class Idea(idea.lite.Idea): # pylint: disable=locally-disabled,too-many-ancesto :return: Value of message attribute ``idea['_Mentat'/'_CESNET']['ResolvedAbuses']``. :rtype: list of strings """ - return list(self.get_custom_key().get('ResolvedAbuses', list())) + return list(self.get_custom_key().get('ResolvedAbuses', [])) def get_categories(self): """ @@ -531,7 +531,7 @@ class IDEAFilterCompiler(pynspect.compilers.IDEAFilterCompiler): compiler should be used for proper rule compilations. """ def __init__(self): - super(IDEAFilterCompiler, self).__init__() + super().__init__() self.register_variable_compilation( '_Mentat.StorageTime', diff --git a/lib/mentat/idea/sqldb.py b/lib/mentat/idea/sqldb.py index d8a503e1..8f8a2d39 100644 --- a/lib/mentat/idea/sqldb.py +++ b/lib/mentat/idea/sqldb.py @@ -93,7 +93,7 @@ class IPList(list): for in-depth explanation. """ -class Idea: # pylint: disable=locally-disabled,too-many-instance-attributes,too-few-public-methods +class Idea: # pylint: disable=locally-disabled,too-many-instance-attributes,too-few-public-methods,use-list-literal """ Performs conversion of IDEA messages into flat relational model. """ @@ -137,11 +137,11 @@ class Idea: # pylint: disable=locally-disabled,too-many-instance-attributes,t # Source IP (both v4 a v6 in single attribute). self.source_ip = IPList() - self.source_ip.extend([ip for ip in idea_event.get_addresses('Source')]) + self.source_ip.extend(list(idea_event.get_addresses('Source'))) # Target IP (both v4 a v6 in single attribute). self.target_ip = IPList() - self.target_ip.extend([ip for ip in idea_event.get_addresses('Target')]) + self.target_ip.extend(list(idea_event.get_addresses('Target'))) # Aggregated source and target IP4|6 ranges for search optimizations. self.source_ip_aggr_ip4 = self._aggr_iplist( diff --git a/lib/mentat/idea/test_internal.py b/lib/mentat/idea/test_internal.py index 6f77a033..06409625 100644 --- a/lib/mentat/idea/test_internal.py +++ b/lib/mentat/idea/test_internal.py @@ -163,7 +163,7 @@ class TestMentatIdeaInternal(unittest.TestCase): self.assertEqual(json.dumps(idea_internal_1, indent=4, sort_keys=True, default=idea_internal_1.json_default), idea_internal_1.to_json(indent=4)) orig = json.dumps(self.idea_raw_1, indent=4, sort_keys=True) new = json.dumps(idea_internal_1, indent=4, sort_keys=True, default=idea_internal_1.json_default) - self.assertEqual(orig, new, "\n".join([l for l in difflib.context_diff(orig.split("\n"), new.split("\n"))])) + self.assertEqual(orig, new, list(difflib.context_diff(orig.split('\n'), new.split('\n')))) idea_internal_2 = mentat.idea.internal.Idea(self.idea_raw_2) if self.verbose: @@ -172,7 +172,7 @@ class TestMentatIdeaInternal(unittest.TestCase): self.assertEqual(json.dumps(idea_internal_2, indent=4, sort_keys=True, default=idea_internal_2.json_default), idea_internal_2.to_json(indent=4)) orig = json.dumps(self.idea_raw_2, indent=4, sort_keys=True) new = json.dumps(idea_internal_2, indent=4, sort_keys=True, default=idea_internal_2.json_default) - self.assertEqual(orig, new, "\n".join([l for l in difflib.context_diff(orig.split("\n"), new.split("\n"))])) + self.assertEqual(orig, new, list(difflib.context_diff(orig.split('\n'), new.split('\n')))) def test_02_idea_lite(self): @@ -188,7 +188,7 @@ class TestMentatIdeaInternal(unittest.TestCase): print(json.dumps(idea_lite_1, indent=4, sort_keys=True, default=idea_lite_1.json_default)) orig = json.dumps(self.idea_raw_1, indent=4, sort_keys=True) new = json.dumps(idea_lite_1, indent=4, sort_keys=True, default=idea_lite_1.json_default) - self.assertEqual(orig, new, "\n".join([l for l in difflib.context_diff(orig.split("\n"), new.split("\n"))])) + self.assertEqual(orig, new, list(difflib.context_diff(orig.split('\n'), new.split('\n')))) # TODO: Following code ends with failure, fix it. #idea_internal_1 = mentat.idea.internal.Idea(idea_lite_1) @@ -206,7 +206,7 @@ class TestMentatIdeaInternal(unittest.TestCase): print(json.dumps(idea_lite_2, indent=4, sort_keys=True, default=idea_lite_2.json_default)) orig = json.dumps(self.idea_raw_2, indent=4, sort_keys=True) new = json.dumps(idea_lite_2, indent=4, sort_keys=True, default=idea_lite_2.json_default) - self.assertEqual(orig, new, "\n".join([l for l in difflib.context_diff(orig.split("\n"), new.split("\n"))])) + self.assertEqual(orig, new, list(difflib.context_diff(orig.split('\n'), new.split('\n')))) # TODO: Following code ends with failure, fix it. #idea_internal_2 = mentat.idea.internal.Idea(idea_lite_2) @@ -269,7 +269,7 @@ class TestMentatIdeaInternal(unittest.TestCase): ) orig = json.dumps(idea_internal_1, indent=4, sort_keys=True, default=idea_internal_1.json_default) new = json.dumps(idea_internal_2, indent=4, sort_keys=True, default=idea_internal_2.json_default) - self.assertEqual(orig, new, "\n".join([l for l in difflib.context_diff(orig.split("\n"), new.split("\n"))])) + self.assertEqual(orig, new, list(difflib.context_diff(orig.split('\n'), new.split('\n')))) def test_05_get_ranges(self): """ diff --git a/lib/mentat/idea/test_jsondict.py b/lib/mentat/idea/test_jsondict.py index 8f220cc8..b2dfa996 100644 --- a/lib/mentat/idea/test_jsondict.py +++ b/lib/mentat/idea/test_jsondict.py @@ -203,7 +203,7 @@ class TestMentatIdeaJSON(unittest.TestCase): print(json.dumps(idea_internal, indent=4, sort_keys=True, default=idea_internal.json_default)) orig = json.dumps(self.idea_raw, indent=4, sort_keys=True) new = json.dumps(idea_internal, indent=4, sort_keys=True, default=idea_internal.json_default) - self.assertEqual(orig, new, "\n".join([l for l in difflib.context_diff(orig.split("\n"), new.split("\n"))])) + self.assertEqual(orig, new, list(difflib.context_diff(orig.split('\n'), new.split('\n')))) # # Test conversions of 'mentat.idea.internal.Idea' into 'mentat.idea.jsondict.Idea'. @@ -237,7 +237,7 @@ class TestMentatIdeaJSON(unittest.TestCase): print(json.dumps(idea_internal_out, indent=4, sort_keys=True, default=idea_internal_out.json_default)) orig = json.dumps(idea_internal, indent=4, sort_keys=True, default=idea_internal.json_default) new = json.dumps(idea_internal_out, indent=4, sort_keys=True, default=idea_internal_out.json_default) - self.assertEqual(orig, new, "\n".join([l for l in difflib.context_diff(orig.split("\n"), new.split("\n"))])) + self.assertEqual(orig, new, list(difflib.context_diff(orig.split('\n'), new.split('\n')))) #------------------------------------------------------------------------------- diff --git a/lib/mentat/idea/test_sqldb.py b/lib/mentat/idea/test_sqldb.py index 6d9e133b..9335df77 100644 --- a/lib/mentat/idea/test_sqldb.py +++ b/lib/mentat/idea/test_sqldb.py @@ -191,7 +191,7 @@ class TestMentatIdeaJSON(unittest.TestCase): print(json.dumps(idea_internal, indent=4, sort_keys=True, default=idea_internal.json_default)) orig = json.dumps(self.idea_raw_1, indent=4, sort_keys=True) new = json.dumps(idea_internal, indent=4, sort_keys=True, default=idea_internal.json_default) - self.assertEqual(orig, new, "\n".join([l for l in difflib.context_diff(orig.split("\n"), new.split("\n"))])) + self.assertEqual(orig, new, list(difflib.context_diff(orig.split('\n'), new.split('\n')))) # # Test conversions of 'mentat.idea.internal.Idea' into 'mentat.idea.sqldb.Idea'. diff --git a/lib/mentat/module/backup.py b/lib/mentat/module/backup.py index 92f3fc30..18b980e5 100644 --- a/lib/mentat/module/backup.py +++ b/lib/mentat/module/backup.py @@ -559,4 +559,4 @@ class MentatBackupScript(mentat.script.base.MentatBaseScript): """ return "postgresql://{user}:{password}@{host}:{port}/{dbname}".format( **parameters - ) \ No newline at end of file + ) diff --git a/lib/mentat/module/dbmngr.py b/lib/mentat/module/dbmngr.py index 3a4c889d..baa3f855 100644 --- a/lib/mentat/module/dbmngr.py +++ b/lib/mentat/module/dbmngr.py @@ -378,9 +378,8 @@ class MentatDbmngrScript(mentat.script.fetcher.FetcherScript): for attrname in ('login', 'fullname', 'email', 'organization', 'roles'): if not getattr(account_user, attrname, None): raise pyzenkit.zenscript.ZenScriptException( - "Please provide user`s {} as \"{}=value\" command line argument".format( - attrname, - attrname + "Please provide user`s {attrname} as \"{attrname}=value\" command line argument".format( + attrname=attrname, ) ) diff --git a/lib/mentat/module/storage.py b/lib/mentat/module/storage.py index a57366d1..ad5eea86 100644 --- a/lib/mentat/module/storage.py +++ b/lib/mentat/module/storage.py @@ -150,4 +150,3 @@ class MentatStorageDaemon(mentat.daemon.piper.PiperDaemon): (mentat.daemon.component.storage.CONFIG_COMMIT_BULKTHR, 500) ) + cfgs return super()._init_config(cfgs, **kwargs) - diff --git a/lib/mentat/plugin/__init__.py b/lib/mentat/plugin/__init__.py index 8b137891..e69de29b 100644 --- a/lib/mentat/plugin/__init__.py +++ b/lib/mentat/plugin/__init__.py @@ -1 +0,0 @@ - diff --git a/lib/mentat/reports/base.py b/lib/mentat/reports/base.py index f997c92f..3b5f9904 100644 --- a/lib/mentat/reports/base.py +++ b/lib/mentat/reports/base.py @@ -184,7 +184,7 @@ class BaseReporter: if isinstance(val, str): try: return dateutil.parser.parse(val) - except: + except Exception: pass return val @@ -229,15 +229,15 @@ class BaseReporter: return format_datetime(val, BABEL_RFC3339_FORMAT, tzinfo = self.tzinfo, locale = self.locale) return format_datetime(val, BABEL_RFC3339_FORMAT, locale = self.locale) - def format_custom_datetime(self, val, format): + def format_custom_datetime(self, val, custom_format): """ Simple wrapper around :py:func:babel.dates.format_datetime` function that prints the datetime in custom format. """ val = self.get_datetime(val) if self.timezone != 'UTC': - return format_datetime(val, format, tzinfo = self.tzinfo, locale = self.locale) - return format_datetime(val, format, locale = self.locale) + return format_datetime(val, custom_format, tzinfo = self.tzinfo, locale = self.locale) + return format_datetime(val, custom_format, locale = self.locale) def format_timedelta(self, val): """ diff --git a/lib/mentat/reports/event.py b/lib/mentat/reports/event.py index 0812845d..addd7c30 100644 --- a/lib/mentat/reports/event.py +++ b/lib/mentat/reports/event.py @@ -107,9 +107,11 @@ class EventReporter(BaseReporter): if event_class not in self.event_classes_data: self.event_classes_data[event_class] = data = {} if os.path.isfile(os.path.join(self.event_classes_dir, event_class, "info.json")): - with open(os.path.join(self.event_classes_dir, event_class, "info.json")) as f: - info = json.load(f) - [data.__setitem__(key, info[key]) for key in ["label", "reference"] if key in info] + with open(os.path.join(self.event_classes_dir, event_class, "info.json"), encoding="utf8") as file: + info = json.load(file) + for key in ["label", "reference"]: + if key in info: + data[key] = info[key] data["has_macro"] = os.path.isfile(os.path.join(self.event_classes_dir, event_class, "email.j2")) def _setup_renderer(self, templates_dir): @@ -255,7 +257,7 @@ class EventReporter(BaseReporter): result[str(group_chain)] = {} result[str(group_chain)]['evcount_all'] = 0 result[str(group_chain)]['evcount_rlp'] = len(events_rel[groups]) - result[str(group_chain)]['evcount_all'] += result[str(group_chain)]['evcount_rlp'] + result[str(group_chain)]['evcount_all'] += result[str(group_chain)]['evcount_rlp'] if groups not in events: events[groups] = {} events[groups]['relapsed'] = events_rel[groups] @@ -266,7 +268,7 @@ class EventReporter(BaseReporter): if not events: result['result'] = 'skipped-no-events' - for groups in events.keys(): + for groups in events: (group_chain, fallback_groups) = groups # Check, that there is anything to report (regular and/or relapsed events). if 'regular' not in events[groups] and 'relapsed' not in events[groups]: @@ -610,9 +612,9 @@ class EventReporter(BaseReporter): passed = False if len(jpath_values(event, 'Source.IP4') + jpath_values(event, 'Source.IP6')) > 1: event_copy = deepcopy(event) - for s in event_copy["Source"]: - s["IP4"] = [] - s["IP6"] = [] + for source in event_copy["Source"]: + source["IP4"] = [] + source["IP6"] = [] for src in set(jpath_values(event, 'Source.IP4')): event_copy["Source"][0]["IP4"] = [src] filtered_groups, fallback_groups, fltlog = self.filter_one_event(src, event_copy, main_group, fltlog) @@ -846,8 +848,8 @@ class EventReporter(BaseReporter): for st in ("Source", "Target"): for k in ("Hostname", "MAC", "Port", "Proto", "URL", "Email"): - for v in jpath_values(event, st + "." + k): - ip_result[st.lower()][k.lower()][v] = 1 + for value in jpath_values(event, st + "." + k): + ip_result[st.lower()][k.lower()][value] = 1 for abuse_value in result.values(): for ip_value in abuse_value.values(): @@ -878,7 +880,7 @@ class EventReporter(BaseReporter): while True: try: - with open(filepath, 'w') as jsonf: + with open(filepath, 'w', encoding="utf8") as jsonf: json.dump( data, jsonf, @@ -911,7 +913,7 @@ class EventReporter(BaseReporter): while True: try: - with open(filepath, 'w') as imf: + with open(filepath, 'w', encoding="utf8") as imf: imf.write(data) break except FileNotFoundError: diff --git a/lib/mentat/reports/utils.py b/lib/mentat/reports/utils.py index 0753402f..95215ffb 100644 --- a/lib/mentat/reports/utils.py +++ b/lib/mentat/reports/utils.py @@ -18,8 +18,8 @@ __credits__ = "Pavel Kácha <pavel.kacha@cesnet.cz>, Andrea KropáÄová <andrea import datetime -import pytz import pprint +import pytz from pynspect.jpath import jpath_value, jpath_values diff --git a/lib/mentat/services/bench_whois.py b/lib/mentat/services/bench_whois.py index 21df1511..da61cf94 100644 --- a/lib/mentat/services/bench_whois.py +++ b/lib/mentat/services/bench_whois.py @@ -90,7 +90,7 @@ IPS = None def b001(): - global WHOIS + global WHOIS # pylint: disable=locally-disabled,global-statement WHOIS = mentat.services.whois.WhoisService([ mentat.services.whois.SqldbWhoisModule().setup() ]) diff --git a/lib/mentat/services/eventstorage.py b/lib/mentat/services/eventstorage.py index 6027c356..f672848f 100644 --- a/lib/mentat/services/eventstorage.py +++ b/lib/mentat/services/eventstorage.py @@ -395,11 +395,11 @@ def build_query(parameters = None, qtype = QTYPE_SELECT, qname = None): query, params_ext = _BQ_MAP[str(qtype)](parameters, qname) params.extend(params_ext) - except KeyError: + except KeyError as error: if isinstance(qtype, psycopg2.sql.Composed): query = qtype else: - raise ValueError("Received invalid value '{}' for SQL query type.".format(qtype)) + raise ValueError("Received invalid value '{}' for SQL query type.".format(qtype)) from error # Build WHERE section of the query. subquery, subparams = _bq_where(parameters) @@ -421,7 +421,7 @@ def build_query(parameters = None, qtype = QTYPE_SELECT, qname = None): if qtype in (QTYPE_SELECT, QTYPE_SELECT_GHOST) and parameters: if parameters.get('sortby', None): field, direction = parameters['sortby'].split('.') - if field != 'detecttime' and field != 'storagetime': + if field not in ['detecttime', 'storagetime']: if parameters.get('st_from', None) or parameters.get('st_to', None): field = 'storagetime' else: @@ -933,7 +933,7 @@ class EventStorageCursor: return count_orphaned + count_timeouted -class incstats_decorator: # pylint: disable=locally-disabled,too-fewpublic-methods,invalid-name +class incstats_decorator: # pylint: disable=locally-disabled,too-few-public-methods,invalid-name """ Decorator for calculating usage statistics. """ @@ -973,7 +973,7 @@ class EventStorageService: def __del__(self): self.close() - def handle_db_exceptions(func): + def handle_db_exceptions(func): # pylint: disable=locally-disabled,no-self-argument """ Handle exceptions raised during database interfacing operations. """ @@ -981,14 +981,14 @@ class EventStorageService: exc_store = None for _ in range(2): try: - return func(self, *args, **kwargs) + return func(self, *args, **kwargs) # pylint: disable=locally-disabled,not-callable except psycopg2.DataError as err: self.rollback() raise DataError(str(err)) from err except (psycopg2.OperationalError, psycopg2.InterfaceError) as err: - self.__init__() + self.__init__() # pylint: disable=locally-disabled,unnecessary-dunder-call exc_store = err continue @@ -1280,7 +1280,7 @@ class EventStorageService: raise DataError(str(err)) from err except (psycopg2.OperationalError, psycopg2.InterfaceError) as err: - self.__init__() + self.__init__() # pylint: disable=locally-disabled,unnecessary-dunder-call exc_store = err continue diff --git a/lib/mentat/services/sqlstorage.py b/lib/mentat/services/sqlstorage.py index c029f4eb..75518b0e 100644 --- a/lib/mentat/services/sqlstorage.py +++ b/lib/mentat/services/sqlstorage.py @@ -196,13 +196,13 @@ def init(core_config, updates = None): _MANAGER = StorageServiceManager(core_config, updates) -def set_manager(manager): +def set_manager(new_manager): """ - Set manager from outside of the module. This should be used only when you know + Set manager from outside the module. This should be used only when you know exactly what you are doing. """ global _MANAGER # pylint: disable=locally-disabled,global-statement - _MANAGER = manager + _MANAGER = new_manager def manager(): diff --git a/lib/mentat/stats/idea.py b/lib/mentat/stats/idea.py index 354118c6..9de21c75 100644 --- a/lib/mentat/stats/idea.py +++ b/lib/mentat/stats/idea.py @@ -251,7 +251,7 @@ def evaluate_events(events, stats = None): :rtype: dict """ if stats is None: - stats = dict() + stats = {} stats.setdefault(ST_SKEY_CNT_EVENTS, 0) stats.setdefault(ST_SKEY_CNT_RECURR, 0) @@ -292,7 +292,7 @@ def evaluate_timeline_events(events, dt_from, dt_to, max_count, timezone = None, :rtype: dict """ if stats is None: - stats = dict() + stats = {} stats.setdefault(ST_SKEY_CNT_EVENTS, 0) stats.setdefault(ST_SKEY_CNT_RECURR, 0) @@ -354,7 +354,7 @@ def evaluate_singlehost_events(host, events, dt_from, dt_to, max_count, timezone :rtype: dict """ if stats is None: - stats = dict() + stats = {} stats.setdefault(ST_SKEY_CNT_EVENTS, 0) stats.setdefault(ST_SKEY_CNT_RECURR, 0) @@ -413,7 +413,7 @@ def aggregate_stats_reports(report_list, dt_from, dt_to, result = None): :rtype: dict """ if result is None: - result = dict() + result = {} if not report_list: return result @@ -464,7 +464,7 @@ def aggregate_dbstats_events(aggr_type, aggr_name, aggr_data, default_val, timel """ if result is None: - result = dict() + result = {} if aggr_type == 'timeline': bucket_dict = { bucket: idx for idx, bucket in enumerate(timeline_cfg['buckets']) } @@ -491,7 +491,7 @@ def aggregate_dbstats_events(aggr_type, aggr_name, aggr_data, default_val, timel result[ST_SKEY_TIMELINE][idx][1].setdefault(aggr_name, {})[str(res.set) or KEY_UNKNOWN] = res.count else: result[ST_SKEY_TIMELINE][idx][1][aggr_name] = res.count - except KeyError: + except KeyError as error: raise ValueError( "Timeline bucket missmatch for '{}:{}:{}' [{}]".format( aggr_type, @@ -499,7 +499,7 @@ def aggregate_dbstats_events(aggr_type, aggr_name, aggr_data, default_val, timel res.bucket, str(res.set) ) - ) + ) from error elif aggr_type == 'aggregate': result[aggr_name] = default_val @@ -570,7 +570,7 @@ def evaluate_event_groups(events, stats = None): :rtype: dict """ if stats is None: - stats = dict() + stats = {} stats[ST_SKEY_COUNT] = len(events) msg_groups = group_events(events) @@ -591,7 +591,7 @@ def aggregate_stat_groups(stats_list, result = None): :rtype: dict """ if result is None: - result = dict() + result = {} result[ST_SKEY_COUNT] = 0 for stat in stats_list: @@ -641,7 +641,7 @@ def aggregate_timeline_groups(stats_list, dt_from, dt_to, max_count, min_step = :rtype: dict """ if result is None: - result = dict() + result = {} result[ST_SKEY_COUNT] = 0 # Do not calculate anything for empty statistical list. @@ -649,9 +649,9 @@ def aggregate_timeline_groups(stats_list, dt_from, dt_to, max_count, min_step = return result # Calculate some overall dataset statistics. - result[ST_SKEY_COUNT] = sum([x.count for x in stats_list]) - result[ST_SKEY_DT_FROM] = min([x.dt_from for x in stats_list]) - result[ST_SKEY_DT_TO] = max([x.dt_to for x in stats_list]) + result[ST_SKEY_COUNT] = sum(x.count for x in stats_list) + result[ST_SKEY_DT_FROM] = min(x.dt_from for x in stats_list) + result[ST_SKEY_DT_TO] = max(x.dt_to for x in stats_list) if not result[ST_SKEY_COUNT]: return result @@ -865,7 +865,7 @@ def _merge_stats(stats, result = None): :rtype: dict """ if result is None: - result = dict() + result = {} result[ST_SKEY_CNT_ALERTS] = result.get(ST_SKEY_CNT_ALERTS, 0) + stats.get(ST_SKEY_CNT_ALERTS, 0) result[ST_SKEY_CNT_EVENTS] = result[ST_SKEY_CNT_ALERTS] @@ -981,7 +981,7 @@ def _make_toplist(stats, dict_key, top_threshold, force = False): # Calculate and store the total for what was omitted into the __REST__ subkey. if sorted_key_list_throw: - tmp[ST_SKEY_REST] = sum([stats[dict_key][key] for key in sorted_key_list_throw]) + tmp[ST_SKEY_REST] = sum(stats[dict_key][key] for key in sorted_key_list_throw) # Add previous value of the __REST__ subkey. if rest: @@ -1031,7 +1031,7 @@ def _mask_toplist(stats, mask, dict_key, top_threshold, force = False): # Calculate and store the total for what was omitted. if stat_key_list_throw: - tmp[ST_SKEY_REST] = sum([stats[dict_key][key] for key in stat_key_list_throw]) + tmp[ST_SKEY_REST] = sum(stats[dict_key][key] for key in stat_key_list_throw) # Add previous value of the __REST__ subkey. if rest: @@ -1189,7 +1189,7 @@ def _init_daily_timeline(dt_from, dt_to): timeline_cfg = calculate_timeline_config_daily(dt_from, dt_to) dt_from = timeline_cfg[ST_SKEY_DT_FROM] - timeline = list() + timeline = [] for i in range(timeline_cfg['count']): # pylint: disable=locally-disabled,unused-variable timeline.append([dt_from, {}]) dt_from = dt_from + timeline_cfg['step'] diff --git a/lib/mentat/stats/test_idea.py b/lib/mentat/stats/test_idea.py index b0892fae..d9676a95 100644 --- a/lib/mentat/stats/test_idea.py +++ b/lib/mentat/stats/test_idea.py @@ -8,10 +8,10 @@ #------------------------------------------------------------------------------- +import datetime import unittest from pprint import pprint import pytz -import datetime import mentat.stats.idea import mentat.datatype.sqldb diff --git a/lib/mentat/stats/test_rrd.py b/lib/mentat/stats/test_rrd.py index 7281c174..5adc5115 100644 --- a/lib/mentat/stats/test_rrd.py +++ b/lib/mentat/stats/test_rrd.py @@ -330,7 +330,7 @@ class TestMentatStatsRrd(unittest.TestCase): """ self.test_04_update() - time_end = (TIME_START + (mentat.stats.rrd.DFLT_STEP * TEST_DATA_SIZE)) + time_end = TIME_START + (mentat.stats.rrd.DFLT_STEP * TEST_DATA_SIZE) result = self.stats.generate(time_end) for res in result: diff --git a/lib/mentat/test_idea.py b/lib/mentat/test_idea.py index e77c1deb..eef4af29 100644 --- a/lib/mentat/test_idea.py +++ b/lib/mentat/test_idea.py @@ -102,12 +102,12 @@ class TestIDEA(unittest.TestCase): idea = lite.Idea(RAW_IDEA) orig = json.dumps(RAW_IDEA, indent=4, sort_keys=True) new = json.dumps(idea, indent=4, sort_keys=True, default=idea.json_default) - self.assertEqual(orig, new, "\n".join([l for l in difflib.context_diff(orig.split("\n"), new.split("\n"))])) + self.assertEqual(orig, new, list(difflib.context_diff(orig.split('\n'), new.split('\n')))) idea = valid.Idea(RAW_IDEA) orig = json.dumps(RAW_IDEA, indent=4, sort_keys=True) new = json.dumps(idea, indent=4, sort_keys=True, default=idea.json_default) - self.assertEqual(orig, new, "\n".join([l for l in difflib.context_diff(orig.split("\n"), new.split("\n"))])) + self.assertEqual(orig, new, list(difflib.context_diff(orig.split('\n'), new.split('\n')))) #------------------------------------------------------------------------------- -- GitLab