Skip to content
Snippets Groups Projects
Commit 6c99c270 authored by Jan Mach's avatar Jan Mach
Browse files

Refactoring: Renamed calculate_secondary_stats method to _calculate_secondary_stats.

(Redmine issue: #4321)
parent b79f27fa
No related branches found
No related tags found
No related merge requests found
...@@ -311,7 +311,7 @@ class ShowView(HTMLViewMixin, SQLAlchemyViewMixin, HawatItemShowView): ...@@ -311,7 +311,7 @@ class ShowView(HTMLViewMixin, SQLAlchemyViewMixin, HawatItemShowView):
""" """
if 'item' in self.response_context and self.response_context['item']: if 'item' in self.response_context and self.response_context['item']:
self.response_context.update( self.response_context.update(
statistics = mentat.stats.idea.calculate_secondary_stats( statistics = mentat.stats.idea._calculate_secondary_stats(
self.response_context['item'].statistics self.response_context['item'].statistics
) )
) )
......
...@@ -206,35 +206,6 @@ def truncate_evaluations(stats, top_threshold = 20): ...@@ -206,35 +206,6 @@ def truncate_evaluations(stats, top_threshold = 20):
stats[key] = truncate_stats(stats[key], top_threshold) stats[key] = truncate_stats(stats[key], top_threshold)
return stats return stats
def calculate_secondary_stats(stats):
"""
Calculate secondary statistics (cnt, min, max, sum, avg) for every statistical
aggregation subkey.
:param dict stats: Structure containing single statistic category.
:return: Updated structure containing statistics.
:rtype: dict
"""
# Calculate unique and recurring events.
if ST_SKEY_CNT_EVENTS in stats:
if ST_SKEY_CNT_RECURR in stats:
stats[ST_SKEY_CNT_UNIQUE] = stats[ST_SKEY_CNT_EVENTS] - stats[ST_SKEY_CNT_RECURR]
else:
stats[ST_SKEY_CNT_UNIQUE] = stats[ST_SKEY_CNT_EVENTS]
stats[ST_SKEY_CNT_RECURR] = 0
# Calculate secondary statistics.
for key in LIST_CALCSTAT_KEYS:
if key in stats:
stats['cnt_{}'.format(key)] = len(stats[key])
stats['sum_{}'.format(key)] = sum(stats[key].values())
stats['min_{}'.format(key)] = min(stats[key].values())
stats['max_{}'.format(key)] = max(stats[key].values())
stats['avg_{}'.format(key)] = stats['sum_{}'.format(key)]/stats['cnt_{}'.format(key)]
stats['list_{}'.format(key)] = list(sorted(stats[key].keys()))
return stats
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
...@@ -271,7 +242,7 @@ def evaluate_events(events, stats = None): ...@@ -271,7 +242,7 @@ def evaluate_events(events, stats = None):
_include_event_to_stats(stats, event) _include_event_to_stats(stats, event)
# Calculate secondary statistics (cnt, min, max, sum, avg). # Calculate secondary statistics (cnt, min, max, sum, avg).
stats = calculate_secondary_stats(stats) stats = _calculate_secondary_stats(stats)
return stats return stats
...@@ -326,13 +297,13 @@ def evaluate_timeline_events(events, dt_from, dt_to, max_count, stats = None): ...@@ -326,13 +297,13 @@ def evaluate_timeline_events(events, dt_from, dt_to, max_count, stats = None):
# Calculate secondary statistics (cnt, min, max, sum, avg) and truncate result # Calculate secondary statistics (cnt, min, max, sum, avg) and truncate result
# to toplist of given size. # to toplist of given size.
stats = calculate_secondary_stats(stats) stats = _calculate_secondary_stats(stats)
stats = truncate_stats(stats) stats = truncate_stats(stats)
# Calculate secondary statistics (cnt, min, max, sum, avg) and mask the result # Calculate secondary statistics (cnt, min, max, sum, avg) and mask the result
# to toplist of given size for all timeline time windows. # to toplist of given size for all timeline time windows.
for tl_stat in stats[ST_SKEY_TIMELINE]: for tl_stat in stats[ST_SKEY_TIMELINE]:
tl_stat[1] = calculate_secondary_stats(tl_stat[1]) tl_stat[1] = _calculate_secondary_stats(tl_stat[1])
tl_stat[1] = truncate_stats_with_mask(tl_stat[1], stats) tl_stat[1] = truncate_stats_with_mask(tl_stat[1], stats)
return stats return stats
...@@ -444,7 +415,7 @@ def aggregate_stat_groups(stats_list, result = None): ...@@ -444,7 +415,7 @@ def aggregate_stat_groups(stats_list, result = None):
) )
for grp_key in LIST_STAT_GROUPS: for grp_key in LIST_STAT_GROUPS:
result[grp_key] = calculate_secondary_stats(result[grp_key]) result[grp_key] = _calculate_secondary_stats(result[grp_key])
return result return result
...@@ -484,7 +455,7 @@ def aggregate_stats_reports(report_list, result = None): ...@@ -484,7 +455,7 @@ def aggregate_stats_reports(report_list, result = None):
result result
) )
result = calculate_secondary_stats(result) result = _calculate_secondary_stats(result)
return result return result
...@@ -551,6 +522,35 @@ def _include_event_to_stats(stats, event, recurring = False): ...@@ -551,6 +522,35 @@ def _include_event_to_stats(stats, event, recurring = False):
key = det key = det
_counter_inc(stats, ST_SKEY_DETECTORSWS, key) _counter_inc(stats, ST_SKEY_DETECTORSWS, key)
def _calculate_secondary_stats(stats):
"""
Calculate secondary statistics (cnt, min, max, sum, avg) for every statistical
aggregation subkey.
:param dict stats: Structure containing single statistic category.
:return: Updated structure containing statistics.
:rtype: dict
"""
# Calculate unique and recurring events.
if ST_SKEY_CNT_EVENTS in stats:
if ST_SKEY_CNT_RECURR in stats:
stats[ST_SKEY_CNT_UNIQUE] = stats[ST_SKEY_CNT_EVENTS] - stats[ST_SKEY_CNT_RECURR]
else:
stats[ST_SKEY_CNT_UNIQUE] = stats[ST_SKEY_CNT_EVENTS]
stats[ST_SKEY_CNT_RECURR] = 0
# Calculate secondary statistics.
for key in LIST_CALCSTAT_KEYS:
if key in stats:
stats['cnt_{}'.format(key)] = len(stats[key])
stats['sum_{}'.format(key)] = sum(stats[key].values())
stats['min_{}'.format(key)] = min(stats[key].values())
stats['max_{}'.format(key)] = max(stats[key].values())
stats['avg_{}'.format(key)] = stats['sum_{}'.format(key)]/stats['cnt_{}'.format(key)]
stats['list_{}'.format(key)] = list(sorted(stats[key].keys()))
return stats
def _make_toplist(stats, dict_key, top_threshold): def _make_toplist(stats, dict_key, top_threshold):
""" """
Produce only toplist of given statistical keys. Produce only toplist of given statistical keys.
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment