Newer
Older
# Copyright (C) 2011-2015 Cesnet z.s.p.o
# Use of this source is governed by a 3-clause BSD-style license, see LICENSE file.
from __future__ import print_function
import logging
import logging.handlers
import json
from traceback import format_tb

Pavel Kácha
committed
from collections import namedtuple
from time import sleep
from random import randint
import configparser as ConfigParser
from urllib.parse import parse_qs
unicode = str
def get_method_params(method):
return method.__code__.co_varnames[:method.__code__.co_argcount]
else:
import ConfigParser
from urlparse import parse_qs
def get_method_params(method):
return method.func_code.co_varnames[:method.func_code.co_argcount]
# for local version of up to date jsonschema
sys.path.append(path.join(path.dirname(__file__), "..", "lib"))

Pavel Kácha
committed
from jsonschema import Draft4Validator
class Encoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, Error):
return o.to_dict()
if isinstance(o, ErrorMessage):
out = o.other_args.copy()
out.pop("exc", None)
out["error"] = o.error
out["message"] = o.message
if o.events:
out["events"] = list(o.events)
return out
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
class ErrorMessage(Exception):
def __init__(self, error, message, events=None, unique_id=None, **kwargs):
super(Exception, self).__setattr__("error", error)
super(Exception, self).__setattr__("message", message)
super(Exception, self).__setattr__("unique_id", unique_id)
self.events = set() if events is None else set(events)
self.other_args = kwargs
def __repr__(self):
return "%s(error=%d, message=%s)" % (
type(self).__name__, self.error, repr(self.message)
)
def __str__(self):
if sys.version_info[0] < 3:
return self.str_err().encode('ascii', 'backslashereplace')
return self.str_err()
def str_err(self):
exc = self.other_args.get("exc", None)
if exc in (None, (None, None, None)):
exc_cause = ""
else:
exc_cause = " (cause was %s: %s)" % (exc[0].__name__, str(exc[1]))
return "Error(%s) %s%s" % (self.error, self.message, exc_cause)
def str_info(self):
arg_copy = self.other_args.copy()
arg_copy.pop("req_id", None)
arg_copy.pop("method", None)
arg_copy.pop("exc", None)
if arg_copy:
return "Detail: %s" % json.dumps(arg_copy, cls=Encoder)
return ""
def str_debug(self):
exc = self.other_args.get("exc", None)
if exc in (None, (None, None, None)):
return ""
exc_tb = exc[2]
if not exc_tb:
return ""
return "Traceback:\n" + "".join(format_tb(exc_tb))
def __getattr__(self, name):
if name in self.other_args:
return self.other_args[name]
raise AttributeError
def __setattr__(self, name, value):
if name in ("events", "exc", "other_args"):
super(Exception, self).__setattr__(name, value)
return
if name in ("error", "message", "unique_id"):
raise AttributeError("Cannot change the attribute %s" % name)
self.other_args[name] = value

Pavel Kácha
committed
def __init__(self, method=None, req_id=None, errors=None, **kwargs):
self.req_id = req_id
if "message" in kwargs:
kwargs.setdefault("error", 500)
self.errors = [ErrorMessage(**kwargs)]
else:
self.errors = []

Pavel Kácha
committed
if errors:
self.errors.extend(errors)

Pavel Kácha
committed
def append(self, _events=None, **kwargs):
kwargs.setdefault("message", "No further information")
kwargs.setdefault("error", 500)
self.errors.append(ErrorMessage(**kwargs))

Pavel Kácha
committed
def get_http_err_msg(self):
try:
err = self.errors[0].error
msg = self.errors[0].message
except (IndexError, AttributeError):

Pavel Kácha
committed
err = 500
msg = "There's NO self-destruction button! Ah, you've just found it..."
return err, msg
if not all(msg == e.message for e in self.errors):
# messages not the same, get Multiple errors
msg = "Multiple errors"
if not all(err == e.error for e in self.errors):
# errors not same, round to basic err code (400, 500)
# and use the highest one
err = max(e.error for e in self.errors) // 100 * 100
Jakub Maloštík
committed
msg = "".join((c if '\x20' <= c != '\x7f' else r'\x{:02x}'.format(ord(c))) for c in msg) # escape control characters

Pavel Kácha
committed
return err, msg
return "\n".join(str(e) for e in self.errors)

Pavel Kácha
committed
def log(self, logger, prio=logging.ERROR):
for e in self.errors:
logger.log(prio, e.str_err())
info = e.str_info()

Pavel Kácha
committed
if info:
logger.info(info)
debug = e.str_debug()

Pavel Kácha
committed
if debug:
logger.debug(debug)

Pavel Kácha
committed
d = {
"method": self.method,
"req_id": self.req_id,
"errors": self.errors

Pavel Kácha
committed
}
return d
def get_clean_root_logger(level=logging.INFO):
""" Attempts to get logging module into clean slate state """
# We want to be able to set up at least stderr logger before any
# configuration is read, and then later get rid of it and set up
# whatever administrator requires.
# However, there can exist only one logger, but we want to get a clean
# slate everytime we initialize StreamLogger or FileLogger... which
# is not exactly supported by logging module.
# So, we look directly inside logger class and clean up handlers/filters
# manually.

Pavel Kácha
committed
logger = logging.getLogger(__name__)
logger.setLevel(level)
while logger.handlers:
logger.handlers[0].close()
logger.removeHandler(logger.handlers[0])
while logger.filters:
logger.removeFilter(logger.filters[0])
logger.propagate = False
def StreamLogger(stream=sys.stderr, level=logging.DEBUG):
""" Fallback handler just for setup, not meant to be used from
configuration file because during wsgi query stdout/stderr
is forbidden.
"""
fhand = logging.StreamHandler(stream)
fform = logging.Formatter('%(asctime)s %(filename)s[%(process)d]: (%(levelname)s) %(message)s')
fhand.setFormatter(fform)
logger = get_clean_root_logger(level)
logger.addHandler(fhand)
return logger
class LogRequestFilter(logging.Filter):
""" Filter class, instance of which is added to logger class to add
info about request automatically into every logline, no matter
how it came into existence.
"""
def __init__(self, req):
logging.Filter.__init__(self)
self.req = req
def filter(self, record):
if self.req.env:
record.req_preamble = "%08x/%s: " % (self.req.req_id or 0, self.req.path)
else:
record.req_preamble = ""
return True
def FileLogger(req, filename, level=logging.INFO):
fhand = logging.FileHandler(filename)
fform = logging.Formatter('%(asctime)s %(filename)s[%(process)d]: (%(levelname)s) %(req_preamble)s%(message)s')
ffilt = LogRequestFilter(req)
logger = get_clean_root_logger(level)
logger.addFilter(ffilt)
logger.info("Initialized FileLogger(req=%r, filename=\"%s\", level=%s)" % (req, filename, level))
return logger
def SysLogger(req, socket="/dev/log", facility=logging.handlers.SysLogHandler.LOG_DAEMON, level=logging.INFO):
fhand = logging.handlers.SysLogHandler(address=socket, facility=facility)
fform = logging.Formatter('%(filename)s[%(process)d]: (%(levelname)s) %(req_preamble)s%(message)s')
ffilt = LogRequestFilter(req)
logger = get_clean_root_logger(level)
logger.addFilter(ffilt)
logger.info("Initialized SysLogger(req=%r, socket=\"%s\", facility=\"%d\", level=%s)" % (req, socket, facility, level))
Client = namedtuple("Client", [
"id", "registered", "requestor", "hostname", "name",
"secret", "valid", "read", "debug", "write", "test", "note"])
class Object(object):
def __str__(self):
attrs = get_method_params(self.__init__)[1:]
eq_str = ["%s=%r" % (attr, getattr(self, attr, None)) for attr in attrs]
return "%s(%s)" % (type(self).__name__, ", ".join(eq_str))

Pavel Kácha
committed
class Request(Object):
""" Simple container for info about ongoing request.
One instance gets created before server startup, and all other
configured objects get it as parameter during instantiation.
Server then takes care of populating this instance on the start
of wsgi request (and resetting at the end). All other objects
then can find this actual request info in their own self.req.
However, only Server.wsgi_app, handler (WardenHandler) exposed
methods and logging related objects should use self.req directly.
All other objects should use self.req only as source of data for
error/exception handling/logging, and should take/return
necessary data as arguments/return values for clarity on
which data their main codepaths work with.
"""
def reset(self, env=None, client=None, path=None, req_id=None):

Pavel Kácha
committed
self.env = env
self.client = client
self.path = path or ""
if req_id is not None:
self.req_id = req_id
else:
self.req_id = 0 if env is None else randint(0x00000000, 0xFFFFFFFF)

Pavel Kácha
committed
def error(self, **kwargs):
return Error(self.path, self.req_id, **kwargs)
class ObjectBase(Object):
def __init__(self, req, log):
Object.__init__(self)
self.req = req
self.log = log
class PlainAuthenticator(ObjectBase):
def __init__(self, req, log, db):
ObjectBase.__init__(self, req, log)
self.db = db
def authenticate(self, env, args, hostnames=None, check_secret=True):

Pavel Kácha
committed
name = args.get("client", [None])[0]

Pavel Kácha
committed
secret = args.get("secret", [None])[0] if check_secret else None
client = self.db.get_client_by_name(hostnames, name, secret)
if not client:
self.log.info("authenticate: client not found by name: \"%s\", secret: %s, hostnames: %s" % (
name, secret, str(hostnames)))

Pavel Kácha
committed
return None
# Clients with 'secret' set must get authenticated by it.
# No secret turns secret auth off for this particular client.

Pavel Kácha
committed
if client.secret is not None and secret is None and check_secret:
self.log.info("authenticate: missing secret argument")
return None
self.log.info("authenticate: %s" % str(client))
# These args are not for handler
args.pop("client", None)
args.pop("secret", None)
return client
def authorize(self, env, client, path, method):
if method.debug:
if not client.debug:
self.log.info("authorize: failed, client does not have debug enabled")
return None
return client
if method.read:
if not client.read:
self.log.info("authorize: failed, client does not have read enabled")
return None
return client
if method.write:
if not (client.write or client.test):
self.log.info("authorize: failed, client is not allowed to write or test")
return None
return client
class X509Authenticator(PlainAuthenticator):
def get_cert_dns_names(self, pem):
cert = M2Crypto.X509.load_cert_string(pem)
subj = cert.get_subject()
commons = [n.get_data().as_text() for n in subj.get_entries_by_nid(subj.nid["CN"])]
try:
extstrs = cert.get_ext("subjectAltName").get_value().split(",")
except LookupError:
extstrs = [val.strip() for val in extstrs]
altnames = [val[4:] for val in extstrs if val.startswith("DNS:")]
# bit of mangling to get rid of duplicates and leave commonname first
firstcommon = commons[0]
return [firstcommon] + list(set(altnames+commons) - set([firstcommon]))
def is_verified_by_apache(self, env, args):
# Allows correct work while SSLVerifyClient both "optional" and "required"
verify = env.get("SSL_CLIENT_VERIFY")
if verify == "SUCCESS":
return True
exception = self.req.error(
message="authenticate: certificate verification failed",
error=403, args=args, ssl_client_verify=verify, cert=env.get("SSL_CLIENT_CERT"))
exception.log(self.log)
return False
def authenticate(self, env, args):
if not self.is_verified_by_apache(env, args):
return None

Pavel Kácha
committed
cert_names = self.get_cert_dns_names(env["SSL_CLIENT_CERT"])
except:
exception = self.req.error(
message="authenticate: cannot get or parse certificate from env",
error=403, exc=sys.exc_info(), env=env)
return PlainAuthenticator.authenticate(self, env, args, hostnames=cert_names)
class X509NameAuthenticator(X509Authenticator):

Pavel Kácha
committed
def authenticate(self, env, args):
if not self.is_verified_by_apache(env, args):
return None
try:
cert_name = env["SSL_CLIENT_S_DN_CN"]
except:
exception = self.req.error(
message="authenticate: cannot get or parse certificate from env",
error=403, exc=sys.exc_info(), env=env)
return None

Pavel Kácha
committed
if cert_name != args.setdefault("client", [cert_name])[0]:
exception = self.req.error(
message="authenticate: client name does not correspond with certificate",
error=403, cn=cert_name, args=args)
return None

Pavel Kácha
committed
return PlainAuthenticator.authenticate(self, env, args, check_secret=False)

Pavel Kácha
committed
class X509MixMatchAuthenticator(X509Authenticator):
def __init__(self, req, log, db):
PlainAuthenticator.__init__(self, req, log, db)
self.hostname_auth = X509Authenticator(req, log, db)

Pavel Kácha
committed
self.name_auth = X509NameAuthenticator(req, log, db)

Pavel Kácha
committed
def authenticate(self, env, args):
if not self.is_verified_by_apache(env, args):
return None

Pavel Kácha
committed
try:
cert_name = env["SSL_CLIENT_S_DN_CN"]

Pavel Kácha
committed
except:
exception = self.req.error(
message="authenticate: cannot get or parse certificate from env",
error=403, exc=sys.exc_info(), env=env)

Pavel Kácha
committed
return None
name = args.get("client", [None])[0]

Pavel Kácha
committed
# Client names are in reverse notation than DNS, client name should
# thus never be the same as machine hostname (if it is, client
# admin does something very amiss).

Pavel Kácha
committed
# So, if client sends the same name in query as in the certificate,
# or sends no name or secret (which is necessary for hostname auth),
# use X509NameAuthenticator. Otherwise (names are different and there
# is name and/or secret in query) use (hostname) X509Authenticator.

Pavel Kácha
committed
if name == cert_name or (name is None and secret is None):
auth = self.name_auth
else:
auth = self.hostname_auth

Pavel Kácha
committed

Pavel Kácha
committed
self.log.info("MixMatch is choosing %s (name: %s, cert_name: %s)" % (type(auth).__name__, name, cert_name))

Pavel Kácha
committed
return auth.authenticate(env, args)

Pavel Kácha
committed
class NoValidator(ObjectBase):
def __init__(self, req, log):
ObjectBase.__init__(self, req, log)
def check(self, event):
return []
class JSONSchemaValidator(NoValidator):
def __init__(self, req, log, filename=None):
NoValidator.__init__(self, req, log)
self.path = filename or path.join(path.dirname(__file__), "idea.schema")
with io.open(self.path, "r", encoding="utf-8") as f:

Pavel Kácha
committed
self.validator = Draft4Validator(self.schema)
def check(self, event):
def sortkey(k):
""" Treat keys as lowercase, prefer keys with less path segments """
return (len(k.path), "/".join(str(k.path)).lower())
res = []
for error in sorted(self.validator.iter_errors(event), key=sortkey):
res.append(
ErrorMessage(
460, "Validation error: key \"%s\", value \"%s\"" % (
"/".join(map(str, error.path)),
error.instance
),
expected=error.schema.get('description', 'no additional info')
)
)
class UnsafeQueryContext:
""" Context manager to be used within a transaction for partial rollbacks
Meant to be used as:
with self as db:
with self.unsafe_query_context(db):
res = db.query_one(...)
"""
def __init__(self, db, silence_exc=False):
self.db = db
self.silence_exc = silence_exc
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
return self.silence_exc and exc_type is not None \
and issubclass(exc_type, (self.db.db.IntegrityError, self.db.db.DataError))
unsafe_query_context = UnsafeQueryContext
def __init__(
self, req, log, host, user, password, dbname, port, retry_count,

Pavel Kácha
committed
retry_pause, event_size_limit, catmap_filename, tagmap_filename):
ObjectBase.__init__(self, req, log)
self.host = host
self.user = user
self.password = password
self.dbname = dbname
self.port = port

Pavel Kácha
committed
self.retry_count = retry_count
self.retry_pause = retry_pause

Pavel Kácha
committed
self.retry_attempt = 0

Pavel Kácha
committed
self.event_size_limit = event_size_limit

Pavel Kácha
committed
self.catmap_filename = catmap_filename
self.tagmap_filename = tagmap_filename
with io.open(catmap_filename, "r", encoding="utf-8") as catmap_fd:

Pavel Kácha
committed
self.catmap = json.load(catmap_fd)
self.catmap_other = self.catmap["Other"] # Catch error soon, avoid lookup later
with io.open(tagmap_filename, "r", encoding="utf-8") as tagmap_fd:

Pavel Kácha
committed
self.tagmap = json.load(tagmap_fd)
self.tagmap_other = self.tagmap["Other"] # Catch error soon, avoid lookup later
self.con = None

Pavel Kácha
committed

Pavel Kácha
committed
def connect(self):

Pavel Kácha
committed
def close(self):
try:
if self.con:
self.con.close()
except Exception:
pass
self.con = None

Pavel Kácha
committed

Pavel Kácha
committed
def repeat(self):
""" Allows for graceful repeating of transactions self.retry_count
times. Unsuccessful attempts wait for self.retry_pause until
next attempt.
Meant for usage with context manager:
for attempt in self.repeat():
with attempt as db:

Pavel Kácha
committed
Note that it's not reentrant (as is not underlying MySQL
connection), so avoid nesting on the same MySQL object.
"""
self.retry_attempt = self.retry_count
while self.retry_attempt:
if self.retry_attempt != self.retry_count:
sleep(self.retry_pause)
self.retry_attempt -= 1
yield self
def __enter__(self):
""" Context manager protocol. Guarantees that transaction will
get either commited or rolled back in case of database
exception. Can be used with self.repeat(), or alone as:
with self as db:

Pavel Kácha
committed
Note that it's not reentrant (as is not underlying MySQL
connection), so avoid nesting on the same MySQL object.
"""
if not self.retry_attempt:
self.retry_attempt = 0
return self
def __exit__(self, exc_type, exc_val, exc_tb):
""" Context manager protocol. If db exception is fired and
self.retry_attempt is not zero, it is only logged and
does not propagate, otherwise it propagates up. Also
open transaction is rolled back.
In case of no exception, transaction gets commited.
"""
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
if exc_type is None:
self.con.commit()
self.retry_attempt = 0
else:
try:
if self.con is not None:
self.con.rollback()
except self.db.Error:
pass
try:
self.close()
except self.db.Error:
pass
if self.retry_attempt > 0:
self.log.info("Database error (%d attempts left): %s %s" %
(self.retry_attempt, exc_type.__name__, exc_val))
return True
def _query(self, *args, **kwargs):
if not self.con:
self.connect()
crs = self.con.cursor()
self.log.debug("execute: %s %s" % (args, kwargs))
crs.execute(*args, **kwargs)
return crs
def _query_multiple(self, query, params, ret, fetch):
res = None
for n, (q, p) in enumerate(zip(query, params)):
cur = self._query(q, p)
if n == ret:
res = fetch(cur)
if ret == -1: # fetch the result of the last query
res = fetch(cur)
return res

Pavel Kácha
committed
def execute(self, query, params, ret=None):
"""Execute the provided queries; discard the result"""
self._query_multiple(query, params, None, None)
def query_all(self, query, params, ret=-1):
"""Execute the provided queries; return list of all rows as dicts of the ret-th query (0 based)"""
return self._query_multiple(query, params, ret, lambda cur: cur.fetchall())
def query_one(self, query, params, ret=-1):
"""Execute the provided queries; return the first result of the ret-th query (0 based)"""
return self._query_multiple(query, params, ret, lambda cur: cur.fetchone())
def query_rowcount(self, query, params, ret=-1):
"""Execute provided query; return the number of affected rows or the number of returned rows of the ret-th query (0 based)"""
return self._query_multiple(query, params, ret, lambda cur: cur.rowcount)
def _get_comma_perc(self, l):
return ",".join(repeat("%s", l if isinstance(l, int) else len(l)))
def _get_comma_perc_n(self, n, l):
return ", ".join(repeat("(%s)" % self._get_comma_perc(n), len(l)))
def _get_not(self, b):
return "" if b else "NOT"
def _build_get_client_by_name(self, cert_names, name, secret):
"""Build query and params for client lookup"""
def get_client_by_name(self, cert_names=None, name=None, secret=None):
query, params, ret = self._build_get_client_by_name(cert_names, name, secret)

Pavel Kácha
committed
for attempt in self.repeat():
with attempt as db:
self.log.warning(
"get_client_by_name: query returned more than one result (cert_names = %s, name = %s, secret = %s): %s" %
(cert_names, name, secret, ", ".join([str(Client(**row)) for row in rows]))
)

Pavel Kácha
committed
return None
return Client(**rows[0]) if rows else None
def _build_get_clients(self, id):
"""Build query and params for client lookup by id"""
query, params, ret = self._build_get_clients(id)

Pavel Kácha
committed
for attempt in self.repeat():
with attempt as db:
rows = db.query_all(query, params, ret=ret)

Pavel Kácha
committed
return [Client(**row) for row in rows]
def _build_add_modify_client(self, id, **kwargs):
"""Build query and params for adding/modifying client"""
def add_modify_client(self, id=None, **kwargs):
if id is not None and all(kwargs.get(attr, None) is None for attr in set(Client._fields) - {"id", "registered"}):
query, params, ret = self._build_add_modify_client(id, **kwargs)

Pavel Kácha
committed
for attempt in self.repeat():
with attempt as db:
res_id = db.query_one(query, params, ret=ret)["id"]
newid = res_id if id is None else id

Pavel Kácha
committed
return newid
def _build_get_debug_version(self):
pass
def _build_get_debug_tablestat(self):
pass
vquery, vparams, vret = self._build_get_debug_version()
tquery, tparams, tret = self._build_get_debug_tablestat()

Pavel Kácha
committed
for attempt in self.repeat():
with attempt as db:
return {
"db": type(self).__name__,
"version": db.query_one(vquery, vparams, vret)["version"],
"tables": db.query_all(tquery, tparams, tret)

Pavel Kácha
committed
}
def getMaps(self, section, variables):
maps = []
for v in variables:

Pavel Kácha
committed
try:
mapped = section[v]

Pavel Kácha
committed
except KeyError:
raise self.req.error(
message="Wrong tag or category used in query.",
maps.append(mapped)
return set(maps) # unique
def _build_fetch_events(
self, client, id, count,
cat, nocat, tag, notag, group, nogroup):
"""Build query and params for fetching events based on id, count and category, tag and group filters"""
def _load_event_json(self, data):
"""Return decoded json from data loaded from database, if unable to decode, return None"""
cat=None, nocat=None,
tag=None, notag=None,
group=None, nogroup=None):
if cat and nocat:
raise self.req.error(
message="Unrealizable conditions. Choose cat or nocat option.",
error=422, cat=cat, nocat=nocat)
if tag and notag:
raise self.req.error(
message="Unrealizable conditions. Choose tag or notag option.",
error=422, tag=tag, notag=notag)
if group and nogroup:
raise self.req.error(
message="Unrealizable conditions. Choose group or nogroup option.",
error=422, group=group, nogroup=nogroup)
query, params, ret = self._build_fetch_events(
client, id, count,
cat, nocat,
tag, notag,
group, nogroup
)

Pavel Kácha
committed
row = None
for attempt in self.repeat():
with attempt as db:
if row:
maxid = max(r['id'] for r in row)
else:
maxid = self.getLastEventId()

Pavel Kácha
committed
events = []
for r in row:
e = self._load_event_json(r["data"])
if e is None: # null cannot be valid event JSON

Pavel Kácha
committed
# Note that we use Error object just for proper formatting,
# but do not raise it; from client perspective invalid
# events get skipped silently.
err = self.req.error(
message="Unable to deserialize JSON event from db, id=%s" % r["id"],
error=500, exc=sys.exc_info(), id=r["id"])
"events": events
def _build_store_events_event(self, client, event, raw_event):
"""Build query and params for event insertion"""
def _build_store_events_categories(self, event_id, cat_ids):
"""Build query and params for insertion of event-categories mapping"""
def _build_store_events_tags(self, event_id, tag_ids):
"""Build query and params for insertion of event-tags mapping"""
def store_events(self, client, events, events_raw, events_indexes):
try:

Pavel Kácha
committed
for attempt in self.repeat():
with attempt as db:
errors = []
stored = 0
for event, raw_event, event_indx in zip(events, events_raw, events_indexes):
equery, eparams, eret = self._build_store_events_event(client, event, raw_event)
try:
with self.unsafe_query_context(db):
lastid = db.query_one(equery, eparams, ret=eret)["id"]
except self.db.IntegrityError:
exception = self.req.error(message="IDEA event with this ID already exists", error=400, exc=sys.exc_info(), env=self.req.env)
exception.log(self.log)
errors.append(ErrorMessage(409, "IDEA event with this ID already exists", events={event_indx}))
continue
stored += 1

Pavel Kácha
committed
catlist = event.get('Category', ["Other"])
cats = set(catlist) | {cat.split(".", 1)[0] for cat in catlist}
cat_ids = [self.catmap.get(cat, self.catmap_other) for cat in cats]
cquery, cparams, _ = self._build_store_events_categories(lastid, cat_ids)
db.execute(cquery, cparams)

Pavel Kácha
committed
nodes = event.get('Node', [])
tags = {tag for node in nodes for tag in node.get('Type', [])}
if tags:
tag_ids = [self.tagmap.get(tag, self.tagmap_other) for tag in tags]
tquery, tparams, _ = self._build_store_events_tags(lastid, tag_ids)
db.execute(tquery, tparams)
except Exception as e:
exception = self.req.error(message="DB error", error=500, exc=sys.exc_info(), env=self.req.env)
return [ErrorMessage(500, "DB error %s" % type(e).__name__)], 0
def _build_insert_last_received_id(self, client, id):
"""Build query and params for insertion of the last event id received by client"""
self.log.debug("insertLastReceivedId: id %i for client %i(%s)" % (id, client.id, client.hostname))
query, params, _ = self._build_insert_last_received_id(client, id)

Pavel Kácha
committed
for attempt in self.repeat():
with attempt as db:
def _build_get_last_event_id(self):
"""Build query and params for querying the id of the last inserted event"""
query, params, ret = self._build_get_last_event_id()

Pavel Kácha
committed
for attempt in self.repeat():
with attempt as db:
id_ = db.query_one(query, params, ret=ret)["id"]
return id_ or 1
def _build_get_last_received_id(self, client):
"""Build query and params for querying the last event id received by client"""
query, params, ret = self._build_get_last_received_id(client)

Pavel Kácha
committed
for attempt in self.repeat():
with attempt as db:
res = db.query_one(query, params, ret=ret)
if res is None:

Pavel Kácha
committed
id = None
self.log.debug("getLastReceivedId: probably first access, unable to get id for client %i(%s)" %
(client.id, client.hostname))

Pavel Kácha
committed
else:
id = res["id"] or 1
self.log.debug("getLastReceivedId: id %i for client %i(%s)" %
(id, client.id, client.hostname))

Pavel Kácha
committed
return id
def _build_load_maps_tags(self):
"""Build query and params for updating the tag map"""
def _build_load_maps_cats(self):
"""Build query and params for updating the catetgory map"""
def load_maps(self):
tquery, tparams, _ = self._build_load_maps_tags()
cquery, cparams, _ = self._build_load_maps_cats()

Pavel Kácha
committed
with self as db:
db.execute(tquery, tparams)
db.execute(cquery, cparams)
def _build_purge_lastlog(self, days):
"""Build query and params for purging stored client last event mapping older than days"""
query, params, ret = self._build_purge_lastlog(days)

Pavel Kácha
committed
with self as db:
return db.query_rowcount(query, params, ret=ret)
def _build_purge_events_get_id(self, days):
"""Build query and params to get largest event id of events older than days"""
def _build_purge_events_events(self, id_):
"""Build query and params to remove events older then days and their mappings"""
def purge_events(self, days):
iquery, iparams, iret = self._build_purge_events_get_id(days)
with self as db:
id_ = db.query_one(iquery, iparams, ret=iret)["id"]
if id_ is None:
return 0
equery, eparams, eret = self._build_purge_events_events(id_)
affected = db.query_rowcount(equery, eparams, ret=eret)
return affected
DataBase = abc.ABCMeta("DataBase", (DataBase,), {})
def __init__(
self, req, log, host, user, password, dbname, port, retry_count,
retry_pause, event_size_limit, catmap_filename, tagmap_filename):
super(DataBase, self).__init__(req, log, host, user, password, dbname, port, retry_count,
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
retry_pause, event_size_limit, catmap_filename, tagmap_filename)
import MySQLdb as db
import MySQLdb.cursors as mycursors
self.db = db
self.mycursors = mycursors
def connect(self):
self.con = self.db.connect(
host=self.host, user=self.user, passwd=self.password,
db=self.dbname, port=self.port, cursorclass=self.mycursors.DictCursor)
def _build_get_client_by_name(self, cert_names=None, name=None, secret=None):
"""Build query and params for client lookup"""
query = ["SELECT * FROM clients WHERE valid = 1"]
params = []
if name:
query.append(" AND name = %s")
params.append(name.lower())
if secret:
query.append(" AND secret = %s")
params.append(secret)
if cert_names:
query.append(" AND hostname IN (%s)" % self._get_comma_perc(cert_names))
params.extend(n.lower() for n in cert_names)
return ["".join(query)], [params], 0
def _build_get_clients(self, id):
"""Build query and params for client lookup by id"""
query = ["SELECT * FROM clients"]
params = []
if id:
query.append("WHERE id = %s")
params.append(id)
query.append("ORDER BY id")
return [" ".join(query)], [params], 0
def _build_add_modify_client(self, id, **kwargs):
"""Build query and params for adding/modifying client"""
query = []
params = []
uquery = []
if id is None:
query.append("INSERT INTO clients SET")
uquery.append("registered = now()")
else:
query.append("UPDATE clients SET")
for attr in set(Client._fields) - set(["id", "registered"]):
val = kwargs.get(attr, None)
if val is not None: # guaranteed at least one is not None
if attr == "secret" and val == "": # disable secret
val = None
uquery.append("`%s` = %%s" % attr)
params.append(val)
query.append(", ".join(uquery))
if id is not None:
query.append("WHERE id = %s")
params.append(id)
return (
[" ".join(query), 'SELECT LAST_INSERT_ID() AS id'],
[params, []],
1
)
def _build_get_debug_version(self):
return ["SELECT VERSION() AS version"], [()], 0
def _build_get_debug_tablestat(self):
return ["SHOW TABLE STATUS"], [()], 0
def _load_event_json(self, data):
"""Return decoded json from data loaded from database, if unable to decode, return None"""
try:
return json.loads(data)
except Exception:
return None
def _build_fetch_events(
self, client, id, count,
cat, nocat, tag, notag, group, nogroup):
query = ["SELECT e.id, e.data FROM clients c RIGHT JOIN events e ON c.id = e.client_id WHERE e.id > %s"]
params = [id or 0]
if cat or nocat:
cats = self.getMaps(self.catmap, (cat or nocat))
query.append(
" AND e.id %s IN (SELECT event_id FROM event_category_mapping WHERE category_id IN (%s))" %
(self._get_not(cat), self._get_comma_perc(cats))
)
params.extend(cats)
if tag or notag:
tags = self.getMaps(self.tagmap, (tag or notag))
query.append(
" AND e.id %s IN (SELECT event_id FROM event_tag_mapping WHERE tag_id IN (%s))" %
(self._get_not(tag), self._get_comma_perc(tags))
)
params.extend(tags)
if group or nogroup:
subquery = []
for name in (group or nogroup):
escaped_name = name.replace('&', '&&').replace("_", "&_").replace("%", "&%") # escape for LIKE
subquery.append("c.name = %s") # exact client
params.append(name)
subquery.append("c.name LIKE CONCAT(%s, '.%%') ESCAPE '&'") # whole subtree
params.append(escaped_name)
query.append(" AND %s (%s)" %
(self._get_not(group), " OR ".join(subquery)))
query.append(" AND e.valid = 1 LIMIT %s")
params.append(count)
return ["".join(query)], [params], 0
def _build_store_events_event(self, client, event, raw_event):
"""Build query and params for event insertion"""
return (
[
"INSERT INTO events (idea_id,received,client_id,data) VALUES (%s, NOW(), %s, %s)",
[(event["ID"], client.id, raw_event), ()],
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1
)
def _build_store_events_categories(self, event_id, cat_ids):
"""Build query and params for insertion of event-categories mapping"""
return (
["INSERT INTO event_category_mapping (event_id,category_id) VALUES " +
self._get_comma_perc_n(2, cat_ids)],
[tuple(param for cat_id in cat_ids for param in (event_id, cat_id))],
None
)
def _build_store_events_tags(self, event_id, tag_ids):
"""Build query and params for insertion of event-tags mapping"""
return (
["INSERT INTO event_tag_mapping (event_id,tag_id) VALUES " +
self._get_comma_perc_n(2, tag_ids)],
[tuple(param for tag_id in tag_ids for param in (event_id, tag_id))],
None
)
def _build_insert_last_received_id(self, client, id):
"""Build query and params for insertion of the last event id received by client"""
return (
["INSERT INTO last_events(client_id, event_id, timestamp) VALUES(%s, %s, NOW())"],
[(client.id, id)],
None
)
def _build_get_last_event_id(self):
"""Build query and params for querying the id of the last inserted event"""
return ["SELECT MAX(id) as id FROM events"], [()], 0
def _build_get_last_received_id(self, client):
"""Build query and params for querying the last event id received by client"""
return (
["SELECT event_id as id FROM last_events WHERE client_id = %s ORDER BY last_events.id DESC LIMIT 1"],
[(client.id,)],
0
)
def _build_load_maps_tags(self):
"""Build query and params for updating the tag map"""
return (
[
"DELETE FROM tags",
"INSERT INTO tags(id, tag) VALUES " +
self._get_comma_perc_n(2, self.tagmap)
],
[
(),
tuple(param for tag, num in self.tagmap.items() for param in (num, tag))
],
None
)
def _build_load_maps_cats(self):
"""Build query and params for updating the catetgory map"""
params = []
for cat_subcat, num in self.catmap.items():
catsplit = cat_subcat.split(".", 1)
category = catsplit[0]
subcategory = catsplit[1] if len(catsplit) > 1 else None
params.extend((num, category, subcategory, cat_subcat))
return (
[
"DELETE FROM categories",
"INSERT INTO categories(id, category, subcategory, cat_subcat) VALUES " +
self._get_comma_perc_n(4, self.catmap)
],
[
(),
tuple(params)
],
None
)
def _build_purge_lastlog(self, days):
"""Build query and params for purging stored client last event mapping older than days"""
return (
[
"DELETE FROM last_events "
" USING last_events LEFT JOIN ("
" SELECT MAX(id) AS last FROM last_events"
" GROUP BY client_id"
" ) AS maxids ON last=id"
" WHERE timestamp < DATE_SUB(CURDATE(), INTERVAL %s DAY) AND last IS NULL",
],
[(days,)],
0
)
def _build_purge_events_get_id(self, days):
"""Build query and params to get largest event id of events older than days"""
return (
[
"SELECT MAX(id) as id"
" FROM events"
" WHERE received < DATE_SUB(CURDATE(), INTERVAL %s DAY)"
],
[(days,)],
0
)
def _build_purge_events_events(self, id_):
"""Build query and params to remove events older then days and their mappings"""
return (
[
"DELETE FROM event_category_mapping WHERE event_id <= %s",
"DELETE FROM event_tag_mapping WHERE event_id <= %s",
"DELETE FROM events WHERE id <= %s",
],
[(id_,), (id_,), (id_,)],
2
)
class PostgresUnsafeQueryContext(UnsafeQueryContext):
SAVEPOINT = 'context_savepoint'
def __enter__(self):
self.db.execute([self.db.ppgsql.SQL('SAVEPOINT "context_savepoint"')], [()])
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
if exc_type is not None:
self.db.execute([self.db.ppgsql.SQL('ROLLBACK TO SAVEPOINT "context_savepoint"')], [()])
return self.silence_exc and exc_type is not None \
and issubclass(exc_type, (self.db.db.IntegrityError, self.db.db.DataError))
unsafe_query_context = PostgresUnsafeQueryContext
def __init__(
self, req, log, host, user, password, dbname, port, retry_count,
retry_pause, event_size_limit, catmap_filename, tagmap_filename):
super(DataBase, self).__init__(req, log, host, user, password, dbname, port, retry_count,
retry_pause, event_size_limit, catmap_filename, tagmap_filename)
import psycopg2 as db
from psycopg2 import sql as ppgsql
import psycopg2.extras as ppgextra
self.db = db
self.ppgsql = ppgsql
self.ppgextra = ppgextra
def connect(self):
self.con = self.db.connect(
host=self.host, user=self.user, password=self.password,
dbname=self.dbname, port=self.port, cursor_factory=self.ppgextra.RealDictCursor)
def _build_get_client_by_name(self, cert_names=None, name=None, secret=None):
"""Build query and params for client lookup"""
Jakub Maloštík
committed
query = ["SELECT * FROM clients WHERE valid"]
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
params = []
if name:
query.append(" AND name = %s")
params.append(name.lower())
if secret:
query.append(" AND secret = %s")
params.append(secret)
if cert_names:
query.append(" AND hostname IN (%s)" % self._get_comma_perc(cert_names))
params.extend(n.lower() for n in cert_names)
return ["".join(query)], [params], 0
def _build_get_clients(self, id):
"""Build query and params for client lookup by id"""
query = ["SELECT * FROM clients"]
params = []
if id:
query.append("WHERE id = %s")
params.append(id)
query.append("ORDER BY id")
return [" ".join(query)], [params], 0
def _build_add_modify_client(self, id, **kwargs):
"""Build query and params for adding/modifying client"""
fields = set(Client._fields) - {"id", "registered"}
cols, params = map(
list,
zip(
*(
(k, None) # disable secret
if k == "secret" and v == "" else
(k, v)
for k, v in kwargs.items()
if v is not None and k in fields
)
)
)
if id is None:
query = self.ppgsql.SQL('INSERT INTO clients ("registered", {}) VALUES (NOW(), {}) RETURNING id').format(
self.ppgsql.SQL(", ").join(map(self.ppgsql.Identifier, cols)),
self.ppgsql.SQL(", ").join(self.ppgsql.Placeholder() * len(cols))
)
elif not cols:
return ["SELECT %s AS id"], [(id,)], 0
else:
query = self.ppgsql.SQL("UPDATE clients SET {} WHERE id = {} RETURNING id").format(
self.ppgsql.SQL(", ").join(
self.ppgsql.SQL("{} = {}").format(
self.ppgsql.Identifier(col),
self.ppgsql.Placeholder()
) for col in cols
),
self.ppgsql.Placeholder()
)
params.append(id)
return [query], [params], 0
def _build_get_debug_version(self):
return ["SELECT setting AS version FROM pg_settings WHERE name = 'server_version'"], [()], 0
def _build_get_debug_tablestat(self):
return [
"SELECT "
'tablename AS "Name", '
'relnatts AS "Columns", '
'n_live_tup AS "Rows", '
'pg_catalog.pg_size_pretty(pg_catalog.pg_table_size(oid)) AS "Table_size", '
'pg_catalog.pg_size_pretty(pg_catalog.pg_indexes_size(oid)) AS "Index_size", '
'coll.collations AS "Collations" '
"FROM "
"pg_catalog.pg_tables tbls "
"LEFT OUTER JOIN pg_catalog.pg_class cls "
"ON tbls.tablename=cls.relname "
"LEFT OUTER JOIN pg_catalog.pg_stat_user_tables sut "
"ON tbls.tablename=sut.relname "
"LEFT OUTER JOIN ("
"SELECT "
"table_name, "
"string_agg("
"DISTINCT COALESCE("
"collation_name, "
"("
"SELECT "
"datcollate "
"FROM "
"pg_catalog.pg_database "
"WHERE "
"datname=%s"
")"
"), "
"','"
") AS collations "
"FROM "
"information_schema.columns "
"GROUP BY "
"table_name"
") coll "
"ON tbls.tablename=coll.table_name "
"WHERE "
"tbls.schemaname='public' "
"AND tbls.tableowner=%s"
], [(self.dbname, self.user)], 0
def _load_event_json(self, data):
"""Return decoded json from data loaded from database, if unable to decode, return None"""
try:
return json.loads(data.tobytes())
except Exception:
return None
def _build_fetch_events(
self, client, id, count,
cat, nocat, tag, notag, group, nogroup):
query = ["SELECT e.id, e.data FROM clients c RIGHT JOIN events e ON c.id = e.client_id WHERE e.id > %s"]
params = [id or 0]
if cat or nocat:
cats = self.getMaps(self.catmap, (cat or nocat))
query.append(
" AND e.id %s IN (SELECT event_id FROM event_category_mapping WHERE category_id IN (%s))" %
(self._get_not(cat), self._get_comma_perc(cats))
)
params.extend(cats)
if tag or notag:
tags = self.getMaps(self.tagmap, (tag or notag))
query.append(
" AND e.id %s IN (SELECT event_id FROM event_tag_mapping WHERE tag_id IN (%s))" %
(self._get_not(tag), self._get_comma_perc(tags))
)
params.extend(tags)
if group or nogroup:
subquery = []
for name in group or nogroup:
name = name.lower() # assumes only lowercase names
escaped_name = name.replace('&', '&&').replace("_", "&_").replace("%", "&%") # escape for LIKE
subquery.append("c.name = %s") # exact client
params.append(name)
subquery.append("c.name LIKE %s || '.%%' ESCAPE '&'") # whole subtree
params.append(escaped_name)
query.append(" AND %s (%s)" % (self._get_not(group), " OR ".join(subquery)))
Jakub Maloštík
committed
query.append(" AND e.valid LIMIT %s")
params.append(count)
return ["".join(query)], [params], 0
def _build_store_events_event(self, client, event, raw_event):
"""Build query and params for event insertion"""
return (
["INSERT INTO events (idea_id,received,client_id,data) VALUES (%s, NOW(), %s, %s) RETURNING id"],
[(event["ID"], client.id, self.db.Binary(raw_event.encode('utf8')))],
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
0
)
def _build_store_events_categories(self, event_id, cat_ids):
"""Build query and params for insertion of event-categories mapping"""
return (
["INSERT INTO event_category_mapping (event_id,category_id) VALUES " +
self._get_comma_perc_n(2, cat_ids)],
[tuple(param for cat_id in cat_ids for param in (event_id, cat_id))],
None
)
def _build_store_events_tags(self, event_id, tag_ids):
"""Build query and params for insertion of event-tags mapping"""
return (
["INSERT INTO event_tag_mapping (event_id,tag_id) VALUES " +
self._get_comma_perc_n(2, tag_ids)],
[tuple(param for tag_id in tag_ids for param in (event_id, tag_id))],
None
)
def _build_insert_last_received_id(self, client, id):
"""Build query and params for insertion of the last event id received by client"""
return (
["INSERT INTO last_events(client_id, event_id, timestamp) VALUES(%s, %s, NOW())"],
[(client.id, None if id == 1 else id)],
None
)
def _build_get_last_event_id(self):
"""Build query and params for querying the id of the last inserted event"""
return ["SELECT MAX(id) as id FROM events"], [()], 0
def _build_get_last_received_id(self, client):
"""Build query and params for querying the last event id received by client"""
return (
["SELECT event_id as id FROM last_events WHERE client_id = %s ORDER BY last_events.id DESC LIMIT 1"],
[(client.id,)],
0
)
def _build_load_maps_tags(self):
"""Build query and params for updating the tag map"""
return (
[
"ALTER TABLE event_tag_mapping DROP CONSTRAINT event_tag_mapping_tag_id_fk",
"DELETE FROM tags",
"INSERT INTO tags(id, tag) VALUES " +
self._get_comma_perc_n(2, self.tagmap),
'ALTER TABLE event_tag_mapping ADD CONSTRAINT "event_tag_mapping_tag_id_fk" FOREIGN KEY ("tag_id") REFERENCES "tags" ("id")'
],
[(), (), tuple(param for tag, num in self.tagmap.items() for param in (num, tag)), ()],
None
)
def _build_load_maps_cats(self):
"""Build query and params for updating the catetgory map"""
params = []
for cat_subcat, num in self.catmap.items():
catsplit = cat_subcat.split(".", 1)
category = catsplit[0]
subcategory = catsplit[1] if len(catsplit) > 1 else None
params.extend((num, category, subcategory, cat_subcat))
return (
[
"ALTER TABLE event_category_mapping DROP CONSTRAINT event_category_mapping_category_id_fk",
"DELETE FROM categories",
"INSERT INTO categories(id, category, subcategory, cat_subcat) VALUES " +
self._get_comma_perc_n(4, self.catmap),
'ALTER TABLE event_category_mapping ADD CONSTRAINT "event_category_mapping_category_id_fk" FOREIGN KEY ("category_id") REFERENCES "categories" ("id")'
],
[(), (), tuple(params), ()],
None
)
def _build_purge_lastlog(self, days):
"""Build query and params for purging stored client last event mapping older than days"""
return (
[
"DELETE FROM last_events "
" USING last_events le LEFT JOIN ("
" SELECT MAX(id) AS last FROM last_events"
" GROUP BY client_id"
" ) AS maxids ON maxids.last=le.id"
" WHERE le.timestamp < CURRENT_DATE - INTERVAL %s DAY AND maxids.last IS NULL"
],
[(str(days),)],
0
)
def _build_purge_events_get_id(self, days):
"""Build query and params to get largest event id of events older than days"""
return (
[
"SELECT MAX(id) as id"
" FROM events"
" WHERE received < CURRENT_DATE - INTERVAL %s DAY"
],
[(str(days),)],
0
)
def _build_purge_events_events(self, id_):
"""Build query and params to remove events older then days and their mappings"""
return ["DELETE FROM events WHERE id <= %s"], [(id_,)], 0
Jakub Maloštík
committed
def expose(read=True, write=False, debug=False):

Pavel Kácha
committed
def expose_deco(meth):
meth.exposed = True
meth.read = read
meth.write = write
meth.debug = debug
if not hasattr(meth, "arguments"):

Pavel Kácha
committed
return meth
return expose_deco
class Server(ObjectBase):
def __init__(self, req, log, auth, handler):
ObjectBase.__init__(self, req, log)
self.auth = auth
self.handler = handler
def sanitize_args(self, path, func, args, exclude=["self", "post"]):
# silently remove internal args, these should never be used
# but if somebody does, we do not expose them by error message
intargs = set(args).intersection(exclude)
for a in intargs:
del args[a]
if intargs:
self.log.info("sanitize_args: Called with internal args: %s" % ", ".join(intargs))
# silently remove surplus arguments - potential forward
# compatibility (unknown args will get ignored)
badargs = set(args) - set(func.arguments)
for a in badargs:
del args[a]
if badargs:
self.log.info("sanitize_args: Called with superfluous args: %s" % ", ".join(badargs))
return args
def wsgi_app(self, environ, start_response, exc_info=None):
path = environ.get("PATH_INFO", "").lstrip("/")
self.req.reset(env=environ, path=path)
output = ""
status = "200 OK"
headers = [('Content-type', 'application/json')]
exception = None
try:
try:
method = getattr(self.handler, path)
method.exposed # dummy access to trigger AttributeError
except Exception:
raise self.req.error(message="You've fallen off the cliff.", error=404)

Pavel Kácha
committed
self.req.args = args = parse_qs(environ.get('QUERY_STRING', ""))
self.req.client = client = self.auth.authenticate(environ, args)

Pavel Kácha
committed
raise self.req.error(message="I'm watching. Authenticate.", error=403)

Pavel Kácha
committed
auth = self.auth.authorize(self.req.env, self.req.client, self.req.path, method)
if not auth:
raise self.req.error(message="I'm watching. Not authorized.", error=403, client=client.name)

Pavel Kácha
committed
args = self.sanitize_args(path, method, args)
# Based on RFC2616, section 4.4 we SHOULD respond with 400 (bad request) or 411
# (length required) if content length was not specified. We choose not to, to
# preserve compatibility with clients deployed in the wild, which use POST for
# all requests (even those without payload, with no specified content length).
# According to PEP3333, section "Input and Error Streams", the application SHOULD
Radko Krkoš
committed
# NOT attempt to read more data than specified by CONTENT_LENGTH. As stated in
# section "environ Variables", CONTENT_LENGTH may be empty (string) or absent.
Radko Krkoš
committed
content_length = int(environ.get('CONTENT_LENGTH', 0))
except ValueError:
content_length = 0
try:
post_data = environ['wsgi.input'].read(content_length)
except:
raise self.req.error(message="Data read error.", error=408, exc=sys.exc_info())
headers, output = method(post_data, **args)
except Error as e:
exception = e
except Exception as e:

Pavel Kácha
committed
exception = self.req.error(message="Server exception", error=500, exc=sys.exc_info())

Pavel Kácha
committed
status = "%d %s" % exception.get_http_err_msg()
output = json.dumps(exception, cls=Encoder)

Pavel Kácha
committed
# Make sure everything is properly encoded - JSON and various function
# may spit out unicode instead of str and it gets propagated up (str
# + unicode = unicode).
# For Python2 the right thing would be to be unicode correct among whole
# source and always decode on input (json module does that for us) and
# on output here.
# For Python3 strings are internally unicode so no decoding on input is
# necessary. For output, "status" must be unicode string, "output" must
# be encoded bytes array, what is done here. Important: for Python 3 we
# define: unicode = str
if isinstance(status, unicode) and sys.version_info[0] < 3:

Pavel Kácha
committed
status = status.encode("utf-8")
if isinstance(output, unicode):
output = output.encode("utf-8")
headers.append(('Content-Length', str(len(output))))
start_response(status, headers)
self.req.reset()
return [output]
__call__ = wsgi_app
def json_wrapper(method):
def meth_deco(self, post, **args):
if "events" in get_method_params(method):
try:
events = json.loads(post.decode('utf-8')) if post else None
except Exception as e:
raise self.req.error(
message="Deserialization error.", error=400,
exc=sys.exc_info(), args=post, parser=str(e))
if events:
args["events"] = events
result = method(self, **args) # call requested method
try:
except Exception as e:
raise self.req.error(message="Serialization error", error=500, exc=sys.exc_info(), args=str(result))
return [('Content-type', 'application/json')], output
try:
meth_deco.arguments = method.arguments
except AttributeError:
meth_deco.arguments = get_method_params(method)
return meth_deco
class WardenHandler(ObjectBase):
send_events_limit=500, get_events_limit=1000,
ObjectBase.__init__(self, req, log)
self.db = db
self.validator = validator
self.send_events_limit = send_events_limit
self.get_events_limit = get_events_limit
self.description = description
Jakub Maloštík
committed
@expose(read=True, debug=True)
@json_wrapper
def getDebug(self):
"environment": self.req.env,
"client": self.req.client._asdict(),
"database": self.db.get_debug(),
"system": {
"uname": os.uname()
},
"process": {
"cwd": unicode(os.getcwd()),
"pid": os.getpid(),
"ppid": os.getppid(),
"pgrp": os.getpgrp(),
"uid": os.getuid(),
"gid": os.getgid(),
"euid": os.geteuid(),
"egid": os.getegid(),
"groups": os.getgroups()
}
Jakub Maloštík
committed
@expose(read=True)
@json_wrapper
def getInfo(self):
info = {
"version": VERSION,
"send_events_limit": self.send_events_limit,
"get_events_limit": self.get_events_limit
}
if self.description:
info["description"] = self.description
return info
Jakub Maloštík
committed
@expose(read=True)
@json_wrapper
cat=None, nocat=None,
tag=None, notag=None,
group=None, nogroup=None):
try:
id = int(id[0])
except (ValueError, TypeError, IndexError):

Pavel Kácha
committed
# If client was already here, fetch server notion of his last id
id = self.db.getLastReceivedId(self.req.client)
except Exception as e:
self.log.info("cannot getLastReceivedId - " + type(e).__name__ + ": " + str(e))

Pavel Kácha
committed
# First access, remember the guy and get him last id
id = self.db.getLastEventId()
self.db.insertLastReceivedId(self.req.client, id)
return {
"lastid": id,
"events": []
}

Pavel Kácha
committed
# Client wants to get only last N events and reset server notion of last id
id += self.db.getLastEventId()

Pavel Kácha
committed
count = int(count[0])
except (ValueError, TypeError, IndexError):
count = self.get_events_limit
if self.get_events_limit:
count = min(count, self.get_events_limit)
res = self.db.fetch_events(self.req.client, id, count, cat, nocat, tag, notag, group, nogroup)
self.db.insertLastReceivedId(self.req.client, res['lastid'])
self.log.info("sending %d events, lastid is %i" % (len(res["events"]), res["lastid"]))
def check_node(self, event, event_indx, name):

Pavel Kácha
committed
try:
ev_id = event['Node'][0]['Name'].lower()

Pavel Kácha
committed
# Event does not bear valid Node attribute
return [
ErrorMessage(422, "Event does not bear valid Node attribute", {event_indx})
]
if ev_id != name:
return [
ErrorMessage(422, "Node does not correspond with saving client", {event_indx})
]

Pavel Kácha
committed
return []
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812
1813
1814
1815
1816
def check_idea_id(self, event, event_indx):
id_length_limit = 64
try:
id_ = event["ID"]
except (KeyError, TypeError, ValueError):
return [ErrorMessage(422, "Missing IDEA ID", {event_indx})]
if not isinstance(id_, unicode) or len(id_) == 0:
return [ErrorMessage(422, "The provided IDEA ID is invalid", {event_indx})]
errors = []
if len(id_) > id_length_limit:
errors.append(
ErrorMessage(
422, "The provided event ID is too long",
{event_indx}, id_length_limit=id_length_limit
)
)
if '\x00' in id_:
errors.append(ErrorMessage(422, "IDEA ID cannot contain null bytes", {event_indx}))
return errors
def add_errors(self, errs_to_add):
for err in errs_to_add:
self.errs.setdefault((err.error, err.message, err.unique_id), err).events.update(err.events)

Pavel Kácha
committed
Jakub Maloštík
committed
@expose(write=True)
@json_wrapper
def sendEvents(self, events=[]):
if not isinstance(events, list):

Pavel Kácha
committed
raise self.req.error(message="List of events expected.", error=400)
self.add_errors(
[
ErrorMessage(
507, "Too many events in one batch.",
set(range(self.send_events_limit, len(events))),
send_events_limit=self.send_events_limit
)
]
)
events_tosend = []
events_raw = []
events_nums = []

Pavel Kácha
committed
for i, event in enumerate(events[0:self.send_events_limit]):
v_errs = self.validator.check(event)
if v_errs:
self.add_errors(v_errs)
continue
idea_id_errs = self.check_idea_id(event, i)
if idea_id_errs:
self.add_errors(idea_id_errs)
continue
node_errs = self.check_node(event, i, self.req.client.name)

Pavel Kácha
committed
if node_errs:
self.add_errors(node_errs)

Pavel Kácha
committed
continue
if self.req.client.test and 'Test' not in event.get('Category', []):
self.add_errors(
[
ErrorMessage(
422, "You're allowed to send only messages containing \"Test\" among categories.", {i},
# Ensure that 1the error message is contained for every combination of categories
unique_id=tuple(event.get('Category', [])),
categories=event.get('Category', [])
)
]
)

Pavel Kácha
committed
raw_event = json.dumps(event)
if len(raw_event) >= self.db.event_size_limit:
self.add_errors(
[
ErrorMessage(
413, "Event too long (>%i B)" % self.db.event_size_limit, {i},
event_size_limit = self.db.event_size_limit
)
]
)

Pavel Kácha
committed
continue
events_tosend.append(event)
events_raw.append(raw_event)
events_nums.append(i)
db_errs, saved = self.db.store_events(self.req.client, events_tosend, events_raw, events_nums)
self.add_errors(db_errs)
self.log.info("Saved %i events" % saved)
if self.errs:
raise self.req.error(errors=self.errs.values())

Pavel Kácha
committed

Pavel Kácha
committed
return {"saved": saved}
def read_ini(path):
c = ConfigParser.RawConfigParser()
res = c.read(path)
# We don't have loggin yet, hopefully this will go into webserver log

Pavel Kácha
committed
raise Error(message="Unable to read config: %s" % path)
data = {}
for sect in c.sections():
for opts in c.options(sect):
lsect = sect.lower()
data[lsect] = {}
data[lsect][opts] = c.get(sect, opts)
return data
def read_cfg(path):
with io.open(path, "r", encoding="utf-8") as f:
stripcomments = "\n".join((l for l in f if not l.lstrip().startswith(("#", "//"))))
conf = json.loads(stripcomments)
# Lowercase keys
(subkey.lower(), val) for subkey, val in subsect.items())
) for sect, subsect in conf.items())
return conf
def fallback_wsgi(environ, start_response, exc_info=None):
# If server does not start, set up simple server, returning
# Warden JSON compliant error message
error = 503
message = "Server not running due to initialization error"
headers = [('Content-type', 'application/json')]
logline = "Error(%d): %s" % (error, message)
status = "%d %s" % (error, message)

Pavel Kácha
committed
output = '{"errors": [{"error": %d, "message": "%s"}]}' % (
logging.getLogger(__name__).critical(logline)
start_response(status, headers)
return [output]

Pavel Kácha
committed
# Order in which the base objects must get initialized
section_order = ("log", "db", "auth", "validator", "handler", "server")
# List of sections and objects, configured by them
# First object in each object list is the default one, otherwise
# "type" keyword in section may be used to choose other
section_def = {
"log": [FileLogger, SysLogger],
"auth": [X509NameAuthenticator, PlainAuthenticator, X509Authenticator, X509MixMatchAuthenticator],

Pavel Kácha
committed
1955
1956
1957
1958
1959
1960
1961
1962
1963
1964
1965
1966
1967
1968
1969
1970
1971
1972
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982
1983
1984
1985
1986
1987
"validator": [JSONSchemaValidator, NoValidator],
"handler": [WardenHandler],
"server": [Server]
}
# Object parameter conversions and defaults
param_def = {
FileLogger: {
"req": {"type": "obj", "default": "req"},
"filename": {"type": "filepath", "default": path.join(path.dirname(__file__), path.splitext(path.split(__file__)[1])[0] + ".log")},
"level": {"type": "loglevel", "default": "info"},
},
SysLogger: {
"req": {"type": "obj", "default": "req"},
"socket": {"type": "filepath", "default": "/dev/log"},
"facility": {"type": "facility", "default": "daemon"},
"level": {"type": "loglevel", "default": "info"}
},
PlainAuthenticator: {
"req": {"type": "obj", "default": "req"},
"log": {"type": "obj", "default": "log"},
"db": {"type": "obj", "default": "db"}
},
X509Authenticator: {
"req": {"type": "obj", "default": "req"},
"log": {"type": "obj", "default": "log"},
"db": {"type": "obj", "default": "db"}
},
X509NameAuthenticator: {
"req": {"type": "obj", "default": "req"},
"log": {"type": "obj", "default": "log"},
"db": {"type": "obj", "default": "db"}
},

Pavel Kácha
committed
X509MixMatchAuthenticator: {
"req": {"type": "obj", "default": "req"},
"log": {"type": "obj", "default": "log"},
"db": {"type": "obj", "default": "db"}
},

Pavel Kácha
committed
NoValidator: {
"req": {"type": "obj", "default": "req"},
"log": {"type": "obj", "default": "log"},
},
JSONSchemaValidator: {
"req": {"type": "obj", "default": "req"},
"log": {"type": "obj", "default": "log"},
"filename": {"type": "filepath", "default": path.join(path.dirname(__file__), "idea.schema")}
},
MySQL: {
"req": {"type": "obj", "default": "req"},
"log": {"type": "obj", "default": "log"},
"host": {"type": "str", "default": "localhost"},
"user": {"type": "str", "default": "warden"},
"password": {"type": "str", "default": ""},
"dbname": {"type": "str", "default": "warden3"},
"port": {"type": "natural", "default": 3306},

Pavel Kácha
committed
"retry_pause": {"type": "natural", "default": 3},

Pavel Kácha
committed
"retry_count": {"type": "natural", "default": 3},
"event_size_limit": {"type": "natural", "default": 5*1024*1024},
"catmap_filename": {"type": "filepath", "default": path.join(path.dirname(__file__), "catmap_db.json")},
"tagmap_filename": {"type": "filepath", "default": path.join(path.dirname(__file__), "tagmap_db.json")}

Pavel Kácha
committed
},
PostgreSQL: {
"req": {"type": "obj", "default": "req"},
"log": {"type": "obj", "default": "log"},
"host": {"type": "str", "default": "localhost"},
"user": {"type": "str", "default": "warden"},
"password": {"type": "str", "default": ""},
"dbname": {"type": "str", "default": "warden3"},
"port": {"type": "natural", "default": 5432},
"retry_pause": {"type": "natural", "default": 3},
"retry_count": {"type": "natural", "default": 3},
"event_size_limit": {"type": "natural", "default": 5*1024*1024},
"catmap_filename": {"type": "filepath", "default": path.join(path.dirname(__file__), "catmap_db.json")},
"tagmap_filename": {"type": "filepath", "default": path.join(path.dirname(__file__), "tagmap_db.json")}
},

Pavel Kácha
committed
2030
2031
2032
2033
2034
2035
2036
2037
2038
2039
2040
2041
2042
2043
2044
2045
2046
2047
2048
2049
2050
2051
2052
WardenHandler: {
"req": {"type": "obj", "default": "req"},
"log": {"type": "obj", "default": "log"},
"validator": {"type": "obj", "default": "validator"},
"db": {"type": "obj", "default": "DB"},
"auth": {"type": "obj", "default": "auth"},
"send_events_limit": {"type": "natural", "default": 500},
"get_events_limit": {"type": "natural", "default": 1000},
"description": {"type": "str", "default": ""}
},
Server: {
"req": {"type": "obj", "default": "req"},
"log": {"type": "obj", "default": "log"},
"auth": {"type": "obj", "default": "auth"},
"handler": {"type": "obj", "default": "handler"}
}
}
def build_server(conf, section_order=section_order, section_def=section_def, param_def=param_def):
objects = {} # Already initialized objects
# Functions for validation and conversion of config values
def facility(name):
return int(getattr(logging.handlers.SysLogHandler, "LOG_" + name.upper()))
def loglevel(name):
return int(getattr(logging, name.upper()))
def natural(name):
num = int(name)
raise ValueError("Not a natural number")
return num
def filepath(name):
# Make paths relative to dir of this script
return path.join(path.dirname(__file__), name)

Pavel Kácha
committed
def obj(name):
return objects[name.lower()]

Pavel Kácha
committed
# Typedef dictionary
conv_dict = {
"facility": facility,
"loglevel": loglevel,
"natural": natural,
"filepath": filepath,
"obj": obj,
"str": str
}
def init_obj(sect_name):
config = dict(conf.get(sect_name, {}))
sect_name = sect_name.lower()
sect_def = section_def[sect_name]
try: # Object type defined?
objtype = config["type"]
del config["type"]
except KeyError: # No, fetch default object type for this section

Pavel Kácha
committed
cls = sect_def[0]

Pavel Kácha
committed
names = [o.__name__ for o in sect_def]
try:
idx = names.index(objtype)
except ValueError:
raise KeyError("Unknown type %s in section %s" % (objtype, sect_name))

Pavel Kácha
committed
cls = sect_def[idx]

Pavel Kácha
committed
params = param_def[cls]
# No surplus parameters? Disallow also 'obj' attributes, these are only
# to provide default referenced section
for name in config:

Pavel Kácha
committed
if name not in params or (name in params and params[name]["type"] == "obj"):
raise KeyError("Unknown key %s in section %s" % (name, sect_name))
# Process parameters
kwargs = {}
for name, definition in params.items():
raw_val = config.get(name, definition["default"])
try:

Pavel Kácha
committed
type_callable = conv_dict[definition["type"]]
val = type_callable(raw_val)
except Exception:
raise KeyError("Bad value \"%s\" for %s in section %s" % (raw_val, name, sect_name))
kwargs[name] = val
try:

Pavel Kácha
committed
obj_inst = cls(**kwargs) # run it
except Exception as e:
raise KeyError("Cannot initialize %s from section %s: %s" % (

Pavel Kácha
committed
if isinstance(obj_inst, Object):
# Log only objects here, functions must take care of themselves
objects["log"].info("Initialized %s" % str(obj_inst))

Pavel Kácha
committed
return obj_inst
# Init logging with at least simple stderr StreamLogger
# Dunno if it's ok within wsgi, but we have no other choice, let's
# hope it at least ends up in webserver error log
# Shared container for common data of ongoing WSGI request
objects["req"] = Request()
try:
# Now try to init required objects

Pavel Kácha
committed
for o in section_order:
init_obj(o)
except Exception as e:
objects["log"].critical(str(e))
objects["log"].debug("", exc_info=sys.exc_info())
return objects["server"]
# Command line utilities
def check_config():
# If we got so far, server object got set up fine
print("Looks clear.", file=sys.stderr)
return 0
def list_clients(id=None):
clients = server.handler.db.get_clients(id)
lines = [[str(getattr(client, col)) for col in Client._fields] for client in clients]
col_width = [max(len(val) for val in col) for col in zip(*(lines+[Client._fields]))]
divider = ["-" * l for l in col_width]
for line in [Client._fields, divider] + lines:
print(" ".join([val.ljust(width) for val, width in zip(line, col_width)]))
return 0
def register_client(**kwargs):
# argparse does _always_ return something, so we cannot rely on missing arguments
Jakub Maloštík
committed
if kwargs["valid"] is None: kwargs["valid"] = True
if kwargs["read"] is None: kwargs["read"] = True
if kwargs["write"] is None: kwargs["write"] = False
if kwargs["debug"] is None: kwargs["debug"] = False
if kwargs["test"] is None: kwargs["test"] = True
return modify_client(id=None, **kwargs)
def modify_client(**kwargs):
def isValidHostname(hostname):
if len(hostname) > 255:
return False
if hostname.endswith("."): # A single trailing dot is legal
hostname = hostname[:-1] # strip exactly one dot from the right, if present
disallowed = re.compile(r"[^A-Z\d-]", re.IGNORECASE)
return all( # Split by labels and verify individually
(label and len(label) <= 63 # length is within proper range
and not label.startswith("-") and not label.endswith("-") # no bordering hyphens
and not disallowed.search(label)) # contains only legal characters
for label in hostname.split("."))
def isValidNSID(nsid):
allowed = re.compile(r"^(?:[a-zA-Z_][a-zA-Z0-9_]*\.)*[a-zA-Z_][a-zA-Z0-9_]*$")
allowed = re.compile(r"(^[a-zA-Z0-9_ .%!+-]*(?=<.*>))?(^|(<(?=.*(>))))[a-zA-Z0-9_.%!+-]+@[a-zA-Z0-9-.]+\4?$") # just basic check
valid = (allowed.match(ms.strip())for ms in mail.split(','))
def isValidID(id):
client = server.handler.db.get_clients(id)
return client and True or False
if kwargs["name"] is not None:
kwargs["name"] = kwargs["name"].lower()
if not isValidNSID(kwargs["name"]):
print("Invalid client name \"%s\"." % kwargs["name"], file=sys.stderr)
if kwargs["hostname"] is not None:
kwargs["hostname"] = kwargs["hostname"].lower()
if not isValidHostname(kwargs["hostname"]):
print("Invalid hostname \"%s\"." % kwargs["hostname"], file=sys.stderr)
return 253
if kwargs["requestor"] is not None and not isValidEmail(kwargs["requestor"]):
print("Invalid requestor email \"%s\"." % kwargs["requestor"], file=sys.stderr)
return 252
if kwargs["id"] is not None and not isValidID(kwargs["id"]):
print("Invalid id \"%s\"." % kwargs["id"], file=sys.stderr)
return 251
for c in server.handler.db.get_clients():
if kwargs["name"] is not None and kwargs["name"].lower() == c.name:
print("Clash with existing name: %s" % str(c), file=sys.stderr)
return 250
if kwargs["secret"] is not None and kwargs["secret"] == c.secret:
print("Clash with existing secret: %s" % str(c), file=sys.stderr)
return 249
newid = server.handler.db.add_modify_client(**kwargs)
return list_clients(id=newid)
def load_maps():
server.handler.db.load_maps()
return 0
def purge(days=30, lastlog=None, events=None):
if lastlog is None and events is None:
lastlog = events = True
if lastlog:
count = server.handler.db.purge_lastlog(days)
print("Purged %d lastlog entries." % count)
if events:
count = server.handler.db.purge_events(days)
return 0
def add_client_args(subargp, mod=False):
subargp.add_argument("--help", action="help", help="show this help message and exit")
if mod:
subargp.add_argument(
"-i", "--id", required=True, type=int,
subargp.add_argument(
"-n", "--name", required=not mod,
help="client name (in dotted reverse path notation)")
subargp.add_argument(
"-h", "--hostname", required=not mod,
subargp.add_argument(
"-r", "--requestor", required=not mod,

Pavel Kácha
committed
help="authentication token (use explicit empty string to disable)")
help="client freetext description")
reg_valid = subargp.add_mutually_exclusive_group(required=False)
Jakub Maloštík
committed
"--valid", action="store_const", const=True, default=None,
Jakub Maloštík
committed
reg_valid.add_argument("--novalid", action="store_const", const=False, dest="valid", default=None)
reg_read = subargp.add_mutually_exclusive_group(required=False)
Jakub Maloštík
committed
"--read", action="store_const", const=True, default=None,
help="client is allowed to read (default)")
Jakub Maloštík
committed
reg_read.add_argument("--noread", action="store_const", const=False, dest="read", default=None)
reg_write = subargp.add_mutually_exclusive_group(required=False)
Jakub Maloštík
committed
"--nowrite", action="store_const", const=False, dest="write", default=None,
help="client is allowed to send (default - no)")
Jakub Maloštík
committed
reg_write.add_argument("--write", action="store_const", const=True, default=None)
reg_debug = subargp.add_mutually_exclusive_group(required=False)
Jakub Maloštík
committed
"--nodebug", action="store_const", const=False, dest="debug", default=None,
help="client is allowed receive debug output (default - no)")
Jakub Maloštík
committed
reg_debug.add_argument("--debug", action="store_const", const=True, default=None)
reg_test = subargp.add_mutually_exclusive_group(required=False)
Jakub Maloštík
committed
"--test", action="store_const", const=True, default=None,
help="client is yet in testing phase (default - yes)")
Jakub Maloštík
committed
reg_test.add_argument("--notest", action="store_const", const=False, dest="test", default=None)
def get_args():
import argparse
argp = argparse.ArgumentParser(
description="Warden server " + VERSION, add_help=False)
help="show this help message and exit")
help="path to configuration file")
subargp = argp.add_subparsers(title="commands", dest="command")
subargp.required = True
subargp_check = subargp.add_parser(
"check", add_help=False,
description="Try to setup server based on configuration file.",
help="check configuration")
subargp_check.set_defaults(command=check_config)
subargp_check.add_argument(
"--help", action="help",
help="show this help message and exit")
subargp_reg = subargp.add_parser(
"register", add_help=False,
description="Add new client registration entry.",
help="register new client")
subargp_reg.set_defaults(command=register_client)
add_client_args(subargp_reg)
subargp_mod = subargp.add_parser(
"modify", add_help=False,
description="Modify details of client registration entry.",
help="modify client registration")
subargp_mod.set_defaults(command=modify_client)
add_client_args(subargp_mod, mod=True)
subargp_list = subargp.add_parser(
"list", add_help=False,
description="List details of client registration entries.",
help="list registered clients")
subargp_list.set_defaults(command=list_clients)
help="show this help message and exit")
subargp_list.add_argument(
"--id", action="store", type=int,
help="client id", default=None)
subargp_purge = subargp.add_parser(
"purge", add_help=False,
description=(
"Purge old events or lastlog records."
" Note that lastlog purge retains at least one newest record for each"
" client, even if it is more than number of 'days' old."),
help="purge old events or lastlog records")
subargp_purge.set_defaults(command=purge)
subargp_purge.add_argument(
"--help", action="help",
help="show this help message and exit")
subargp_purge.add_argument(
"-l", "--lastlog", action="store_true", dest="lastlog", default=None,
subargp_purge.add_argument(
"-e", "--events", action="store_true", dest="events", default=None,
subargp_purge.add_argument(
"-d", "--days", action="store", dest="days", type=int, default=30,
help="records older than 'days' back from today will get purged")
subargp_loadmaps = subargp.add_parser(
"loadmaps", add_help=False,
description=(
"Load 'categories' and 'tags' table from 'catmap_db.json' and 'tagmap_db.json'."
" Note also that previous content of both tables will be lost."),
help="load catmap and tagmap into db")
subargp_loadmaps.set_defaults(command=load_maps)
subargp_loadmaps.add_argument(
"--help", action="help",
help="show this help message and exit")
args = get_args()
config = path.join(path.dirname(__file__), args.config or "warden_server.cfg")
server = build_server(read_cfg(config))
command = args.command
subargs = vars(args)
del subargs["command"]
del subargs["config"]
if not server or server is fallback_wsgi:
print("Failed initialization, check configured log targets for reasons.", file=sys.stderr)
sys.exit(255)
sys.exit(command(**subargs))