Skip to content
Snippets Groups Projects
warden_server.py 87.8 KiB
Newer Older
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2011-2015 Cesnet z.s.p.o
# Use of this source is governed by a 3-clause BSD-style license, see LICENSE file.

from __future__ import print_function

import os
import logging
import logging.handlers
import json
import re
from traceback import format_tb
from itertools import repeat
if sys.version_info[0] >= 3:
    import configparser as ConfigParser
    from urllib.parse import parse_qs
    unicode = str

    def get_method_params(method):
        return method.__code__.co_varnames[:method.__code__.co_argcount]

else:
    import ConfigParser
    from urlparse import parse_qs
    def get_method_params(method):
        return method.func_code.co_varnames[:method.func_code.co_argcount]


# for local version of up to date jsonschema
sys.path.append(path.join(path.dirname(__file__), "..", "lib"))

Pavel Kácha's avatar
Pavel Kácha committed
VERSION = "3.0-beta3"
Pavel Kácha's avatar
Pavel Kácha committed

class Encoder(json.JSONEncoder):
    def default(self, o):
        if isinstance(o, Error):
            return o.to_dict()
        if isinstance(o, ErrorMessage):
            out = o.other_args.copy()
            out.pop("exc", None)
            out["error"] = o.error
            out["message"] = o.message
            if o.events:
                out["events"] = list(o.events)
            return out
        return str(o)


class ErrorMessage(Exception):

    def __init__(self, error, message, events=None, unique_id=None, **kwargs):
        super(Exception, self).__setattr__("error", error)
        super(Exception, self).__setattr__("message", message)
        super(Exception, self).__setattr__("unique_id", unique_id)
        self.events = set() if events is None else set(events)
        self.other_args = kwargs

    def __repr__(self):
        return "%s(error=%d, message=%s)" % (
            type(self).__name__, self.error, repr(self.message)
        )

    def __str__(self):
        if sys.version_info[0] < 3:
            return self.str_err().encode('ascii', 'backslashereplace')
        return self.str_err()

    def str_err(self):
        exc = self.other_args.get("exc", None)
        if exc in (None, (None, None, None)):
            exc_cause = ""
        else:
            exc_cause = " (cause was %s: %s)" % (exc[0].__name__, str(exc[1]))
        return "Error(%s) %s%s" % (self.error, self.message, exc_cause)

    def str_info(self):
        arg_copy = self.other_args.copy()
        arg_copy.pop("req_id", None)
        arg_copy.pop("method", None)
        arg_copy.pop("exc", None)
        if arg_copy:
            return "Detail: %s" % json.dumps(arg_copy, cls=Encoder)
        return ""

    def str_debug(self):
        exc = self.other_args.get("exc", None)
        if exc in (None, (None, None, None)):
            return ""
        exc_tb = exc[2]
        if not exc_tb:
            return ""

        return "Traceback:\n" + "".join(format_tb(exc_tb))

    def __getattr__(self, name):
        if name in self.other_args:
            return self.other_args[name]
        raise AttributeError

    def __setattr__(self, name, value):
        if name in ("events", "exc", "other_args"):
            super(Exception, self).__setattr__(name, value)
            return
        if name in ("error", "message", "unique_id"):
            raise AttributeError("Cannot change the attribute %s" % name)
        self.other_args[name] = value


class Error(Exception):

    def __init__(self, method=None, req_id=None, errors=None, **kwargs):
        self.method = method
        if "message" in kwargs:
            kwargs.setdefault("error", 500)
            self.errors = [ErrorMessage(**kwargs)]
        else:
            self.errors = []
    def append(self, _events=None, **kwargs):
        kwargs.setdefault("message", "No further information")
        kwargs.setdefault("error", 500)
        self.errors.append(ErrorMessage(**kwargs))
            err = self.errors[0].error
            msg = self.errors[0].message
        except (IndexError, AttributeError):
            err = 500
            msg = "There's NO self-destruction button! Ah, you've just found it..."
            return err, msg

        if not all(msg == e.message for e in self.errors):
            # messages not the same, get Multiple errors
            msg = "Multiple errors"
        if not all(err == e.error for e in self.errors):
            # errors not same, round to basic err code (400, 500)
            # and use the highest one
            err = max(e.error for e in self.errors) // 100 * 100

        msg = "".join((c if '\x20' <= c != '\x7f' else r'\x{:02x}'.format(ord(c))) for c in msg) # escape control characters


    def __str__(self):
        return "\n".join(str(e) for e in self.errors)

    def log(self, logger, prio=logging.ERROR):
        for e in self.errors:
            logger.log(prio, e.str_err())
            info = e.str_info()
    def to_dict(self):
        d = {
            "method": self.method,
            "req_id": self.req_id,
        return d


def get_clean_root_logger(level=logging.INFO):
    """ Attempts to get logging module into clean slate state """

    # We want to be able to set up at least stderr logger before any
    # configuration is read, and then later get rid of it and set up
    # whatever administrator requires.
    # However, there can exist only one logger, but we want to get a clean
    # slate everytime we initialize StreamLogger or FileLogger... which
    # is not exactly supported by logging module.
    # So, we look directly inside logger class and clean up handlers/filters
    # manually.
    logger.setLevel(level)
    while logger.handlers:
        logger.removeHandler(logger.handlers[0])
    while logger.filters:
        logger.removeFilter(logger.filters[0])
def StreamLogger(stream=sys.stderr, level=logging.DEBUG):
    """ Fallback handler just for setup, not meant to be used from
        configuration file because during wsgi query stdout/stderr
        is forbidden.
    """

    fhand = logging.StreamHandler(stream)
    fform = logging.Formatter('%(asctime)s %(filename)s[%(process)d]: (%(levelname)s) %(message)s')
    fhand.setFormatter(fform)
    logger = get_clean_root_logger(level)
    logger.addHandler(fhand)
class LogRequestFilter(logging.Filter):
    """ Filter class, instance of which is added to logger class to add
        info about request automatically into every logline, no matter
        how it came into existence.
    """

    def __init__(self, req):
        logging.Filter.__init__(self)
        self.req = req

    def filter(self, record):
        if self.req.env:
Pavel Kácha's avatar
Pavel Kácha committed
            record.req_preamble = "%08x/%s: " % (self.req.req_id or 0, self.req.path)
def FileLogger(req, filename, level=logging.INFO):

    fhand = logging.FileHandler(filename)
    fform = logging.Formatter('%(asctime)s %(filename)s[%(process)d]: (%(levelname)s) %(req_preamble)s%(message)s')
    fhand.setFormatter(fform)
    logger = get_clean_root_logger(level)
    logger.addHandler(fhand)
Pavel Kácha's avatar
Pavel Kácha committed
    logger.info("Initialized FileLogger(req=%r, filename=\"%s\", level=%s)" % (req, filename, level))
def SysLogger(req, socket="/dev/log", facility=logging.handlers.SysLogHandler.LOG_DAEMON, level=logging.INFO):

    fhand = logging.handlers.SysLogHandler(address=socket, facility=facility)
    fform = logging.Formatter('%(filename)s[%(process)d]: (%(levelname)s) %(req_preamble)s%(message)s')
    fhand.setFormatter(fform)
    logger = get_clean_root_logger(level)
    logger.addHandler(fhand)
Pavel Kácha's avatar
Pavel Kácha committed
    logger.info("Initialized SysLogger(req=%r, socket=\"%s\", facility=\"%d\", level=%s)" % (req, socket, facility, level))
Pavel Kácha's avatar
Pavel Kácha committed
    return logger
Pavel Kácha's avatar
Pavel Kácha committed
Client = namedtuple("Client", [
    "id", "registered", "requestor", "hostname", "name",
    "secret", "valid", "read", "debug", "write", "test", "note"])
        attrs = get_method_params(self.__init__)[1:]
Pavel Kácha's avatar
Pavel Kácha committed
        eq_str = ["%s=%r" % (attr, getattr(self, attr, None)) for attr in attrs]
        return "%s(%s)" % (type(self).__name__, ", ".join(eq_str))
class Request(Object):
    """ Simple container for info about ongoing request.
        One instance gets created before server startup, and all other
        configured objects get it as parameter during instantiation.

        Server then takes care of populating this instance on the start
        of wsgi request (and resetting at the end). All other objects
        then can find this actual request info in their own self.req.

        However, only Server.wsgi_app, handler (WardenHandler) exposed
        methods and logging related objects should use self.req directly.
        All other objects should use self.req only as source of data for
        error/exception handling/logging, and should take/return
        necessary data as arguments/return values for clarity on
        which data their main codepaths work with.
    """
    def reset(self, env=None, client=None, path=None, req_id=None):
        self.env = env
        self.client = client
        self.path = path or ""
        if req_id is not None:
            self.req_id = req_id
        else:
            self.req_id = 0 if env is None else randint(0x00000000, 0xFFFFFFFF)
Pavel Kácha's avatar
Pavel Kácha committed
    __init__ = reset

    def error(self, **kwargs):
        return Error(self.path, self.req_id, **kwargs)
class PlainAuthenticator(ObjectBase):
    def __init__(self, req, log, db):
        ObjectBase.__init__(self, req, log)
Pavel Kácha's avatar
Pavel Kácha committed
    def authenticate(self, env, args, hostnames=None, check_secret=True):
        secret = args.get("secret", [None])[0] if check_secret else None

        client = self.db.get_client_by_name(hostnames, name, secret)

        if not client:
            self.log.info("authenticate: client not found by name: \"%s\", secret: %s, hostnames: %s" % (
                name, secret, str(hostnames)))
        # Clients with 'secret' set must get authenticated by it.
        # No secret turns secret auth off for this particular client.
        if client.secret is not None and secret is None and check_secret:
            self.log.info("authenticate: missing secret argument")
        self.log.info("authenticate: %s" % str(client))
        # These args are not for handler
        args.pop("client", None)
        args.pop("secret", None)

    def authorize(self, env, client, path, method):
        if method.debug:
            if not client.debug:
                self.log.info("authorize: failed, client does not have debug enabled")
        if method.read:
            if not client.read:
                self.log.info("authorize: failed, client does not have read enabled")
        if method.write:
            if not (client.write or client.test):
                self.log.info("authorize: failed, client is not allowed to write or test")
class X509Authenticator(PlainAuthenticator):

    def get_cert_dns_names(self, pem):

        cert = M2Crypto.X509.load_cert_string(pem)

        subj = cert.get_subject()
        commons = [n.get_data().as_text() for n in subj.get_entries_by_nid(subj.nid["CN"])]

Pavel Kácha's avatar
Pavel Kácha committed
            extstrs = cert.get_ext("subjectAltName").get_value().split(",")
Pavel Kácha's avatar
Pavel Kácha committed
            extstrs = []
        extstrs = [val.strip() for val in extstrs]
        altnames = [val[4:] for val in extstrs if val.startswith("DNS:")]

        # bit of mangling to get rid of duplicates and leave commonname first
        firstcommon = commons[0]
        return [firstcommon] + list(set(altnames+commons) - set([firstcommon]))

    def is_verified_by_apache(self, env, args):
        # Allows correct work while SSLVerifyClient both "optional" and "required"
        verify = env.get("SSL_CLIENT_VERIFY")
        if verify == "SUCCESS":
            return True
Pavel Kácha's avatar
Pavel Kácha committed
        exception = self.req.error(
            message="authenticate: certificate verification failed",
            error=403, args=args, ssl_client_verify=verify, cert=env.get("SSL_CLIENT_CERT"))
        exception.log(self.log)
        return False
    def authenticate(self, env, args):
        if not self.is_verified_by_apache(env, args):
            return None

            cert_names = self.get_cert_dns_names(env["SSL_CLIENT_CERT"])
        except:
Pavel Kácha's avatar
Pavel Kácha committed
            exception = self.req.error(
                message="authenticate: cannot get or parse certificate from env",
                error=403, exc=sys.exc_info(), env=env)
            exception.log(self.log)
Michal Kostenec's avatar
Michal Kostenec committed
            return None
Pavel Kácha's avatar
Pavel Kácha committed
        return PlainAuthenticator.authenticate(self, env, args, hostnames=cert_names)

class X509NameAuthenticator(X509Authenticator):
    def authenticate(self, env, args):
        if not self.is_verified_by_apache(env, args):
            return None

        try:
            cert_name = env["SSL_CLIENT_S_DN_CN"]
        except:
Pavel Kácha's avatar
Pavel Kácha committed
            exception = self.req.error(
                message="authenticate: cannot get or parse certificate from env",
                error=403, exc=sys.exc_info(), env=env)
            exception.log(self.log)
        if cert_name != args.setdefault("client", [cert_name])[0]:
Pavel Kácha's avatar
Pavel Kácha committed
            exception = self.req.error(
                message="authenticate: client name does not correspond with certificate",
                error=403, cn=cert_name, args=args)
            exception.log(self.log)
Pavel Kácha's avatar
Pavel Kácha committed
        return PlainAuthenticator.authenticate(self, env, args, check_secret=False)
class X509MixMatchAuthenticator(X509Authenticator):
    def __init__(self, req, log, db):
        PlainAuthenticator.__init__(self, req, log, db)
        self.hostname_auth = X509Authenticator(req, log, db)
        self.name_auth = X509NameAuthenticator(req, log, db)
        if not self.is_verified_by_apache(env, args):
            return None

            cert_name = env["SSL_CLIENT_S_DN_CN"]
Pavel Kácha's avatar
Pavel Kácha committed
            exception = self.req.error(
                message="authenticate: cannot get or parse certificate from env",
                error=403, exc=sys.exc_info(), env=env)
            exception.log(self.log)
Pavel Kácha's avatar
Pavel Kácha committed
        secret = args.get("secret", [None])[0]
        # Client names are in reverse notation than DNS, client name should
        # thus never be the same as machine hostname (if it is, client
        # admin does something very amiss).
        # So, if client sends the same name in query as in the certificate,
        # or sends no name or secret (which is necessary for hostname auth),
        # use X509NameAuthenticator. Otherwise (names are different and there
        # is name and/or secret in query) use (hostname) X509Authenticator.
        if name == cert_name or (name is None and secret is None):
            auth = self.name_auth
        else:
            auth = self.hostname_auth
        self.log.info("MixMatch is choosing %s (name: %s, cert_name: %s)" % (type(auth).__name__, name, cert_name))
    def __init__(self, req, log):
        ObjectBase.__init__(self, req, log)
    def check(self, event):
        return []


class JSONSchemaValidator(NoValidator):

    def __init__(self, req, log, filename=None):
        NoValidator.__init__(self, req, log)
        self.path = filename or path.join(path.dirname(__file__), "idea.schema")
        with io.open(self.path, "r", encoding="utf-8") as f:
            self.schema = json.load(f)
        self.validator = Draft4Validator(self.schema)

    def check(self, event):

        def sortkey(k):
            """ Treat keys as lowercase, prefer keys with less path segments """
            return (len(k.path), "/".join(str(k.path)).lower())

        res = []
        for error in sorted(self.validator.iter_errors(event), key=sortkey):
            res.append(
                ErrorMessage(
                    460, "Validation error: key \"%s\", value \"%s\"" % (
                        "/".join(map(str, error.path)),
                        error.instance
                    ),
                    expected=error.schema.get('description', 'no additional info')
                )
            )
class UnsafeQueryContext:
    """ Context manager to be used within a transaction for partial rollbacks
        Meant to be used as:
        with self as db:
            with self.unsafe_query_context(db):
                res = db.query_one(...)
    """
    def __init__(self, db, silence_exc=False):
        self.db = db
        self.silence_exc = silence_exc

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc_value, exc_traceback):
        return self.silence_exc and exc_type is not None \
            and issubclass(exc_type, (self.db.db.IntegrityError, self.db.db.DataError))


class DataBase(ObjectBase):
Pavel Kácha's avatar
Pavel Kácha committed

    unsafe_query_context = UnsafeQueryContext

Pavel Kácha's avatar
Pavel Kácha committed
    def __init__(
            self, req, log, host, user, password, dbname, port, retry_count,
            retry_pause, event_size_limit, catmap_filename, tagmap_filename):
        ObjectBase.__init__(self, req, log)
Pavel Kácha's avatar
Pavel Kácha committed
        self.host = host
        self.user = user
        self.password = password
        self.dbname = dbname
        self.port = port
        self.retry_count = retry_count
        self.retry_pause = retry_pause
        self.event_size_limit = event_size_limit
        self.catmap_filename = catmap_filename
        self.tagmap_filename = tagmap_filename

        with io.open(catmap_filename, "r", encoding="utf-8") as catmap_fd:
            self.catmap = json.load(catmap_fd)
            self.catmap_other = self.catmap["Other"]    # Catch error soon, avoid lookup later

        with io.open(tagmap_filename, "r", encoding="utf-8") as tagmap_fd:
            self.tagmap_other = self.tagmap["Other"]    # Catch error soon, avoid lookup later
Pavel Kácha's avatar
Pavel Kácha committed

Pavel Kácha's avatar
Pavel Kácha committed

        try:
            if self.con:
                self.con.close()
        except Exception:
            pass
        self.con = None
    def __del__(self):
        self.close()
Pavel Kácha's avatar
Pavel Kácha committed

    def repeat(self):
        """ Allows for graceful repeating of transactions self.retry_count
            times. Unsuccessful attempts wait for self.retry_pause until
            next attempt.

            Meant for usage with context manager:

            for attempt in self.repeat():
                with attempt as db:
                    res = db.query_all(...)

            Note that it's not reentrant (as is not underlying MySQL
            connection), so avoid nesting on the same MySQL object.
        """
        self.retry_attempt = self.retry_count
        while self.retry_attempt:
            if self.retry_attempt != self.retry_count:
                sleep(self.retry_pause)
            self.retry_attempt -= 1
            yield self

    def __enter__(self):
        """ Context manager protocol. Guarantees that transaction will
            get either commited or rolled back in case of database
            exception. Can be used with self.repeat(), or alone as:

            with self as db:
                res = db.query_all(...)

            Note that it's not reentrant (as is not underlying MySQL
            connection), so avoid nesting on the same MySQL object.
        """
        if not self.retry_attempt:
            self.retry_attempt = 0
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        """ Context manager protocol. If db exception is fired and
            self.retry_attempt is not zero, it is only logged and
            does not propagate, otherwise it propagates up. Also
            open transaction is rolled back.
            In case of no exception, transaction gets commited.
        """
        if exc_type is None:
            self.con.commit()
            self.retry_attempt = 0
        else:
            try:
                if self.con is not None:
                    self.con.rollback()
            except self.db.Error:
                pass
            try:
                self.close()
            except self.db.Error:
                pass
            if self.retry_attempt > 0:
                self.log.info("Database error (%d attempts left): %s %s" %
                              (self.retry_attempt, exc_type.__name__, exc_val))
                return True

    def _query(self, *args, **kwargs):
        if not self.con:
            self.connect()
        crs = self.con.cursor()
        self.log.debug("execute: %s %s" % (args, kwargs))
        crs.execute(*args, **kwargs)
        return crs

    def _query_multiple(self, query, params, ret, fetch):
        res = None
        for n, (q, p) in enumerate(zip(query, params)):
            cur = self._query(q, p)
            if n == ret:
                res = fetch(cur)
        if ret == -1:  # fetch the result of the last query
            res = fetch(cur)
        return res
    def execute(self, query, params, ret=None):
        """Execute the provided queries; discard the result"""
        self._query_multiple(query, params, None, None)
    def query_all(self, query, params, ret=-1):
        """Execute the provided queries; return list of all rows as dicts of the ret-th query (0 based)"""
        return self._query_multiple(query, params, ret, lambda cur: cur.fetchall())
    def query_one(self, query, params, ret=-1):
        """Execute the provided queries; return the first result of the ret-th query (0 based)"""
        return self._query_multiple(query, params, ret, lambda cur: cur.fetchone())

    def query_rowcount(self, query, params, ret=-1):
        """Execute provided query; return the number of affected rows or the number of returned rows of the ret-th query (0 based)"""
        return self._query_multiple(query, params, ret, lambda cur: cur.rowcount)

    def _get_comma_perc(self, l):
        return ",".join(repeat("%s", l if isinstance(l, int) else len(l)))

    def _get_comma_perc_n(self, n, l):
        return ", ".join(repeat("(%s)" % self._get_comma_perc(n), len(l)))

    def _get_not(self, b):
        return "" if b else "NOT"
    def _build_get_client_by_name(self, cert_names, name, secret):
        """Build query and params for client lookup"""
    def get_client_by_name(self, cert_names=None, name=None, secret=None):
        query, params, ret = self._build_get_client_by_name(cert_names, name, secret)
                rows = db.query_all(query, params, ret)
Pavel Kácha's avatar
Pavel Kácha committed
                if len(rows) > 1:
                        "get_client_by_name: query returned more than one result (cert_names = %s, name = %s, secret = %s): %s" %
                        (cert_names, name, secret, ", ".join([str(Client(**row)) for row in rows]))
                    )
                    return None

                return Client(**rows[0]) if rows else None
    def _build_get_clients(self, id):
        """Build query and params for client lookup by id"""

    def get_clients(self, id=None):
        query, params, ret = self._build_get_clients(id)

                rows = db.query_all(query, params, ret=ret)
    def _build_add_modify_client(self, id, **kwargs):
        """Build query and params for adding/modifying client"""

    def add_modify_client(self, id=None, **kwargs):
        if id is not None and all(kwargs.get(attr, None) is None for attr in set(Client._fields) - {"id", "registered"}):

        query, params, ret = self._build_add_modify_client(id, **kwargs)

                res_id = db.query_one(query, params, ret=ret)["id"]
                newid = res_id if id is None else id
    def _build_get_debug_version(self):
        pass

    def _build_get_debug_tablestat(self):
        pass

Pavel Kácha's avatar
Pavel Kácha committed
    def get_debug(self):
        vquery, vparams, vret = self._build_get_debug_version()
        tquery, tparams, tret = self._build_get_debug_tablestat()
        for attempt in self.repeat():
            with attempt as db:
                return {
                    "db": type(self).__name__,
                    "version": db.query_one(vquery, vparams, vret)["version"],
                    "tables": db.query_all(tquery, tparams, tret)
Pavel Kácha's avatar
Pavel Kácha committed

    def getMaps(self, section, variables):
        maps = []
Pavel Kácha's avatar
Pavel Kácha committed
                raise self.req.error(
                    message="Wrong tag or category used in query.",
                    error=422, exc=sys.exc_info(), key=v
                )
            maps.append(mapped)
        return set(maps)    # unique
    def _build_fetch_events(
            self, client, id, count,
            cat, nocat, tag, notag, group, nogroup):
        """Build query and params for fetching events based on id, count and category, tag and group filters"""

    def _load_event_json(self, data):
        """Return decoded json from data loaded from database, if unable to decode, return None"""

Pavel Kácha's avatar
Pavel Kácha committed
    def fetch_events(
            self, client, id, count,
            cat=None, nocat=None,
            tag=None, notag=None,
            group=None, nogroup=None):
Pavel Kácha's avatar
Pavel Kácha committed

Pavel Kácha's avatar
Pavel Kácha committed
            raise self.req.error(
                message="Unrealizable conditions. Choose cat or nocat option.",
                error=422, cat=cat, nocat=nocat)
Pavel Kácha's avatar
Pavel Kácha committed
            raise self.req.error(
                message="Unrealizable conditions. Choose tag or notag option.",
                error=422, tag=tag, notag=notag)
Pavel Kácha's avatar
Pavel Kácha committed
            raise self.req.error(
                message="Unrealizable conditions. Choose group or nogroup option.",
                error=422, group=group, nogroup=nogroup)
Michal Kostenec's avatar
Michal Kostenec committed

        query, params, ret = self._build_fetch_events(
            client, id, count,
            cat, nocat,
            tag, notag,
            group, nogroup
        )
        row = None
        for attempt in self.repeat():
            with attempt as db:
                row = db.query_all(query, params, ret=ret)
Michal Kostenec's avatar
Michal Kostenec committed

Michal Kostenec's avatar
Michal Kostenec committed
        if row:
            maxid = max(r['id'] for r in row)
        else:
            maxid = self.getLastEventId()

            e = self._load_event_json(r["data"])
            if e is None:  # null cannot be valid event JSON
                # Note that we use Error object just for proper formatting,
                # but do not raise it; from client perspective invalid
                # events get skipped silently.
Pavel Kácha's avatar
Pavel Kácha committed
                err = self.req.error(
                    message="Unable to deserialize JSON event from db, id=%s" % r["id"],
                    error=500, exc=sys.exc_info(), id=r["id"])
                err.log(self.log, prio=logging.WARNING)
            else:
                events.append(e)
Michal Kostenec's avatar
Michal Kostenec committed
            "lastid": maxid,
    def _build_store_events_event(self, client, event, raw_event):
        """Build query and params for event insertion"""

    def _build_store_events_categories(self, event_id, cat_ids):
        """Build query and params for insertion of event-categories mapping"""

    def _build_store_events_tags(self, event_id, tag_ids):
        """Build query and params for insertion of event-tags mapping"""

    def store_events(self, client, events, events_raw, events_indexes):
                    errors = []
                    stored = 0
                    for event, raw_event, event_indx in zip(events, events_raw, events_indexes):
                        equery, eparams, eret = self._build_store_events_event(client, event, raw_event)
                        try:
                            with self.unsafe_query_context(db):
                                lastid = db.query_one(equery, eparams, ret=eret)["id"]
                        except self.db.IntegrityError:
                            exception = self.req.error(message="IDEA event with this ID already exists", error=400, exc=sys.exc_info(), env=self.req.env)
                            exception.log(self.log)
                            errors.append(ErrorMessage(409, "IDEA event with this ID already exists", events={event_indx}))
                            continue

                        stored += 1
                        cats = set(catlist) | {cat.split(".", 1)[0] for cat in catlist}
                        cat_ids = [self.catmap.get(cat, self.catmap_other) for cat in cats]
                        cquery, cparams, _ = self._build_store_events_categories(lastid, cat_ids)
                        db.execute(cquery, cparams)
                        tags = {tag for node in nodes for tag in node.get('Type', [])}
                        if tags:
                            tag_ids = [self.tagmap.get(tag, self.tagmap_other) for tag in tags]
                            tquery, tparams, _ = self._build_store_events_tags(lastid, tag_ids)
                            db.execute(tquery, tparams)

                    return errors, stored
            exception = self.req.error(message="DB error", error=500, exc=sys.exc_info(), env=self.req.env)
            exception.log(self.log)
            return [ErrorMessage(500, "DB error %s" % type(e).__name__)], 0
    def _build_insert_last_received_id(self, client, id):
        """Build query and params for insertion of the last event id received by client"""

Michal Kostenec's avatar
Michal Kostenec committed
    def insertLastReceivedId(self, client, id):
        self.log.debug("insertLastReceivedId: id %i for client %i(%s)" % (id, client.id, client.hostname))
        query, params, _ = self._build_insert_last_received_id(client, id)
                db.execute(query, params)

    def _build_get_last_event_id(self):
        """Build query and params for querying the id of the last inserted event"""
Michal Kostenec's avatar
Michal Kostenec committed

    def getLastEventId(self):
        query, params, ret = self._build_get_last_event_id()
                id_ = db.query_one(query, params, ret=ret)["id"]
                return id_ or 1

    def _build_get_last_received_id(self, client):
        """Build query and params for querying the last event id received by client"""
Michal Kostenec's avatar
Michal Kostenec committed

    def getLastReceivedId(self, client):
        query, params, ret = self._build_get_last_received_id(client)
                res = db.query_one(query, params, ret=ret)

                if res is None:
                    self.log.debug("getLastReceivedId: probably first access, unable to get id for client %i(%s)" %
                        (client.id, client.hostname))
                    id = res["id"] or 1
                    self.log.debug("getLastReceivedId: id %i for client %i(%s)" %
                        (id, client.id, client.hostname))
    def _build_load_maps_tags(self):
        """Build query and params for updating the tag map"""

    def _build_load_maps_cats(self):
        """Build query and params for updating the catetgory map"""

        tquery, tparams, _ = self._build_load_maps_tags()
        cquery, cparams, _ = self._build_load_maps_cats()
            db.execute(tquery, tparams)
            db.execute(cquery, cparams)

    def _build_purge_lastlog(self, days):
        """Build query and params for purging stored client last event mapping older than days"""
    def purge_lastlog(self, days):
        query, params, ret = self._build_purge_lastlog(days)
            return db.query_rowcount(query, params, ret=ret)

    def _build_purge_events_get_id(self, days):
        """Build query and params to get largest event id of events older than days"""

    def _build_purge_events_events(self, id_):
        """Build query and params to remove events older then days and their mappings"""

    def purge_events(self, days):
        iquery, iparams, iret = self._build_purge_events_get_id(days)
        with self as db:
            id_ = db.query_one(iquery, iparams, ret=iret)["id"]
            if id_ is None:
                return 0
            equery, eparams, eret = self._build_purge_events_events(id_)
            affected = db.query_rowcount(equery, eparams, ret=eret)
            return affected


DataBase = abc.ABCMeta("DataBase", (DataBase,), {})


class MySQL(DataBase):

    def __init__(
            self, req, log, host, user, password, dbname, port, retry_count,
            retry_pause, event_size_limit, catmap_filename, tagmap_filename):

        super(DataBase, self).__init__(req, log, host, user, password, dbname, port, retry_count,
            retry_pause, event_size_limit, catmap_filename, tagmap_filename)

        import MySQLdb as db
        import MySQLdb.cursors as mycursors
        self.db = db
        self.mycursors = mycursors

    def connect(self):
        self.con = self.db.connect(
            host=self.host, user=self.user, passwd=self.password,
            db=self.dbname, port=self.port, cursorclass=self.mycursors.DictCursor)

    def _build_get_client_by_name(self, cert_names=None, name=None, secret=None):
        """Build query and params for client lookup"""
        query = ["SELECT * FROM clients WHERE valid = 1"]
        params = []
        if name:
            query.append(" AND name = %s")
            params.append(name.lower())
        if secret:
            query.append(" AND secret = %s")
            params.append(secret)
        if cert_names:
            query.append(" AND hostname IN (%s)" % self._get_comma_perc(cert_names))
            params.extend(n.lower() for n in cert_names)

        return ["".join(query)], [params], 0

    def _build_get_clients(self, id):
        """Build query and params for client lookup by id"""
        query = ["SELECT * FROM clients"]
        params = []
        if id:
            query.append("WHERE id = %s")
            params.append(id)
        query.append("ORDER BY id")

        return [" ".join(query)], [params], 0

    def _build_add_modify_client(self, id, **kwargs):
        """Build query and params for adding/modifying client"""
        query = []
        params = []
        uquery = []
        if id is None:
            query.append("INSERT INTO clients SET")
            uquery.append("registered = now()")
        else:
            query.append("UPDATE clients SET")
        for attr in set(Client._fields) - set(["id", "registered"]):
            val = kwargs.get(attr, None)
            if val is not None:  # guaranteed at least one is not None
                if attr == "secret" and val == "":  # disable secret
                    val = None
                uquery.append("`%s` = %%s" % attr)
                params.append(val)

        query.append(", ".join(uquery))
        if id is not None:
            query.append("WHERE id = %s")
            params.append(id)
        return (
            [" ".join(query), 'SELECT LAST_INSERT_ID() AS id'],
            [params, []],
            1
        )

    def _build_get_debug_version(self):
        return ["SELECT VERSION() AS version"], [()], 0

    def _build_get_debug_tablestat(self):
        return ["SHOW TABLE STATUS"], [()], 0

    def _load_event_json(self, data):
        """Return decoded json from data loaded from database, if unable to decode, return None"""
        try:
            return json.loads(data)
        except Exception:
            return None

    def _build_fetch_events(
            self, client, id, count,
            cat, nocat, tag, notag, group, nogroup):
        query = ["SELECT e.id, e.data FROM clients c RIGHT JOIN events e ON c.id = e.client_id WHERE e.id > %s"]
        params = [id or 0]

        if cat or nocat:
            cats = self.getMaps(self.catmap, (cat or nocat))
            query.append(
                " AND e.id %s IN (SELECT event_id FROM event_category_mapping WHERE category_id IN (%s))" %
                (self._get_not(cat), self._get_comma_perc(cats))
            )
            params.extend(cats)

        if tag or notag:
            tags = self.getMaps(self.tagmap, (tag or notag))
            query.append(
                " AND e.id %s IN (SELECT event_id FROM event_tag_mapping WHERE tag_id IN (%s))" %
                (self._get_not(tag), self._get_comma_perc(tags))
            )
            params.extend(tags)

        if group or nogroup:
            subquery = []
            for name in (group or nogroup):
                escaped_name = name.replace('&', '&&').replace("_", "&_").replace("%", "&%")  # escape for LIKE
                subquery.append("c.name = %s")                 # exact client
                params.append(name)
                subquery.append("c.name LIKE CONCAT(%s, '.%%') ESCAPE '&'")   # whole subtree
                params.append(escaped_name)

            query.append(" AND %s (%s)" %
                (self._get_not(group), " OR ".join(subquery)))

        query.append(" AND e.valid = 1 LIMIT %s")
        params.append(count)

        return ["".join(query)], [params], 0

    def _build_store_events_event(self, client, event, raw_event):
        """Build query and params for event insertion"""
        return (
            [
                "INSERT INTO events (idea_id,received,client_id,data) VALUES (%s, NOW(), %s, %s)",
                "SELECT LAST_INSERT_ID() AS id"
            ],
            [(event["ID"], client.id, raw_event), ()],
            1
        )

    def _build_store_events_categories(self, event_id, cat_ids):
        """Build query and params for insertion of event-categories mapping"""
        return (
            ["INSERT INTO event_category_mapping (event_id,category_id) VALUES " +
                self._get_comma_perc_n(2, cat_ids)],
            [tuple(param for cat_id in cat_ids for param in (event_id, cat_id))],
            None
        )

    def _build_store_events_tags(self, event_id, tag_ids):
        """Build query and params for insertion of event-tags mapping"""
        return (
            ["INSERT INTO event_tag_mapping (event_id,tag_id) VALUES " +
                self._get_comma_perc_n(2, tag_ids)],
            [tuple(param for tag_id in tag_ids for param in (event_id, tag_id))],
            None
        )

    def _build_insert_last_received_id(self, client, id):
        """Build query and params for insertion of the last event id received by client"""
        return (
            ["INSERT INTO last_events(client_id, event_id, timestamp) VALUES(%s, %s, NOW())"],
            [(client.id, id)],
            None
        )

    def _build_get_last_event_id(self):
        """Build query and params for querying the id of the last inserted event"""
        return ["SELECT MAX(id) as id FROM events"], [()], 0

    def _build_get_last_received_id(self, client):
        """Build query and params for querying the last event id received by client"""
        return (
            ["SELECT event_id as id FROM last_events WHERE client_id = %s ORDER BY last_events.id DESC LIMIT 1"],
            [(client.id,)],
            0
        )

    def _build_load_maps_tags(self):
        """Build query and params for updating the tag map"""
        return (
            [
                "DELETE FROM tags",
                "INSERT INTO tags(id, tag) VALUES " +
                    self._get_comma_perc_n(2, self.tagmap)
            ],
            [
                (),
                tuple(param for tag, num in self.tagmap.items() for param in (num, tag))
            ],
            None
        )

    def _build_load_maps_cats(self):
        """Build query and params for updating the catetgory map"""
        params = []
        for cat_subcat, num in self.catmap.items():
            catsplit = cat_subcat.split(".", 1)
            category = catsplit[0]
            subcategory = catsplit[1] if len(catsplit) > 1 else None
            params.extend((num, category, subcategory, cat_subcat))

        return (
            [
                "DELETE FROM categories",
                "INSERT INTO categories(id, category, subcategory, cat_subcat) VALUES " +
                    self._get_comma_perc_n(4, self.catmap)
            ],
            [
                (),
                tuple(params)
            ],
            None
        )

    def _build_purge_lastlog(self, days):
        """Build query and params for purging stored client last event mapping older than days"""
        return (
            [
                "DELETE FROM last_events "
                " USING last_events LEFT JOIN ("
                "    SELECT MAX(id) AS last FROM last_events"
                "    GROUP BY client_id"
                " ) AS maxids ON last=id"
                " WHERE timestamp < DATE_SUB(CURDATE(), INTERVAL %s DAY) AND last IS NULL",
            ],
            [(days,)],
            0
        )

    def _build_purge_events_get_id(self, days):
        """Build query and params to get largest event id of events older than days"""
        return (
            [
                "SELECT MAX(id) as id"
                "  FROM events"
                "  WHERE received < DATE_SUB(CURDATE(), INTERVAL %s DAY)"
            ],
            [(days,)],
            0
        )

    def _build_purge_events_events(self, id_):
        """Build query and params to remove events older then days and their mappings"""
        return (
            [
                "DELETE FROM event_category_mapping WHERE event_id <= %s",
                "DELETE FROM event_tag_mapping WHERE event_id <= %s",
                "DELETE FROM events WHERE id <= %s",
            ],
            [(id_,), (id_,), (id_,)],
            2
        )
class PostgresUnsafeQueryContext(UnsafeQueryContext):

    SAVEPOINT = 'context_savepoint'

    def __enter__(self):
        self.db.execute([self.db.ppgsql.SQL('SAVEPOINT "context_savepoint"')], [()])
        return self

    def __exit__(self, exc_type, exc_value, exc_traceback):
        if exc_type is not None:
            self.db.execute([self.db.ppgsql.SQL('ROLLBACK TO SAVEPOINT "context_savepoint"')], [()])

        return self.silence_exc and exc_type is not None \
            and issubclass(exc_type, (self.db.db.IntegrityError, self.db.db.DataError))


class PostgreSQL(DataBase):
    unsafe_query_context = PostgresUnsafeQueryContext

Jakub Maloštík's avatar
Jakub Maloštík committed
    def __init__(
            self, req, log, host, user, password, dbname, port, retry_count,
            retry_pause, event_size_limit, catmap_filename, tagmap_filename):

        super(DataBase, self).__init__(req, log, host, user, password, dbname, port, retry_count,
Jakub Maloštík's avatar
Jakub Maloštík committed
            retry_pause, event_size_limit, catmap_filename, tagmap_filename)

        import psycopg2 as db
        from psycopg2 import sql as ppgsql
        import psycopg2.extras as ppgextra
        self.db = db
        self.ppgsql = ppgsql
        self.ppgextra = ppgextra

    def connect(self):
        self.con = self.db.connect(
            host=self.host, user=self.user, password=self.password,
            dbname=self.dbname, port=self.port, cursor_factory=self.ppgextra.RealDictCursor)

    def _build_get_client_by_name(self, cert_names=None, name=None, secret=None):
        """Build query and params for client lookup"""
Jakub Maloštík's avatar
Jakub Maloštík committed
        params = []
        if name:
            query.append(" AND name = %s")
            params.append(name.lower())
        if secret:
            query.append(" AND secret = %s")
            params.append(secret)
        if cert_names:
            query.append(" AND hostname IN (%s)" % self._get_comma_perc(cert_names))
            params.extend(n.lower() for n in cert_names)

        return ["".join(query)], [params], 0

    def _build_get_clients(self, id):
        """Build query and params for client lookup by id"""
        query = ["SELECT * FROM clients"]
        params = []
        if id:
            query.append("WHERE id = %s")
            params.append(id)
        query.append("ORDER BY id")

        return [" ".join(query)], [params], 0

    def _build_add_modify_client(self, id, **kwargs):
        """Build query and params for adding/modifying client"""
        fields = set(Client._fields) - {"id", "registered"}
        cols, params = map(
            list,
            zip(
                *(
                    (k, None)  # disable secret
                    if k == "secret" and v == "" else
                    (k, v)
                    for k, v in kwargs.items()
                    if v is not None and k in fields
                )
            )
        )

        if id is None:
            query = self.ppgsql.SQL('INSERT INTO clients ("registered", {}) VALUES (NOW(), {}) RETURNING id').format(
                self.ppgsql.SQL(", ").join(map(self.ppgsql.Identifier, cols)),
                self.ppgsql.SQL(", ").join(self.ppgsql.Placeholder() * len(cols))
            )
        elif not cols:
            return ["SELECT %s AS id"], [(id,)], 0
        else:
            query = self.ppgsql.SQL("UPDATE clients SET {} WHERE id = {} RETURNING id").format(
                self.ppgsql.SQL(", ").join(
                    self.ppgsql.SQL("{} = {}").format(
                        self.ppgsql.Identifier(col),
                        self.ppgsql.Placeholder()
                    ) for col in cols
                ),
                self.ppgsql.Placeholder()
            )
            params.append(id)

        return [query], [params], 0

    def _build_get_debug_version(self):
        return ["SELECT setting AS version FROM pg_settings WHERE name = 'server_version'"], [()], 0

    def _build_get_debug_tablestat(self):
        return [
            "SELECT "
                'tablename AS "Name", '
                'relnatts AS "Columns", '
                'n_live_tup AS "Rows", '
                'pg_catalog.pg_size_pretty(pg_catalog.pg_table_size(oid)) AS "Table_size", '
                'pg_catalog.pg_size_pretty(pg_catalog.pg_indexes_size(oid)) AS "Index_size", '
                'coll.collations AS "Collations" '
            "FROM "
                "pg_catalog.pg_tables tbls "
                "LEFT OUTER JOIN pg_catalog.pg_class cls "
                "ON tbls.tablename=cls.relname "
                "LEFT OUTER JOIN pg_catalog.pg_stat_user_tables sut "
                "ON tbls.tablename=sut.relname "
                "LEFT OUTER JOIN ("
                    "SELECT "
                        "table_name, "
                        "string_agg("
                            "DISTINCT COALESCE("
                                "collation_name, "
                                "("
                                    "SELECT "
                                        "datcollate "
                                    "FROM "
                                        "pg_catalog.pg_database "
                                    "WHERE "
                                        "datname=%s"
                                ")"
                            "), "
                            "','"
                        ") AS collations "
                    "FROM "
                        "information_schema.columns "
                    "GROUP BY "
                        "table_name"
                ") coll "
                "ON tbls.tablename=coll.table_name "
            "WHERE "
                "tbls.schemaname='public' "
                "AND tbls.tableowner=%s"
        ], [(self.dbname, self.user)], 0

    def _load_event_json(self, data):
        """Return decoded json from data loaded from database, if unable to decode, return None"""
        try:
            return json.loads(data.tobytes())
        except Exception:
            return None

    def _build_fetch_events(
            self, client, id, count,
            cat, nocat, tag, notag, group, nogroup):

        query = ["SELECT e.id, e.data FROM clients c RIGHT JOIN events e ON c.id = e.client_id WHERE e.id > %s"]
        params = [id or 0]

        if cat or nocat:
            cats = self.getMaps(self.catmap, (cat or nocat))
            query.append(
                " AND e.id %s IN (SELECT event_id FROM event_category_mapping WHERE category_id IN (%s))" %
                (self._get_not(cat), self._get_comma_perc(cats))
            )
            params.extend(cats)

        if tag or notag:
            tags = self.getMaps(self.tagmap, (tag or notag))
            query.append(
                " AND e.id %s IN (SELECT event_id FROM event_tag_mapping WHERE tag_id IN (%s))" %
                (self._get_not(tag), self._get_comma_perc(tags))
            )
            params.extend(tags)

        if group or nogroup:
            subquery = []
            for name in group or nogroup:
                name = name.lower()  # assumes only lowercase names
                escaped_name = name.replace('&', '&&').replace("_", "&_").replace("%", "&%")  # escape for LIKE
                subquery.append("c.name = %s")                          # exact client
                params.append(name)
                subquery.append("c.name LIKE %s || '.%%' ESCAPE '&'")   # whole subtree
                params.append(escaped_name)

            query.append(" AND %s (%s)" % (self._get_not(group), " OR ".join(subquery)))

Jakub Maloštík's avatar
Jakub Maloštík committed
        params.append(count)

        return ["".join(query)], [params], 0

    def _build_store_events_event(self, client, event, raw_event):
        """Build query and params for event insertion"""
        return (
            ["INSERT INTO events (idea_id,received,client_id,data) VALUES (%s, NOW(), %s, %s) RETURNING id"],
            [(event["ID"], client.id, self.db.Binary(raw_event.encode('utf8')))],
Jakub Maloštík's avatar
Jakub Maloštík committed
            0
        )

    def _build_store_events_categories(self, event_id, cat_ids):
        """Build query and params for insertion of event-categories mapping"""
        return (
            ["INSERT INTO event_category_mapping (event_id,category_id) VALUES " +
                self._get_comma_perc_n(2, cat_ids)],
            [tuple(param for cat_id in cat_ids for param in (event_id, cat_id))],
            None
        )

    def _build_store_events_tags(self, event_id, tag_ids):
        """Build query and params for insertion of event-tags mapping"""
        return (
            ["INSERT INTO event_tag_mapping (event_id,tag_id) VALUES " +
                self._get_comma_perc_n(2, tag_ids)],
            [tuple(param for tag_id in tag_ids for param in (event_id, tag_id))],
            None
        )

    def _build_insert_last_received_id(self, client, id):
        """Build query and params for insertion of the last event id received by client"""
        return (
            ["INSERT INTO last_events(client_id, event_id, timestamp) VALUES(%s, %s, NOW())"],
            [(client.id, None if id == 1 else id)],
            None
        )

    def _build_get_last_event_id(self):
        """Build query and params for querying the id of the last inserted event"""
        return ["SELECT MAX(id) as id FROM events"], [()], 0

    def _build_get_last_received_id(self, client):
        """Build query and params for querying the last event id received by client"""
        return (
            ["SELECT event_id as id FROM last_events WHERE client_id = %s ORDER BY last_events.id DESC LIMIT 1"],
            [(client.id,)],
            0
        )

    def _build_load_maps_tags(self):
        """Build query and params for updating the tag map"""
        return (
            [
                "ALTER TABLE event_tag_mapping DROP CONSTRAINT event_tag_mapping_tag_id_fk",
                "DELETE FROM tags",
                "INSERT INTO tags(id, tag) VALUES " +
                    self._get_comma_perc_n(2, self.tagmap),
                'ALTER TABLE event_tag_mapping ADD CONSTRAINT "event_tag_mapping_tag_id_fk" FOREIGN KEY ("tag_id") REFERENCES "tags" ("id")'
            ],
            [(), (), tuple(param for tag, num in self.tagmap.items() for param in (num, tag)), ()],
            None
        )

    def _build_load_maps_cats(self):
        """Build query and params for updating the catetgory map"""
        params = []
        for cat_subcat, num in self.catmap.items():
            catsplit = cat_subcat.split(".", 1)
            category = catsplit[0]
            subcategory = catsplit[1] if len(catsplit) > 1 else None
            params.extend((num, category, subcategory, cat_subcat))

        return (
            [
                "ALTER TABLE event_category_mapping DROP CONSTRAINT event_category_mapping_category_id_fk",
                "DELETE FROM categories",
                "INSERT INTO categories(id, category, subcategory, cat_subcat) VALUES " +
                    self._get_comma_perc_n(4, self.catmap),
                'ALTER TABLE event_category_mapping ADD CONSTRAINT "event_category_mapping_category_id_fk" FOREIGN KEY ("category_id") REFERENCES "categories" ("id")'
            ],
            [(), (), tuple(params), ()],
            None
        )

    def _build_purge_lastlog(self, days):
        """Build query and params for purging stored client last event mapping older than days"""
        return (
            [
                "DELETE FROM last_events "
                " USING last_events le LEFT JOIN ("
                "    SELECT MAX(id) AS last FROM last_events"
                "    GROUP BY client_id"
                " ) AS maxids ON maxids.last=le.id"
                " WHERE le.timestamp < CURRENT_DATE - INTERVAL %s DAY AND maxids.last IS NULL"
            ],
            [(str(days),)],
            0
        )

    def _build_purge_events_get_id(self, days):
        """Build query and params to get largest event id of events older than days"""
        return (
            [
                "SELECT MAX(id) as id"
                "  FROM events"
                "  WHERE received < CURRENT_DATE - INTERVAL %s DAY"
            ],
            [(str(days),)],
            0
        )

    def _build_purge_events_events(self, id_):
        """Build query and params to remove events older then days and their mappings"""
        return ["DELETE FROM events WHERE id <= %s"], [(id_,)], 0



    def expose_deco(meth):
        meth.exposed = True
        meth.read = read
        meth.write = write
        meth.debug = debug
        if not hasattr(meth, "arguments"):
Rajmund Hruška's avatar
Rajmund Hruška committed
            meth.arguments = get_method_params(meth)
    def __init__(self, req, log, auth, handler):
        ObjectBase.__init__(self, req, log)
        self.auth = auth
        self.handler = handler

    def sanitize_args(self, path, func, args, exclude=["self", "post"]):
        # silently remove internal args, these should never be used
        # but if somebody does, we do not expose them by error message
        intargs = set(args).intersection(exclude)
        for a in intargs:
            del args[a]
        if intargs:
            self.log.info("sanitize_args: Called with internal args: %s" % ", ".join(intargs))

        # silently remove surplus arguments - potential forward
        # compatibility (unknown args will get ignored)
        badargs = set(args) - set(func.arguments)
        for a in badargs:
            del args[a]
        if badargs:
            self.log.info("sanitize_args: Called with superfluous args: %s" % ", ".join(badargs))

        return args

    def wsgi_app(self, environ, start_response, exc_info=None):
        path = environ.get("PATH_INFO", "").lstrip("/")
        self.req.reset(env=environ, path=path)
        output = ""
        status = "200 OK"
        headers = [('Content-type', 'application/json')]
        exception = None

        try:
            try:
                method = getattr(self.handler, path)
                method.exposed    # dummy access to trigger AttributeError
            except Exception:
Pavel Kácha's avatar
Pavel Kácha committed
                raise self.req.error(message="You've fallen off the cliff.", error=404)
            self.req.args = args = parse_qs(environ.get('QUERY_STRING', ""))

            self.req.client = client = self.auth.authenticate(environ, args)
            if not client:
                raise self.req.error(message="I'm watching. Authenticate.", error=403)
            auth = self.auth.authorize(self.req.env, self.req.client, self.req.path, method)
            if not auth:
                raise self.req.error(message="I'm watching. Not authorized.", error=403, client=client.name)
            args = self.sanitize_args(path, method, args)

            # Based on RFC2616, section 4.4 we SHOULD respond with 400 (bad request) or 411
            # (length required) if content length was not specified. We choose not to, to
            # preserve compatibility with clients deployed in the wild, which use POST for
            # all requests (even those without payload, with no specified content length).
            # According to PEP3333, section "Input and Error Streams", the application SHOULD
            # NOT attempt to read more data than specified by CONTENT_LENGTH. As stated in
            # section "environ Variables", CONTENT_LENGTH may be empty (string) or absent.
                content_length = int(environ.get('CONTENT_LENGTH', 0))
            except ValueError:
                content_length = 0

            try:
                post_data = environ['wsgi.input'].read(content_length)
            except:
                raise self.req.error(message="Data read error.", error=408, exc=sys.exc_info())

            headers, output = method(post_data, **args)

        except Error as e:
            exception = e
        except Exception as e:
            exception = self.req.error(message="Server exception", error=500, exc=sys.exc_info())
            status = "%d %s" % exception.get_http_err_msg()
            output = json.dumps(exception, cls=Encoder)
            exception.log(self.log)

        # Make sure everything is properly encoded - JSON and various function
        # may spit out unicode instead of str and it gets propagated up (str
        # + unicode = unicode).
        # For Python2 the right thing would be to be unicode correct among whole
        # source and always decode on input (json module does that for us) and
        # on output here.
        # For Python3 strings are internally unicode so no decoding on input is
        # necessary. For output, "status" must be unicode string, "output" must
        # be encoded bytes array, what is done here. Important: for Python 3 we
        # define: unicode = str
        if isinstance(status, unicode) and sys.version_info[0] < 3:
            status = status.encode("utf-8")
        if isinstance(output, unicode):
            output = output.encode("utf-8")
        headers.append(('Content-Length', str(len(output))))
        start_response(status, headers)
        return [output]

    __call__ = wsgi_app


def json_wrapper(method):
    def meth_deco(self, post, **args):
        if "events" in get_method_params(method):
                events = json.loads(post.decode('utf-8')) if post else None
Pavel Kácha's avatar
Pavel Kácha committed
                raise self.req.error(
                    message="Deserialization error.", error=400,
                    exc=sys.exc_info(), args=post, parser=str(e))
            if events:
                args["events"] = events

        result = method(self, **args)   # call requested method

        try:
            output = json.dumps(result, cls=Encoder)
Pavel Kácha's avatar
Pavel Kácha committed
            raise self.req.error(message="Serialization error", error=500, exc=sys.exc_info(), args=str(result))

        return [('Content-type', 'application/json')], output

    try:
        meth_deco.arguments = method.arguments
    except AttributeError:
        meth_deco.arguments = get_method_params(method)
class WardenHandler(ObjectBase):
Pavel Kácha's avatar
Pavel Kácha committed
    def __init__(
            self, req, log, validator, db, auth,
            send_events_limit=500, get_events_limit=1000,
        ObjectBase.__init__(self, req, log)
Michal Kostenec's avatar
Michal Kostenec committed
        self.auth = auth
        self.db = db
        self.validator = validator
        self.send_events_limit = send_events_limit
        self.get_events_limit = get_events_limit
        self.description = description

Pavel Kácha's avatar
Pavel Kácha committed
        return {
            "client": self.req.client._asdict(),
            "database": self.db.get_debug(),
            "system": {
                "python": sys.version,
                "uname": os.uname()
            },
            "process": {
                "cwd": unicode(os.getcwd()),
                "pid": os.getpid(),
                "ppid": os.getppid(),
                "pgrp": os.getpgrp(),
                "uid": os.getuid(),
                "gid": os.getgid(),
                "euid": os.geteuid(),
                "egid": os.getegid(),
                "groups": os.getgroups()
            }
Pavel Kácha's avatar
Pavel Kácha committed
        }
        info = {
            "version": VERSION,
            "send_events_limit": self.send_events_limit,
            "get_events_limit": self.get_events_limit
        }
        if self.description:
            info["description"] = self.description
        return info

Pavel Kácha's avatar
Pavel Kácha committed
    def getEvents(
            self, id=None, count=None,
            cat=None, nocat=None,
            tag=None, notag=None,
            group=None, nogroup=None):

        try:
            id = int(id[0])
        except (ValueError, TypeError, IndexError):
            # If client was already here, fetch server notion of his last id
Michal Kostenec's avatar
Michal Kostenec committed
            try:
                id = self.db.getLastReceivedId(self.req.client)
            except Exception as e:
                self.log.info("cannot getLastReceivedId - " + type(e).__name__ + ": " + str(e))
Pavel Kácha's avatar
Pavel Kácha committed

            # First access, remember the guy and get him last id
            id = self.db.getLastEventId()
            self.db.insertLastReceivedId(self.req.client, id)
Michal Kostenec's avatar
Michal Kostenec committed
            return {
                "lastid": id,
                "events": []
            }

Pavel Kácha's avatar
Pavel Kácha committed
        if id <= 0:
            # Client wants to get only last N events and reset server notion of last id
            id += self.db.getLastEventId()
Pavel Kácha's avatar
Pavel Kácha committed
            if id < 0: id = 0
            count = int(count[0])
        except (ValueError, TypeError, IndexError):
            count = self.get_events_limit

        if self.get_events_limit:
            count = min(count, self.get_events_limit)
        count = max(0, count)
        res = self.db.fetch_events(self.req.client, id, count, cat, nocat, tag, notag, group, nogroup)
        self.db.insertLastReceivedId(self.req.client, res['lastid'])
        self.log.info("sending %d events, lastid is %i" % (len(res["events"]), res["lastid"]))
    def check_node(self, event, event_indx, name):
        try:
            ev_id = event['Node'][0]['Name'].lower()
Pavel Kácha's avatar
Pavel Kácha committed
        except (KeyError, TypeError, IndexError):
            # Event does not bear valid Node attribute
            return [
                ErrorMessage(422, "Event does not bear valid Node attribute", {event_indx})
            ]
            return [
                ErrorMessage(422, "Node does not correspond with saving client", {event_indx})
            ]
Jakub Maloštík's avatar
Jakub Maloštík committed
    def check_idea_id(self, event, event_indx):
        id_length_limit = 64
        try:
            id_ = event["ID"]
        except (KeyError, TypeError, ValueError):
            return [ErrorMessage(422, "Missing IDEA ID", {event_indx})]
        if not isinstance(id_, unicode) or len(id_) == 0:
            return [ErrorMessage(422, "The provided IDEA ID is invalid", {event_indx})]

        errors = []
        if len(id_) > id_length_limit:
            errors.append(
                ErrorMessage(
                    422, "The provided event ID is too long",
                    {event_indx}, id_length_limit=id_length_limit
                )
            )
        if '\x00' in id_:
            errors.append(ErrorMessage(422, "IDEA ID cannot contain null bytes", {event_indx}))
        return errors

    def add_errors(self, errs_to_add):
        for err in errs_to_add:
            self.errs.setdefault((err.error, err.message, err.unique_id), err).events.update(err.events)
        if not isinstance(events, list):
            raise self.req.error(message="List of events expected.", error=400)
Pavel Kácha's avatar
Pavel Kácha committed
        if len(events) > self.send_events_limit:
            self.add_errors(
                [
                    ErrorMessage(
                        507, "Too many events in one batch.",
                        set(range(self.send_events_limit, len(events))),
                        send_events_limit=self.send_events_limit
                    )
                ]
            )
        events_tosend = []
        events_raw = []
        events_nums = []
        for i, event in enumerate(events[0:self.send_events_limit]):
            v_errs = self.validator.check(event)
            if v_errs:
                self.add_errors(v_errs)
Jakub Maloštík's avatar
Jakub Maloštík committed
            idea_id_errs = self.check_idea_id(event, i)
            if idea_id_errs:
                self.add_errors(idea_id_errs)
                continue

            node_errs = self.check_node(event, i, self.req.client.name)
                self.add_errors(node_errs)
            if self.req.client.test and 'Test' not in event.get('Category', []):
                self.add_errors(
                    [
                        ErrorMessage(
                            422, "You're allowed to send only messages containing \"Test\" among categories.", {i},
                            # Ensure that 1the error message is contained for every combination of categories
                            unique_id=tuple(event.get('Category', [])),
                            categories=event.get('Category', [])
                        )
                    ]
                )
Pavel Kácha's avatar
Pavel Kácha committed
                continue
            raw_event = json.dumps(event)
            if len(raw_event) >= self.db.event_size_limit:
                self.add_errors(
                    [
                        ErrorMessage(
                            413, "Event too long (>%i B)" % self.db.event_size_limit, {i},
                            event_size_limit = self.db.event_size_limit
                        )
                    ]
                )
            events_tosend.append(event)
            events_raw.append(raw_event)
            events_nums.append(i)

        db_errs, saved = self.db.store_events(self.req.client, events_tosend, events_raw, events_nums)
        self.log.info("Saved %i events" % saved)
        if self.errs:
            raise self.req.error(errors=self.errs.values())


def read_ini(path):
    c = ConfigParser.RawConfigParser()
    res = c.read(path)
    if not res or path not in res:
        # We don't have loggin yet, hopefully this will go into webserver log
        raise Error(message="Unable to read config: %s" % path)
    data = {}
    for sect in c.sections():
        for opts in c.options(sect):
            lsect = sect.lower()
            if lsect not in data:
                data[lsect] = {}
            data[lsect][opts] = c.get(sect, opts)
    return data


def read_cfg(path):
    with io.open(path, "r", encoding="utf-8") as f:
        stripcomments = "\n".join((l for l in f if not l.lstrip().startswith(("#", "//"))))
        conf = json.loads(stripcomments)

    # Lowercase keys
Pavel Kácha's avatar
Pavel Kácha committed
    conf = dict((
        sect.lower(), dict(
            (subkey.lower(), val) for subkey, val in subsect.items())
    ) for sect, subsect in conf.items())

    return conf


def fallback_wsgi(environ, start_response, exc_info=None):

    # If server does not start, set up simple server, returning
    # Warden JSON compliant error message
Pavel Kácha's avatar
Pavel Kácha committed
    error = 503
    message = "Server not running due to initialization error"
    headers = [('Content-type', 'application/json')]

    logline = "Error(%d): %s" % (error, message)
    status = "%d %s" % (error, message)
    output = '{"errors": [{"error": %d, "message": "%s"}]}' % (
    logging.getLogger(__name__).critical(logline)
    start_response(status, headers)
    return [output]


# Order in which the base objects must get initialized
section_order = ("log", "db", "auth", "validator", "handler", "server")

# List of sections and objects, configured by them
# First object in each object list is the default one, otherwise
# "type" keyword in section may be used to choose other
section_def = {
    "log": [FileLogger, SysLogger],
Jakub Maloštík's avatar
Jakub Maloštík committed
    "db": [MySQL, PostgreSQL],
    "auth": [X509NameAuthenticator, PlainAuthenticator, X509Authenticator, X509MixMatchAuthenticator],
    "validator": [JSONSchemaValidator, NoValidator],
    "handler": [WardenHandler],
    "server": [Server]
}

# Object parameter conversions and defaults
param_def = {
    FileLogger: {
        "req": {"type": "obj", "default": "req"},
        "filename": {"type": "filepath", "default": path.join(path.dirname(__file__), path.splitext(path.split(__file__)[1])[0] + ".log")},
        "level": {"type": "loglevel", "default": "info"},
    },
    SysLogger: {
        "req": {"type": "obj", "default": "req"},
        "socket": {"type": "filepath", "default": "/dev/log"},
        "facility": {"type": "facility", "default": "daemon"},
        "level": {"type": "loglevel", "default": "info"}
    },
    PlainAuthenticator: {
        "req": {"type": "obj", "default": "req"},
        "log": {"type": "obj", "default": "log"},
        "db": {"type": "obj", "default": "db"}
    },
    X509Authenticator: {
        "req": {"type": "obj", "default": "req"},
        "log": {"type": "obj", "default": "log"},
        "db": {"type": "obj", "default": "db"}
    },
    X509NameAuthenticator: {
        "req": {"type": "obj", "default": "req"},
        "log": {"type": "obj", "default": "log"},
        "db": {"type": "obj", "default": "db"}
    },
    X509MixMatchAuthenticator: {
        "req": {"type": "obj", "default": "req"},
        "log": {"type": "obj", "default": "log"},
        "db": {"type": "obj", "default": "db"}
    },
    NoValidator: {
        "req": {"type": "obj", "default": "req"},
        "log": {"type": "obj", "default": "log"},
    },
    JSONSchemaValidator: {
        "req": {"type": "obj", "default": "req"},
        "log": {"type": "obj", "default": "log"},
        "filename": {"type": "filepath", "default": path.join(path.dirname(__file__), "idea.schema")}
    },
    MySQL: {
        "req": {"type": "obj", "default": "req"},
        "log": {"type": "obj", "default": "log"},
        "host": {"type": "str", "default": "localhost"},
        "user": {"type": "str", "default": "warden"},
        "password": {"type": "str", "default": ""},
        "dbname": {"type": "str", "default": "warden3"},
        "port": {"type": "natural", "default": 3306},
        "retry_pause": {"type": "natural", "default": 3},
        "retry_count": {"type": "natural", "default": 3},
        "event_size_limit": {"type": "natural", "default": 5*1024*1024},
        "catmap_filename": {"type": "filepath", "default": path.join(path.dirname(__file__), "catmap_db.json")},
        "tagmap_filename": {"type": "filepath", "default": path.join(path.dirname(__file__), "tagmap_db.json")}
Jakub Maloštík's avatar
Jakub Maloštík committed
    PostgreSQL: {
        "req": {"type": "obj", "default": "req"},
        "log": {"type": "obj", "default": "log"},
        "host": {"type": "str", "default": "localhost"},
        "user": {"type": "str", "default": "warden"},
        "password": {"type": "str", "default": ""},
        "dbname": {"type": "str", "default": "warden3"},
        "port": {"type": "natural", "default": 5432},
        "retry_pause": {"type": "natural", "default": 3},
        "retry_count": {"type": "natural", "default": 3},
        "event_size_limit": {"type": "natural", "default": 5*1024*1024},
        "catmap_filename": {"type": "filepath", "default": path.join(path.dirname(__file__), "catmap_db.json")},
        "tagmap_filename": {"type": "filepath", "default": path.join(path.dirname(__file__), "tagmap_db.json")}
    },
    WardenHandler: {
        "req": {"type": "obj", "default": "req"},
        "log": {"type": "obj", "default": "log"},
        "validator": {"type": "obj", "default": "validator"},
        "db": {"type": "obj", "default": "DB"},
        "auth": {"type": "obj", "default": "auth"},
        "send_events_limit": {"type": "natural", "default": 500},
        "get_events_limit": {"type": "natural", "default": 1000},
        "description": {"type": "str", "default": ""}
    },
    Server: {
        "req": {"type": "obj", "default": "req"},
        "log": {"type": "obj", "default": "log"},
        "auth": {"type": "obj", "default": "auth"},
        "handler": {"type": "obj", "default": "handler"}
    }
}


def build_server(conf, section_order=section_order, section_def=section_def, param_def=param_def):

    objects = {}    # Already initialized objects

    # Functions for validation and conversion of config values
    def facility(name):
        return int(getattr(logging.handlers.SysLogHandler, "LOG_" + name.upper()))

    def loglevel(name):
        return int(getattr(logging, name.upper()))

    def natural(name):
        num = int(name)
Pavel Kácha's avatar
Pavel Kácha committed
        if num < 1:
            raise ValueError("Not a natural number")
        return num

    def filepath(name):
        # Make paths relative to dir of this script
        return path.join(path.dirname(__file__), name)

        return objects[name.lower()]

    # Typedef dictionary
    conv_dict = {
        "facility": facility,
        "loglevel": loglevel,
        "natural": natural,
        "filepath": filepath,
        "obj": obj,
        "str": str
    }

    def init_obj(sect_name):
        config = dict(conf.get(sect_name, {}))
        sect_name = sect_name.lower()
        sect_def = section_def[sect_name]

        try:    # Object type defined?
            objtype = config["type"]
            del config["type"]
        except KeyError:    # No, fetch default object type for this section
Pavel Kácha's avatar
Pavel Kácha committed
        else:  # Yes, get corresponding class/callable
            names = [o.__name__ for o in sect_def]
            try:
                idx = names.index(objtype)
            except ValueError:
                raise KeyError("Unknown type %s in section %s" % (objtype, sect_name))

        # No surplus parameters? Disallow also 'obj' attributes, these are only
        # to provide default referenced section
        for name in config:
            if name not in params or (name in params and params[name]["type"] == "obj"):
                raise KeyError("Unknown key %s in section %s" % (name, sect_name))

        # Process parameters
        kwargs = {}
        for name, definition in params.items():
            raw_val = config.get(name, definition["default"])
            try:
                type_callable = conv_dict[definition["type"]]
                val = type_callable(raw_val)
            except Exception:
                raise KeyError("Bad value \"%s\" for %s in section %s" % (raw_val, name, sect_name))
            kwargs[name] = val

        try:
        except Exception as e:
            raise KeyError("Cannot initialize %s from section %s: %s" % (
                cls.__name__, sect_name, str(e)))
Pavel Kácha's avatar
Pavel Kácha committed
        objects[sect_name] = obj_inst
            # Log only objects here, functions must take care of themselves
            objects["log"].info("Initialized %s" % str(obj_inst))

    # Init logging with at least simple stderr StreamLogger
    # Dunno if it's ok within wsgi, but we have no other choice, let's
    # hope it at least ends up in webserver error log
    objects["log"] = StreamLogger()
    # Shared container for common data of ongoing WSGI request
    objects["req"] = Request()

    try:
        # Now try to init required objects
            init_obj(o)
    except Exception as e:
        objects["log"].critical(str(e))
        objects["log"].debug("", exc_info=sys.exc_info())
        return fallback_wsgi

    objects["log"].info("Server ready")
# Command line utilities

def check_config():
    # If we got so far, server object got set up fine
    print("Looks clear.", file=sys.stderr)
    return 0


def list_clients(id=None):
    clients = server.handler.db.get_clients(id)
    lines = [[str(getattr(client, col)) for col in Client._fields] for client in clients]
    col_width = [max(len(val) for val in col) for col in zip(*(lines+[Client._fields]))]
    divider = ["-" * l for l in col_width]
    for line in [Client._fields, divider] + lines:
        print(" ".join([val.ljust(width) for val, width in zip(line, col_width)]))
def register_client(**kwargs):
    # argparse does _always_ return something, so we cannot rely on missing arguments
    if kwargs["valid"] is None: kwargs["valid"] = True
    if kwargs["read"] is None: kwargs["read"] = True
    if kwargs["write"] is None: kwargs["write"] = False
    if kwargs["debug"] is None: kwargs["debug"] = False
    if kwargs["test"] is None: kwargs["test"] = True
    return modify_client(id=None, **kwargs)
def modify_client(**kwargs):

    def isValidHostname(hostname):
        if len(hostname) > 255:
            return False
Pavel Kácha's avatar
Pavel Kácha committed
        if hostname.endswith("."):  # A single trailing dot is legal
            hostname = hostname[:-1]  # strip exactly one dot from the right, if present
        disallowed = re.compile(r"[^A-Z\d-]", re.IGNORECASE)
Pavel Kácha's avatar
Pavel Kácha committed
        return all(  # Split by labels and verify individually
            (label and len(label) <= 63  # length is within proper range
             and not label.startswith("-") and not label.endswith("-")  # no bordering hyphens
             and not disallowed.search(label))  # contains only legal characters
            for label in hostname.split("."))

    def isValidNSID(nsid):
        allowed = re.compile(r"^(?:[a-zA-Z_][a-zA-Z0-9_]*\.)*[a-zA-Z_][a-zA-Z0-9_]*$")
        return allowed.match(nsid)

    def isValidEmail(mail):
        allowed = re.compile(r"(^[a-zA-Z0-9_ .%!+-]*(?=<.*>))?(^|(<(?=.*(>))))[a-zA-Z0-9_.%!+-]+@[a-zA-Z0-9-.]+\4?$")   # just basic check
        valid = (allowed.match(ms.strip())for ms in mail.split(','))
        return all(valid)

    def isValidID(id):
        client = server.handler.db.get_clients(id)
        return client and True or False

    if kwargs["name"] is not None:
        kwargs["name"] = kwargs["name"].lower()
        if not isValidNSID(kwargs["name"]):
            print("Invalid client name \"%s\"." % kwargs["name"], file=sys.stderr)
    if kwargs["hostname"] is not None:
        kwargs["hostname"] = kwargs["hostname"].lower()
        if not isValidHostname(kwargs["hostname"]):
            print("Invalid hostname \"%s\"." % kwargs["hostname"], file=sys.stderr)
    if kwargs["requestor"] is not None and not isValidEmail(kwargs["requestor"]):
        print("Invalid requestor email \"%s\"." % kwargs["requestor"], file=sys.stderr)
    if kwargs["id"] is not None and not isValidID(kwargs["id"]):
        print("Invalid id \"%s\"." % kwargs["id"], file=sys.stderr)
    for c in server.handler.db.get_clients():
Pavel Kácha's avatar
Pavel Kácha committed
        if kwargs["name"] is not None and kwargs["name"].lower() == c.name:
            print("Clash with existing name: %s" % str(c), file=sys.stderr)
Pavel Kácha's avatar
Pavel Kácha committed
        if kwargs["secret"] is not None and kwargs["secret"] == c.secret:
            print("Clash with existing secret: %s" % str(c), file=sys.stderr)
    newid = server.handler.db.add_modify_client(**kwargs)
def load_maps():
    server.handler.db.load_maps()
def purge(days=30, lastlog=None, events=None):
    if lastlog is None and events is None:
        lastlog = events = True
    if lastlog:
        count = server.handler.db.purge_lastlog(days)
        print("Purged %d lastlog entries." % count)
    if events:
        count = server.handler.db.purge_events(days)
        print("Purged %d events." % count)
def add_client_args(subargp, mod=False):
    subargp.add_argument("--help", action="help", help="show this help message and exit")
    if mod:
Pavel Kácha's avatar
Pavel Kácha committed
        subargp.add_argument(
            "-i", "--id", required=True, type=int,
            help="client id")
Pavel Kácha's avatar
Pavel Kácha committed
    subargp.add_argument(
        "-n", "--name", required=not mod,
        help="client name (in dotted reverse path notation)")
Pavel Kácha's avatar
Pavel Kácha committed
    subargp.add_argument(
        "-h", "--hostname", required=not mod,
        help="client FQDN hostname")
Pavel Kácha's avatar
Pavel Kácha committed
    subargp.add_argument(
        "-r", "--requestor", required=not mod,
        help="requestor email")
Pavel Kácha's avatar
Pavel Kácha committed
    subargp.add_argument(
        "-s", "--secret",
        help="authentication token (use explicit empty string to disable)")
Pavel Kácha's avatar
Pavel Kácha committed
    subargp.add_argument(
        "--note",
        help="client freetext description")

    reg_valid = subargp.add_mutually_exclusive_group(required=False)
Pavel Kácha's avatar
Pavel Kácha committed
    reg_valid.add_argument(
        "--valid", action="store_const", const=True, default=None,
        help="valid client (default)")
    reg_valid.add_argument("--novalid", action="store_const", const=False, dest="valid", default=None)

    reg_read = subargp.add_mutually_exclusive_group(required=False)
Pavel Kácha's avatar
Pavel Kácha committed
    reg_read.add_argument(
        "--read", action="store_const", const=True, default=None,
        help="client is allowed to read (default)")
    reg_read.add_argument("--noread", action="store_const", const=False, dest="read", default=None)

    reg_write = subargp.add_mutually_exclusive_group(required=False)
Pavel Kácha's avatar
Pavel Kácha committed
    reg_write.add_argument(
        "--nowrite", action="store_const", const=False, dest="write", default=None,
        help="client is allowed to send (default - no)")
    reg_write.add_argument("--write", action="store_const", const=True, default=None)

    reg_debug = subargp.add_mutually_exclusive_group(required=False)
Pavel Kácha's avatar
Pavel Kácha committed
    reg_debug.add_argument(
        "--nodebug", action="store_const", const=False, dest="debug", default=None,
        help="client is allowed receive debug output (default - no)")
    reg_debug.add_argument("--debug", action="store_const", const=True, default=None)

    reg_test = subargp.add_mutually_exclusive_group(required=False)
Pavel Kácha's avatar
Pavel Kácha committed
    reg_test.add_argument(
        "--test", action="store_const", const=True, default=None,
        help="client is yet in testing phase (default - yes)")
    reg_test.add_argument("--notest", action="store_const", const=False, dest="test", default=None)


def get_args():
    import argparse
    argp = argparse.ArgumentParser(
        description="Warden server " + VERSION, add_help=False)
Pavel Kácha's avatar
Pavel Kácha committed
    argp.add_argument(
        "--help", action="help",
        help="show this help message and exit")
Pavel Kácha's avatar
Pavel Kácha committed
    argp.add_argument(
        "-c", "--config",
        help="path to configuration file")
    subargp = argp.add_subparsers(title="commands", dest="command")
    subargp.required = True
Pavel Kácha's avatar
Pavel Kácha committed
    subargp_check = subargp.add_parser(
        "check", add_help=False,
        description="Try to setup server based on configuration file.",
        help="check configuration")
    subargp_check.set_defaults(command=check_config)
Pavel Kácha's avatar
Pavel Kácha committed
    subargp_check.add_argument(
        "--help", action="help",
        help="show this help message and exit")

Pavel Kácha's avatar
Pavel Kácha committed
    subargp_reg = subargp.add_parser(
        "register", add_help=False,
        description="Add new client registration entry.",
        help="register new client")
    subargp_reg.set_defaults(command=register_client)
    add_client_args(subargp_reg)

Pavel Kácha's avatar
Pavel Kácha committed
    subargp_mod = subargp.add_parser(
        "modify", add_help=False,
        description="Modify details of client registration entry.",
        help="modify client registration")
    subargp_mod.set_defaults(command=modify_client)
    add_client_args(subargp_mod, mod=True)

Pavel Kácha's avatar
Pavel Kácha committed
    subargp_list = subargp.add_parser(
        "list", add_help=False,
        description="List details of client registration entries.",
        help="list registered clients")
    subargp_list.set_defaults(command=list_clients)
Pavel Kácha's avatar
Pavel Kácha committed
    subargp_list.add_argument(
        "--help", action="help",
        help="show this help message and exit")
Pavel Kácha's avatar
Pavel Kácha committed
    subargp_list.add_argument(
        "--id", action="store", type=int,
        help="client id", default=None)

Pavel Kácha's avatar
Pavel Kácha committed
    subargp_purge = subargp.add_parser(
        "purge", add_help=False,
        description=(
            "Purge old events or lastlog records."
            " Note that lastlog purge retains at least one newest record for each"
Pavel Kácha's avatar
Pavel Kácha committed
            " client, even if it is more than number of 'days' old."),
        help="purge old events or lastlog records")
    subargp_purge.set_defaults(command=purge)
Pavel Kácha's avatar
Pavel Kácha committed
    subargp_purge.add_argument(
        "--help", action="help",
        help="show this help message and exit")
Pavel Kácha's avatar
Pavel Kácha committed
    subargp_purge.add_argument(
        "-l", "--lastlog", action="store_true", dest="lastlog", default=None,
        help="purge lastlog records")
Pavel Kácha's avatar
Pavel Kácha committed
    subargp_purge.add_argument(
        "-e", "--events", action="store_true", dest="events", default=None,
        help="purge events")
Pavel Kácha's avatar
Pavel Kácha committed
    subargp_purge.add_argument(
        "-d", "--days", action="store", dest="days", type=int, default=30,
        help="records older than 'days' back from today will get purged")

Pavel Kácha's avatar
Pavel Kácha committed
    subargp_loadmaps = subargp.add_parser(
        "loadmaps", add_help=False,
        description=(
            "Load 'categories' and 'tags' table from 'catmap_db.json' and 'tagmap_db.json'."
Pavel Kácha's avatar
Pavel Kácha committed
            " Note also that previous content of both tables will be lost."),
        help="load catmap and tagmap into db")
    subargp_loadmaps.set_defaults(command=load_maps)
Pavel Kácha's avatar
Pavel Kácha committed
    subargp_loadmaps.add_argument(
        "--help", action="help",
    return argp.parse_args()


Pavel Kácha's avatar
Pavel Kácha committed
if __name__ == "__main__":
    args = get_args()
    config = path.join(path.dirname(__file__), args.config or "warden_server.cfg")
    server = build_server(read_cfg(config))
    command = args.command
    subargs = vars(args)
    del subargs["command"]
    del subargs["config"]
    if not server or server is fallback_wsgi:
        print("Failed initialization, check configured log targets for reasons.", file=sys.stderr)
        sys.exit(255)
    sys.exit(command(**subargs))