Skip to content
Snippets Groups Projects
Select Git revision
  • f07b120580fe8753201ecd246fc63d2ece1e1fa0
  • master default protected
  • devel
  • hruska-feature-clients-api
  • malostik-#5066-deduplicate-idea-ids
  • warden-postgresql-port
  • hruska-feature-#6799-filter-keys
  • hruska-feature-5066-duplicateIdeaID
  • warden-client-3.0-beta3
  • warden-server-3.0-beta3
  • warden-client-2.2-final
  • warden-server-2.2-final
  • warden-client-3.0-beta2
  • warden-server-3.0-beta2
  • warden-client-2.2
  • warden-server-2.2-patch3
  • warden-client-3.0-beta1
  • warden-server-3.0-beta1
  • warden-server-2.2-patch1
  • warden-client-3.0-beta0
  • warden-server-3.0-beta0
  • warden-server-2.2
  • warden-server-2.1-patch1
  • warden-client-2.1
  • warden-server-2.1
  • warden-server-2.1-beta6
  • warden-server-2.1-beta5
  • warden-server-2.1-beta4
28 results

warden_server.py

Blame
  • warden_server.py 38.92 KiB
    #!/usr/bin/python
    # -*- coding: utf-8 -*-
    #
    # Copyright (C) 2011-2013 Cesnet z.s.p.o
    # Use of this source is governed by a 3-clause BSD-style license, see LICENSE file.
    
    import sys
    import os
    import logging
    import logging.handlers
    import ConfigParser
    from traceback import format_tb
    import M2Crypto.X509
    import json
    import MySQLdb as my
    import MySQLdb.cursors as mycursors
    from collections import namedtuple
    from uuid import uuid4
    from time import time, gmtime, sleep
    from math import trunc
    from io import BytesIO
    from urlparse import parse_qs
    from os import path
    from random import randint
    
    # for local version of up to date jsonschema
    sys.path.append(path.join(path.dirname(__file__), "..", "lib"))
    
    from jsonschema import Draft4Validator
    
    
    VERSION = "3.0-not-even-alpha"
    
    class Error(Exception):
    
        def __init__(self, method=None, req_id=None, errors=None, **kwargs):
            self.method = method
            self.req_id = req_id
            self.errors = [kwargs] if kwargs else []
            if errors:
                self.errors.extend(errors)
    
    
        def append(self, _events=None, **kwargs):
            self.errors.append(kwargs)
    
    
        def get_http_err_msg(self):
            try:
                err = self.errors[0]["error"]
                msg = self.errors[0]["message"]
            except (IndexError, KeyError):
                err = 500
                msg = "There's NO self-destruction button! Ah, you've just found it..."
            for e in self.errors:
                next_err = e.get("error", 500)
                if err != next_err:
                    # errors not same, round to basic err code (400, 500)
                    # and use the highest one
                    err = max(err//100, next_err//100)*100
                next_msg = e.get("message", "Unknown error")
                if msg != next_msg:
                    msg = "Multiple errors"
            return err, msg
    
    
        def __str__(self):
            return "\n".join(self.str_err(e) for e in self.errors)
    
    
        def log(self, logger, prio=logging.ERROR):
            for e in self.errors:
                logger.log(prio, self.str_err(e))
                info = self.str_info(e)
                if info:
                    logger.info(info)
                debug = self.str_debug(e)
                if debug:
                    logger.debug(debug)
    
    
        def str_err(self, e):
            out = []
            out.append("Error(%s) %s " % (e.get("error", 0), e.get("message", "Unknown error")))
            if "exc" in e and e["exc"]:
                out.append("(cause was %s: %s)" % (e["exc"][0].__name__, str(e["exc"][1])))
            return "".join(out)
    
    
        def str_info(self, e):
            ecopy = dict(e)    # shallow copy
            ecopy.pop("req_id", None)
            ecopy.pop("method", None)
            ecopy.pop("error", None)
            ecopy.pop("message", None)
            ecopy.pop("exc", None)
            if ecopy:
                out = "Detail: %s" % (json.dumps(ecopy, default=lambda v: str(v)))
            else:
                out = ""
            return out
    
    
        def str_debug(self, e):
            out = []
            if not "exc" in e or not e["exc"]:
                return ""
            exc_tb = e["exc"][2]
            if exc_tb:
                out.append("Traceback:\n")
                out.extend(format_tb(exc_tb))
            return "".join(out)
    
    
        def to_dict(self):
            errlist = []
            for e in self.errors:
                ecopy = dict(e)
                ecopy.pop("exc", None)
                errlist.append(ecopy)
            d = {
                "method": self.method,
                "req_id": self.req_id,
                "errors": errlist
            }
            return d
    
    
    
    def get_clean_root_logger(level=logging.INFO):
        """ Attempts to get logging module into clean slate state """
    
        # We want to be able to set up at least stderr logger before any
        # configuration is read, and then later get rid of it and set up
        # whatever administrator requires.
        # However, there can exist only one logger, but we want to get a clean
        # slate everytime we initialize StreamLogger or FileLogger... which
        # is not exactly supported by logging module.
        # So, we look directly inside logger class and clean up handlers/filters
        # manually.
        logger = logging.getLogger()  # no need to create new
        logger.setLevel(level)
        while logger.handlers:
            logger.removeHandler(logger.handlers[0])
        while logger.filters:
            logger.removeFilter(logger.filters[0])
        return logger
    
    
    
    def StreamLogger(stream=sys.stderr, level=logging.INFO):
        """ Fallback handler just for setup, not meant to be used from
            configuration file because during wsgi query stdout/stderr
            is forbidden.
        """
    
        fhand = logging.StreamHandler(stream)
        fform = logging.Formatter('%(asctime)s %(filename)s[%(process)d]: (%(levelname)s) %(message)s')
        fhand.setFormatter(fform)
        logger = get_clean_root_logger(level)
        logger.addHandler(fhand)
    
    
    
    class LogRequestFilter(logging.Filter):
        """ Filter class, instance of which is added to logger class to add
            info about request automatically into every logline, no matter
            how it came into existence.
        """
    
        def __init__(self, req):
            logging.Filter.__init__(self)
            self.req = req
    
    
        def filter(self, record):
            if self.req.env:
                record.req_preamble = "%08x/%s: " % (self.req.req_id or 0, self.req.path)
            else:
                record.req_preamble = ""
            return True
    
    
    
    def FileLogger(req, filename, level=logging.INFO):
    
        fhand = logging.FileHandler(filename)
        fform = logging.Formatter('%(asctime)s %(filename)s[%(process)d]: (%(levelname)s) %(req_preamble)s%(message)s')
        fhand.setFormatter(fform)
        ffilt = LogRequestFilter(req)
        logger = get_clean_root_logger(level)
        logger.addFilter(ffilt)
        logger.addHandler(fhand)
        logging.info("Initialized FileLogger(req=%s, filename=\"%s\", \"%s\")" % (type(req).__name__, filename, level))
    
    
    
    def SysLogger(req, socket="/dev/log", facility=logging.handlers.SysLogHandler.LOG_DAEMON, level=logging.INFO):
    
        fhand = logging.handlers.SysLogHandler(address=socket, facility=facility)
        fform = logging.Formatter('%(filename)s[%(process)d]: (%(levelname)s) %(message)s')
        fhand.setFormatter(fform)
        ffilt = LogRequestFilter(req)
        logger = get_clean_root_logger(level)
        logger.addFilter(ffilt)
        logger.addHandler(fhand)
        logging.info("Initialized SysLogger(req=%s, socket=\"%s\", facility=\"%s\", level=\"%s\")" % (type(req).__name__, socket, facility, level))
    
    
    
    class Client(namedtuple("ClientTuple",
        ["id", "registered", "requestor", "hostname", "service", "note",
        "identity", "secret", "read", "debug", "write", "test"])):
    
        def __str__(self):
            return (
                "%s(id=%i, registered=%s, requestor=\"%s\", hostname=\"%s\", "
                "service=\"%s\", note=\"%s\", identity=\"%s\", secret=%s, "
                "read=%i, debug=%i, write=%i, test=%i)") % (
                type(self).__name__, self.id, self.registered,
                self.requestor, self.hostname, self.service, self.note,
                self.identity, "..." if self.secret is not None else "None",
                self.read, self.debug, self.write, self.test)
    
    
    
    class Object(object):
    
        def __str__(self):
            return "%s()" % type(self).__name__
    
    
    
    class Request(Object):
        """ Simple container for info about ongoing request.
            One instance gets created before server startup, and all other
            configured objects get it as parameter during instantiation.
    
            Server then takes care of populating this instance on the start
            of wsgi request (and resetting at the end). All other objects
            then can find this actual request info in their own self.req.
    
            However, only Server.wsgi_app, handler (WardenHandler) exposed
            methods and logging related objects should use self.req directly.
            All other objects should use self.req only as source of data for
            error/exception handling/logging, and should take/return
            necessary data as arguments/return values for clarity on
            which data their main codepaths work with.
        """
    
        def __init__(self):
            Object.__init__(self)
            self.reset()
    
    
        def __str__(self):
            return "%s()" % (type(self).__name__, str(self.env), str(self.client))
    
    
        def reset(self, env=None, client=None, path=None, req_id=None):
            self.env = env
            self.client = client
            self.path = path or ""
            if req_id is not None:
                self.req_id = req_id
            else:
                self.req_id = 0 if env is None else randint(0x00000000, 0xFFFFFFFF)
    
    
        def error(self, **kwargs):
            return Error(self.path, self.req_id, **kwargs)
    
    
    
    class ObjectReq(Object):
    
        def __init__(self, req):
            Object.__init__(self)
            self.req = req
    
    
        def __str__(self):
            return "%s(req=%s)" % (type(self).__name__, type(self.req).__name__)
    
    
    
    class NoAuthenticator(ObjectReq):
    
        def __init__(self, req):
            ObjectReq.__init__(self, req)
    
    
        def authenticate (self, env, args):
            return "anybody"    # or None
    
    
        def authorize(self, env, client, path, method):
            return (client is not None)
    
    
    
    class X509Authenticator(NoAuthenticator):
    
        def __init__(self, req, db):
            NoAuthenticator.__init__(self, req)
            self.db = db
    
    
        def __str__(self):
            return "%s(req=%s, db=%s)" % (type(self).__name__, type(self.req).__name__, type(self.db).__name__)
    
    
        def get_cert_dns_names(self, pem):
    
            cert = M2Crypto.X509.load_cert_string(pem)
    
            subj = cert.get_subject()
            commons = [n.get_data().as_text() for n in subj.get_entries_by_nid(subj.nid["CN"])]
    
            ext = cert.get_ext("subjectAltName")
            extstrs = [val.strip() for val in ext.get_value().split(",")]
            altnames = [val[4:] for val in extstrs if val.startswith("DNS:")]
    
            # bit of mangling to get rid of duplicates and leave commonname first
            firstcommon = commons[0]
            return [firstcommon] + list(set(altnames+commons) - set([firstcommon]))
    
    
        def authenticate (self, env, args):
            try:
                cert_names = self.get_cert_dns_names(env["SSL_CLIENT_CERT"])
            except:
                logging.info("authenticate: cannot get or parse certificate from env")
                return None
    
            identity = args.get("client", [None])[0]
            secret =  args.get("secret", [None])[0]
    
            client = self.db.get_client_by_name(cert_names, identity, secret)
    
            if not client:
                logging.info("authenticate: client not found by identity: \"%s\", secret: %s, cert_names: %s" % (
                    identity, "..." if secret else "None", str(cert_names)))
                return None
            
            # Clients with 'secret' set muset get authorized by it.
            # No secret turns auth off for this particular client.
            if client.secret is not None and secret is None:
                logging.info("authenticate: missing secret argument")
                return None
    
            logging.info("authenticate: %s" % str(client))
    
            return client
    
    
        def authorize(self, env, client, path, method):
            if method.debug:
                if not client.debug:
                    logging.info("authorize: failed, client does not have debug enabled")
                    return None
                return client
    
            if method.read:
                if not client.read:
                    logging.info("authorize: failed, client does not have read enabled")
                    return None
                return client
    
            if method.write:
                if not (client.write or client.test):
                    logging.info("authorize: failed, client is not allowed to write or test")
                    return None
    
            return client
            
    
    class NoValidator(ObjectReq):
    
        def __init__(self, req):
            ObjectReq.__init__(self, req)
    
    
        def __str__(self):
            return "%s(req=%s)" % (type(self).__name__, type(self.req).__name__)
    
    
        def check(self, event):
            return []
    
    
    class JSONSchemaValidator(NoValidator):
    
        def __init__(self, req, filename=None):
            NoValidator.__init__(self, req)
            self.path = filename or path.join(path.dirname(__file__), "idea.schema")
            with open(self.path) as f:
                self.schema = json.load(f)
            self.validator = Draft4Validator(self.schema)
    
    
        def __str__(self):
            return "%s(req=%s, filename=\"%s\")" % (type(self).__name__, type(self.req).__name__, self.path)
    
    
        def check(self, event):
    
            def sortkey(k):
                """ Treat keys as lowercase, prefer keys with less path segments """
                return (len(k.path), "/".join(str(k.path)).lower())
    
            res = []
            for error in sorted(self.validator.iter_errors(event), key=sortkey):
                res.append({"error": 460,
                    "message": "Validation error: key \"%s\", value \"%s\", expected - %s" % (
                        "/".join(str(v) for v in error.path),
                        error.instance,
                        error.schema.get('description', 'no additional info'))})
    
            return res
    
    
    
    class MySQL(ObjectReq):
    
        def __init__(self, req, host, user, password, dbname, port, retry_count, retry_pause, catmap_filename, tagmap_filename):
            ObjectReq.__init__(self, req)
            self.host = host
            self.user = user
            self.password = password
            self.dbname = dbname
            self.port = port
            self.retry_count = retry_count
            self.retry_pause = retry_pause
            self.catmap_filename = catmap_filename
            self.tagmap_filename = tagmap_filename
    
            with open(catmap_filename, "r") as catmap_fd:
                self.catmap = json.load(catmap_fd)
                self.catmap_other = self.catmap["Other"]    # Catch error soon, avoid lookup later
    
            with open(tagmap_filename, "r") as tagmap_fd:
                self.tagmap = json.load(tagmap_fd)
                self.tagmap_other = self.catmap["Other"]    # Catch error soon, avoid lookup later
    
            self.con = self.crs = None
    
            self.connect()
    
    
        def __str__(self):
            return "%s(req=%s, host='%s', user='%s', dbname='%s', port=%d, retry_count=%d, retry_pause=%d, catmap_filename=\"%s\", tagmap_filename=\"%s\")" % (
                type(self).__name__, type(self.req).__name__, self.host, self.user, self.dbname, self.port, self.retry_count, self.retry_pause, self.catmap_filename, self.tagmap_filename)
    
    
        def connect(self):
            self.con = my.connect(host=self.host, user=self.user, passwd=self.password,
                db=self.dbname, port=self.port, cursorclass=mycursors.DictCursor)
            self.crs = self.con.cursor()
    
    
        def close(self):
            try:
                if self.crs:
                    self.crs.close()
                if self.con:
                    self.con.close()
            except Exception:
                pass
    
    
        __del__ = close
    
    
        def execute(self, *args, **kwargs):
            """ Execute query on self.con, reconnecting if necessary """
            success = False
            countdown = self.retry_count
            while not success:
                try:
                    self.crs.execute(*args, **kwargs)
                    success = True
                except my.OperationalError:
                    if not countdown:
                        raise
                    logging.info("execute: Database down, trying to reconnect (%d attempts left)..." % countdown)
                    if countdown<self.retry_count:
                        sleep(self.retry_pause)    # no need to melt down server on longer outage
                    self.close()
                    self.connect()
                    countdown -= 1
    
    
        def _get_comma_perc(self, l):
            return ','.join(['%s'] * len(l))
    
    
        def _get_not(self, b):
            return "" if b else "NOT"
    
    
        def get_client_by_name(self, cert_names, identity=None, secret=None):
            query = ["SELECT id, registered, requestor, hostname, service, note, identity, secret, `read`, debug, `write`, test FROM clients WHERE valid = 1"]
            params = []
            if identity:
                query.append(" AND identity = %s")
                params.append(identity)
            if secret:
                query.append(" AND secret = %s")
                params.append(secret)
            query.append(" AND hostname IN (%s)" % self._get_comma_perc(cert_names))
            params.extend(cert_names)
            self.execute("".join(query), params)
            rows = self.crs.fetchall()
    
            if len(rows)>1:
                logging.warn("get_client_by_name: query returned more than one result: %s" % ", ".join(
                    [str(Client(**row)) for row in rows]))
                return None
    
            return Client(**rows[0]) if rows else None
    
    
        def get_debug(self):
            self.execute("SELECT VERSION() AS VER")
            row = self.crs.fetchone()
            self.execute("SHOW TABLE STATUS")
            tablestat = self.crs.fetchall()
            return {
                "db": "MySQL",
                "version": row["VER"],
                "tables": tablestat
            }
    
    
        def getMaps(self, section, variables):
            maps = []
            for v in variables:
                try:
                    mapped = section[v]
                except KeyError:
                    raise self.req.error(message="Wrong tag or category used in query.", error=422,
                        exc=sys.exc_info(), key=v)
                maps.append(mapped)
            return set(maps)    # unique
    
    
        def fetch_events(self, client, id, count,
                cat=None, nocat=None,
                tag=None, notag=None,
                group=None, nogroup=None):
           
            logging.debug("fetch_events: id=%i, count=%i, cat=%s, nocat=%s, tag=%s, notag=%s, group=%s, nogroup=%s" % (id, count, str(cat), str(nocat), str(tag), str(notag), str(group), str(nogroup)))
    
            if cat and nocat:
                raise self.req.error(message="Unrealizable conditions. Choose cat or nocat option.", error=422,
                            cat=cat, nocat=nocat)
            if tag and notag:
                raise self.req.error(message="Unrealizable conditions. Choose tag or notag option.", error=422,
                            tag=tag, notag=notag)
            if group and nogroup:
                raise self.req.error(message="Unrealizable conditions. Choose group or nogroup option.", error=422,
                            group=group, nogroup=nogroup)
    
            query = ["SELECT e.id, e.data FROM clients c RIGHT JOIN events e ON c.id = e.client_id WHERE e.id > %s"]
            params = [id or 0]
    
            if cat or nocat:
                cats = self.getMaps(self.catmap, (cat or nocat))
                query.append(
                    " AND e.id %s IN (SELECT event_id FROM event_category_mapping WHERE category_id IN (%s))" % (
                        self._get_not(cat), self._get_comma_perc(cats)))
                params.extend(cats)
    
            if tag or notag:
                tags = self.getMaps(self.tagmap, (tag or notag))
                query.append(
                    " AND e.id %s IN (SELECT event_id FROM event_tag_mapping WHERE tag_id IN (%s))" % (
                        self._get_not(tag), self._get_comma_perc(tags)))
                params.extend(tags)
    
            if group or nogroup:
                subquery = []
                for identity in (group or nogroup):
                    subquery.append("c.identity = %s")      # exact client
                    params.append(identity)
                    subquery.append("c.identity LIKE %s")   # whole subtree
                    params.append(identity + ".%")
    
                query.append(" AND %s (%s)" % (self._get_not(group), " OR ".join(subquery)))
    
            query.append(" AND e.valid = 1 LIMIT %s")
            params.append(count)
    
            query_string = "".join(query)
            logging.debug("fetch_events: query - %s" % query_string)
            logging.debug("fetch_events: params - %s", str(params))
    
            self.execute(query_string, params)
            row = self.crs.fetchall()
    
            if row:
                maxid = max(r['id'] for r in row)
            else:
                maxid = self.getLastEventId()
    
            events = [json.loads(r["data"]) for r in row]
    
            return {
                "lastid": maxid,
                "events": events
            }
    
    
        def store_event(self, client, event):
            try:
                self.execute("INSERT INTO events (received,client_id,data) VALUES (NOW(), %s, %s)", (client.id, json.dumps(event)))
                lastid = self.crs.lastrowid
    
                catlist = event.get('Category', ["Other"])
                cats = set(catlist) | set(cat.split(".", 1)[0] for cat in catlist)
                for cat in cats:
                    cat_id = self.catmap.get(cat, self.catmap_other)
                    self.execute("INSERT INTO event_category_mapping (event_id,category_id) VALUES (%s, %s)", (lastid, cat_id))
                    
                try:
                    tags = event['Node'][0]['Tags']
                except (KeyError, IndexError):
                    tags = []
                    
                for tag in tags:
                    tag_id = self.tagmap.get(tag, self.tagmap_other)
                    self.execute("INSERT INTO event_tag_mapping (event_id,tag_id) VALUES (%s, %s)", (lastid, tag_id))
    
                self.con.commit()
                return []
            except Exception as e:
                self.con.rollback()
                return [{"error": 500, "message": type(e).__name__}]
    
    
        def insertLastReceivedId(self, client, id):
            logging.debug("insertLastReceivedId: id %i for client %i(%s)" % (id, client.id, client.hostname))
            self.execute("INSERT INTO last_events(client_id, event_id, timestamp) VALUES(%s, %s, NOW())", (client.id, id))
            self.con.commit()
    
        def getLastEventId(self):
            self.execute("SELECT MAX(id) as id FROM events")
            row = self.crs.fetchone()
    
            return row['id'] or 0
    
        def getLastReceivedId(self, client):
            self.execute("SELECT MAX(event_id) as id FROM last_events WHERE client_id = %s", client.id)
            row = self.crs.fetchone()
    
            id = row['id'] if row is not None else 0
            logging.debug("getLastReceivedId: id %i for client %i(%s)" % (id, client.id, client.hostname))
    
            return id
    
    
    
    def expose(read=1, write=0, debug=0):
    
        def expose_deco(meth):
            meth.exposed = True
            meth.read = read
            meth.write = write
            meth.debug = debug
            return meth
    
        return expose_deco
    
    
    class Server(ObjectReq):
    
        def __init__(self, req, auth, handler):
            ObjectReq.__init__(self, req)
            self.auth = auth
            self.handler = handler
    
    
        def __str__(self):
            return "%s(req=%s, auth=%s, handler=%s)" % (type(self).__name__, type(self.req).__name__, type(self.auth).__name__, type(self.handler).__name__)
    
    
        def sanitize_args(self, path, func, args, exclude=["self"]):
            # silently remove internal args, these should never be used
            # but if somebody does, we do not expose them by error message
            intargs = set(args).intersection(exclude)
            for a in intargs:
                del args[a]
            if intargs:
                logging.info("sanitize_args: Called with internal args: %s" % ", ".join(intargs))
    
            # silently remove surplus arguments - potential forward
            # compatibility (unknown args will get ignored)
            badargs = set(args) - set(func.func_code.co_varnames[0:func.func_code.co_argcount])
            for a in badargs:
                del args[a]
            if badargs:
                logging.info("sanitize_args: Called with superfluous args: %s" % ", ".join(badargs))
    
            return args
    
    
        def wsgi_app(self, environ, start_response, exc_info=None):
            path = environ.get("PATH_INFO", "").lstrip("/")
            self.req.reset(env=environ, path=path)
            output = ""
            status = "200 OK"
            headers = [('Content-type', 'application/json')]
            exception = None
    
            try:
                try:
                    injson = environ['wsgi.input'].read()
                except:
                    raise self.req.error(message="Data read error.", error=408, exc=sys.exc_info())
    
                try:
                    method = getattr(self.handler, path)
                    method.exposed    # dummy access to trigger AttributeError
                except Exception:
                    raise self.req.error(message="You've fallen of the cliff.", error=404)
    
                self.req.args = args = parse_qs(environ.get('QUERY_STRING', ""))
    
                self.req.client = client = self.auth.authenticate(environ, args)
                if not client:
                    raise self.req.error(message="I'm watching. Authenticate.", error=403)
    
                try:
                    events = json.loads(injson) if injson else None
                except Exception as e:
                    raise self.req.error(message="Deserialization error.", error=400,
                        exc=sys.exc_info(), args=injson, parser=str(e))
                if events:
                    args["events"] = events
    
                auth = self.auth.authorize(self.req.env, self.req.client, self.req.path, method)
                if not auth:
                    raise self.req.error(message="I'm watching. Not authorized.", error=403, client=client.identity)
    
                # These args are not for handler
                args.pop("client", None)
                args.pop("secret", None)
    
                args = self.sanitize_args(path, method, args)
                result = method(**args)   # call requested method
    
                try:
                    # 'default': takes care of non JSON serializable objects,
                    # which could (although shouldn't) appear in handler code
                    output = json.dumps(result, default=lambda v: str(v))
                except Exception as e:
                    raise self.req.error(message="Serialization error", error=500,
                        exc=sys.exc_info(), args=str(result))
    
            except Error as e:
                exception = e
            except Exception as e:
                exception = self.req.error(message="Server exception", error=500, exc=sys.exc_info())
    
            if exception:
                status = "%d %s" % exception.get_http_err_msg()
                output = json.dumps(exception.to_dict(), default=lambda v: str(v))
                exception.log(logging.getLogger())
    
            # Make sure everything is properly encoded - JSON and various function
            # may spit out unicode instead of str and it gets propagated up (str
            # + unicode = unicode). However, the right thing would be to be unicode
            # correct among whole source and always decode on input (json module
            # does that for us) and on output here.
            if isinstance(status, unicode):
                status = status.encode("utf-8")
            if isinstance(output, unicode):
                output = output.encode("utf-8")
            headers.append(('Content-Length', str(len(output))))
            start_response(status, headers)
            self.req.reset()
            return [output]
    
    
        __call__ = wsgi_app
    
    
    
    class WardenHandler(ObjectReq):
    
        def __init__(self, req, validator, db, auth,
                send_events_limit=100000, get_events_limit=100000,
                description=None):
    
            ObjectReq.__init__(self, req)
            self.auth = auth
            self.db = db
            self.validator = validator
            self.send_events_limit = send_events_limit
            self.get_events_limit = get_events_limit
            self.description = description
    
    
        def __str__(self):
            return "%s(req=%s, validator=%s, db=%s, send_events_limit=%s, get_events_limit=%s, description=\"%s\")" % (
                type(self).__name__, type(self.req).__name__, type(self.validator).__name__, type(self.db).__name__,
                self.get_events_limit, self.send_events_limit, self.description)
    
    
        @expose(read=1, debug=1)
        def getDebug(self):
            return {
                "environment": self.req.env,
                "client": self.req.client.__dict__,
                "database": self.db.get_debug(),
                "system": {
                    "uname": os.uname()
                },
                "process": {
                    "cwd": os.getcwdu(),
                    "pid": os.getpid(),
                    "ppid": os.getppid(),
                    "pgrp": os.getpgrp(),
                    "uid": os.getuid(),
                    "gid": os.getgid(),
                    "euid": os.geteuid(),
                    "egid": os.getegid(),
                    "groups": os.getgroups()
                }
            }
    
    
        @expose(read=1)
        def getInfo(self):
            info = {
                "version": VERSION,
                "send_events_limit": self.send_events_limit,
                "get_events_limit": self.get_events_limit
            }
            if self.description:
                info["description"] = self.description
            return info
    
    
        @expose(read=1)
        def getEvents(self, id=None, count=None,
                cat=None, nocat=None,
                tag=None, notag=None,
                group=None, nogroup=None):
    
            try:
                id = int(id[0])
            except (ValueError, TypeError, IndexError):
                id = None
    
            if id is None:
                try:
                    id = self.db.getLastReceivedId(self.req.client)
                except Exception, e:
                    logging.info("cannot getLastReceivedId - " + type(e).__name__ + ": " + str(e))
                    
            if id is None:
                # First access, remember the guy and get him last event
                id = self.db.getLastEventId()
                self.db.insertLastReceivedId(self.req.client, id)
                return {
                    "lastid": id,
                    "events": []
                }
    
            try:
                count = int(count[0])
            except (ValueError, TypeError, IndexError):
                count = self.get_events_limit
    
            if self.get_events_limit:
                count = min(count, self.get_events_limit)
    
            res = self.db.fetch_events(self.req.client, id, count, cat, nocat, tag, notag, group, nogroup)
    
            self.db.insertLastReceivedId(self.req.client, res['lastid'])
    
            logging.info("sending %d events, lastid is %i" % (len(res["events"]), res["lastid"]))
    
            return res
    
    
        def check_node(self, event, identity):
            try:
                ev_id = event['Node'][0]['Name'].lower()
            except (KeyError, TypeError):
                # Event does not bear valid Node attribute
                return [{"error": 422, "message": "Event does not bear valid Node attribute"}]
            if ev_id != identity:
                return [{"error": 422, "message": "Node does not correspond with saving client"}]
            return []
    
    
        def add_event_nums(self, ilist, events, errlist):
            for err in errlist:
                err.setdefault("events", []).extend(ilist)
                ev_ids = err.setdefault("events_id", [])
                for i in ilist:
                    event = events[i]
                    id = event.get("ID", None)
                    ev_ids.append(id)
            return errlist
    
    
        @expose(write=1)
        def sendEvents(self, events=[]):
            if not isinstance(events, list):
                raise self.req.error(message="List of events expected.", error=400)
    
            errs = []
            if len(events)>self.send_events_limit:
                errs.extend(
                    self.add_event_nums(range(self.send_events_limit, len(events)), events,
                        [{"error": 507, "message": "Too much events in one batch.",
                          "send_events_limit": self.send_events_limit}]))
    
            saved = 0
            for i, event in enumerate(events[0:self.send_events_limit]):
                v_errs = self.validator.check(event)
                if v_errs:
                    errs.extend(self.add_event_nums([i], events, v_errs))
                    continue
    
                node_errs = self.check_node(event, self.req.client.identity)
                if node_errs:
                    errs.extend(self.add_event_nums([i], events, node_errs))
                    continue
    
                if self.req.client.test and not 'Test' in event.get('Category', []):
                    errs.extend(self.add_event_nums([i], events, [{"error": 422,
                        "message": "You're allowed to send only messages, containing \"Test\" among categories.",
                        "categories": event.get('Category', [])}]))
                    continue
    
                db_errs = self.db.store_event(self.req.client, event)
                if db_errs:
                    errs.extend(self.add_event_nums([i], events, db_errs))
                    continue
    
                saved += 1
    
            logging.info("Saved %i events" % saved)
            if errs:
                raise self.req.error(errors=errs)
    
            return {}
    
    
    
    def read_ini(path):
        c = ConfigParser.RawConfigParser()
        res = c.read(path)
        if not res or not path in res:
            # We don't have loggin yet, hopefully this will go into webserver log
            raise Error(message="Unable to read config: %s" % path)
        data = {}
        for sect in c.sections():
            for opts in c.options(sect):
                lsect = sect.lower()
                if not lsect in data:
                    data[lsect] = {}
                data[lsect][opts] = c.get(sect, opts)
        return data
    
    
    def read_cfg(path):
        with open(path, "r") as f:
            stripcomments = "\n".join((l for l in f if not l.lstrip().startswith(("#", "//"))))
            conf = json.loads(stripcomments)
    
        # Lowercase keys
        conf = dict((sect.lower(), dict(
            (subkey.lower(), val) for subkey, val in subsect.iteritems())
        ) for sect, subsect in conf.iteritems())
    
        return conf
    
    
    def fallback_wsgi(environ, start_response, exc_info=None):
    
        # If server does not start, set up simple server, returning
        # Warden JSON compliant error message
        error=503
        message="Server not running due to initialization error"
        headers = [('Content-type', 'application/json')]
    
        logline = "Error(%d): %s" % (error, message)
        status = "%d %s" % (error, message)
        output = '{"errors": [{"error": %d, "message": "%s"}]}' % (
            error, message)
    
        logging.critical(logline)
        start_response(status, headers)
        return [output]
    
    
    def build_server(conf):
    
        # Functions for validation and conversion of config values
        def facility(name):
            return int(getattr(logging.handlers.SysLogHandler, "LOG_" + name.upper()))
    
        def loglevel(name):
            return int(getattr(logging, name.upper()))
    
        def natural(name):
            num = int(name)
            if num<1:
                raise ValueError("Not a natural number")
            return num
    
        def filepath(name):
            # Make paths relative to dir of this script
            return path.join(path.dirname(__file__), name)
    
        def objdef(name):
            return objects[name.lower()]
    
        obj = objdef    # Draw into local namespace for init_obj
    
        objects = {}    # Already initialized objects
    
        # List of sections and objects, configured by them
        # First object in each object list is the default one, otherwise
        # "type" keyword in section may be used to choose other
        section_def = {
            "log": ["FileLogger", "SysLogger"],
            "db": ["MySQL"],
            "auth": ["X509Authenticator", "NoAuthenticator"],
            "validator": ["JSONSchemaValidator", "NoValidator"],
            "handler": ["WardenHandler"],
            "server": ["Server"]
        }
    
        # Object parameter conversions and defaults
        param_def = {
            "FileLogger": {
                "req": {"type": obj, "default": "req"},
                "filename": {"type": filepath, "default": path.join(path.dirname(__file__), path.splitext(path.split(__file__)[1])[0] + ".log")},
                "level": {"type": loglevel, "default": "info"},
            },
            "SysLogger": {
                "req": {"type": obj, "default": "req"},
                "socket": {"type": filepath, "default": "/dev/log"},
                "facility": {"type": facility, "default": "daemon"},
                "level": {"type": loglevel, "default": "info"}
            },
            "NoAuthenticator": {
                "req": {"type": obj, "default": "req"}
            },
            "X509Authenticator": {
                "req": {"type": obj, "default": "req"},
                "db": {"type": obj, "default": "db"}
            },
            "NoValidator": {
                "req": {"type": obj, "default": "req"},
            },
            "JSONSchemaValidator": {
                "req": {"type": obj, "default": "req"},
                "filename": {"type": filepath, "default": path.join(path.dirname(__file__), "idea.schema")}
            },
            "MySQL": {
                "req": {"type": obj, "default": "req"},
                "host": {"type": str, "default": "localhost"},
                "user": {"type": str, "default": "warden"},
                "password": {"type": str, "default": ""},
                "dbname": {"type": str, "default": "warden3"},
                "port": {"type": natural, "default": 3306},
                "retry_pause": {"type": natural, "default": 5},
                "retry_count": {"type": natural, "default": 3},
                "catmap_filename": {"type": filepath, "default": path.join(path.dirname(__file__), "catmap_mysql.json")},
                "tagmap_filename": {"type": filepath, "default": path.join(path.dirname(__file__), "tagmap_mysql.json")}
            },
            "WardenHandler": {
                "req": {"type": obj, "default": "req"},
                "validator": {"type": obj, "default": "validator"},
                "db": {"type": obj, "default": "DB"},
                "auth": {"type": obj, "default": "auth"},
                "send_events_limit": {"type": natural, "default": 10000},
                "get_events_limit": {"type": natural, "default": 10000},
                "description": {"type": str, "default": ""}
            },
            "Server": {
                "req": {"type": obj, "default": "req"},
                "auth": {"type": obj, "default": "auth"},
                "handler": {"type": obj, "default": "handler"}
            }
        }
    
        def init_obj(sect_name):
            config = conf.get(sect_name, {})
            sect_name = sect_name.lower()
            sect_def = section_def[sect_name]
    
            try:    # Object type defined?
                objtype = config["type"]
                del config["type"]
            except KeyError:    # No, fetch default object type for this section
                objtype = sect_def[0]
            else:
                if not objtype in sect_def:
                    raise KeyError("Unknown type %s in section %s" % (objtype, sect_name))
    
            params = param_def[objtype]
    
            # No surplus parameters? Disallow also 'obj' attributes, these are only
            # to provide default referenced section
            for name in config:
                if name not in params or (name in params and params[name]["type"] is objdef):
                    raise KeyError("Unknown key %s in section %s" % (name, sect_name))
    
            # Process parameters
            kwargs = {}
            for name, definition in params.iteritems():
                raw_val = config.get(name, definition["default"])
                try:
                    val = definition["type"](raw_val)
                except Exception:
                    raise KeyError("Bad value \"%s\" for %s in section %s" % (raw_val, name, sect_name))
                kwargs[name] = val
    
            cls = globals()[objtype]   # get class/function type
            try:
                obj = cls(**kwargs)         # run it
            except Exception as e:
                raise KeyError("Cannot initialize %s from section %s: %s" % (
                    objtype, sect_name, str(e)))
    
            if isinstance(obj, Object):
                # Log only objects here, functions must take care of themselves
                logging.info("Initialized %s" % str(obj))
    
            objects[sect_name] = obj
            return obj
    
        # Init logging with at least simple stderr StreamLogger
        # Dunno if it's ok within wsgi, but we have no other choice, let's
        # hope it at least ends up in webserver error log
        StreamLogger()
    
        # Shared container for common data of ongoing WSGI request
        objects["req"] = Request()
    
        try:
            # Now try to init required objects
            for o in ("log", "db", "auth", "validator", "handler", "server"):
                init_obj(o)
        except Exception as e:
            logging.critical(str(e))
            logging.debug("", exc_info=sys.exc_info())
            return fallback_wsgi
    
        logging.info("Ready to serve")
    
        return objects["server"]
    
    
    if __name__=="__main__":
        # FIXME: just development stuff
        srv = build_server(read_ini("warden3.cfg.wheezy-warden3"))