#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (C) 2011-2013 Cesnet z.s.p.o # Use of this source is governed by a 3-clause BSD-style license, see LICENSE file. import sys import logging import logging.handlers import ConfigParser from traceback import format_tb import M2Crypto.X509 import json import MySQLdb as my import MySQLdb.cursors as mycursors from uuid import uuid4 from time import time, gmtime from math import trunc from io import BytesIO from urlparse import parse_qs from os import path # for local version of up to date jsonschema sys.path.append(path.join(path.dirname(__file__), "..", "lib")) from jsonschema import Draft4Validator, FormatChecker VERSION = "3.0-not-even-alpha" class Error(Exception): def __init__(self, message, error=500, method=None, detail=None, exc=(None, None, None)): self.error = int(error) self.method = method self.message = message self.detail = detail (self.exctype, self.excval, self.exctb) = exc or sys.exc_info() self.cause = self.excval # compatibility with other exceptions def __str__(self): out = [] out.append("Error(%s)" % (self.error)) if self.method is not None: out.append(" in \"%s\"" % self.method) if self.message is not None: out.append(": %s" % self.message) if self.excval is not None: out.append(" - cause was %s: %s" % (type(self.excval).__name__, str(self.excval))) return "".join(out) def info_str(self): return ("Detail: %s" % self.detail) or "" def debug_str(self): out = [] if self.excval is not None: out.append("Exception %s: %s\n" % (type(self.excval).__name__, str(self.excval))) if self.exctb is not None: out.append("Traceback:\n%s" % "".join(format_tb(self.exctb))) return "".join(out) def to_dict(self): d = {} if self.error is not None: d["error"] = self.error if self.method is not None: d["method"] = self.method if self.message is not None: d["message"] = self.message if self.detail is not None: d["detail"] = self.detail if self.excval is not None: d["message"] = d["message"] + ", cause was %s: %s" % (type(self.excval).__name__, str(self.excval)) return d def get_clean_root_logger(level=logging.INFO): """ Attempts to get logging module into clean slate state """ # We want to be able to set up at least stderr logger before any # configuration is read, and then later get rid of it and set up # whatever administrator requires. # However, there can exist only one logger, but we want to get a clean # slate everytime we initialize StreamLogger or FileLogger... which # is not exactly supported by logging module. # So, we look directly inside logger class and clean up handlers/filters # manually. logger = logging.getLogger() # no need to create new logger.setLevel(level) while logger.handlers: logger.removeHandler(logger.handlers[0]) while logger.filters: logger.removeFilter(logger.filters[0]) return logger def StreamLogger(stream=sys.stderr, level=logging.INFO): """ Fallback handler just for setup, not meant to be used from configuration file because during wsgi query stdout/stderr is forbidden. """ fhand = logging.StreamHandler(stream) fform = logging.Formatter('%(asctime)s %(filename)s[%(process)d]: (%(levelname)s) %(message)s') fhand.setFormatter(fform) logger = get_clean_root_logger(level) logger.addHandler(fhand) def FileLogger(filename, level=logging.INFO): fhand = logging.FileHandler(filename) fform = logging.Formatter('%(asctime)s %(filename)s[%(process)d]: (%(levelname)s) %(message)s') fhand.setFormatter(fform) logger = get_clean_root_logger(level) logger.addHandler(fhand) logging.info("Initialized FileLogger(filename=\"%s\", \"%s\")" % (filename, level)) def SysLogger(socket="/dev/log", facility=logging.handlers.SysLogHandler.LOG_DAEMON, level=logging.INFO): fhand = logging.handlers.SysLogHandler(address=socket, facility=facility) fform = logging.Formatter('%(filename)s[%(process)d]: (%(levelname)s) %(message)s') fhand.setFormatter(fform) logger = get_clean_root_logger(level) logger.addHandler(fhand) logging.info("Initialized SysLogger(socket=\"%s\", facility=\"%s\", level=\"%s\")" % (socket, facility, level)) class Object(object): def __str__(self): return "%s()" % type(self).__name__ class NoAuthenticator(Object): def __init__(self): Object.__init__(self) def authenticate (self, env): return "anybody" # or None def authorize(self, env, client, method, event, args): return (client is not None) class X509Authenticator(NoAuthenticator): def __init__(self, db): self.db = db NoAuthenticator.__init__(self) def __str__(self): return "%s(db=%s)" % (type(self).__name__, type(self.db).__name__) def get_cert_dns_names(self, pem): cert = M2Crypto.X509.load_cert_string(pem) subj = cert.get_subject() commons = [n.get_data().as_text() for n in subj.get_entries_by_nid(subj.nid["CN"])] ext = cert.get_ext("subjectAltName") extstrs = [val.strip() for val in ext.get_value().split(",")] altnames = [val[4:] for val in extstrs if val.startswith("DNS:")] # bit of mangling to get rid of duplicates and leave commonname first firstcommon = commons[0] return [firstcommon] + list(set(altnames+commons) - set([firstcommon])) def authenticate (self, env): names = self.get_cert_dns_names(env["SSL_CLIENT_CERT"]) return self.db.get_client_by_name(names) def authorize(self, env, client, method, event, args): # Authorize for debug if (method == 'getDebug'): if not client["debug"]: logging.info("Auth failed: client does not have debug enabled") return None return client try: identity = event['Node'][0]['Name'].lower() except KeyError: # Event does not bear valid Node attribute logging.info("Auth failed: event does not bear valid Node attribute") return None try: service = client["services"][identity] except KeyError: # We are unable to pair service in message to service in db logging.info("Auth failed: '%s' from event not found in services for client %i" % (identity, client["id"])) return None client["service"] = service # Authorize for sending events if (method == "sendEvents"): if not (service["write"] or service["test"]): logging.info("Auth failed: service %i (%s) is not allowed to write or test" % (service["service_id"], identity)) return None test = 'Test' in event.get('Category', []) if not test: logging.info("Auth failed: service %i (%s) does not send Test category in event" % (service["service_id"], identity)) return None return client class NoValidator(Object): def check(self, event): return [] class JSONSchemaValidator(NoValidator): def __init__(self, filename=None): self.path = filename or path.join(path.dirname(__file__), "idea.schema") with open(self.path) as f: self.schema = json.load(f) self.validator = Draft4Validator(self.schema, format_checker=FormatChecker()) def __str__(self): return "%s(filename=\"%s\")" % (type(self).__name__, self.path) def check(self, event): def sortkey(k): """ Treat keys as lowercase, prefer keys with less path segments """ return (len(k.path), "/".join(str(k.path)).lower()) res = [] for error in sorted(self.validator.iter_errors(event), key=sortkey): res.append( "Validation error: key \"%s\", value \"%s\", expected - %s, error message - %s\n" % ( u"/".join(str(v) for v in error.path), error.instance, error.schema.get('description', 'no additional info'), error.message)) return res class MySQL(Object): def __init__(self, host, user, password, dbname, port): self.host = host self.user = user self.password = password self.dbname = dbname self.port = port self.con = my.connect(host=self.host, user=self.user, passwd=self.password, db=self.dbname, port=self.port, cursorclass=mycursors.DictCursor) self.crs = self.con.cursor() def __str__(self): return "%s(host='%s', user='%s', dbname='%s', port=%d)" % ( type(self).__name__, self.host, self.user, self.dbname, self.port) def get_client_by_name(self, name): format_strings = ','.join(['%s'] * len(name)) self.crs.execute("SELECT cl.`id`, cl.`hostname`, s.`service`, s.`service_id`, s.`identity`, cl.`read`, s.`write`, s.`test`, cl.`debug` FROM `clients` cl LEFT JOIN `services` s ON cl.`id` = s.`client_id` WHERE cl.`valid` = 1 AND s.`valid` = 1 AND `hostname` IN (%s)" % format_strings, tuple(name)) rows = self.crs.fetchall() logging.debug("Client/service info: " + str(rows)) if not rows: return None client = {} for n in ["id", "hostname", "read", "debug"]: client[n] = rows[0][n] services = {} for row in rows: service = {} for n in ["service", "service_id", "identity", "write", "test"]: service[n] = row[n] services[row["identity"]] = service client["services"] = services logging.debug("Client/service formatted info: " + str(client)) return client def get_debug(self): self.crs.execute("SELECT VERSION() AS VER") row = self.crs.fetchone() return { "db": "MySQL", "version": row["VER"] } def get_status(self): return {} def gen_random_idea(self): def get_precise_timestamp(): t = time() us = trunc((t-trunc(t))*1000000) g = gmtime(t) iso = '%04d-%02d-%02dT%02d:%02d:%02d.%0dZ' % (g[0:6]+(us,)) return iso return { "Format": "IDEA0", "ID": str(uuid4()), "DetectTime": get_precise_timestamp(), "Category": ["Test"], } def fetch_events(self, client, id, count, cat=None, nocat=None, tag=None, notag=None, group=None, nogroup=None): sqlwhere = [] sqltemp = {} if cat is not None and nocat is not None: raise Error("Unrealizable conditions. Choose cat or nocat option.", 500, method='getEvents', exc=sys.exc_info(), detail={'cat': cat, 'nocat' : nocat}) if cat is not None or nocat is not None: if cat is not None: parent_cats = [] sqltemp['cat'] = self.generateDynamicQuery("Category", "category_id IN (%s)", json.loads(cat), parent_cats) for pcats in parent_cats: sqltemp['cat'] += " %s category_id DIV %s = 1 " % (("OR" if sqltemp['cat'] else ""), pcats) if nocat is not None: parent_cats = [] sqltemp['cat'] = self.generateDynamicQuery("Category", "category_id NOT IN (%s)", json.loads(nocat), parent_cats) for pcats in parent_cats: sqltemp['cat'] += " %s category_id DIV %s = 1 " % (("OR" if sqltemp['cat'] else ""), pcats) sqlwhere.append("e.id IN (SELECT event_id FROM event_category_mapping WHERE %s)" % sqltemp['cat']) if tag is not None and notag is not None: raise Error("Unrealizable conditions. Choose tag or notag option.", 500, method='getEvents', exc=sys.exc_info(), detail={'tag': cat, 'notag' : nocat}) if tag is not None or notag is not None: if tag is not None: sqltemp['tag'] = self.generateDynamicQuery("Tag", "tag_id IN (%s)", json.loads(tag)) if notag is not None: sqltemp['tag'] = self.generateDynamicQuery("Tag", "tag_id NOT IN (%s)", json.loads(notag)) sqlwhere.append("e.id IN (SELECT event_id FROM event_tag_mapping WHERE %s)" % sqltemp['tag']) if group is not None and nogroup is not None: raise Error("Unrealizable conditions. Choose group or nogroup option.", 500, method='getEvents', exc=sys.exc_info(), detail={'tag': cat, 'notag' : nocat}) if group is not None or nogroup is not None: sqltemp['group'] = "" if group is not None: for identity in json.loads(group): sqltemp['group'] += ("s.identity LIKE '%s' AND " % (identity)) if nogroup is not None: for identity in json.loads(nogroup): sqltemp['group'] += ("s.identity NOT LIKE '%s' AND " % (identity)) # logging.debug(sqltemp['group'][:-4]) sqlwhere.append(sqltemp['group'][:-4]) sqlwhere_string = (" AND " . join(sqlwhere)) # logging.debug(sqlwhere_string) # logging.debug(' AND ' . join(sqlwhere)) #sqlwhere = sqlwhere[:-4] and_op = "" if not sqlwhere_string else "AND" logging.debug("SELECT e.id, e.data FROM services s RIGHT JOIN events e ON s.id = e.service_id WHERE e.id > %s AND %s %s e.valid = 1 LIMIT %s" % (str(id or 0), sqlwhere_string, and_op, str(count))) self.crs.execute("SELECT e.id, e.data FROM services s RIGHT JOIN events e ON s.service_id = e.service_id WHERE e.id > %s AND %s %s e.valid = 1 LIMIT %s" % (str(id or 0), sqlwhere_string, and_op, str(count))) row = self.crs.fetchall() if row: maxid = max(r['id'] for r in row) else: maxid = self.getLastEventId() # logging.debug("MAX ID = %s", str(maxid)) return { "lastid": maxid, # "lastid": row[-1]['id'] if row else str(id), "events": [row[i]['data'] for i in range(len(row))] } def store_event(self, client, event): try: # logging.debug("INSERT INTO events (detected,received,service_id,data) VALUES ('%s', NOW(), '%s', '%s')" % (event['DetectTime'], client["service"]["service_id"], self.con.escape_string(str(event)))) self.crs.execute("INSERT INTO events (detected,received,service_id,data) VALUES ('%s', NOW(), '%s', '%s')" % (event['DetectTime'], client["service"]["service_id"], self.con.escape_string(str(event)))) lastid = self.crs.lastrowid # logging.debug(str(lastid)) for cat in event['Category']: # logging.debug({'cat': cat}) cat_id = self.map_id('Category', cat) if self.map_id('Category', cat) else self.map_id('Category', 'Other.Other') # logging.debug({'cat_id': cat_id}) # logging.debug("INSERT INTO event_category_mapping (event_id,category_id) VALUES ('%s', '%s')" % (str(lastid), str(cat_id))) self.crs.execute("INSERT INTO event_category_mapping (event_id,category_id) VALUES ('%s', '%s')" % (str(lastid), str(cat_id))) for tag in event['Node'][0]['Tags']: tag_id = self.map_id('Tag', tag) if self.map_id('Tag', tag) else self.map_id('Tag', 'Other') # logging.debug({'tag_id': tag_id}) # logging.debug("INSERT INTO event_tag_mapping (event_id,tag_id) VALUES ('%s', '%s')" % (str(lastid), tag_id)) self.crs.execute("INSERT INTO event_tag_mapping (event_id,tag_id) VALUES ('%s', '%s')" % (str(lastid), str(tag_id))) self.con.commit() return [] except Exception as e: self.con.rollback() return [{"event": event, "error": type(e).__name__ + ": " + str(e)}] def insertLastReceivedId(self, client, id): logging.debug("INSERT INTO last_events(client_id, event_id, timestamp) VALUES(%s, %s, NOW())" % (str(client["id"]), id)) self.crs.execute("INSERT INTO last_events(client_id, event_id, timestamp) VALUES(%s, %s, NOW())" % (str(client["id"]), id)) def getLastEventId(self): self.crs.execute("SELECT MAX(id) as id FROM events") row = self.crs.fetchone() return row['id'] if row['id'] is not None else 0 def getLastReceivedId(self, client): client_id = client["id"] logging.debug("getLastReceivedId (client_id) = %s", str(client_id)) logging.debug("SELECT MAX(event_id) as id FROM last_events WHERE client_id = %s" % (str(client_id))) self.crs.execute("SELECT MAX(event_id) as id FROM last_events WHERE client_id = %s" % (str(client_id))) row = self.crs.fetchone() logging.debug("getLastReceivedId - %s" % str(row['id'])) return row['id'] if row is not None else 0 def map_id (self, section, key, strict = False): # Should by placed in config file data = {} data['Tag'] = { "Connection" : 1, "Datagram" : 2, "Content" : 3, "Data" : 4, "File" : 5, "Flow" : 6, "Log": 7, "Protocol" : 8, "Host" : 9, "Network" : 10, "Correlation" : 11, "External" : 12, "Reporting" : 13, "Other" : 99 } data['Category'] = { "Abusive" : 100, "Abusive.Spam" : 101, "Abusive.Harassment" : 102, "Abusive.Child" : 103, "Abusive.Sexual" : 104, "Abusive.Violence" : 105, "Malware" : 200, "Malware.Virus" : 201, "Malware.Worm" : 202, "Malware.Trojan" : 203, "Malware.Spyware" : 204, "Malware.Dialer" : 205, "Malware.Rootkit" : 206, "Recon.Scanning" : 3, "Recon.Scanning" : 301, "Recon.Sniffing" : 302, "Recon.SocialEngineering" : 303, "Recon.Searching" : 304, "Attempt" : 400, "Attempt.Exploit" : 401, "Attempt.Login" : 402, "Attempt.NewSignature" : 403, "Intrusion" : 500, "Intrusion.AdminCompromise" : 501, "Intrusion.UserCompromise" : 502, "Intrusion.AppCompromise" : 503, "Intrusion.Botnet" : 504, "Availability" : 600, "Availability.DoS" : 601, "Availability.DDoS" : 602, "Availability.Sabotage" : 603, "Availability.Outage" : 604, "Information" : 700, "Information.UnauthorizedAccess" : 701, "Information.UnauthorizedModification" : 702, "Fraud" : 800, "Fraud.UnauthorizedUsage" : 801, "Fraud.Copyright" : 802, "Fraud.Masquerade" : 803, "Fraud.Phishing" : 804, "Fraud.Scam" : 805, "Vulnerable" : 900, "Vulnerable.Open" : 901, "Anomaly" : 1000, "Anomaly.Traffic" : 1001, "Anomaly.Connection" : 1002, "Anomaly.Protocol" : 1003, "Anomaly.System" : 1004, "Anomaly.Application" : 1005, "Anomaly.Behaviour" : 1006, "Other" : 9998, "Test" : 9999, } try: return data[section][key] except: #Return 0 for strict mode (searching), otherwise map everything else to 'Other' return 0 if strict else data[section]['Other'] def generateDynamicQuery(self, section, query_string, variables, parent_cats = []): variables_id = [] # parent_cats = [] for v in variables: mapped_id = self.map_id(section, v, True) if mapped_id % 100 != 0: variables_id.append(mapped_id) else: parent_cats.append(mapped_id) # variables_id = [self.map_id(section, v) for v in variables if self.map_id(section, v) % 100 != 0] format_strings = ','.join(['\'%s\''] * len(variables_id)) temp_string = query_string % format_strings return temp_string % tuple(variables_id) def expose(meth): meth.exposed = True return meth class Server(Object): def __init__(self, auth, handler): self.auth = auth self.handler = handler def __str__(self): return "%s(auth=%s, handler=%s)" % (type(self).__name__, type(self.auth).__name__, type(self.handler).__name__) def sanitize_args(self, path, func, args, exclude=["self", "_env", "_client"]): # silently remove internal args, these should never be used # but if somebody does, we do not expose them by error message intargs = set(args).intersection(exclude) for a in intargs: del args[a] if intargs: logging.info("%s called with internal args: %s" % (path, ", ".join(intargs))) # silently remove surplus arguments - potential forward # compatibility (unknown args will get ignored) badargs = set(args)-set(func.func_code.co_varnames[0:func.func_code.co_argcount]) for a in badargs: del args[a] if badargs: logging.info("%s called with superfluous args: %s" % (path, ", ".join(badargs))) return args def wsgi_app(self, environ, start_response, exc_info=None): path = environ.get("PATH_INFO", "").lstrip("/") output = "" status = "200 OK" headers = [('Content-type', 'application/json')] exception = None try: try: injson = environ['wsgi.input'].read() except: raise Error("Data read error", 400, method=path, exc=sys.exc_info()) try: method = getattr(self.handler, path) method.exposed # dummy access to trigger AttributeError except Exception: raise Error("You've fallen of the cliff.", 404, method=path) client = self.auth.authenticate(environ) if not client: raise Error("I'm watching YOU. (Authenticate)", 403, method=path) try: events = json.loads(injson) if injson else None except Exception: raise Error("Deserialization error", 400, method=path, exc=sys.exc_info(), detail={"args": injson}) args = parse_qs(environ.get('QUERY_STRING', "")) for k, v in args.iteritems(): args[k] = v[0] logging.debug("%s called with %s" % (path, str(args))) if events: args["events"] = events # if not self.auth.authorize(environ, client, path, args): # raise Error("I'm watching YOU.", 403, method=path, detail={"client": client}) args = self.sanitize_args(path, method, args) result = method(_env=environ, _client=client, **args) # call requested method try: # 'default': takes care of non JSON serializable objects, # which could (although shouldn't) appear in handler code output = json.dumps(result, default=lambda v: str(v)) except Exception as e: raise Error("Serialization error", 500, method=path, exc=sys.exc_info(), detail={"args": str(result)}) except Error as e: exception = e except Exception as e: exception = Error("Server exception", 500, method=path, exc=sys.exc_info()) if exception: status = "%d %s" % (exception.error, exception.message) result = exception.to_dict() try: output = json.dumps(result, default=lambda v: str(v)) except Exception as e: # Here all bets are off, generate at least sane output output = '{"error": %d, "message": "%s"}' % ( exception.error, exception.message) logging.error(str(exception)) i = exception.info_str() if i: logging.info(i) d = exception.debug_str() if d: logging.debug(d) headers.append(('Content-Length', str(len(output)))) start_response(status, headers) return [output] __call__ = wsgi_app class WardenHandler(Object): def __init__(self, validator, db, auth, send_events_limit=100000, get_events_limit=100000, description=None): self.auth = auth self.db = db self.validator = validator self.send_events_limit = send_events_limit self.get_events_limit = get_events_limit self.description = description def __str__(self): return "%s(validator=%s, db=%s, send_events_limit=%s, get_events_limit=%s, description=\"%s\")" % ( type(self).__name__, type(self.validator).__name__, type(self.db).__name__, self.get_events_limit, self.send_events_limit, self.description) @expose def getDebug(self, _env, _client): auth = self.auth.authorize(_env, _client, 'getDebug', None, None) if not auth: raise Error("I'm watching YOU. (Authorization)", 403, method='getDebug', detail={"client": _client}) return { "environment": _env, "database": self.db.get_debug() } @expose def getInfo(self, _env, _client): info = { "version": VERSION, "send_events_limit": self.send_events_limit, "get_events_limit": self.get_events_limit } if self.description: info["description"] = self.description return info @expose def getEvents(self, _env, _client, id=None, count=None, cat=None, nocat=None, tag=None, notag=None, group=None, nogroup=None): try: id = int(id) except (ValueError, TypeError): id=0 if id == 0: try: id = self.db.getLastReceivedId(_client) # logging.debug("Last received ID for %s is %s" % (_client['hostname'], str(id))) except Exception, e: logging.error(e) id = 0 if id == 0: try: id = self.db.getLastEventId() except Exception as e: raise Error("Last event id receiving error", 500, detail={"client": _client}) self.db.insertLastReceivedId(_client, id) return { "lastid": id, "events": [] } try: count = int(count) except (ValueError, TypeError): count = 1 if self.get_events_limit: count = min(count, self.get_events_limit) logging.debug("getEvents - count: %s" % count) res = self.db.fetch_events(_client, id or 0, count, cat, nocat, tag, notag, group, nogroup) logging.info("getEvents(%d, %d, %s, %s, %s, %s, %s, %s): sending %d events" % ( id or 0, count, cat, nocat, tag, notag, group, nogroup, len(res["events"]))) self.db.insertLastReceivedId(_client, res['lastid']) logging.debug("lastid inserting: %s" % {'lastid': res['lastid'], 'client' : _client}) return res @expose def sendEvents(self, _env, _client, events=[]): if not isinstance(events, list): raise Error("List of events expected", 400, method="sendEvents") if len(events)>self.send_events_limit: raise Error("Too much events in one batch", 400, method="sendEvents", detail={"limit": self.send_events_limit}) saved = 0 errs = {} for i, event in enumerate(events): ev_errs = [] auth_cl = self.auth.authorize(_env, _client, 'sendEvents', event, None) if not auth_cl: errs[i] = ["Client %i(%s) does not correspond with event Node info or is not allowed to write" % (_client["service"]["service_id"], _client["service"]["identity"])] continue v_errs = self.validator.check(event) if v_errs: errs[i] = v_errs continue db_errs = self.db.store_event(auth_cl, event) if db_errs: errs[i] = db_errs saved += 1 logging.info("sendEvents(...): Saved %i events" % saved) return errs def read_ini(path): c = ConfigParser.RawConfigParser() res = c.read(path) if not res or not path in res: # We don't have loggin yet, hopefully this will go into webserver log raise Error("Unable to read config: %s" % path) data = {} for sect in c.sections(): for opts in c.options(sect): lsect = sect.lower() if not lsect in data: data[lsect] = {} data[lsect][opts] = c.get(sect, opts) return data def read_cfg(path): with open(path, "r") as f: stripcomments = "\n".join((l for l in f if not l.lstrip().startswith("#"))) conf = json.loads(stripcomments) # Lowercase keys conf = dict((sect.lower(), dict( (subkey.lower(), val) for subkey, val in subsect.iteritems()) ) for sect, subsect in conf.iteritems()) return conf def fallback_wsgi(environ, start_response, exc_info=None): # If server does not start, set up simple server, returning # Warden JSON compliant error message error=503 message="Server not running due to initialization error" headers = [('Content-type', 'application/json')] logline = "Error(%d): %s" % (error, message) status = "%d %s" % (error, message) output = '{"error": %d, "message": "%s"}' % ( error, message) logging.critical(logline) start_response(status, headers) return [output] def build_server(conf): # Functions for validation and conversion of config values def facility(name): return int(getattr(logging.handlers.SysLogHandler, "LOG_" + name.upper())) def loglevel(name): return int(getattr(logging, name.upper())) def natural(name): num = int(name) if num<1: raise ValueError("Not a natural number") return num def filepath(name): # Make paths relative to dir of this script return path.join(path.dirname(__file__), name) def objdef(name): return objects[name.lower()] obj = objdef # Draw into local namespace for init_obj objects = {} # Already initialized objects # List of sections and objects, configured by them # First object in each object list is the default one, otherwise # "type" keyword in section may be used to choose other section_def = { "log": ["FileLogger", "SysLogger"], "db": ["MySQL"], "auth": ["X509Authenticator", "NoAuthenticator"], "validator": ["JSONSchemaValidator", "NoValidator"], "handler": ["WardenHandler"], "server": ["Server"] } # Object parameter conversions and defaults param_def = { "FileLogger": { "filename": {"type": filepath, "default": path.join(path.dirname(__file__), path.splitext(path.split(__file__)[1])[0] + ".log")}, "level": {"type": loglevel, "default": "info"}, }, "SysLogger": { "socket": {"type": filepath, "default": "/dev/log"}, "facility": {"type": facility, "default": "daemon"}, "level": {"type": loglevel, "default": "info"} }, "NoAuthenticator": {}, "X509Authenticator": { "db": {"type": obj, "default": "db"} }, "NoValidator": {}, "JSONSchemaValidator": { "filename": {"type": filepath, "default": path.join(path.dirname(__file__), "idea.schema")} }, "MySQL": { "host": {"type": str, "default": "localhost"}, "user": {"type": str, "default": "warden"}, "password": {"type": str, "default": ""}, "dbname": {"type": str, "default": "warden3"}, "port": {"type": natural, "default": 3306} }, "WardenHandler": { "validator": {"type": obj, "default": "validator"}, "db": {"type": obj, "default": "DB"}, "auth": {"type": obj, "default": "auth"}, "send_events_limit": {"type": natural, "default": 10000}, "get_events_limit": {"type": natural, "default": 10000}, "description": {"type": str, "default": ""} }, "Server": { "auth": {"type": obj, "default": "auth"}, "handler": {"type": obj, "default": "handler"} } } def init_obj(sect_name): config = conf.get(sect_name, {}) sect_name = sect_name.lower() sect_def = section_def[sect_name] #logging.debug("Testing %s" % sect_name) try: # Object type defined? objtype = config["type"] del config["type"] except KeyError: # No, fetch default object type for this section objtype = sect_def[0] else: if not objtype in sect_def: raise KeyError("Unknown type %s in section %s" % (objtype, sect_name)) params = param_def[objtype] # No surplus parameters? Disallow also 'obj' attributes, these are only # to provide default referenced section for name in config: if name not in params or (name in params and params[name]["type"] is objdef): raise KeyError("Unknown key %s in section %s" % (name, sect_name)) # Process parameters kwargs = {} for name, definition in params.iteritems(): raw_val = config.get(name, definition["default"]) try: val = definition["type"](raw_val) except Exception: raise KeyError("Bad value \"%s\" for %s in section %s" % (raw_val, name, sect_name)) kwargs[name] = val cls = globals()[objtype] # get class/function type try: obj = cls(**kwargs) # run it except Exception as e: raise KeyError("Cannot initialize %s from section %s: %s" % ( objtype, sect_name, str(e))) if isinstance(obj, Object): # Log only objects here, functions must take care of themselves logging.info("Initialized %s" % str(obj)) objects[sect_name] = obj return obj # Init logging with at least simple stderr StreamLogger # Dunno if it's ok within wsgi, but we have no other choice, let's # hope it at least ends up in webserver error log StreamLogger() try: # Now try to init required objects for o in ("log", "db", "auth", "validator", "handler", "server"): init_obj(o) except Exception as e: logging.critical(str(e)) logging.debug("", exc_info=sys.exc_info()) return fallback_wsgi logging.info("Ready to serve") return objects["server"] if __name__=="__main__": # FIXME: just development stuff srv = build_server(read_ini("warden3.cfg.wheezy-warden3"))