Newer
Older
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2011-2013 Cesnet z.s.p.o
# Use of this source is governed by a 3-clause BSD-style license, see LICENSE file.
import sys
import logging
import logging.handlers
import ConfigParser
from traceback import format_tb
import M2Crypto.X509
import json
import MySQLdb as my
import MySQLdb.cursors as mycursors
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
from uuid import uuid4
from time import time, gmtime
from math import trunc
from io import BytesIO
from urlparse import parse_qs
from os import path
# for local version of up to date jsonschema
sys.path.append(path.join(path.dirname(__file__), "..", "lib"))
from jsonschema import Draft4Validator, FormatChecker
VERSION = "3.0-not-even-alpha"
class Error(Exception):
def __init__(self, message, error=500, method=None,
detail=None, exc=(None, None, None)):
self.error = int(error)
self.method = method
self.message = message
self.detail = detail
(self.exctype, self.excval, self.exctb) = exc or sys.exc_info()
self.cause = self.excval # compatibility with other exceptions
def __str__(self):
out = []
out.append("Error(%s)" % (self.error))
if self.method is not None:
out.append(" in \"%s\"" % self.method)
if self.message is not None:
out.append(": %s" % self.message)
if self.excval is not None:
out.append(" - cause was %s: %s" % (type(self.excval).__name__, str(self.excval)))
return "".join(out)
def info_str(self):
return ("Detail: %s" % self.detail) or ""
def debug_str(self):
out = []
if self.excval is not None:
out.append("Exception %s: %s\n" % (type(self.excval).__name__, str(self.excval)))
if self.exctb is not None:
out.append("Traceback:\n%s" % "".join(format_tb(self.exctb)))
return "".join(out)
def to_dict(self):
d = {}
if self.error is not None:
d["error"] = self.error
if self.method is not None:
d["method"] = self.method
if self.message is not None:
d["message"] = self.message
if self.detail is not None:
d["detail"] = self.detail
return d
def get_clean_root_logger(level=logging.INFO):
""" Attempts to get logging module into clean slate state """
# We want to be able to set up at least stderr logger before any
# configuration is read, and then later get rid of it and set up
# whatever administrator requires.
# However, there can exist only one logger, but we want to get a clean
# slate everytime we initialize StreamLogger or FileLogger... which
# is not exactly supported by logging module.
# So, we look directly inside logger class and clean up handlers/filters
# manually.
logger = logging.getLogger() # no need to create new
logger.setLevel(level)
while logger.handlers:
logger.removeHandler(logger.handlers[0])
while logger.filters:
logger.removeFilter(logger.filters[0])
return logger
def StreamLogger(stream=sys.stderr, level=logging.INFO):
""" Fallback handler just for setup, not meant to be used from
configuration file because during wsgi query stdout/stderr
is forbidden.
"""
fhand = logging.StreamHandler(stream)
fform = logging.Formatter('%(asctime)s %(filename)s[%(process)d]: (%(levelname)s) %(message)s')
fhand.setFormatter(fform)
logger = get_clean_root_logger(level)
logger.addHandler(fhand)
def FileLogger(filename, level=logging.INFO):
fhand = logging.FileHandler(filename)
fform = logging.Formatter('%(asctime)s %(filename)s[%(process)d]: (%(levelname)s) %(message)s')
fhand.setFormatter(fform)
logger = get_clean_root_logger(level)
logger.addHandler(fhand)
logging.info("Initialized FileLogger(filename=\"%s\", \"%s\")" % (filename, level))
def SysLogger(socket="/dev/log", facility=logging.handlers.SysLogHandler.LOG_DAEMON, level=logging.INFO):
fhand = logging.handlers.SysLogHandler(address=socket, facility=facility)
fform = logging.Formatter('%(filename)s[%(process)d]: (%(levelname)s) %(message)s')
fhand.setFormatter(fform)
logger = get_clean_root_logger(level)
logger.addHandler(fhand)
logging.info("Initialized SysLogger(socket=\"%s\", facility=\"%s\", level=\"%s\")" % (socket, facility, level))
class Object(object):
def __str__(self):
return "%s()" % type(self).__name__
class NoAuthenticator(Object):
def __init__(self):
Object.__init__(self)
def authenticate (self, env):
return "anybody" # or None
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
return (client is not None)
class X509Authenticator(NoAuthenticator):
def __init__(self, db):
self.db = db
NoAuthenticator.__init__(self)
def __str__(self):
return "%s(db=%s)" % (type(self).__name__, type(self.db).__name__)
def get_cert_dns_names(self, pem):
cert = M2Crypto.X509.load_cert_string(pem)
subj = cert.get_subject()
commons = [n.get_data().as_text() for n in subj.get_entries_by_nid(subj.nid["CN"])]
ext = cert.get_ext("subjectAltName")
extstrs = [val.strip() for val in ext.get_value().split(",")]
altnames = [val[4:] for val in extstrs if val.startswith("DNS:")]
# bit of mangling to get rid of duplicates and leave commonname first
firstcommon = commons[0]
return [firstcommon] + list(set(altnames+commons) - set([firstcommon]))
def authenticate (self, env):
names = self.get_cert_dns_names(env["SSL_CLIENT_CERT"])
# Authorize for debug
if (method == 'getDebug'):
if not client["debug"]:
logging.info("Auth failed: client does not have debug enabled")
return None
return client

Pavel Kácha
committed

Pavel Kácha
committed
if method in ['getInfo', 'getEvents']:
return client
try:
identity = event['Node'][0]['Name'].lower()

Pavel Kácha
committed
except (KeyError, TypeError):
# Event does not bear valid Node attribute
logging.info("Auth failed: event does not bear valid Node attribute")
return None
try:
service = client["services"][identity]
except KeyError:
# We are unable to pair service in message to service in db
logging.info("Auth failed: '%s' from event not found in services for client %i" % (identity, client["id"]))
if (method == "sendEvents"):
if not (service["write"] or service["test"]):
logging.info("Auth failed: service %i (%s) is not allowed to write or test" % (service["service_id"], identity))
return None
test = 'Test' in event.get('Category', [])
if not test:
logging.info("Auth failed: service %i (%s) does not send Test category in event" % (service["service_id"], identity))
return None
return client
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
class NoValidator(Object):
def check(self, event):
return []
class JSONSchemaValidator(NoValidator):
def __init__(self, filename=None):
self.path = filename or path.join(path.dirname(__file__), "idea.schema")
with open(self.path) as f:
self.schema = json.load(f)
self.validator = Draft4Validator(self.schema, format_checker=FormatChecker())
def __str__(self):
return "%s(filename=\"%s\")" % (type(self).__name__, self.path)
def check(self, event):
def sortkey(k):
""" Treat keys as lowercase, prefer keys with less path segments """
return (len(k.path), "/".join(str(k.path)).lower())
res = []
for error in sorted(self.validator.iter_errors(event), key=sortkey):
res.append(
"Validation error: key \"%s\", value \"%s\", expected - %s, error message - %s\n" % (
u"/".join(str(v) for v in error.path),
error.instance,
error.schema.get('description', 'no additional info'),
error.message))
return res

Pavel Kácha
committed
def __init__(self, host, user, password, dbname, port, catmap_filename, tagmap_filename):
self.host = host
self.user = user
self.password = password
self.dbname = dbname
self.port = port

Pavel Kácha
committed
self.catmap_filename = catmap_filename
self.tagmap_filename = tagmap_filename
with open(catmap_filename, "r") as catmap_fd:
self.catmap = json.load(catmap_fd)
self.catmap_other = self.catmap["Other"] # Catch error soon, avoid lookup later
with open(tagmap_filename, "r") as tagmap_fd:
self.tagmap = json.load(tagmap_fd)
self.tagmap_other = self.catmap["Other"] # Catch error soon, avoid lookup later
self.con = my.connect(host=self.host, user=self.user, passwd=self.password,
db=self.dbname, port=self.port, cursorclass=mycursors.DictCursor)
self.crs = self.con.cursor()
def __str__(self):

Pavel Kácha
committed
return "%s(host='%s', user='%s', dbname='%s', port=%d, catmap_filename=\"%s\", tagmap_filename=\"%s\")" % (
type(self).__name__, self.host, self.user, self.dbname, self.port, self.catmap_filename, self.tagmap_filename)
self.crs.execute("SELECT cl.`id`, cl.`hostname`, s.`service`, s.`service_id`, s.`identity`, cl.`read`, s.`write`, s.`test`, cl.`debug` FROM `clients` cl LEFT JOIN `services` s ON cl.`id` = s.`client_id` WHERE cl.`valid` = 1 AND s.`valid` = 1 AND `hostname` IN (%s)" % format_strings, tuple(name))
rows = self.crs.fetchall()
if not rows:
return None
client = {}
for n in ["id", "hostname", "read", "debug"]:
client[n] = rows[0][n]
services = {}
for row in rows:
service = {}
for n in ["service", "service_id", "identity", "write", "test"]:
service[n] = row[n]
services[row["identity"]] = service
client["services"] = services
logging.debug("get_client_by_name: %s", str(client))
self.crs.execute("SHOW TABLE STATUS")
tablestat = self.crs.fetchall()
"version": row["VER"],
"tables": tablestat
def generateDynamicQuery(self, section, query_string, variables, parent_cats = []):
variables_id = []
for v in variables:

Pavel Kácha
committed
try:
mapped_id = section[v]
except KeyError:
raise Error("Wrong tag or category used in query.", 422, method='getEvents',
exc=sys.exc_info(), detail={"key": v})
if mapped_id % 100:
variables_id.append(mapped_id)
else:
parent_cats.append(mapped_id)
format_strings = ','.join(['%s'] * len(variables_id))
temp_string = query_string % format_strings
return temp_string, variables_id
def fetch_events(self, client, id, count,
cat=None, nocat=None,
tag=None, notag=None,
group=None, nogroup=None):
logging.debug("fetch_events: id=%i, count=%i, cat=%s, nocat=%s, tag=%s, notag=%s, group=%s, nogroup=%s" % (id, count, str(cat), str(nocat), str(tag), str(notag), str(group), str(nogroup)))
if cat and nocat:

Pavel Kácha
committed
raise Error("Unrealizable conditions. Choose cat or nocat option.", 422, method='getEvents',
exc=sys.exc_info(), detail={'cat': cat, 'nocat' : nocat})
if tag and notag:

Pavel Kácha
committed
raise Error("Unrealizable conditions. Choose tag or notag option.", 422, method='getEvents',
exc=sys.exc_info(), detail={'tag': cat, 'notag' : nocat})
if group and nogroup:

Pavel Kácha
committed
raise Error("Unrealizable conditions. Choose group or nogroup option.", 422, method='getEvents',
exc=sys.exc_info(), detail={'tag': cat, 'notag' : nocat})
sqlwhere = []
sqlparams = []
sqlwhere.append("SELECT e.id, e.data FROM services s RIGHT JOIN events e ON s.service_id = e.service_id WHERE e.id > %s")
sqlparams.append(id or 0)
if cat or nocat:
not_op = "" if cat else "NOT"
parent_cats = []

Pavel Kácha
committed
sqltemp, sqlpar = self.generateDynamicQuery(self.catmap, "category_id %s IN (%%s)" % not_op, (cat or nocat), parent_cats)
for pcats in parent_cats:
sqltemp += " %s category_id DIV %s = 1 " % (("OR" if sqltemp else ""), pcats)
sqlwhere.append(" AND e.id IN (SELECT event_id FROM event_category_mapping WHERE %s)" % sqltemp)
sqlparams.extend(sqlpar)
if tag or notag:
not_op = "" if tag else "NOT"

Pavel Kácha
committed
sqltemp, sqlpar = self.generateDynamicQuery(self.tagmap, "tag_id %s IN (%%s)" % not_op, (tag or notag))
sqlwhere.append(" AND e.id IN (SELECT event_id FROM event_tag_mapping WHERE %s)" % sqltemp)
sqlparams.extend(sqlpar)
if group or nogroup:
not_op = "" if group else "NOT"
for identity in (group or nogroup):
sqlwhere.append(" AND s.identity %s LIKE %%s" % not_op)
sqlparams.append(identity + "%")
sqlwhere.append(" AND e.valid = 1 LIMIT %s")
sqlparams.append(count)
sqlwhere_string = "".join(sqlwhere)
logging.debug("fetch_events: query - %s" % sqlwhere_string)
logging.debug("fetch_events: params - %s", str(sqlparams))
self.crs.execute(sqlwhere_string, sqlparams)
if row:
maxid = max(r['id'] for r in row)
else:
maxid = self.getLastEventId()
events = [json.loads(r["data"]) for r in row]
"events": events
def store_event(self, client, event):
try:
self.crs.execute("INSERT INTO events (received,service_id,data) VALUES (NOW(), %s, %s)", (client["service"]["service_id"], json.dumps(event)))
lastid = self.crs.lastrowid
logging.debug("store_event: Last ID in events - %i" % lastid)
for cat in event.get('Category', ["Other"]):

Pavel Kácha
committed
cat_id = self.catmap.get(cat, self.catmap_other)
logging.debug("store_event: Category \"%s\" translated to %i" % (cat, cat_id))
self.crs.execute("INSERT INTO event_category_mapping (event_id,category_id) VALUES (%s, %s)", (lastid, cat_id))
try:
tags = event['Node'][0]['Tags']
except (KeyError, IndexError):
tags = []
for tag in tags:

Pavel Kácha
committed
tag_id = self.tagmap.get(tag, self.tagmap_other)
logging.debug("store_event: Tag \"%s\" translated to %i" % (tag, tag_id))
self.crs.execute("INSERT INTO event_tag_mapping (event_id,tag_id) VALUES (%s, %s)", (lastid, tag_id))
self.con.commit()
return []
except Exception as e:
self.con.rollback()
return [type(e).__name__ + ": " + str(e)]
logging.debug("insertLastReceivedId: id %i for client %i(%s)" % (id, client["id"], client["hostname"]))
self.crs.execute("INSERT INTO last_events(client_id, event_id, timestamp) VALUES(%s, %s, NOW())", (client["id"], id))
self.crs.execute("SELECT MAX(id) as id FROM events")
row = self.crs.fetchone()
return row['id'] if row['id'] is not None else 0
def getLastReceivedId(self, client):
self.crs.execute("SELECT MAX(event_id) as id FROM last_events WHERE client_id = %s", client["id"])
id = row['id'] if row is not None else 0
logging.debug("getLastReceivedId: id %i for client %i(%s)" % (id, client["id"], client["hostname"]))
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
def expose(meth):
meth.exposed = True
return meth
class Server(Object):
def __init__(self, auth, handler):
self.auth = auth
self.handler = handler
def __str__(self):
return "%s(auth=%s, handler=%s)" % (type(self).__name__, type(self.auth).__name__, type(self.handler).__name__)
def sanitize_args(self, path, func, args, exclude=["self", "_env", "_client"]):
# silently remove internal args, these should never be used
# but if somebody does, we do not expose them by error message
intargs = set(args).intersection(exclude)
for a in intargs:
del args[a]
if intargs:
logging.info("%s called with internal args: %s" % (path, ", ".join(intargs)))
# silently remove surplus arguments - potential forward
# compatibility (unknown args will get ignored)
badargs = set(args)-set(func.func_code.co_varnames[0:func.func_code.co_argcount])
for a in badargs:
del args[a]
if badargs:
logging.info("%s called with superfluous args: %s" % (path, ", ".join(badargs)))
return args
def wsgi_app(self, environ, start_response, exc_info=None):
path = environ.get("PATH_INFO", "").lstrip("/")
output = ""
status = "200 OK"
headers = [('Content-type', 'application/json')]
exception = None
try:
try:
injson = environ['wsgi.input'].read()
except:

Pavel Kácha
committed
raise Error("Data read error.", 408, method=path, exc=sys.exc_info())
try:
method = getattr(self.handler, path)
method.exposed # dummy access to trigger AttributeError
except Exception:
raise Error("You've fallen of the cliff.", 404, method=path)
client = self.auth.authenticate(environ)
if not client:

Pavel Kácha
committed
raise Error("I'm watching. Authenticate.", 403, method=path)
try:
events = json.loads(injson) if injson else None
except Exception:
raise Error("Deserialization error", 400, method=path,
exc=sys.exc_info(), detail={"args": injson})
args = parse_qs(environ.get('QUERY_STRING', ""))
logging.debug("%s called with %s" % (path, str(args)))
if events:
args["events"] = events
args = self.sanitize_args(path, method, args)
result = method(_env=environ, _client=client, **args) # call requested method
try:
# 'default': takes care of non JSON serializable objects,
# which could (although shouldn't) appear in handler code
output = json.dumps(result, default=lambda v: str(v))
except Exception as e:
raise Error("Serialization error", 500, method=path,
exc=sys.exc_info(), detail={"args": str(result)})
except Error as e:
exception = e
except Exception as e:

Pavel Kácha
committed
exception = Error("Server exception", 500, method=path, exc=sys.exc_info())
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
if exception:
status = "%d %s" % (exception.error, exception.message)
result = exception.to_dict()
try:
output = json.dumps(result, default=lambda v: str(v))
except Exception as e:
# Here all bets are off, generate at least sane output
output = '{"error": %d, "message": "%s"}' % (
exception.error, exception.message)
logging.error(str(exception))
i = exception.info_str()
if i:
logging.info(i)
d = exception.debug_str()
if d:
logging.debug(d)
headers.append(('Content-Length', str(len(output))))
start_response(status, headers)
return [output]
__call__ = wsgi_app
class WardenHandler(Object):
send_events_limit=100000, get_events_limit=100000,
description=None):
self.db = db
self.validator = validator
self.send_events_limit = send_events_limit
self.get_events_limit = get_events_limit
self.description = description
def __str__(self):
return "%s(validator=%s, db=%s, send_events_limit=%s, get_events_limit=%s, description=\"%s\")" % (
type(self).__name__, type(self.validator).__name__, type(self.db).__name__,
self.get_events_limit, self.send_events_limit, self.description)
@expose
def getDebug(self, _env, _client):
auth = self.auth.authorize(_env, _client, 'getDebug', None, None)
if not auth:

Pavel Kácha
committed
raise Error("I'm watching. Authorize.", 403, method='getDebug', detail={"client": _client})
"database": self.db.get_debug(),
"system": {
"uname": os.uname()
},
"process": {
"cwd": os.getcwdu(),
"pid": os.getpid(),
"ppid": os.getppid(),
"pgrp": os.getpgrp(),
"uid": os.getuid(),
"gid": os.getgid(),
"euid": os.geteuid(),
"egid": os.getegid(),
"groups": os.getgroups()
}
@expose
def getInfo(self, _env, _client):

Pavel Kácha
committed
auth = self.auth.authorize(_env, _client, 'getInfo', None, None)
if not auth:
raise Error("I'm watching. Authorize.", 403, method='getDebug', detail={"client": _client})
info = {
"version": VERSION,
"send_events_limit": self.send_events_limit,
"get_events_limit": self.get_events_limit
}
if self.description:
info["description"] = self.description
return info
@expose
def getEvents(self, _env, _client, id=None, count=None,
cat=None, nocat=None,
tag=None, notag=None,
group=None, nogroup=None):

Pavel Kácha
committed
auth = self.auth.authorize(_env, _client, 'getEvents', None, None)
if not auth:
raise Error("I'm watching. Authorize.", 403, method='getDebug', detail={"client": _client})
id = int(id[0])
except (ValueError, TypeError, IndexError):
try:
id = self.db.getLastReceivedId(_client)
except Exception, e:
logging.info("getEvents: cannot getLastReceivedId - " + type(e).__name__ + ": " + e)
if id is None:
# First access, remember the guy and get him last event
id = self.db.getLastEventId()
self.db.insertLastReceivedId(_client, id)
return {
"lastid": id,
"events": []
}
count = int(count[0])
except (ValueError, TypeError, IndexError):
count = 1
if self.get_events_limit:
count = min(count, self.get_events_limit)
res = self.db.fetch_events(_client, id, count, cat, nocat, tag, notag, group, nogroup)
logging.info("getEvents(%d, %d, %s, %s, %s, %s, %s, %s): sending %d events" % (
id, count, cat, nocat, tag, notag, group, nogroup, len(res["events"])))
self.db.insertLastReceivedId(_client, res['lastid'])
return res
@expose
def sendEvents(self, _env, _client, events=[]):
if not isinstance(events, list):

Pavel Kácha
committed
raise Error("List of events expected.", 400, method="sendEvents")
if len(events)>self.send_events_limit:

Pavel Kácha
committed
raise Error("Too much events in one batch.", 413, method="sendEvents",
detail={"limit": self.send_events_limit})
saved = 0
errs = {}
for i, event in enumerate(events):
ev_errs = []
auth_cl = self.auth.authorize(_env, _client, 'sendEvents', event, None)
if not auth_cl:
errs[i] = ["Client %i(%s) does not correspond with event Node info or is not allowed to write" % (_client["service"]["service_id"], _client["service"]["identity"])]
continue
v_errs = self.validator.check(event)
if v_errs:
errs[i] = v_errs
continue
db_errs = self.db.store_event(auth_cl, event)
if db_errs:
errs[i] = db_errs

Pavel Kácha
committed
continue
saved += 1

Pavel Kácha
committed
logging.info("sendEvents: Saved %i events" % saved)
if errs:
logging.info("sendEvents errors: \n%s\n" % str(errs))
return errs
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
def read_ini(path):
c = ConfigParser.RawConfigParser()
res = c.read(path)
if not res or not path in res:
# We don't have loggin yet, hopefully this will go into webserver log
raise Error("Unable to read config: %s" % path)
data = {}
for sect in c.sections():
for opts in c.options(sect):
lsect = sect.lower()
if not lsect in data:
data[lsect] = {}
data[lsect][opts] = c.get(sect, opts)
return data
def read_cfg(path):
with open(path, "r") as f:
stripcomments = "\n".join((l for l in f if not l.lstrip().startswith("#")))
conf = json.loads(stripcomments)
# Lowercase keys
conf = dict((sect.lower(), dict(
(subkey.lower(), val) for subkey, val in subsect.iteritems())
) for sect, subsect in conf.iteritems())
return conf
def fallback_wsgi(environ, start_response, exc_info=None):
# If server does not start, set up simple server, returning
# Warden JSON compliant error message
error=503
message="Server not running due to initialization error"
headers = [('Content-type', 'application/json')]
logline = "Error(%d): %s" % (error, message)
status = "%d %s" % (error, message)
output = '{"error": %d, "message": "%s"}' % (
error, message)
logging.critical(logline)
start_response(status, headers)
return [output]
def build_server(conf):
# Functions for validation and conversion of config values
def facility(name):
return int(getattr(logging.handlers.SysLogHandler, "LOG_" + name.upper()))
def loglevel(name):
return int(getattr(logging, name.upper()))
def natural(name):
num = int(name)
if num<1:
raise ValueError("Not a natural number")
return num
def filepath(name):
# Make paths relative to dir of this script
return path.join(path.dirname(__file__), name)
def objdef(name):
return objects[name.lower()]
obj = objdef # Draw into local namespace for init_obj
objects = {} # Already initialized objects
# List of sections and objects, configured by them
# First object in each object list is the default one, otherwise
# "type" keyword in section may be used to choose other
section_def = {
"log": ["FileLogger", "SysLogger"],
"auth": ["X509Authenticator", "NoAuthenticator"],
"validator": ["JSONSchemaValidator", "NoValidator"],
"handler": ["WardenHandler"],
"server": ["Server"]
}
# Object parameter conversions and defaults
param_def = {
"FileLogger": {
"filename": {"type": filepath, "default": path.join(path.dirname(__file__), path.splitext(path.split(__file__)[1])[0] + ".log")},
"level": {"type": loglevel, "default": "info"},
},
"SysLogger": {
"socket": {"type": filepath, "default": "/dev/log"},
"facility": {"type": facility, "default": "daemon"},
"level": {"type": loglevel, "default": "info"}
},
"NoAuthenticator": {},
"X509Authenticator": {
"db": {"type": obj, "default": "db"}
},
"NoValidator": {},
"JSONSchemaValidator": {
"filename": {"type": filepath, "default": path.join(path.dirname(__file__), "idea.schema")}
},
"MySQL": {
"host": {"type": str, "default": "localhost"},
"user": {"type": str, "default": "warden"},
"password": {"type": str, "default": ""},

Pavel Kácha
committed
"port": {"type": natural, "default": 3306},
"catmap_filename": {"type": filepath, "default": path.join(path.dirname(__file__), "catmap_mysql.json")},
"tagmap_filename": {"type": filepath, "default": path.join(path.dirname(__file__), "tagmap_mysql.json")}
"WardenHandler": {
"validator": {"type": obj, "default": "validator"},
"db": {"type": obj, "default": "DB"},
"send_events_limit": {"type": natural, "default": 10000},
"get_events_limit": {"type": natural, "default": 10000},
"description": {"type": str, "default": ""}
},
"Server": {
"auth": {"type": obj, "default": "auth"},
"handler": {"type": obj, "default": "handler"}
}
}
def init_obj(sect_name):
config = conf.get(sect_name, {})
sect_name = sect_name.lower()
sect_def = section_def[sect_name]
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
try: # Object type defined?
objtype = config["type"]
del config["type"]
except KeyError: # No, fetch default object type for this section
objtype = sect_def[0]
else:
if not objtype in sect_def:
raise KeyError("Unknown type %s in section %s" % (objtype, sect_name))
params = param_def[objtype]
# No surplus parameters? Disallow also 'obj' attributes, these are only
# to provide default referenced section
for name in config:
if name not in params or (name in params and params[name]["type"] is objdef):
raise KeyError("Unknown key %s in section %s" % (name, sect_name))
# Process parameters
kwargs = {}
for name, definition in params.iteritems():
raw_val = config.get(name, definition["default"])
try:
val = definition["type"](raw_val)
except Exception:
raise KeyError("Bad value \"%s\" for %s in section %s" % (raw_val, name, sect_name))
kwargs[name] = val
cls = globals()[objtype] # get class/function type
try:
obj = cls(**kwargs) # run it
except Exception as e:
raise KeyError("Cannot initialize %s from section %s: %s" % (
objtype, sect_name, str(e)))
if isinstance(obj, Object):
# Log only objects here, functions must take care of themselves
logging.info("Initialized %s" % str(obj))
objects[sect_name] = obj
return obj
# Init logging with at least simple stderr StreamLogger
# Dunno if it's ok within wsgi, but we have no other choice, let's
# hope it at least ends up in webserver error log
StreamLogger()
try:
# Now try to init required objects
for o in ("log", "db", "auth", "validator", "handler", "server"):
init_obj(o)
except Exception as e:
logging.critical(str(e))
logging.debug("", exc_info=sys.exc_info())
return fallback_wsgi
logging.info("Ready to serve")
return objects["server"]
if __name__=="__main__":
# FIXME: just development stuff
srv = build_server(read_ini("warden3.cfg.wheezy-warden3"))